2 # Build many configurations of glibc.
3 # Copyright (C) 2016-2017 Free Software Foundation, Inc.
4 # This file is part of the GNU C Library.
6 # The GNU C Library is free software; you can redistribute it and/or
7 # modify it under the terms of the GNU Lesser General Public
8 # License as published by the Free Software Foundation; either
9 # version 2.1 of the License, or (at your option) any later version.
11 # The GNU C Library is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 # Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public
17 # License along with the GNU C Library; if not, see
18 # <http://www.gnu.org/licenses/>.
20 """Build many configurations of glibc.
22 This script takes as arguments a directory name (containing a src
23 subdirectory with sources of the relevant toolchain components) and a
24 description of what to do: 'checkout', to check out sources into that
25 directory, 'bot-cycle', to run a series of checkout and build steps,
26 'bot', to run 'bot-cycle' repeatedly, 'host-libraries', to build
27 libraries required by the toolchain, 'compilers', to build
28 cross-compilers for various configurations, or 'glibcs', to build
29 glibc for various configurations and run the compilation parts of the
30 testsuite. Subsequent arguments name the versions of components to
31 check out (<component>-<version), for 'checkout', or, for actions
32 other than 'checkout' and 'bot-cycle', name configurations for which
33 compilers or glibc are to be built.
39 import email
.mime
.text
55 import multiprocessing
56 os
.cpu_count
= lambda: multiprocessing
.cpu_count()
61 re
.fullmatch
= lambda p
,s
,f
=0: re
.match(p
+"\\Z",s
,f
)
66 class _CompletedProcess
:
67 def __init__(self
, args
, returncode
, stdout
=None, stderr
=None):
69 self
.returncode
= returncode
73 def _run(*popenargs
, input=None, timeout
=None, check
=False, **kwargs
):
74 assert(timeout
is None)
75 with subprocess
.Popen(*popenargs
, **kwargs
) as process
:
77 stdout
, stderr
= process
.communicate(input)
82 returncode
= process
.poll()
83 if check
and returncode
:
84 raise subprocess
.CalledProcessError(returncode
, popenargs
)
85 return _CompletedProcess(popenargs
, returncode
, stdout
, stderr
)
90 class Context(object):
91 """The global state associated with builds in a given directory."""
93 def __init__(self
, topdir
, parallelism
, keep
, replace_sources
, strip
,
95 """Initialize the context."""
97 self
.parallelism
= parallelism
99 self
.replace_sources
= replace_sources
101 self
.srcdir
= os
.path
.join(topdir
, 'src')
102 self
.versions_json
= os
.path
.join(self
.srcdir
, 'versions.json')
103 self
.build_state_json
= os
.path
.join(topdir
, 'build-state.json')
104 self
.bot_config_json
= os
.path
.join(topdir
, 'bot-config.json')
105 self
.installdir
= os
.path
.join(topdir
, 'install')
106 self
.host_libraries_installdir
= os
.path
.join(self
.installdir
,
108 self
.builddir
= os
.path
.join(topdir
, 'build')
109 self
.logsdir
= os
.path
.join(topdir
, 'logs')
110 self
.logsdir_old
= os
.path
.join(topdir
, 'logs-old')
111 self
.makefile
= os
.path
.join(self
.builddir
, 'Makefile')
112 self
.wrapper
= os
.path
.join(self
.builddir
, 'wrapper')
113 self
.save_logs
= os
.path
.join(self
.builddir
, 'save-logs')
114 self
.script_text
= self
.get_script_text()
115 if action
!= 'checkout':
116 self
.build_triplet
= self
.get_build_triplet()
117 self
.glibc_version
= self
.get_glibc_version()
119 self
.glibc_configs
= {}
120 self
.makefile_pieces
= ['.PHONY: all\n']
121 self
.add_all_configs()
122 self
.load_versions_json()
123 self
.load_build_state_json()
124 self
.status_log_list
= []
125 self
.email_warning
= False
127 def get_script_text(self
):
128 """Return the text of this script."""
129 with
open(sys
.argv
[0], 'r') as f
:
133 """Re-execute this script with the same arguments."""
135 os
.execv(sys
.executable
, [sys
.executable
] + sys
.argv
)
137 def get_build_triplet(self
):
138 """Determine the build triplet with config.guess."""
139 config_guess
= os
.path
.join(self
.component_srcdir('gcc'),
141 cg_out
= subprocess
.run([config_guess
], stdout
=subprocess
.PIPE
,
142 check
=True, universal_newlines
=True).stdout
143 return cg_out
.rstrip()
145 def get_glibc_version(self
):
146 """Determine the glibc version number (major.minor)."""
147 version_h
= os
.path
.join(self
.component_srcdir('glibc'), 'version.h')
148 with
open(version_h
, 'r') as f
:
149 lines
= f
.readlines()
150 starttext
= '#define VERSION "'
152 if l
.startswith(starttext
):
153 l
= l
[len(starttext
):]
155 m
= re
.fullmatch('([0-9]+)\.([0-9]+)[.0-9]*', l
)
156 return '%s.%s' % m
.group(1, 2)
157 print('error: could not determine glibc version')
160 def add_all_configs(self
):
161 """Add all known glibc build configurations."""
162 # On architectures missing __builtin_trap support, these
163 # options may be needed as a workaround; see
164 # <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70216> for SH.
165 no_isolate
= ('-fno-isolate-erroneous-paths-dereference'
166 ' -fno-isolate-erroneous-paths-attribute')
167 self
.add_config(arch
='aarch64',
169 self
.add_config(arch
='aarch64_be',
171 self
.add_config(arch
='alpha',
173 self
.add_config(arch
='arm',
174 os_name
='linux-gnueabi')
175 self
.add_config(arch
='armeb',
176 os_name
='linux-gnueabi')
177 self
.add_config(arch
='armeb',
178 os_name
='linux-gnueabi',
180 gcc_cfg
=['--with-arch=armv7-a'])
181 self
.add_config(arch
='arm',
182 os_name
='linux-gnueabihf')
183 self
.add_config(arch
='armeb',
184 os_name
='linux-gnueabihf')
185 self
.add_config(arch
='armeb',
186 os_name
='linux-gnueabihf',
188 gcc_cfg
=['--with-arch=armv7-a'])
189 self
.add_config(arch
='hppa',
191 self
.add_config(arch
='ia64',
193 first_gcc_cfg
=['--with-system-libunwind'])
194 self
.add_config(arch
='m68k',
196 gcc_cfg
=['--disable-multilib'])
197 self
.add_config(arch
='m68k',
200 gcc_cfg
=['--with-arch=cf', '--disable-multilib'])
201 self
.add_config(arch
='microblaze',
203 gcc_cfg
=['--disable-multilib'])
204 self
.add_config(arch
='microblazeel',
206 gcc_cfg
=['--disable-multilib'])
207 self
.add_config(arch
='mips64',
209 gcc_cfg
=['--with-mips-plt'],
210 glibcs
=[{'variant': 'n32'},
212 'ccopts': '-mabi=32'},
214 'ccopts': '-mabi=64'}])
215 self
.add_config(arch
='mips64',
218 gcc_cfg
=['--with-mips-plt', '--with-float=soft'],
219 glibcs
=[{'variant': 'n32-soft',
220 'cfg': ['--without-fp']},
223 'ccopts': '-mabi=32',
224 'cfg': ['--without-fp']},
225 {'variant': 'n64-soft',
226 'ccopts': '-mabi=64',
227 'cfg': ['--without-fp']}])
228 self
.add_config(arch
='mips64',
231 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
232 '--with-arch-64=mips64r2',
233 '--with-arch-32=mips32r2'],
234 glibcs
=[{'variant': 'n32-nan2008'},
235 {'variant': 'nan2008',
237 'ccopts': '-mabi=32'},
238 {'variant': 'n64-nan2008',
239 'ccopts': '-mabi=64'}])
240 self
.add_config(arch
='mips64',
242 variant
='nan2008-soft',
243 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
244 '--with-arch-64=mips64r2',
245 '--with-arch-32=mips32r2',
246 '--with-float=soft'],
247 glibcs
=[{'variant': 'n32-nan2008-soft',
248 'cfg': ['--without-fp']},
249 {'variant': 'nan2008-soft',
251 'ccopts': '-mabi=32',
252 'cfg': ['--without-fp']},
253 {'variant': 'n64-nan2008-soft',
254 'ccopts': '-mabi=64',
255 'cfg': ['--without-fp']}])
256 self
.add_config(arch
='mips64el',
258 gcc_cfg
=['--with-mips-plt'],
259 glibcs
=[{'variant': 'n32'},
261 'ccopts': '-mabi=32'},
263 'ccopts': '-mabi=64'}])
264 self
.add_config(arch
='mips64el',
267 gcc_cfg
=['--with-mips-plt', '--with-float=soft'],
268 glibcs
=[{'variant': 'n32-soft',
269 'cfg': ['--without-fp']},
272 'ccopts': '-mabi=32',
273 'cfg': ['--without-fp']},
274 {'variant': 'n64-soft',
275 'ccopts': '-mabi=64',
276 'cfg': ['--without-fp']}])
277 self
.add_config(arch
='mips64el',
280 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
281 '--with-arch-64=mips64r2',
282 '--with-arch-32=mips32r2'],
283 glibcs
=[{'variant': 'n32-nan2008'},
284 {'variant': 'nan2008',
286 'ccopts': '-mabi=32'},
287 {'variant': 'n64-nan2008',
288 'ccopts': '-mabi=64'}])
289 self
.add_config(arch
='mips64el',
291 variant
='nan2008-soft',
292 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
293 '--with-arch-64=mips64r2',
294 '--with-arch-32=mips32r2',
295 '--with-float=soft'],
296 glibcs
=[{'variant': 'n32-nan2008-soft',
297 'cfg': ['--without-fp']},
298 {'variant': 'nan2008-soft',
300 'ccopts': '-mabi=32',
301 'cfg': ['--without-fp']},
302 {'variant': 'n64-nan2008-soft',
303 'ccopts': '-mabi=64',
304 'cfg': ['--without-fp']}])
305 self
.add_config(arch
='nios2',
307 self
.add_config(arch
='powerpc',
309 gcc_cfg
=['--disable-multilib', '--enable-secureplt'],
310 extra_glibcs
=[{'variant': 'power4',
311 'ccopts': '-mcpu=power4',
312 'cfg': ['--with-cpu=power4']}])
313 self
.add_config(arch
='powerpc',
316 gcc_cfg
=['--disable-multilib', '--with-float=soft',
317 '--enable-secureplt'],
318 glibcs
=[{'variant': 'soft', 'cfg': ['--without-fp']}])
319 self
.add_config(arch
='powerpc64',
321 gcc_cfg
=['--disable-multilib', '--enable-secureplt'])
322 self
.add_config(arch
='powerpc64le',
324 gcc_cfg
=['--disable-multilib', '--enable-secureplt'])
325 self
.add_config(arch
='powerpc',
326 os_name
='linux-gnuspe',
327 gcc_cfg
=['--disable-multilib', '--enable-secureplt',
328 '--enable-e500-double'],
329 glibcs
=[{'cfg': ['--without-fp']}])
330 self
.add_config(arch
='powerpc',
331 os_name
='linux-gnuspe',
333 gcc_cfg
=['--disable-multilib', '--enable-secureplt'],
334 glibcs
=[{'variant': 'e500v1', 'cfg': ['--without-fp']}])
335 self
.add_config(arch
='s390x',
338 {'arch': 's390', 'ccopts': '-m31'}])
339 self
.add_config(arch
='sh3',
341 glibcs
=[{'ccopts': no_isolate
}])
342 self
.add_config(arch
='sh3eb',
344 glibcs
=[{'ccopts': no_isolate
}])
345 self
.add_config(arch
='sh4',
347 glibcs
=[{'ccopts': no_isolate
}])
348 self
.add_config(arch
='sh4eb',
350 glibcs
=[{'ccopts': no_isolate
}])
351 self
.add_config(arch
='sh4',
354 gcc_cfg
=['--without-fp'],
355 glibcs
=[{'variant': 'soft',
356 'cfg': ['--without-fp'],
357 'ccopts': no_isolate
}])
358 self
.add_config(arch
='sh4eb',
361 gcc_cfg
=['--without-fp'],
362 glibcs
=[{'variant': 'soft',
363 'cfg': ['--without-fp'],
364 'ccopts': no_isolate
}])
365 self
.add_config(arch
='sparc64',
369 'ccopts': '-m32 -mlong-double-128'}])
370 self
.add_config(arch
='tilegx',
373 {'variant': '32', 'ccopts': '-m32'}])
374 self
.add_config(arch
='tilegxbe',
377 {'variant': '32', 'ccopts': '-m32'}])
378 self
.add_config(arch
='tilepro',
380 self
.add_config(arch
='x86_64',
382 gcc_cfg
=['--with-multilib-list=m64,m32,mx32'],
384 {'variant': 'x32', 'ccopts': '-mx32'},
385 {'arch': 'i686', 'ccopts': '-m32 -march=i686'}],
386 extra_glibcs
=[{'variant': 'disable-multi-arch',
387 'cfg': ['--disable-multi-arch']},
388 {'variant': 'disable-multi-arch',
390 'ccopts': '-m32 -march=i686',
391 'cfg': ['--disable-multi-arch']},
393 'ccopts': '-m32 -march=i486'},
395 'ccopts': '-m32 -march=i586'}])
397 def add_config(self
, **args
):
398 """Add an individual build configuration."""
399 cfg
= Config(self
, **args
)
400 if cfg
.name
in self
.configs
:
401 print('error: duplicate config %s' % cfg
.name
)
403 self
.configs
[cfg
.name
] = cfg
404 for c
in cfg
.all_glibcs
:
405 if c
.name
in self
.glibc_configs
:
406 print('error: duplicate glibc config %s' % c
.name
)
408 self
.glibc_configs
[c
.name
] = c
410 def component_srcdir(self
, component
):
411 """Return the source directory for a given component, e.g. gcc."""
412 return os
.path
.join(self
.srcdir
, component
)
414 def component_builddir(self
, action
, config
, component
, subconfig
=None):
415 """Return the directory to use for a build."""
418 assert subconfig
is None
419 return os
.path
.join(self
.builddir
, action
, component
)
420 if subconfig
is None:
421 return os
.path
.join(self
.builddir
, action
, config
, component
)
423 # glibc build as part of compiler build.
424 return os
.path
.join(self
.builddir
, action
, config
, component
,
427 def compiler_installdir(self
, config
):
428 """Return the directory in which to install a compiler."""
429 return os
.path
.join(self
.installdir
, 'compilers', config
)
431 def compiler_bindir(self
, config
):
432 """Return the directory in which to find compiler binaries."""
433 return os
.path
.join(self
.compiler_installdir(config
), 'bin')
435 def compiler_sysroot(self
, config
):
436 """Return the sysroot directory for a compiler."""
437 return os
.path
.join(self
.compiler_installdir(config
), 'sysroot')
439 def glibc_installdir(self
, config
):
440 """Return the directory in which to install glibc."""
441 return os
.path
.join(self
.installdir
, 'glibcs', config
)
443 def run_builds(self
, action
, configs
):
444 """Run the requested builds."""
445 if action
== 'checkout':
446 self
.checkout(configs
)
448 if action
== 'bot-cycle':
450 print('error: configurations specified for bot-cycle')
456 print('error: configurations specified for bot')
460 if action
== 'host-libraries' and configs
:
461 print('error: configurations specified for host-libraries')
463 self
.clear_last_build_state(action
)
464 build_time
= datetime
.datetime
.utcnow()
465 if action
== 'host-libraries':
466 build_components
= ('gmp', 'mpfr', 'mpc')
469 self
.build_host_libraries()
470 elif action
== 'compilers':
471 build_components
= ('binutils', 'gcc', 'glibc', 'linux')
472 old_components
= ('gmp', 'mpfr', 'mpc')
473 old_versions
= self
.build_state
['host-libraries']['build-versions']
474 self
.build_compilers(configs
)
476 build_components
= ('glibc',)
477 old_components
= ('gmp', 'mpfr', 'mpc', 'binutils', 'gcc', 'linux')
478 old_versions
= self
.build_state
['compilers']['build-versions']
479 self
.build_glibcs(configs
)
483 # Partial build, do not update stored state.
486 for k
in build_components
:
487 if k
in self
.versions
:
488 build_versions
[k
] = {'version': self
.versions
[k
]['version'],
489 'revision': self
.versions
[k
]['revision']}
490 for k
in old_components
:
491 if k
in old_versions
:
492 build_versions
[k
] = {'version': old_versions
[k
]['version'],
493 'revision': old_versions
[k
]['revision']}
494 self
.update_build_state(action
, build_time
, build_versions
)
497 def remove_dirs(*args
):
498 """Remove directories and their contents if they exist."""
500 shutil
.rmtree(dir, ignore_errors
=True)
503 def remove_recreate_dirs(*args
):
504 """Remove directories if they exist, and create them as empty."""
505 Context
.remove_dirs(*args
)
507 os
.makedirs(dir, exist_ok
=True)
509 def add_makefile_cmdlist(self
, target
, cmdlist
, logsdir
):
510 """Add makefile text for a list of commands."""
511 commands
= cmdlist
.makefile_commands(self
.wrapper
, logsdir
)
512 self
.makefile_pieces
.append('all: %s\n.PHONY: %s\n%s:\n%s\n' %
513 (target
, target
, target
, commands
))
514 self
.status_log_list
.extend(cmdlist
.status_logs(logsdir
))
516 def write_files(self
):
517 """Write out the Makefile and wrapper script."""
518 mftext
= ''.join(self
.makefile_pieces
)
519 with
open(self
.makefile
, 'w') as f
:
529 'prev_status=$prev_base-status.txt\n'
530 'this_status=$this_base-status.txt\n'
531 'this_log=$this_base-log.txt\n'
532 'date > "$this_log"\n'
533 'echo >> "$this_log"\n'
534 'echo "Description: $desc" >> "$this_log"\n'
535 'printf "%s" "Command:" >> "$this_log"\n'
536 'for word in "$@"; do\n'
537 ' if expr "$word" : "[]+,./0-9@A-Z_a-z-]\\\\{1,\\\\}\\$" > /dev/null; then\n'
538 ' printf " %s" "$word"\n'
541 ' printf "%s" "$word" | sed -e "s/\'/\'\\\\\\\\\'\'/"\n'
544 'done >> "$this_log"\n'
545 'echo >> "$this_log"\n'
546 'echo "Directory: $dir" >> "$this_log"\n'
547 'echo "Path addition: $path" >> "$this_log"\n'
548 'echo >> "$this_log"\n'
551 ' echo >> "$this_log"\n'
552 ' echo "$1: $desc" > "$this_status"\n'
553 ' echo "$1: $desc" >> "$this_log"\n'
554 ' echo >> "$this_log"\n'
555 ' date >> "$this_log"\n'
556 ' echo "$1: $desc"\n'
561 ' if [ "$1" != "0" ]; then\n'
562 ' record_status FAIL\n'
565 'if [ "$prev_base" ] && ! grep -q "^PASS" "$prev_status"; then\n'
566 ' record_status UNRESOLVED\n'
568 'if [ "$dir" ]; then\n'
570 ' check_error "$?"\n'
572 'if [ "$path" ]; then\n'
573 ' PATH=$path:$PATH\n'
575 '"$@" < /dev/null >> "$this_log" 2>&1\n'
577 'record_status PASS\n')
578 with
open(self
.wrapper
, 'w') as f
:
579 f
.write(wrapper_text
)
581 mode_exec
= (stat
.S_IRWXU|stat
.S_IRGRP|stat
.S_IXGRP|
582 stat
.S_IROTH|stat
.S_IXOTH
)
583 os
.chmod(self
.wrapper
, mode_exec
)
586 'if ! [ -f tests.sum ]; then\n'
587 ' echo "No test summary available."\n'
592 ' echo "Contents of $1:"\n'
596 ' echo "End of contents of $1."\n'
599 'save_file tests.sum\n'
600 'non_pass_tests=$(grep -v "^PASS: " tests.sum | sed -e "s/^PASS: //")\n'
601 'for t in $non_pass_tests; do\n'
602 ' if [ -f "$t.out" ]; then\n'
603 ' save_file "$t.out"\n'
606 with
open(self
.save_logs
, 'w') as f
:
607 f
.write(save_logs_text
)
608 os
.chmod(self
.save_logs
, mode_exec
)
611 """Do the actual build."""
612 cmd
= ['make', '-j%d' % self
.parallelism
]
613 subprocess
.run(cmd
, cwd
=self
.builddir
, check
=True)
615 def build_host_libraries(self
):
616 """Build the host libraries."""
617 installdir
= self
.host_libraries_installdir
618 builddir
= os
.path
.join(self
.builddir
, 'host-libraries')
619 logsdir
= os
.path
.join(self
.logsdir
, 'host-libraries')
620 self
.remove_recreate_dirs(installdir
, builddir
, logsdir
)
621 cmdlist
= CommandList('host-libraries', self
.keep
)
622 self
.build_host_library(cmdlist
, 'gmp')
623 self
.build_host_library(cmdlist
, 'mpfr',
624 ['--with-gmp=%s' % installdir
])
625 self
.build_host_library(cmdlist
, 'mpc',
626 ['--with-gmp=%s' % installdir
,
627 '--with-mpfr=%s' % installdir
])
628 cmdlist
.add_command('done', ['touch', os
.path
.join(installdir
, 'ok')])
629 self
.add_makefile_cmdlist('host-libraries', cmdlist
, logsdir
)
631 def build_host_library(self
, cmdlist
, lib
, extra_opts
=None):
632 """Build one host library."""
633 srcdir
= self
.component_srcdir(lib
)
634 builddir
= self
.component_builddir('host-libraries', None, lib
)
635 installdir
= self
.host_libraries_installdir
636 cmdlist
.push_subdesc(lib
)
637 cmdlist
.create_use_dir(builddir
)
638 cfg_cmd
= [os
.path
.join(srcdir
, 'configure'),
639 '--prefix=%s' % installdir
,
642 cfg_cmd
.extend (extra_opts
)
643 cmdlist
.add_command('configure', cfg_cmd
)
644 cmdlist
.add_command('build', ['make'])
645 cmdlist
.add_command('check', ['make', 'check'])
646 cmdlist
.add_command('install', ['make', 'install'])
647 cmdlist
.cleanup_dir()
648 cmdlist
.pop_subdesc()
650 def build_compilers(self
, configs
):
651 """Build the compilers."""
653 self
.remove_dirs(os
.path
.join(self
.builddir
, 'compilers'))
654 self
.remove_dirs(os
.path
.join(self
.installdir
, 'compilers'))
655 self
.remove_dirs(os
.path
.join(self
.logsdir
, 'compilers'))
656 configs
= sorted(self
.configs
.keys())
658 self
.configs
[c
].build()
660 def build_glibcs(self
, configs
):
661 """Build the glibcs."""
663 self
.remove_dirs(os
.path
.join(self
.builddir
, 'glibcs'))
664 self
.remove_dirs(os
.path
.join(self
.installdir
, 'glibcs'))
665 self
.remove_dirs(os
.path
.join(self
.logsdir
, 'glibcs'))
666 configs
= sorted(self
.glibc_configs
.keys())
668 self
.glibc_configs
[c
].build()
670 def load_versions_json(self
):
671 """Load information about source directory versions."""
672 if not os
.access(self
.versions_json
, os
.F_OK
):
675 with
open(self
.versions_json
, 'r') as f
:
676 self
.versions
= json
.load(f
)
678 def store_json(self
, data
, filename
):
679 """Store information in a JSON file."""
680 filename_tmp
= filename
+ '.tmp'
681 with
open(filename_tmp
, 'w') as f
:
682 json
.dump(data
, f
, indent
=2, sort_keys
=True)
683 os
.rename(filename_tmp
, filename
)
685 def store_versions_json(self
):
686 """Store information about source directory versions."""
687 self
.store_json(self
.versions
, self
.versions_json
)
689 def set_component_version(self
, component
, version
, explicit
, revision
):
690 """Set the version information for a component."""
691 self
.versions
[component
] = {'version': version
,
692 'explicit': explicit
,
693 'revision': revision
}
694 self
.store_versions_json()
696 def checkout(self
, versions
):
697 """Check out the desired component versions."""
698 default_versions
= {'binutils': 'vcs-2.28',
700 'glibc': 'vcs-mainline',
706 explicit_versions
= {}
709 for k
in default_versions
.keys():
713 if k
in use_versions
:
714 print('error: multiple versions for %s' % k
)
717 explicit_versions
[k
] = True
721 print('error: unknown component in %s' % v
)
723 for k
in default_versions
.keys():
724 if k
not in use_versions
:
725 if k
in self
.versions
and self
.versions
[k
]['explicit']:
726 use_versions
[k
] = self
.versions
[k
]['version']
727 explicit_versions
[k
] = True
729 use_versions
[k
] = default_versions
[k
]
730 explicit_versions
[k
] = False
731 os
.makedirs(self
.srcdir
, exist_ok
=True)
732 for k
in sorted(default_versions
.keys()):
733 update
= os
.access(self
.component_srcdir(k
), os
.F_OK
)
736 k
in self
.versions
and
737 v
!= self
.versions
[k
]['version']):
738 if not self
.replace_sources
:
739 print('error: version of %s has changed from %s to %s, '
740 'use --replace-sources to check out again' %
741 (k
, self
.versions
[k
]['version'], v
))
743 shutil
.rmtree(self
.component_srcdir(k
))
745 if v
.startswith('vcs-'):
746 revision
= self
.checkout_vcs(k
, v
[4:], update
)
748 self
.checkout_tar(k
, v
, update
)
750 self
.set_component_version(k
, v
, explicit_versions
[k
], revision
)
751 if self
.get_script_text() != self
.script_text
:
752 # Rerun the checkout process in case the updated script
753 # uses different default versions or new components.
756 def checkout_vcs(self
, component
, version
, update
):
757 """Check out the given version of the given component from version
758 control. Return a revision identifier."""
759 if component
== 'binutils':
760 git_url
= 'git://sourceware.org/git/binutils-gdb.git'
761 if version
== 'mainline':
762 git_branch
= 'master'
764 trans
= str.maketrans({'.': '_'})
765 git_branch
= 'binutils-%s-branch' % version
.translate(trans
)
766 return self
.git_checkout(component
, git_url
, git_branch
, update
)
767 elif component
== 'gcc':
768 if version
== 'mainline':
771 trans
= str.maketrans({'.': '_'})
772 branch
= 'branches/gcc-%s-branch' % version
.translate(trans
)
773 svn_url
= 'svn://gcc.gnu.org/svn/gcc/%s' % branch
774 return self
.gcc_checkout(svn_url
, update
)
775 elif component
== 'glibc':
776 git_url
= 'git://sourceware.org/git/glibc.git'
777 if version
== 'mainline':
778 git_branch
= 'master'
780 git_branch
= 'release/%s/master' % version
781 r
= self
.git_checkout(component
, git_url
, git_branch
, update
)
782 self
.fix_glibc_timestamps()
785 print('error: component %s coming from VCS' % component
)
788 def git_checkout(self
, component
, git_url
, git_branch
, update
):
789 """Check out a component from git. Return a commit identifier."""
791 subprocess
.run(['git', 'remote', 'prune', 'origin'],
792 cwd
=self
.component_srcdir(component
), check
=True)
793 subprocess
.run(['git', 'pull', '-q'],
794 cwd
=self
.component_srcdir(component
), check
=True)
796 subprocess
.run(['git', 'clone', '-q', '-b', git_branch
, git_url
,
797 self
.component_srcdir(component
)], check
=True)
798 r
= subprocess
.run(['git', 'rev-parse', 'HEAD'],
799 cwd
=self
.component_srcdir(component
),
800 stdout
=subprocess
.PIPE
,
801 check
=True, universal_newlines
=True).stdout
804 def fix_glibc_timestamps(self
):
805 """Fix timestamps in a glibc checkout."""
806 # Ensure that builds do not try to regenerate generated files
807 # in the source tree.
808 srcdir
= self
.component_srcdir('glibc')
809 for dirpath
, dirnames
, filenames
in os
.walk(srcdir
):
811 if (f
== 'configure' or
812 f
== 'preconfigure' or
813 f
.endswith('-kw.h')):
814 to_touch
= os
.path
.join(dirpath
, f
)
815 subprocess
.run(['touch', to_touch
], check
=True)
817 def gcc_checkout(self
, svn_url
, update
):
818 """Check out GCC from SVN. Return the revision number."""
820 subprocess
.run(['svn', 'co', '-q', svn_url
,
821 self
.component_srcdir('gcc')], check
=True)
822 subprocess
.run(['contrib/gcc_update', '--silent'],
823 cwd
=self
.component_srcdir('gcc'), check
=True)
824 r
= subprocess
.run(['svnversion', self
.component_srcdir('gcc')],
825 stdout
=subprocess
.PIPE
,
826 check
=True, universal_newlines
=True).stdout
829 def checkout_tar(self
, component
, version
, update
):
830 """Check out the given version of the given component from a
834 url_map
= {'binutils': 'https://ftp.gnu.org/gnu/binutils/binutils-%(version)s.tar.bz2',
835 'gcc': 'https://ftp.gnu.org/gnu/gcc/gcc-%(version)s/gcc-%(version)s.tar.bz2',
836 'gmp': 'https://ftp.gnu.org/gnu/gmp/gmp-%(version)s.tar.xz',
837 'linux': 'https://www.kernel.org/pub/linux/kernel/v4.x/linux-%(version)s.tar.xz',
838 'mpc': 'https://ftp.gnu.org/gnu/mpc/mpc-%(version)s.tar.gz',
839 'mpfr': 'https://ftp.gnu.org/gnu/mpfr/mpfr-%(version)s.tar.xz'}
840 if component
not in url_map
:
841 print('error: component %s coming from tarball' % component
)
843 url
= url_map
[component
] % {'version': version
}
844 filename
= os
.path
.join(self
.srcdir
, url
.split('/')[-1])
845 response
= urllib
.request
.urlopen(url
)
846 data
= response
.read()
847 with
open(filename
, 'wb') as f
:
849 subprocess
.run(['tar', '-C', self
.srcdir
, '-x', '-f', filename
],
851 os
.rename(os
.path
.join(self
.srcdir
, '%s-%s' % (component
, version
)),
852 self
.component_srcdir(component
))
855 def load_build_state_json(self
):
856 """Load information about the state of previous builds."""
857 if os
.access(self
.build_state_json
, os
.F_OK
):
858 with
open(self
.build_state_json
, 'r') as f
:
859 self
.build_state
= json
.load(f
)
861 self
.build_state
= {}
862 for k
in ('host-libraries', 'compilers', 'glibcs'):
863 if k
not in self
.build_state
:
864 self
.build_state
[k
] = {}
865 if 'build-time' not in self
.build_state
[k
]:
866 self
.build_state
[k
]['build-time'] = ''
867 if 'build-versions' not in self
.build_state
[k
]:
868 self
.build_state
[k
]['build-versions'] = {}
869 if 'build-results' not in self
.build_state
[k
]:
870 self
.build_state
[k
]['build-results'] = {}
871 if 'result-changes' not in self
.build_state
[k
]:
872 self
.build_state
[k
]['result-changes'] = {}
873 if 'ever-passed' not in self
.build_state
[k
]:
874 self
.build_state
[k
]['ever-passed'] = []
876 def store_build_state_json(self
):
877 """Store information about the state of previous builds."""
878 self
.store_json(self
.build_state
, self
.build_state_json
)
880 def clear_last_build_state(self
, action
):
881 """Clear information about the state of part of the build."""
882 # We clear the last build time and versions when starting a
883 # new build. The results of the last build are kept around,
884 # as comparison is still meaningful if this build is aborted
885 # and a new one started.
886 self
.build_state
[action
]['build-time'] = ''
887 self
.build_state
[action
]['build-versions'] = {}
888 self
.store_build_state_json()
890 def update_build_state(self
, action
, build_time
, build_versions
):
891 """Update the build state after a build."""
892 build_time
= build_time
.replace(microsecond
=0)
893 self
.build_state
[action
]['build-time'] = str(build_time
)
894 self
.build_state
[action
]['build-versions'] = build_versions
896 for log
in self
.status_log_list
:
897 with
open(log
, 'r') as f
:
899 log_text
= log_text
.rstrip()
900 m
= re
.fullmatch('([A-Z]+): (.*)', log_text
)
902 test_name
= m
.group(2)
903 assert test_name
not in build_results
904 build_results
[test_name
] = result
905 old_build_results
= self
.build_state
[action
]['build-results']
906 self
.build_state
[action
]['build-results'] = build_results
908 all_tests
= set(old_build_results
.keys()) |
set(build_results
.keys())
910 if t
in old_build_results
:
911 old_res
= old_build_results
[t
]
913 old_res
= '(New test)'
914 if t
in build_results
:
915 new_res
= build_results
[t
]
917 new_res
= '(Test removed)'
918 if old_res
!= new_res
:
919 result_changes
[t
] = '%s -> %s' % (old_res
, new_res
)
920 self
.build_state
[action
]['result-changes'] = result_changes
921 old_ever_passed
= {t
for t
in self
.build_state
[action
]['ever-passed']
922 if t
in build_results
}
923 new_passes
= {t
for t
in build_results
if build_results
[t
] == 'PASS'}
924 self
.build_state
[action
]['ever-passed'] = sorted(old_ever_passed |
926 self
.store_build_state_json()
928 def load_bot_config_json(self
):
929 """Load bot configuration."""
930 with
open(self
.bot_config_json
, 'r') as f
:
931 self
.bot_config
= json
.load(f
)
933 def part_build_old(self
, action
, delay
):
934 """Return whether the last build for a given action was at least a
935 given number of seconds ago, or does not have a time recorded."""
936 old_time_str
= self
.build_state
[action
]['build-time']
939 old_time
= datetime
.datetime
.strptime(old_time_str
,
941 new_time
= datetime
.datetime
.utcnow()
942 delta
= new_time
- old_time
943 return delta
.total_seconds() >= delay
946 """Run a single round of checkout and builds."""
947 print('Bot cycle starting %s.' % str(datetime
.datetime
.utcnow()))
948 self
.load_bot_config_json()
949 actions
= ('host-libraries', 'compilers', 'glibcs')
950 self
.bot_run_self(['--replace-sources'], 'checkout')
951 self
.load_versions_json()
952 if self
.get_script_text() != self
.script_text
:
953 print('Script changed, re-execing.')
954 # On script change, all parts of the build should be rerun.
956 self
.clear_last_build_state(a
)
958 check_components
= {'host-libraries': ('gmp', 'mpfr', 'mpc'),
959 'compilers': ('binutils', 'gcc', 'glibc', 'linux'),
960 'glibcs': ('glibc',)}
963 build_vers
= self
.build_state
[a
]['build-versions']
964 must_build
[a
] = False
965 if not self
.build_state
[a
]['build-time']:
969 for c
in check_components
[a
]:
971 old_vers
[c
] = build_vers
[c
]
972 new_vers
[c
] = {'version': self
.versions
[c
]['version'],
973 'revision': self
.versions
[c
]['revision']}
974 if new_vers
== old_vers
:
975 print('Versions for %s unchanged.' % a
)
977 print('Versions changed or rebuild forced for %s.' % a
)
978 if a
== 'compilers' and not self
.part_build_old(
979 a
, self
.bot_config
['compilers-rebuild-delay']):
980 print('Not requiring rebuild of compilers this soon.')
983 if must_build
['host-libraries']:
984 must_build
['compilers'] = True
985 if must_build
['compilers']:
986 must_build
['glibcs'] = True
989 print('Must rebuild %s.' % a
)
990 self
.clear_last_build_state(a
)
992 print('No need to rebuild %s.' % a
)
993 if os
.access(self
.logsdir
, os
.F_OK
):
994 shutil
.rmtree(self
.logsdir_old
, ignore_errors
=True)
995 shutil
.copytree(self
.logsdir
, self
.logsdir_old
)
998 build_time
= datetime
.datetime
.utcnow()
999 print('Rebuilding %s at %s.' % (a
, str(build_time
)))
1000 self
.bot_run_self([], a
)
1001 self
.load_build_state_json()
1002 self
.bot_build_mail(a
, build_time
)
1003 print('Bot cycle done at %s.' % str(datetime
.datetime
.utcnow()))
1005 def bot_build_mail(self
, action
, build_time
):
1006 """Send email with the results of a build."""
1007 if not ('email-from' in self
.bot_config
and
1008 'email-server' in self
.bot_config
and
1009 'email-subject' in self
.bot_config
and
1010 'email-to' in self
.bot_config
):
1011 if not self
.email_warning
:
1012 print("Email not configured, not sending.")
1013 self
.email_warning
= True
1016 build_time
= build_time
.replace(microsecond
=0)
1017 subject
= (self
.bot_config
['email-subject'] %
1019 'build-time': str(build_time
)})
1020 results
= self
.build_state
[action
]['build-results']
1021 changes
= self
.build_state
[action
]['result-changes']
1022 ever_passed
= set(self
.build_state
[action
]['ever-passed'])
1023 versions
= self
.build_state
[action
]['build-versions']
1024 new_regressions
= {k
for k
in changes
if changes
[k
] == 'PASS -> FAIL'}
1025 all_regressions
= {k
for k
in ever_passed
if results
[k
] == 'FAIL'}
1026 all_fails
= {k
for k
in results
if results
[k
] == 'FAIL'}
1028 new_reg_list
= sorted(['FAIL: %s' % k
for k
in new_regressions
])
1029 new_reg_text
= ('New regressions:\n\n%s\n\n' %
1030 '\n'.join(new_reg_list
))
1034 all_reg_list
= sorted(['FAIL: %s' % k
for k
in all_regressions
])
1035 all_reg_text
= ('All regressions:\n\n%s\n\n' %
1036 '\n'.join(all_reg_list
))
1040 all_fail_list
= sorted(['FAIL: %s' % k
for k
in all_fails
])
1041 all_fail_text
= ('All failures:\n\n%s\n\n' %
1042 '\n'.join(all_fail_list
))
1046 changes_list
= sorted(changes
.keys())
1047 changes_list
= ['%s: %s' % (changes
[k
], k
) for k
in changes_list
]
1048 changes_text
= ('All changed results:\n\n%s\n\n' %
1049 '\n'.join(changes_list
))
1052 results_text
= (new_reg_text
+ all_reg_text
+ all_fail_text
+
1054 if not results_text
:
1055 results_text
= 'Clean build with unchanged results.\n\n'
1056 versions_list
= sorted(versions
.keys())
1057 versions_list
= ['%s: %s (%s)' % (k
, versions
[k
]['version'],
1058 versions
[k
]['revision'])
1059 for k
in versions_list
]
1060 versions_text
= ('Component versions for this build:\n\n%s\n' %
1061 '\n'.join(versions_list
))
1062 body_text
= results_text
+ versions_text
1063 msg
= email
.mime
.text
.MIMEText(body_text
)
1064 msg
['Subject'] = subject
1065 msg
['From'] = self
.bot_config
['email-from']
1066 msg
['To'] = self
.bot_config
['email-to']
1067 msg
['Message-ID'] = email
.utils
.make_msgid()
1068 msg
['Date'] = email
.utils
.format_datetime(datetime
.datetime
.utcnow())
1069 with smtplib
.SMTP(self
.bot_config
['email-server']) as s
:
1072 def bot_run_self(self
, opts
, action
, check
=True):
1073 """Run a copy of this script with given options."""
1074 cmd
= [sys
.executable
, sys
.argv
[0], '--keep=none',
1075 '-j%d' % self
.parallelism
]
1077 cmd
.extend([self
.topdir
, action
])
1079 subprocess
.run(cmd
, check
=check
)
1082 """Run repeated rounds of checkout and builds."""
1084 self
.load_bot_config_json()
1085 if not self
.bot_config
['run']:
1086 print('Bot exiting by request.')
1088 self
.bot_run_self([], 'bot-cycle', check
=False)
1089 self
.load_bot_config_json()
1090 if not self
.bot_config
['run']:
1091 print('Bot exiting by request.')
1093 time
.sleep(self
.bot_config
['delay'])
1094 if self
.get_script_text() != self
.script_text
:
1095 print('Script changed, bot re-execing.')
1099 class Config(object):
1100 """A configuration for building a compiler and associated libraries."""
1102 def __init__(self
, ctx
, arch
, os_name
, variant
=None, gcc_cfg
=None,
1103 first_gcc_cfg
=None, glibcs
=None, extra_glibcs
=None):
1104 """Initialize a Config object."""
1108 self
.variant
= variant
1110 self
.name
= '%s-%s' % (arch
, os_name
)
1112 self
.name
= '%s-%s-%s' % (arch
, os_name
, variant
)
1113 self
.triplet
= '%s-glibc-%s' % (arch
, os_name
)
1117 self
.gcc_cfg
= gcc_cfg
1118 if first_gcc_cfg
is None:
1119 self
.first_gcc_cfg
= []
1121 self
.first_gcc_cfg
= first_gcc_cfg
1123 glibcs
= [{'variant': variant
}]
1124 if extra_glibcs
is None:
1126 glibcs
= [Glibc(self
, **g
) for g
in glibcs
]
1127 extra_glibcs
= [Glibc(self
, **g
) for g
in extra_glibcs
]
1128 self
.all_glibcs
= glibcs
+ extra_glibcs
1129 self
.compiler_glibcs
= glibcs
1130 self
.installdir
= ctx
.compiler_installdir(self
.name
)
1131 self
.bindir
= ctx
.compiler_bindir(self
.name
)
1132 self
.sysroot
= ctx
.compiler_sysroot(self
.name
)
1133 self
.builddir
= os
.path
.join(ctx
.builddir
, 'compilers', self
.name
)
1134 self
.logsdir
= os
.path
.join(ctx
.logsdir
, 'compilers', self
.name
)
1136 def component_builddir(self
, component
):
1137 """Return the directory to use for a (non-glibc) build."""
1138 return self
.ctx
.component_builddir('compilers', self
.name
, component
)
1141 """Generate commands to build this compiler."""
1142 self
.ctx
.remove_recreate_dirs(self
.installdir
, self
.builddir
,
1144 cmdlist
= CommandList('compilers-%s' % self
.name
, self
.ctx
.keep
)
1145 cmdlist
.add_command('check-host-libraries',
1147 os
.path
.join(self
.ctx
.host_libraries_installdir
,
1149 cmdlist
.use_path(self
.bindir
)
1150 self
.build_cross_tool(cmdlist
, 'binutils', 'binutils',
1152 '--disable-libdecnumber',
1153 '--disable-readline',
1155 if self
.os
.startswith('linux'):
1156 self
.install_linux_headers(cmdlist
)
1157 self
.build_gcc(cmdlist
, True)
1158 for g
in self
.compiler_glibcs
:
1159 cmdlist
.push_subdesc('glibc')
1160 cmdlist
.push_subdesc(g
.name
)
1161 g
.build_glibc(cmdlist
, True)
1162 cmdlist
.pop_subdesc()
1163 cmdlist
.pop_subdesc()
1164 self
.build_gcc(cmdlist
, False)
1165 cmdlist
.add_command('done', ['touch',
1166 os
.path
.join(self
.installdir
, 'ok')])
1167 self
.ctx
.add_makefile_cmdlist('compilers-%s' % self
.name
, cmdlist
,
1170 def build_cross_tool(self
, cmdlist
, tool_src
, tool_build
, extra_opts
=None):
1171 """Build one cross tool."""
1172 srcdir
= self
.ctx
.component_srcdir(tool_src
)
1173 builddir
= self
.component_builddir(tool_build
)
1174 cmdlist
.push_subdesc(tool_build
)
1175 cmdlist
.create_use_dir(builddir
)
1176 cfg_cmd
= [os
.path
.join(srcdir
, 'configure'),
1177 '--prefix=%s' % self
.installdir
,
1178 '--build=%s' % self
.ctx
.build_triplet
,
1179 '--host=%s' % self
.ctx
.build_triplet
,
1180 '--target=%s' % self
.triplet
,
1181 '--with-sysroot=%s' % self
.sysroot
]
1183 cfg_cmd
.extend(extra_opts
)
1184 cmdlist
.add_command('configure', cfg_cmd
)
1185 cmdlist
.add_command('build', ['make'])
1186 # Parallel "make install" for GCC has race conditions that can
1187 # cause it to fail; see
1188 # <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=42980>. Such
1189 # problems are not known for binutils, but doing the
1190 # installation in parallel within a particular toolchain build
1191 # (as opposed to installation of one toolchain from
1192 # build-many-glibcs.py running in parallel to the installation
1193 # of other toolchains being built) is not known to be
1194 # significantly beneficial, so it is simplest just to disable
1195 # parallel install for cross tools here.
1196 cmdlist
.add_command('install', ['make', '-j1', 'install'])
1197 cmdlist
.cleanup_dir()
1198 cmdlist
.pop_subdesc()
1200 def install_linux_headers(self
, cmdlist
):
1201 """Install Linux kernel headers."""
1202 arch_map
= {'aarch64': 'arm64',
1212 'microblaze': 'microblaze',
1215 'powerpc': 'powerpc',
1223 if self
.arch
.startswith(k
):
1224 linux_arch
= arch_map
[k
]
1226 assert linux_arch
is not None
1227 srcdir
= self
.ctx
.component_srcdir('linux')
1228 builddir
= self
.component_builddir('linux')
1229 headers_dir
= os
.path
.join(self
.sysroot
, 'usr')
1230 cmdlist
.push_subdesc('linux')
1231 cmdlist
.create_use_dir(builddir
)
1232 cmdlist
.add_command('install-headers',
1233 ['make', '-C', srcdir
, 'O=%s' % builddir
,
1234 'ARCH=%s' % linux_arch
,
1235 'INSTALL_HDR_PATH=%s' % headers_dir
,
1237 cmdlist
.cleanup_dir()
1238 cmdlist
.pop_subdesc()
1240 def build_gcc(self
, cmdlist
, bootstrap
):
1242 # libsanitizer commonly breaks because of glibc header
1243 # changes, or on unusual targets. libssp is of little
1244 # relevance with glibc's own stack checking support.
1245 cfg_opts
= list(self
.gcc_cfg
)
1246 cfg_opts
+= ['--disable-libsanitizer', '--disable-libssp']
1247 host_libs
= self
.ctx
.host_libraries_installdir
1248 cfg_opts
+= ['--with-gmp=%s' % host_libs
,
1249 '--with-mpfr=%s' % host_libs
,
1250 '--with-mpc=%s' % host_libs
]
1252 tool_build
= 'gcc-first'
1253 # Building a static-only, C-only compiler that is
1254 # sufficient to build glibc. Various libraries and
1255 # features that may require libc headers must be disabled.
1256 # When configuring with a sysroot, --with-newlib is
1257 # required to define inhibit_libc (to stop some parts of
1258 # libgcc including libc headers); --without-headers is not
1260 cfg_opts
+= ['--enable-languages=c', '--disable-shared',
1261 '--disable-threads',
1262 '--disable-libatomic',
1263 '--disable-decimal-float',
1265 '--disable-libgomp',
1268 '--disable-libquadmath',
1269 '--without-headers', '--with-newlib',
1270 '--with-glibc-version=%s' % self
.ctx
.glibc_version
1272 cfg_opts
+= self
.first_gcc_cfg
1275 cfg_opts
+= ['--enable-languages=c,c++', '--enable-shared',
1277 self
.build_cross_tool(cmdlist
, 'gcc', tool_build
, cfg_opts
)
1280 class Glibc(object):
1281 """A configuration for building glibc."""
1283 def __init__(self
, compiler
, arch
=None, os_name
=None, variant
=None,
1284 cfg
=None, ccopts
=None):
1285 """Initialize a Glibc object."""
1286 self
.ctx
= compiler
.ctx
1287 self
.compiler
= compiler
1289 self
.arch
= compiler
.arch
1293 self
.os
= compiler
.os
1296 self
.variant
= variant
1298 self
.name
= '%s-%s' % (self
.arch
, self
.os
)
1300 self
.name
= '%s-%s-%s' % (self
.arch
, self
.os
, variant
)
1301 self
.triplet
= '%s-glibc-%s' % (self
.arch
, self
.os
)
1306 self
.ccopts
= ccopts
1308 def tool_name(self
, tool
):
1309 """Return the name of a cross-compilation tool."""
1310 ctool
= '%s-%s' % (self
.compiler
.triplet
, tool
)
1311 if self
.ccopts
and (tool
== 'gcc' or tool
== 'g++'):
1312 ctool
= '%s %s' % (ctool
, self
.ccopts
)
1316 """Generate commands to build this glibc."""
1317 builddir
= self
.ctx
.component_builddir('glibcs', self
.name
, 'glibc')
1318 installdir
= self
.ctx
.glibc_installdir(self
.name
)
1319 logsdir
= os
.path
.join(self
.ctx
.logsdir
, 'glibcs', self
.name
)
1320 self
.ctx
.remove_recreate_dirs(installdir
, builddir
, logsdir
)
1321 cmdlist
= CommandList('glibcs-%s' % self
.name
, self
.ctx
.keep
)
1322 cmdlist
.add_command('check-compilers',
1324 os
.path
.join(self
.compiler
.installdir
, 'ok')])
1325 cmdlist
.use_path(self
.compiler
.bindir
)
1326 self
.build_glibc(cmdlist
, False)
1327 self
.ctx
.add_makefile_cmdlist('glibcs-%s' % self
.name
, cmdlist
,
1330 def build_glibc(self
, cmdlist
, for_compiler
):
1331 """Generate commands to build this glibc, either as part of a compiler
1332 build or with the bootstrapped compiler (and in the latter case, run
1334 srcdir
= self
.ctx
.component_srcdir('glibc')
1336 builddir
= self
.ctx
.component_builddir('compilers',
1337 self
.compiler
.name
, 'glibc',
1339 installdir
= self
.compiler
.sysroot
1340 srcdir_copy
= self
.ctx
.component_builddir('compilers',
1345 builddir
= self
.ctx
.component_builddir('glibcs', self
.name
,
1347 installdir
= self
.ctx
.glibc_installdir(self
.name
)
1348 srcdir_copy
= self
.ctx
.component_builddir('glibcs', self
.name
,
1350 cmdlist
.create_use_dir(builddir
)
1351 # glibc builds write into the source directory, and even if
1352 # not intentionally there is a risk of bugs that involve
1353 # writing into the working directory. To avoid possible
1354 # concurrency issues, copy the source directory.
1355 cmdlist
.create_copy_dir(srcdir
, srcdir_copy
)
1356 cfg_cmd
= [os
.path
.join(srcdir_copy
, 'configure'),
1359 '--build=%s' % self
.ctx
.build_triplet
,
1360 '--host=%s' % self
.triplet
,
1361 'CC=%s' % self
.tool_name('gcc'),
1362 'CXX=%s' % self
.tool_name('g++'),
1363 'AR=%s' % self
.tool_name('ar'),
1364 'AS=%s' % self
.tool_name('as'),
1365 'LD=%s' % self
.tool_name('ld'),
1366 'NM=%s' % self
.tool_name('nm'),
1367 'OBJCOPY=%s' % self
.tool_name('objcopy'),
1368 'OBJDUMP=%s' % self
.tool_name('objdump'),
1369 'RANLIB=%s' % self
.tool_name('ranlib'),
1370 'READELF=%s' % self
.tool_name('readelf'),
1371 'STRIP=%s' % self
.tool_name('strip')]
1373 cmdlist
.add_command('configure', cfg_cmd
)
1374 cmdlist
.add_command('build', ['make'])
1375 cmdlist
.add_command('install', ['make', 'install',
1376 'install_root=%s' % installdir
])
1377 # GCC uses paths such as lib/../lib64, so make sure lib
1378 # directories always exist.
1379 cmdlist
.add_command('mkdir-lib', ['mkdir', '-p',
1380 os
.path
.join(installdir
, 'lib'),
1381 os
.path
.join(installdir
,
1383 if not for_compiler
:
1385 cmdlist
.add_command('strip',
1387 ('%s %s/lib*/*.so' %
1388 (self
.tool_name('strip'), installdir
))])
1389 cmdlist
.add_command('check', ['make', 'check'])
1390 cmdlist
.add_command('save-logs', [self
.ctx
.save_logs
],
1392 cmdlist
.cleanup_dir('cleanup-src', srcdir_copy
)
1393 cmdlist
.cleanup_dir()
1396 class Command(object):
1397 """A command run in the build process."""
1399 def __init__(self
, desc
, num
, dir, path
, command
, always_run
=False):
1400 """Initialize a Command object."""
1404 trans
= str.maketrans({' ': '-'})
1405 self
.logbase
= '%03d-%s' % (num
, desc
.translate(trans
))
1406 self
.command
= command
1407 self
.always_run
= always_run
1410 def shell_make_quote_string(s
):
1411 """Given a string not containing a newline, quote it for use by the
1413 assert '\n' not in s
1414 if re
.fullmatch('[]+,./0-9@A-Z_a-z-]+', s
):
1416 strans
= str.maketrans({"'": "'\\''"})
1417 s
= "'%s'" % s
.translate(strans
)
1418 mtrans
= str.maketrans({'$': '$$'})
1419 return s
.translate(mtrans
)
1422 def shell_make_quote_list(l
, translate_make
):
1423 """Given a list of strings not containing newlines, quote them for use
1424 by the shell and make, returning a single string. If translate_make
1425 is true and the first string is 'make', change it to $(MAKE)."""
1426 l
= [Command
.shell_make_quote_string(s
) for s
in l
]
1427 if translate_make
and l
[0] == 'make':
1431 def shell_make_quote(self
):
1432 """Return this command quoted for the shell and make."""
1433 return self
.shell_make_quote_list(self
.command
, True)
1436 class CommandList(object):
1437 """A list of commands run in the build process."""
1439 def __init__(self
, desc
, keep
):
1440 """Initialize a CommandList object."""
1447 def desc_txt(self
, desc
):
1448 """Return the description to use for a command."""
1449 return '%s %s' % (' '.join(self
.desc
), desc
)
1451 def use_dir(self
, dir):
1452 """Set the default directory for subsequent commands."""
1455 def use_path(self
, path
):
1456 """Set a directory to be prepended to the PATH for subsequent
1460 def push_subdesc(self
, subdesc
):
1461 """Set the default subdescription for subsequent commands (e.g., the
1462 name of a component being built, within the series of commands
1464 self
.desc
.append(subdesc
)
1466 def pop_subdesc(self
):
1467 """Pop a subdescription from the list of descriptions."""
1470 def create_use_dir(self
, dir):
1471 """Remove and recreate a directory and use it for subsequent
1473 self
.add_command_dir('rm', None, ['rm', '-rf', dir])
1474 self
.add_command_dir('mkdir', None, ['mkdir', '-p', dir])
1477 def create_copy_dir(self
, src
, dest
):
1478 """Remove a directory and recreate it as a copy from the given
1480 self
.add_command_dir('copy-rm', None, ['rm', '-rf', dest
])
1481 parent
= os
.path
.dirname(dest
)
1482 self
.add_command_dir('copy-mkdir', None, ['mkdir', '-p', parent
])
1483 self
.add_command_dir('copy', None, ['cp', '-a', src
, dest
])
1485 def add_command_dir(self
, desc
, dir, command
, always_run
=False):
1486 """Add a command to run in a given directory."""
1487 cmd
= Command(self
.desc_txt(desc
), len(self
.cmdlist
), dir, self
.path
,
1488 command
, always_run
)
1489 self
.cmdlist
.append(cmd
)
1491 def add_command(self
, desc
, command
, always_run
=False):
1492 """Add a command to run in the default directory."""
1493 cmd
= Command(self
.desc_txt(desc
), len(self
.cmdlist
), self
.dir,
1494 self
.path
, command
, always_run
)
1495 self
.cmdlist
.append(cmd
)
1497 def cleanup_dir(self
, desc
='cleanup', dir=None):
1498 """Clean up a build directory. If no directory is specified, the
1499 default directory is cleaned up and ceases to be the default
1504 if self
.keep
!= 'all':
1505 self
.add_command_dir(desc
, None, ['rm', '-rf', dir],
1506 always_run
=(self
.keep
== 'none'))
1508 def makefile_commands(self
, wrapper
, logsdir
):
1509 """Return the sequence of commands in the form of text for a Makefile.
1510 The given wrapper script takes arguments: base of logs for
1511 previous command, or empty; base of logs for this command;
1512 description; directory; PATH addition; the command itself."""
1513 # prev_base is the base of the name for logs of the previous
1514 # command that is not always-run (that is, a build command,
1515 # whose failure should stop subsequent build commands from
1516 # being run, as opposed to a cleanup command, which is run
1517 # even if previous commands failed).
1520 for c
in self
.cmdlist
:
1521 ctxt
= c
.shell_make_quote()
1522 if prev_base
and not c
.always_run
:
1523 prev_log
= os
.path
.join(logsdir
, prev_base
)
1526 this_log
= os
.path
.join(logsdir
, c
.logbase
)
1527 if not c
.always_run
:
1528 prev_base
= c
.logbase
1537 prelims
= [wrapper
, prev_log
, this_log
, c
.desc
, dir, path
]
1538 prelim_txt
= Command
.shell_make_quote_list(prelims
, False)
1539 cmds
.append('\t@%s %s' % (prelim_txt
, ctxt
))
1540 return '\n'.join(cmds
)
1542 def status_logs(self
, logsdir
):
1543 """Return the list of log files with command status."""
1544 return [os
.path
.join(logsdir
, '%s-status.txt' % c
.logbase
)
1545 for c
in self
.cmdlist
]
1549 """Return an argument parser for this module."""
1550 parser
= argparse
.ArgumentParser(description
=__doc__
)
1551 parser
.add_argument('-j', dest
='parallelism',
1552 help='Run this number of jobs in parallel',
1553 type=int, default
=os
.cpu_count())
1554 parser
.add_argument('--keep', dest
='keep',
1555 help='Whether to keep all build directories, '
1556 'none or only those from failed builds',
1557 default
='none', choices
=('none', 'all', 'failed'))
1558 parser
.add_argument('--replace-sources', action
='store_true',
1559 help='Remove and replace source directories '
1560 'with the wrong version of a component')
1561 parser
.add_argument('--strip', action
='store_true',
1562 help='Strip installed glibc libraries')
1563 parser
.add_argument('topdir',
1564 help='Toplevel working directory')
1565 parser
.add_argument('action',
1567 choices
=('checkout', 'bot-cycle', 'bot',
1568 'host-libraries', 'compilers', 'glibcs'))
1569 parser
.add_argument('configs',
1570 help='Versions to check out or configurations to build',
1576 """The main entry point."""
1577 parser
= get_parser()
1578 opts
= parser
.parse_args(argv
)
1579 topdir
= os
.path
.abspath(opts
.topdir
)
1580 ctx
= Context(topdir
, opts
.parallelism
, opts
.keep
, opts
.replace_sources
,
1581 opts
.strip
, opts
.action
)
1582 ctx
.run_builds(opts
.action
, opts
.configs
)
1585 if __name__
== '__main__':