3 # This Source Code Form is subject to the terms of the Mozilla Public
4 # License, v. 2.0. If a copy of the MPL was not distributed with this
5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
6 ##########################################################################
8 # This is a collection of helper tools to get stuff done in NSS.
22 from hashlib
import sha256
24 DEVNULL
= open(os
.devnull
, 'wb')
25 cwd
= os
.path
.dirname(os
.path
.abspath(__file__
))
27 def run_tests(test
, cycles
="standard", env
={}, silent
=False):
28 domsuf
= os
.getenv('DOMSUF', "localdomain")
29 host
= os
.getenv('HOST', "localhost")
39 command
= cwd
+ "/tests/all.sh"
40 stdout
= stderr
= DEVNULL
if silent
else None
41 subprocess
.check_call(command
, env
=os_env
, stdout
=stdout
, stderr
=stderr
)
44 class coverityAction(argparse
.Action
):
46 def get_coverity_remote_cfg(self
):
47 secret_name
= 'project/relman/coverity-nss'
48 secrets_url
= 'http://taskcluster/secrets/v1/secret/{}'.format(secret_name
)
50 print('Using symbol upload token from the secrets service: "{}"'.
54 res
= requests
.get(secrets_url
)
55 res
.raise_for_status()
57 cov_config
= secret
['secret'] if 'secret' in secret
else None
59 if cov_config
is None:
60 print('Ill formatted secret for Coverity. Aborting analysis.')
65 def get_coverity_local_cfg(self
, path
):
68 file_handler
= open(path
)
69 config
= yaml
.safe_load(file_handler
)
71 print('Unable to load coverity config from {}'.format(path
))
75 def get_cov_config(self
, path
):
78 cov_config
= self
.get_coverity_local_cfg(path
)
80 cov_config
= self
.get_coverity_remote_cfg()
82 if cov_config
is None:
83 print('Unable to load Coverity config.')
86 self
.cov_analysis_url
= cov_config
.get('package_url')
87 self
.cov_package_name
= cov_config
.get('package_name')
88 self
.cov_url
= cov_config
.get('server_url')
89 self
.cov_port
= cov_config
.get('server_port')
90 self
.cov_auth
= cov_config
.get('auth_key')
91 self
.cov_package_ver
= cov_config
.get('package_ver')
92 self
.cov_full_stack
= cov_config
.get('full_stack', False)
96 def download_coverity(self
):
97 if self
.cov_url
is None or self
.cov_port
is None or self
.cov_analysis_url
is None or self
.cov_auth
is None:
98 print('Missing Coverity config options!')
101 COVERITY_CONFIG
= '''
103 "type": "Coverity configuration",
110 "on_new_cert" : "trust",
111 "auth_key_file": "%s"
116 "clean_cmd": ["%s", "-cc"],
121 # Generate the coverity.conf and auth files
122 build_cmd
= os
.path
.join(cwd
, 'build.sh')
123 cov_auth_path
= os
.path
.join(self
.cov_state_path
, 'auth')
124 cov_setup_path
= os
.path
.join(self
.cov_state_path
, 'coverity.conf')
125 cov_conf
= COVERITY_CONFIG
% (self
.cov_url
, self
.cov_port
, cov_auth_path
, build_cmd
, build_cmd
)
127 def download(artifact_url
, target
):
129 resp
= requests
.get(artifact_url
, verify
=False, stream
=True)
130 resp
.raise_for_status()
132 # Extract archive into destination
133 with tarfile
.open(fileobj
=io
.BytesIO(resp
.content
)) as tar
:
134 tar
.extractall(target
)
136 download(self
.cov_analysis_url
, self
.cov_state_path
)
138 with
open(cov_auth_path
, 'w') as f
:
139 f
.write(self
.cov_auth
)
141 # Modify it's permission to 600
142 os
.chmod(cov_auth_path
, 0o600)
144 with
open(cov_setup_path
, 'a') as f
:
147 def setup_coverity(self
, config_path
, storage_path
=None, force_download
=True):
148 rc
= self
.get_cov_config(config_path
)
153 if storage_path
is None:
154 # If storage_path is None we set the context of the coverity into the cwd.
157 self
.cov_state_path
= os
.path
.join(storage_path
, "coverity")
159 if force_download
is True or not os
.path
.exists(self
.cov_state_path
):
160 shutil
.rmtree(self
.cov_state_path
, ignore_errors
=True)
161 os
.mkdir(self
.cov_state_path
)
163 # Download everything that we need for Coverity from out private instance
164 self
.download_coverity()
166 self
.cov_path
= os
.path
.join(self
.cov_state_path
, self
.cov_package_name
)
167 self
.cov_run_desktop
= os
.path
.join(self
.cov_path
, 'bin', 'cov-run-desktop')
168 self
.cov_translate
= os
.path
.join(self
.cov_path
, 'bin', 'cov-translate')
169 self
.cov_configure
= os
.path
.join(self
.cov_path
, 'bin', 'cov-configure')
170 self
.cov_work_path
= os
.path
.join(self
.cov_state_path
, 'data-coverity')
171 self
.cov_idir_path
= os
.path
.join(self
.cov_work_path
, self
.cov_package_ver
, 'idir')
173 if not os
.path
.exists(self
.cov_path
) or \
174 not os
.path
.exists(self
.cov_run_desktop
) or \
175 not os
.path
.exists(self
.cov_translate
) or \
176 not os
.path
.exists(self
.cov_configure
):
177 print('Missing Coverity in {}'.format(self
.cov_path
))
182 def run_process(self
, args
, cwd
=cwd
):
183 proc
= subprocess
.Popen(args
, cwd
=cwd
)
185 while status
is None:
188 except KeyboardInterrupt:
192 def cov_is_file_in_source(self
, abs_path
):
193 if os
.path
.islink(abs_path
):
194 abs_path
= os
.path
.realpath(abs_path
)
197 def dump_cov_artifact(self
, cov_results
, source
, output
):
201 '''Build path relative to repository root'''
202 if path
.startswith(cwd
):
203 return os
.path
.relpath(path
, cwd
)
206 # Parse Coverity json into structured issues
207 with
open(cov_results
) as f
:
208 result
= json
.load(f
)
210 # Parse the issues to a standard json format
211 issues_dict
= {'files': {}}
213 files_list
= issues_dict
['files']
215 def build_element(issue
):
216 # We look only for main event
217 event_path
= next((event
for event
in issue
['events'] if event
['main'] is True), None)
220 'line': issue
['mainEventLineNumber'],
221 'flag': issue
['checkerName'],
222 'message': event_path
['eventDescription'],
224 'category': issue
['checkerProperties']['category'],
225 'stateOnServer': issue
['stateOnServer'],
230 # Embed all events into extra message
231 for event
in issue
['events']:
232 dict_issue
['extra']['stack'].append({'file_path': relpath(event
['strippedFilePathname']),
233 'line_number': event
['lineNumber'],
234 'path_type': event
['eventTag'],
235 'description': event
['eventDescription']})
239 for issue
in result
['issues']:
240 path
= self
.cov_is_file_in_source(issue
['strippedMainEventFilePathname'])
242 # Since we skip a result we should log it
243 print('Skipping CID: {0} from file: {1} since it\'s not related with the current patch.'.format(
244 issue
['stateOnServer']['cid'], issue
['strippedMainEventFilePathname']))
246 # If path does not start with `cwd` skip it
247 if not path
.startswith(cwd
):
250 if path
in files_list
:
251 files_list
[path
]['warnings'].append(build_element(issue
))
253 files_list
[path
] = {'warnings': [build_element(issue
)]}
255 with
open(output
, 'w') as f
:
256 json
.dump(issues_dict
, f
)
258 def mutate_paths(self
, paths
):
259 for index
in xrange(len(paths
)):
260 paths
[index
] = os
.path
.abspath(paths
[index
])
262 def __call__(self
, parser
, args
, paths
, option_string
=None):
263 self
.local_config
= True
264 config_path
= args
.config
265 storage_path
= args
.storage
270 print('No files have been specified for analysis, running Coverity on the entire project.')
272 self
.mutate_paths(paths
)
274 if config_path
is None:
275 self
.local_config
= False
276 print('No coverity config path has been specified, so running in automation.')
277 if 'NSS_AUTOMATION' not in os
.environ
:
278 print('Coverity based static-analysis cannot be ran outside automation.')
281 rc
= self
.setup_coverity(config_path
, storage_path
, args
.force
)
285 # First run cov-run-desktop --setup in order to setup the analysis env
286 cmd
= [self
.cov_run_desktop
, '--setup']
287 print('Running {} --setup'.format(self
.cov_run_desktop
))
289 rc
= self
.run_process(args
=cmd
, cwd
=self
.cov_path
)
292 print('Running {} --setup failed!'.format(self
.cov_run_desktop
))
295 cov_result
= os
.path
.join(self
.cov_state_path
, 'cov-results.json')
297 # Once the capture is performed we need to do the actual Coverity Desktop analysis
299 cmd
= [self
.cov_run_desktop
, '--json-output-v6', cov_result
] + paths
301 cmd
= [self
.cov_run_desktop
, '--json-output-v6', cov_result
, '--analyze-captured-source']
303 print('Running Coverity Analysis for {}'.format(cmd
))
305 rc
= self
.run_process(cmd
, cwd
=self
.cov_state_path
)
308 print('Coverity Analysis failed!')
310 # On automation, like try, we want to build an artifact with the results.
311 if 'NSS_AUTOMATION' in os
.environ
:
312 self
.dump_cov_artifact(cov_result
, cov_result
, "/home/worker/nss/coverity/coverity.json")
315 class cfAction(argparse
.Action
):
316 docker_command
= None
319 def __call__(self
, parser
, args
, values
, option_string
=None):
320 self
.setDockerCommand(args
)
323 files
= [os
.path
.relpath(os
.path
.abspath(x
), start
=cwd
) for x
in values
]
325 files
= self
.modifiedFiles()
327 # First check if we can run docker.
329 with
open(os
.devnull
, "w") as f
:
330 subprocess
.check_call(
331 self
.docker_command
+ ["images"], stdout
=f
)
333 self
.docker_command
= None
335 if self
.docker_command
is None:
336 print("warning: running clang-format directly, which isn't guaranteed to be correct")
337 command
= [cwd
+ "/automation/clang-format/run_clang_format.sh"] + files
339 subprocess
.call(command
)
342 files
= [os
.path
.join('/home/worker/nss', x
) for x
in files
]
343 docker_image
= 'clang-format-service:latest'
344 cf_docker_folder
= cwd
+ "/automation/clang-format"
346 # Build the image if necessary.
347 if self
.filesChanged(cf_docker_folder
):
348 self
.buildImage(docker_image
, cf_docker_folder
)
350 # Check if we have the docker image.
352 command
= self
.docker_command
+ [
353 "image", "inspect", "clang-format-service:latest"
355 with
open(os
.devnull
, "w") as f
:
356 subprocess
.check_call(command
, stdout
=f
)
358 print("I have to build the docker image first.")
359 self
.buildImage(docker_image
, cf_docker_folder
)
361 command
= self
.docker_command
+ [
362 'run', '-v', cwd
+ ':/home/worker/nss:Z', '--rm', '-ti', docker_image
364 # The clang format script returns 1 if something's to do. We don't
366 subprocess
.call(command
+ files
)
367 if self
.restorecon
is not None:
368 subprocess
.call([self
.restorecon
, '-R', cwd
])
370 def filesChanged(self
, path
):
372 for dirname
, dirnames
, files
in os
.walk(path
):
374 with
open(os
.path
.join(dirname
, file), "rb") as f
:
375 hash.update(f
.read())
376 chk_file
= cwd
+ "/.chk"
378 new_chk
= hash.hexdigest()
379 if os
.path
.exists(chk_file
):
380 with
open(chk_file
) as f
:
381 old_chk
= f
.readline()
382 if old_chk
!= new_chk
:
383 with
open(chk_file
, "w+") as f
:
388 def buildImage(self
, docker_image
, cf_docker_folder
):
389 command
= self
.docker_command
+ [
390 "build", "-t", docker_image
, cf_docker_folder
392 subprocess
.check_call(command
)
395 def setDockerCommand(self
, args
):
396 from distutils
.spawn
import find_executable
397 if platform
.system() == "Linux":
398 self
.restorecon
= find_executable("restorecon")
399 dcmd
= find_executable("docker")
401 self
.docker_command
= [dcmd
]
403 self
.docker_command
= ["sudo"] + self
.docker_command
405 self
.docker_command
= None
407 def modifiedFiles(self
):
409 if os
.path
.exists(os
.path
.join(cwd
, '.hg')):
410 st
= subprocess
.Popen(['hg', 'status', '-m', '-a'],
411 cwd
=cwd
, stdout
=subprocess
.PIPE
, universal_newlines
=True)
412 for line
in iter(st
.stdout
.readline
, ''):
413 files
+= [line
[2:].rstrip()]
414 elif os
.path
.exists(os
.path
.join(cwd
, '.git')):
415 st
= subprocess
.Popen(['git', 'status', '--porcelain'],
416 cwd
=cwd
, stdout
=subprocess
.PIPE
)
417 for line
in iter(st
.stdout
.readline
, ''):
418 if line
[1] == 'M' or line
[1] != 'D' and \
419 (line
[0] == 'M' or line
[0] == 'A' or
420 line
[0] == 'C' or line
[0] == 'U'):
421 files
+= [line
[3:].rstrip()]
423 files
+= [line
[line
.index(' -> ', beg
=4) + 4:]]
425 print('Warning: neither mercurial nor git detected!')
428 return x
[-2:] == '.c' or x
[-3:] == '.cc' or x
[-2:] == '.h'
429 return [x
for x
in files
if isFormatted(x
)]
432 class buildAction(argparse
.Action
):
434 def __call__(self
, parser
, args
, values
, option_string
=None):
435 subprocess
.check_call([cwd
+ "/build.sh"] + values
)
438 class testAction(argparse
.Action
):
440 def __call__(self
, parser
, args
, values
, option_string
=None):
444 class covAction(argparse
.Action
):
446 def runSslGtests(self
, outdir
):
448 "GTESTFILTER": "*", # Prevent parallel test runs.
449 "ASAN_OPTIONS": "coverage=1:coverage_dir=" + outdir
,
450 "NSS_DEFAULT_DB_TYPE": "dbm"
453 run_tests("ssl_gtests", env
=env
, silent
=True)
455 def findSanCovFile(self
, outdir
):
456 for file in os
.listdir(outdir
):
457 if fnmatch
.fnmatch(file, 'ssl_gtest.*.sancov'):
458 return os
.path
.join(outdir
, file)
462 def __call__(self
, parser
, args
, values
, option_string
=None):
464 print("Output directory: " + outdir
)
466 print("\nBuild with coverage sanitizers...\n")
467 sancov_args
= "edge,no-prune,trace-pc-guard,trace-cmp"
468 subprocess
.check_call([
469 os
.path
.join(cwd
, "build.sh"), "-c", "--clang", "--asan", "--enable-legacy-db",
470 "--sancov=" + sancov_args
473 print("\nRun ssl_gtests to get a coverage report...")
474 self
.runSslGtests(outdir
)
477 sancov_file
= self
.findSanCovFile(outdir
)
479 print("Couldn't find .sancov file.")
482 symcov_file
= os
.path
.join(outdir
, "ssl_gtest.symcov")
483 out
= open(symcov_file
, 'wb')
484 # Don't exit immediately on error
485 symbol_retcode
= subprocess
.call([
487 "-blacklist=" + os
.path
.join(cwd
, ".sancov-blacklist"),
488 "-symbolize", sancov_file
,
489 os
.path
.join(cwd
, "../dist/Debug/bin/ssl_gtest")
493 print("\nCopying ssl_gtests to artifacts...")
494 shutil
.copyfile(os
.path
.join(cwd
, "../dist/Debug/bin/ssl_gtest"),
495 os
.path
.join(outdir
, "ssl_gtest"))
497 print("\nCoverage report: " + symcov_file
)
498 if symbol_retcode
> 0:
499 print("sancov failed to symbolize with return code {}".format(symbol_retcode
))
500 sys
.exit(symbol_retcode
)
502 class commandsAction(argparse
.Action
):
505 def __call__(self
, parser
, args
, values
, option_string
=None):
506 for c
in commandsAction
.commands
:
509 def parse_arguments():
510 parser
= argparse
.ArgumentParser(
511 description
='NSS helper script. ' +
512 'Make sure to separate sub-command arguments with --.')
513 subparsers
= parser
.add_subparsers()
515 parser_build
= subparsers
.add_parser(
516 'build', help='All arguments are passed to build.sh')
517 parser_build
.add_argument(
518 'build_args', nargs
='*', help="build arguments", action
=buildAction
)
520 parser_cf
= subparsers
.add_parser(
525 By default this runs against any files that you have modified. If
526 there are no modified files, it checks everything.
528 parser_cf
.add_argument(
530 help='On linux, suppress the use of \'sudo\' for running docker.',
532 parser_cf
.add_argument(
535 help="Specify files or directories to run clang-format on",
538 parser_sa
= subparsers
.add_parser(
541 Run static-analysis tools based on coverity.
543 By default this runs only on automation and provides a list of issues that
544 are only present locally.
546 parser_sa
.add_argument(
547 '--config', help='Path to Coverity config file. Only used for local runs.',
549 parser_sa
.add_argument(
550 '--storage', help="""
551 Path where to store Coverity binaries and results. If none, the base repository will be used.
554 parser_sa
.add_argument(
555 '--force', help='Force the re-download of the coverity artefact.',
557 parser_sa
.add_argument(
560 help="Specify files to run Coverity on. If no files are specified the analysis will check the entire project.",
561 action
=coverityAction
)
563 parser_test
= subparsers
.add_parser(
564 'tests', help='Run tests through tests/all.sh.')
566 "cipher", "lowhash", "chains", "cert", "dbtests", "tools", "fips",
567 "sdr", "crmf", "smime", "ssl", "ocsp", "merge", "pkits", "ec",
568 "gtests", "ssl_gtests", "bogo", "interop", "policy"
570 parser_test
.add_argument(
571 'test', choices
=tests
, help="Available tests", action
=testAction
)
573 parser_cov
= subparsers
.add_parser(
574 'coverage', help='Generate coverage report')
575 cov_modules
= ["ssl_gtests"]
576 parser_cov
.add_argument(
577 '--outdir', help='Output directory for coverage report data.',
578 default
=tempfile
.mkdtemp())
579 parser_cov
.add_argument(
580 'module', choices
=cov_modules
, help="Available coverage modules",
583 parser_commands
= subparsers
.add_parser(
585 help="list commands")
586 parser_commands
.add_argument(
589 action
=commandsAction
)
591 commandsAction
.commands
= [c
for c
in subparsers
.choices
]
592 return parser
.parse_args()
599 if __name__
== '__main__':