1 # Copyright
(C
) 2001-2017 Free Software Foundation
, Inc.
3 # This
program is free software
; you can redistribute it and
/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation
; either version
3 of the License
, or
6 #
(at your option
) any later version.
8 # This
program is distributed in the hope that it will be useful
,
9 # but WITHOUT
ANY WARRANTY
; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License
for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with GCC
; see the file COPYING3.
If not see
15 #
<http
://www.gnu.org
/licenses
/>.
17 # This script was submitted by Janis Johnson
<janis187@us.ibm.com
>.
19 # Test the functionality and optionally
, performance improvement
, of
20 # programs compiled with profile
-directed optimizations.
Compile and
21 # run a test with profile options
, compile it with options using the
22 # profile feedback
, and
then run the test again. Optionally
compile
23 # and run a third time without the profile
-directed optimization and
24 # compare timing results of the
program with
normal optimization and
25 # with the profile
-directed optimization. Each test is run using
26 # multiple sets of optimization and
/or code generation options in
27 # addition to the profiling and feedback options.
29 #
If perf_ext is defined and the performance value
for the
30 # profile
-directed test run is nonzero
then the performance check will
36 global PROFOPT_OPTIONS perf_delta
38 # The including .exp file must
define these.
39 global tool profile_option feedback_option prof_ext profile_wrapper
40 if ![info exists tool
] {
41 error
"Tools is not specified."
43 if ![info exists prof_ext
] {
44 error
"No profile data file extensions specified."
47 # The maximum perforance degradation can be defined in the including file.
48 if ![info exists perf_delta
] {
52 # The default option list can be overridden by
53 # PROFOPT_OPTIONS
="{ { list1 } ... { list2 } }"
55 if ![info exists PROFOPT_OPTIONS
] {
56 set PROFOPT_OPTIONS
[list \
67 # profopt
-cleanup
-- remove profiling or performance results files.
69 # TESTCASE is the
name of the test
70 # EXT is the extensions of files to remove
72 proc profopt
-cleanup
{ testcase extlist
} {
73 set basename
[file tail $testcase
]
74 set base
[file rootname $basename
]
75 foreach ext $extlist
{
76 set files
[glob
-nocomplain $base.$ext
]
78 eval
"remote_file build delete $files"
84 # profopt
-target
-cleanup
-- remove profiling result files.
86 #
DIR is the
name of the directory
87 # TESTCASE is the
name of the test
88 # EXT is the extensions of files to remove
90 proc profopt
-target
-cleanup
{ dir testcase ext
} {
91 global additional_sources_used
92 set basename
[file tail $testcase
]
93 set base
[file rootname $basename
]
94 set file
"$dir/$base.$ext"
95 eval
"remote_file target delete $file"
97 if [info exists additional_sources_used
] {
98 foreach srcfile $additional_sources_used
{
99 set basename
[file tail $srcfile
]
100 set base
[file rootname $basename
]
101 set file
"$dir/$base.$ext"
102 eval
"remote_file target delete $file"
108 # profopt
-perf
-value
-- get performance value
for a test
110 # TESTCASE is the
name of the test
111 # PERF_EXT is the extension of the performance result file
112 # OPTSTR is the string of compiler options
114 proc profopt
-perf
-value
{ testcase perf_ext optstr
} {
115 set basename
[file tail $testcase
]
116 set base
[file rootname $basename
]
117 set files
[glob
-nocomplain $base.$perf_ext
]
118 # The file doesn
't exist; let the caller decide if that's a problem.
119 if { $files
== "" } {
122 remote_upload host $base.$perf_ext $base.$perf_ext
123 set fd
[open $base.$perf_ext r
]
126 if [regexp
"TIME" $line] {
127 if [regexp
"TIME -1" $line] {
128 fail
"$testcase perf check: no consistent time available, $optstr"
130 } elseif
![regexp
"(\[0-9\]+)" "$line" val] {
134 #
Report problems with an existing file.
136 fail
"$testcase perf check: file $base.$perf_ext has wrong format, $optstr"
139 profopt
-cleanup $testcase $perf_ext
144 # dg
-final
-generate
-- process code to run after the profile
-generate step
146 #
ARGS is the line number of the directive followed by the commands.
148 proc dg
-final
-generate
{ args } {
149 global generate_final_code
151 if { [llength $
args] > 2 } {
152 error
"[lindex $args 0]: too many arguments"
155 append generate_final_code
"[lindex $args 1]\n"
159 # dg
-final
-use
-- process code to run after the profile
-use step
161 #
ARGS is the line number of the directive followed by the commands.
163 proc dg
-final
-use
{ args } {
164 global use_final_code
166 if { [llength $
args] > 2 } {
167 error
"[lindex $args 0]: too many arguments"
170 append use_final_code
"[lindex $args 1]\n"
174 # dg
-final
-use
-not
-autofdo
-- process code to run after the profile
-use step
175 # but only
if not running autofdo
176 #
ARGS is the line number of the directive followed by the commands.
178 proc dg
-final
-use
-not
-autofdo
{ args } {
179 global use_final_code
182 if { [llength $
args] > 2 } {
183 error
"[lindex $args 0]: too many arguments"
187 if { $run_autofdo
== 1 } {
190 append use_final_code
"[lindex $args 1]\n"
194 # dg
-final
-use
-autofdo
-- process code to run after the profile
-use step
195 # but only
if running autofdo
196 #
ARGS is the line number of the directive followed by the commands.
199 proc dg
-final
-use
-autofdo
{ args } {
200 global use_final_code
203 if { [llength $
args] > 2 } {
204 error
"[lindex $args 0]: too many arguments"
208 if { $run_autofdo
!= 1 } {
211 append use_final_code
"[lindex $args 1]\n"
215 # profopt
-final
-code
-- run final code
217 # WHICH is
"generate" or "use".
218 # FINAL_CODE is the TCL code to run.
219 # TESTCASE is the
name of the test
, for error messages.
221 proc profopt
-final
-code
{ which final_code
name } {
222 # This is copied from dg
-test in dg.exp of DejaGnu.
223 regsub
-all
"\\\\(\[{}\])" $final_code "\\1" final_code
224 proc profopt
-final
-proc
{ args } $final_code
225 if [catch
"profopt-final-proc $name" errmsg] {
226 perror
"$name: error executing dg-final-${which}: $errmsg"
227 unresolved
"$name: Error executing dg-final-${which}: $errmsg"
232 # profopt
-get-options
-- process test directives
234 # SRC is the full pathname of the testcase.
236 proc profopt
-get-options
{ src
} {
237 # dg
-options sets a
variable called dg
-extra
-tool
-flags.
238 set dg
-extra
-tool
-flags
""
240 # dg
-require
-* sets dg
-do-what.
241 upvar dg
-do-what dg
-do-what
243 # current_compiler_flags reads tool_flags from the same stack frame
244 # as dg
-extra
-tool
-flags
247 set tmp
[dg
-get-options $src
]
249 set cmd
[lindex $op
0]
250 if { ![string compare
"dg-options" $cmd] \
251 ||
![string compare
"dg-additional-options" $cmd] \
252 ||
![string compare
"dg-add-options" $cmd] \
253 ||
![string compare
"dg-skip-if" $cmd] \
254 ||
![string compare
"dg-final-generate" $cmd] \
255 ||
![string compare
"dg-final-use" $cmd] \
256 ||
![string compare
"dg-final-use-not-autofdo" $cmd] \
257 ||
![string compare
"dg-final-use-autofdo" $cmd] \
258 ||
![string compare
"dg-additional-sources" $cmd] \
259 ||
[string match
"dg-require-*" $cmd] } {
260 set status [catch
"$op" errmsg]
261 if { $
status != 0 } {
262 perror
"$src: $errmsg for \"$op\"\n"
263 unresolved
"$src: $errmsg for \"$op\""
267 # Ignore unrecognized dg
- commands
, but warn about them.
268 warning
"profopt.exp does not support $cmd"
272 #
Return flags to use
for compiling the primary source file and
for
274 return $
{dg
-extra
-tool
-flags
}
277 # auto
-profopt
-execute -- Compile for auto profiling and
then feedback
,
278 #
then normal. SRC is the full path
name of the testcase.
279 proc auto
-profopt
-execute { src
} {
280 global profile_wrapper
281 global profile_option
282 global feedback_option
286 if { ! [check_profiling_available
"-fauto-profile"] } {
287 regsub
"(?q)$srcdir/" $src "" testcase
288 unsupported
"$testcase -fauto-profile"
291 set profile_wrapper
[profopt
-perf
-wrapper
]
292 set profile_option
"-g"
293 set feedback_option
"-fauto-profile"
296 unset profile_wrapper
298 unset feedback_option
303 # c
-prof
-execute -- compile for profiling and
then feedback
, then normal
305 # SRC is the full pathname of the testcase.
307 proc profopt
-execute { src
} {
309 global PROFOPT_OPTIONS
310 global tool profile_option feedback_option prof_ext perf_ext perf_delta
311 global profile_wrapper run_autofdo ld_library_path
312 global generate_final_code use_final_code
314 global testname_with_flags
316 if ![info exists profile_option
] {
317 error
"No profile option specified for first compile."
319 if ![info exists feedback_option
] {
320 error
"No feedback option specified for second compile."
322 if ![info exists profile_wrapper
] {
323 set profile_wrapper
""
325 if ![info exists run_autofdo
] {
329 # Use the default option list or one defined
for a
set of tests.
330 if ![info exists PROFOPT_OPTIONS
] {
331 error
"PROFOPT_OPTIONS is not defined"
333 set prof_option_list $PROFOPT_OPTIONS
335 regsub
"(?q)$srcdir/" $src "" testcase
336 #
If we couldn
't rip $srcdir out of `src' then just
do the best we can.
337 # The point is to reduce the unnecessary noise in the logs. Don
't strip
338 # out too much because different testcases with the same name can confuse
340 if [string match
"/*" $testcase] {
341 set testcase
"[file tail [file dirname $src]]/[file tail $src]"
344 # Several procedures access the
name of the test with torture flags
,
345 # normally defined in dg
-test. Profile optimization tests don
't
346 # use dg-test, so define it here to make it accessible via
347 # testname-for-summary.
348 set testname_with_flags $testcase
350 set executable $tmpdir/[file tail [file rootname $src].x]
351 set basename [file tail $testcase]
352 set base [file rootname $basename]
355 foreach option $prof_option_list {
356 set execname1 "${executable}${count}1"
357 set execname2 "${executable}${count}2"
358 set execname3 "${executable}${count}3"
361 remote_file build delete $execname1
362 remote_file build delete $execname2
363 remote_file build delete $execname3
364 verbose "Testing $testcase, $option" 1
366 # Remove old performance data files.
367 if [info exists perf_ext] {
368 profopt-cleanup $testcase $perf_ext
371 # Process test directives.
373 set generate_final_code ""
374 set use_final_code ""
375 set dg-do-what [list "run" "" P]
376 set extra_flags [profopt-get-options $src]
377 if { [lindex ${dg-do-what} 1 ] == "N" } {
378 unsupported "$testcase"
379 unset testname_with_flags
380 verbose "$src not supported on this target, skipping it" 3
384 # schedule removal of dump files et al
385 # Do this before the call below destroys additional_sources..
386 append use_final_code [schedule-cleanups "$option $extra_flags"]
387 set extra_options [dg-additional-files-options "" "$src"]
389 # Remove old profiling data files. Make sure additional_sources_used is
390 # valid, by running it after dg-additional-files-options.
391 foreach ext $prof_ext {
392 profopt-target-cleanup $tmpdir $base $ext
393 profopt-target-cleanup $tmpdir $base "perf.data"
396 # Tree profiling requires TLS runtime support, which may need
398 if { [string first "-fprofile-generate" $profile_option] >= 0 } {
399 set extra_flags [add_options_for_tls $extra_flags]
402 # Compile for profiling.
404 set options "$extra_options"
405 lappend options "additional_flags=$option $extra_flags $profile_option"
406 set optstr "$option $profile_option"
407 set comp_output [${tool}_target_compile "$src" "$execname1" executable $options]
408 if ![${tool}_check_compile "$testcase compilation" $optstr $execname1 $comp_output] {
409 unresolved "$testcase execution, $optstr"
410 unresolved "$testcase compilation, $option $feedback_option"
411 unresolved "$testcase execution, $option $feedback_option"
415 # Run the profiled test.
416 if { $run_autofdo == 1 } {
417 if { ![info exists ld_library_path]} {
418 set ld_library_path ""
420 set orig_ld_library_path "[getenv LD_LIBRARY_PATH]"
421 setenv LD_LIBRARY_PATH "$ld_library_path:$orig_ld_library_path"
422 verbose -log "Running $profile_wrapper -o $tmpdir/$base.perf.data $execname1"
423 set id [remote_spawn "" "$profile_wrapper -o $tmpdir/$base.perf.data $execname1" "readonly"]
424 setenv LD_LIBRARY_PATH $orig_ld_library_path
426 warning "Failed to run profiler"
429 set result [remote_wait "" 300]
430 set status [lindex $result 0]
431 verbose "perf result $result"
432 if { $status == 0 } {
439 set result [${tool}_load $execname1 "" ""]
440 set status [lindex $result 0]
445 # Make sure the profile data was generated, and fail if not.
446 if { $status == "pass" } {
448 if { $run_autofdo == 1 } {
450 set cmd "create_gcov --binary $execname1 --profile=$tmpdir/$base.perf.data -gcov_version=1 --gcov=$tmpdir/$bprefix$base.$ext"
451 verbose "Running $cmd"
452 set id [remote_spawn "" $cmd]
454 unsupported "$testcase -fauto-profile: cannot run create_gcov"
458 set status [remote_wait "" 300]
462 foreach ext $prof_ext {
463 remote_upload target $tmpdir/$bprefix$base.$ext
464 set files [glob -nocomplain $bprefix$base.$ext]
465 if { $files == "" } {
468 fail "$testcase execution: file $bprefix$base.$ext does not exist, $option $profile_option"
472 if { $missing_file == 0 } {
473 $status "$testcase execution, $optstr"
476 # If there is dg-final code to execute for the generate step, do it
477 # even if it failed; it might clean up temporary files.
478 if ![string match $generate_final_code ""] {
479 profopt-final-code "generate" $generate_final_code $testcase
482 remote_file build delete $execname1
484 # Quit for this round if it failed
485 if { $status != "pass" } {
486 unresolved "$testcase compilation, $option $feedback_option"
487 unresolved "$testcase execution, $option $feedback_option"
491 # Compile with feedback-directed optimizations.
493 set options "$extra_options"
494 lappend options "additional_flags=$option $extra_flags $feedback_option"
495 set optstr "$option $feedback_option"
496 if { [string first "-fauto-profile" $options] >= 0} {
497 set options [regsub -- "-fauto-profile" $options "-fauto-profile=$tmpdir/$bprefix$base.$ext"]
500 set comp_output [${tool}_target_compile "$src" "$execname2" "executable" $options]
502 # Prune warnings we know are unwanted.
503 set comp_output [prune_warnings $comp_output]
505 if ![${tool}_check_compile "$testcase compilation" $optstr $execname2 $comp_output] {
506 unresolved "$testcase execution, $optstr"
510 # Run the profile-directed optimized test.
512 set result [${tool}_load "$execname2" "" ""]
513 set status [lindex $result 0]
514 $status "$testcase execution, $optstr"
516 # If there is dg-final code to execute for the use step, do it.
517 if ![string match $use_final_code ""] {
518 profopt-final-code "use" $use_final_code $testcase
521 # Remove the profiling data files.
522 foreach ext $prof_ext {
523 profopt-target-cleanup $tmpdir "$bprefix$base" $ext
524 profopt-target-cleanup $tmpdir $base "perf.data"
525 profopt-target-cleanup $tmpdir $base "gcda.imports"
528 if { $status != "pass" } {
532 # If the test is not expected to produce performance data then
534 if ![info exists perf_ext
] {
535 remote_file build
delete $execname2
539 #
Get the performance data from the test built with
540 # profile
-directed optimization.
If the file doesn
't exist or if
541 # the value is zero, skip the performance comparison.
542 set val2 [profopt-perf-value $testcase $perf_ext $optstr]
544 remote_file build delete $execname2
548 # Compile with normal optimizations.
550 set options "$extra_options"
551 lappend options "additional_flags=$option"
553 set comp_output [${tool}_target_compile "$src" "$execname3" "executable" $options]
554 if ![${tool}_check_compile "$testcase compilation" $optstr $execname3 $comp_output] {
555 unresolved "$testcase execution, $optstr"
556 unresolved "$testcase perf check, $optstr"
560 # Run the test with normal optimizations.
562 set result [${tool}_load "$execname3" "" ""]
563 set status [lindex $result 0]
564 $status "$testcase execution, $optstr"
565 if { $status != "pass" } {
566 unresolved "$testcase perf check, $optstr"
570 # Get the performance data from the test built with normal
572 set val1 [profopt-perf-value $testcase $perf_ext $optstr]
575 # The data file existed with the profile-directed
576 # optimization so this one should, too.
577 fail "$testcase perf check: file $base.$perf_ext does not exist, $optstr"
582 # Compare results of the two runs and fail if the time with the
583 # profile-directed optimization is significantly more than the time
586 if { $val2 > $val1 } {
587 # Check for a performance degration outside of allowable limits.
588 if { [expr $val2 - $val1] > [expr [expr $val1 * $perf_delta] / 100] } {
592 if { $status == "fail" } {
593 fail "$testcase perf check: orig: $val1 new: $val2, $optstr"
595 $status "$testcase perf check, $optstr"
596 verbose "$testcase orig: $val1 new: $val2, $optstr" 2
597 remote_file build delete $execname2
598 remote_file build delete $execname3
601 unset testname_with_flags