Remove outermost loop parameter.
[official-gcc/graphite-test-results.git] / gcc / testsuite / lib / profopt.exp
bloba9989b43ea4f7b1dbeab29302d681a7a011f9fcd
1 # Copyright (C) 2001, 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 3 of the License, or
6 # (at your option) any later version.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with GCC; see the file COPYING3. If not see
15 # <http://www.gnu.org/licenses/>.
17 # This script was submitted by Janis Johnson <janis187@us.ibm.com>.
19 # Test the functionality and optionally, performance improvement, of
20 # programs compiled with profile-directed optimizations. Compile and
21 # run a test with profile options, compile it with options using the
22 # profile feedback, and then run the test again. Optionally compile
23 # and run a third time without the profile-directed optimization and
24 # compare timing results of the program with normal optimization and
25 # with the profile-directed optimization. Each test is run using
26 # multiple sets of optimization and/or code generation options in
27 # addition to the profiling and feedback options.
29 # If perf_ext is defined and the performance value for the
30 # profile-directed test run is nonzero then the performance check will
31 # be done.
33 load_lib dg.exp
34 load_lib gcc-dg.exp
36 global PROFOPT_OPTIONS perf_delta
38 # The including .exp file must define these.
39 global tool profile_option feedback_option prof_ext
40 if ![info exists tool] {
41 error "Tools is not specified."
43 if ![info exists prof_ext] {
44 error "No profile data file extensions specified."
47 # The maximum perforance degradation can be defined in the including file.
48 if ![info exists perf_delta] {
49 set perf_delta 4
52 # The default option list can be overridden by
53 # PROFOPT_OPTIONS="{ { list1 } ... { list2 } }"
55 if ![info exists PROFOPT_OPTIONS] {
56 set PROFOPT_OPTIONS [list \
57 { -g } \
58 { -O0 } \
59 { -O1 } \
60 { -O2 } \
61 { -O3 } \
62 { -O3 -g } \
63 { -Os } ]
66 set prof_option_list $PROFOPT_OPTIONS
69 # profopt-cleanup -- remove profiling or performance results files.
71 # TESTCASE is the name of the test
72 # EXT is the extensions of files to remove
74 proc profopt-cleanup { testcase extlist } {
75 set basename [file tail $testcase]
76 set base [file rootname $basename]
77 foreach ext $extlist {
78 set files [glob -nocomplain $base.$ext]
79 if { $files != "" } {
80 eval "remote_file build delete $files"
86 # profopt-perf-value -- get performance value for a test
88 # TESTCASE is the name of the test
89 # PERF_EXT is the extension of the performance result file
90 # OPTSTR is the string of compiler options
92 proc profopt-perf-value { testcase perf_ext optstr } {
93 set basename [file tail $testcase]
94 set base [file rootname $basename]
95 set files [glob -nocomplain $base.$perf_ext]
96 # The file doesn't exist; let the caller decide if that's a problem.
97 if { $files == "" } {
98 return -2
100 remote_upload host $base.$perf_ext $base.$perf_ext
101 set fd [open $base.$perf_ext r]
102 gets $fd line
103 set val -2
104 if [regexp "TIME" $line] {
105 if [regexp "TIME -1" $line] {
106 fail "$testcase perf check: no consistent time available, $optstr"
107 set val -1
108 } elseif ![regexp "(\[0-9\]+)" "$line" val] {
109 set val -2
112 # Report problems with an existing file.
113 if { $val == -2 } {
114 fail "$testcase perf check: file $base.$perf_ext has wrong format, $optstr"
116 close $fd
117 profopt-cleanup $testcase $perf_ext
118 return $val
122 # dg-final-generate -- process code to run after the profile-generate step
124 # ARGS is the line number of the directive followed by the commands.
126 proc dg-final-generate { args } {
127 global generate_final_code
129 if { [llength $args] > 2 } {
130 error "[lindex $args 0]: too many arguments"
131 return
133 append generate_final_code "[lindex $args 1]\n"
137 # dg-final-use -- process code to run after the profile-use step
139 # ARGS is the line number of the directive followed by the commands.
141 proc dg-final-use { args } {
142 global use_final_code
144 if { [llength $args] > 2 } {
145 error "[lindex $args 0]: too many arguments"
146 return
148 append use_final_code "[lindex $args 1]\n"
152 # profopt-final-code -- run final code
154 # WHICH is "generate" or "use".
155 # FINAL_CODE is the TCL code to run.
156 # TESTCASE is the name of the test, for error messages.
158 proc profopt-final-code { which final_code name } {
159 # This is copied from dg-test in dg.exp of DejaGnu.
160 regsub -all "\\\\(\[{}\])" $final_code "\\1" final_code
161 proc profopt-final-proc { args } $final_code
162 if [catch "profopt-final-proc $name" errmsg] {
163 perror "$name: error executing dg-final-${which}: $errmsg"
164 unresolved "$name: Error executing dg-final-${which}: $errmsg"
169 # profopt-get-options -- process test directives
171 # SRC is the full pathname of the testcase.
173 proc profopt-get-options { src } {
174 # dg-options sets a variable called dg-extra-tool-flags.
175 set dg-extra-tool-flags ""
177 # dg-require-* sets dg-do-what.
178 upvar dg-do-what dg-do-what
180 # current_compiler_flags reads tool_flags from the same stack frame
181 # as dg-extra-tool-flags
182 set tool_flags ""
184 set tmp [dg-get-options $src]
185 foreach op $tmp {
186 set cmd [lindex $op 0]
187 if { ![string compare "dg-options" $cmd] \
188 || ![string compare "dg-skip-if" $cmd] \
189 || ![string compare "dg-final-generate" $cmd] \
190 || ![string compare "dg-final-use" $cmd] \
191 || ![string compare "dg-additional-sources" $cmd] \
192 || [string match "dg-require-*" $cmd] } {
193 set status [catch "$op" errmsg]
194 if { $status != 0 } {
195 perror "$src: $errmsg for \"$op\"\n"
196 unresolved "$src: $errmsg for \"$op\""
197 return
199 } else {
200 # Ignore unrecognized dg- commands, but warn about them.
201 warning "profopt.exp does not support $cmd"
205 # Return flags to use for compiling the primary source file and for
206 # linking.
207 return ${dg-extra-tool-flags}
211 # c-prof-execute -- compile for profiling and then feedback, then normal
213 # SRC is the full pathname of the testcase.
215 proc profopt-execute { src } {
216 global srcdir tmpdir
217 global prof_option_list
218 global tool profile_option feedback_option prof_ext perf_ext perf_delta
219 global generate_final_code use_final_code
220 global verbose
222 if ![info exists profile_option] {
223 error "No profile option specified for first compile."
225 if ![info exists feedback_option] {
226 error "No feedback option specified for second compile."
229 regsub "(?q)$srcdir/" $src "" testcase
230 # If we couldn't rip $srcdir out of `src' then just do the best we can.
231 # The point is to reduce the unnecessary noise in the logs. Don't strip
232 # out too much because different testcases with the same name can confuse
233 # `test-tool'.
234 if [string match "/*" $testcase] {
235 set testcase "[file tail [file dirname $src]]/[file tail $src]"
238 set executable $tmpdir/[file tail [file rootname $src].x]
239 set basename [file tail $testcase]
240 set base [file rootname $basename]
242 set count 0
243 foreach option $prof_option_list {
244 set execname1 "${executable}${count}1"
245 set execname2 "${executable}${count}2"
246 set execname3 "${executable}${count}3"
247 incr count
249 remote_file build delete $execname1
250 remote_file build delete $execname2
251 remote_file build delete $execname3
252 verbose "Testing $testcase, $option" 1
254 # Remove old profiling and performance data files.
255 foreach ext $prof_ext {
256 remote_file target delete $tmpdir/$base.$ext
258 if [info exists perf_ext] {
259 profopt-cleanup $testcase $perf_ext
262 # Process test directives.
264 set generate_final_code ""
265 set use_final_code ""
266 set dg-do-what [list "run" "" P]
267 set extra_flags [profopt-get-options $src]
268 if { [lindex ${dg-do-what} 1 ] == "N" } {
269 unsupported "$src"
270 verbose "$src not supported on this target, skipping it" 3
271 return
274 set extra_options [dg-additional-files-options "" "$src"]
276 # Compile for profiling.
278 set options "$extra_options"
279 lappend options "additional_flags=$option $extra_flags $profile_option"
280 set optstr "$option $profile_option"
281 set comp_output [${tool}_target_compile "$src" "$execname1" executable $options]
282 if ![${tool}_check_compile "$testcase compilation" $optstr $execname1 $comp_output] {
283 unresolved "$testcase execution, $optstr"
284 unresolved "$testcase compilation, $option $feedback_option"
285 unresolved "$testcase execution, $option $feedback_option"
286 continue
289 # Run the profiled test.
291 set result [${tool}_load $execname1 "" ""]
292 set status [lindex $result 0]
293 set missing_file 0
294 # Make sure the profile data was generated, and fail if not.
295 if { $status == "pass" } {
296 foreach ext $prof_ext {
297 remote_upload target $tmpdir/$base.$ext
298 set files [glob -nocomplain $base.$ext]
299 if { $files == "" } {
300 set status "fail"
301 set missing_file 1
302 fail "$testcase execution: file $base.$ext does not exist, $option $profile_option"
306 if { $missing_file == 0 } {
307 $status "$testcase execution, $optstr"
310 # If there is dg-final code to execute for the generate step, do it
311 # even if it failed; it might clean up temporary files.
312 if ![string match $generate_final_code ""] {
313 profopt-final-code "generate" $generate_final_code $testcase
316 remote_file build delete $execname1
318 # Quit for this round if it failed
319 if { $status != "pass" } {
320 unresolved "$testcase compilation, $option $feedback_option"
321 unresolved "$testcase execution, $option $feedback_option"
322 continue
325 # Compile with feedback-directed optimizations.
327 set options "$extra_options"
328 lappend options "additional_flags=$option $extra_flags $feedback_option"
329 set optstr "$option $feedback_option"
330 set comp_output [${tool}_target_compile "$src" "$execname2" "executable" $options]
331 if ![${tool}_check_compile "$testcase compilation" $optstr $execname2 $comp_output] {
332 unresolved "$testcase execution, $optstr"
333 continue
336 # Run the profile-directed optimized test.
338 set result [${tool}_load "$execname2" "" ""]
339 set status [lindex $result 0]
340 $status "$testcase execution, $optstr"
342 # If there is dg-final code to execute for the use step, do it.
343 if ![string match $use_final_code ""] {
344 profopt-final-code "use" $use_final_code $testcase
347 # Remove the profiling data files.
348 foreach ext $prof_ext {
349 remote_file target delete $tmpdir/$base.$ext
352 if { $status != "pass" } {
353 continue
356 # If the test is not expected to produce performance data then
357 # we're done now.
358 if ![info exists perf_ext] {
359 remote_file build delete $execname2
360 continue
363 # Get the performance data from the test built with
364 # profile-directed optimization. If the file doesn't exist or if
365 # the value is zero, skip the performance comparison.
366 set val2 [profopt-perf-value $testcase $perf_ext $optstr]
367 if { $val2 <= 0 } {
368 remote_file build delete $execname2
369 continue
372 # Compile with normal optimizations.
374 set options "$extra_options"
375 lappend options "additional_flags=$option"
376 set optstr "$option"
377 set comp_output [${tool}_target_compile "$src" "$execname3" "executable" $options]
378 if ![${tool}_check_compile "$testcase compilation" $optstr $execname3 $comp_output] {
379 unresolved "$testcase execution, $optstr"
380 unresolved "$testcase perf check, $optstr"
381 continue
384 # Run the test with normal optimizations.
386 set result [${tool}_load "$execname3" "" ""]
387 set status [lindex $result 0]
388 $status "$testcase execution, $optstr"
389 if { $status != "pass" } {
390 unresolved "$testcase perf check, $optstr"
391 continue
394 # Get the performance data from the test built with normal
395 # optimization.
396 set val1 [profopt-perf-value $testcase $perf_ext $optstr]
397 if { $val1 < 0 } {
398 if { $val1 == -2 } {
399 # The data file existed with the profile-directed
400 # optimization so this one should, too.
401 fail "$testcase perf check: file $base.$perf_ext does not exist, $optstr"
403 continue
406 # Compare results of the two runs and fail if the time with the
407 # profile-directed optimization is significantly more than the time
408 # without it.
409 set status "pass"
410 if { $val2 > $val1 } {
411 # Check for a performance degration outside of allowable limits.
412 if { [expr $val2 - $val1] > [expr [expr $val1 * $perf_delta] / 100] } {
413 set status "fail"
416 if { $status == "fail" } {
417 fail "$testcase perf check: orig: $val1 new: $val2, $optstr"
418 } else {
419 $status "$testcase perf check, $optstr"
420 verbose "$testcase orig: $val1 new: $val2, $optstr" 2
421 remote_file build delete $execname2
422 remote_file build delete $execname3