1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7 Converted to use trees by Dale Johannesen, Apple Computer.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 /* Generate basic block profile instrumentation and auxiliary files.
27 Tree-based version. See profile.c for overview. */
31 #include "coretypes.h"
42 #include "tree-flow.h"
43 #include "tree-dump.h"
44 #include "tree-pass.h"
46 #include "value-prof.h"
50 /* Do initialization work for the edge profiler. */
53 tree_init_edge_profiler (void)
57 /* Output instructions as GIMPLE trees to increment the edge
58 execution count, and insert them on E. We rely on
59 bsi_insert_on_edge to preserve the order. */
62 tree_gen_edge_profiler (int edgeno
, edge e
)
64 tree gcov_type_node
= get_gcov_type ();
65 tree tmp1
= create_tmp_var (gcov_type_node
, "PROF");
66 tree tmp2
= create_tmp_var (gcov_type_node
, "PROF");
67 tree ref
= tree_coverage_counter_ref (GCOV_COUNTER_ARCS
, edgeno
);
68 tree stmt1
= build (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref
);
69 tree stmt2
= build (MODIFY_EXPR
, gcov_type_node
, tmp2
,
70 build (PLUS_EXPR
, gcov_type_node
,
71 tmp1
, integer_one_node
));
72 tree stmt3
= build (MODIFY_EXPR
, gcov_type_node
, ref
, tmp2
);
73 bsi_insert_on_edge (e
, stmt1
);
74 bsi_insert_on_edge (e
, stmt2
);
75 bsi_insert_on_edge (e
, stmt3
);
78 /* Output instructions as GIMPLE trees to increment the interval histogram
79 counter. VALUE is the expression whose value is profiled. TAG is the
80 tag of the section for counters, BASE is offset of the counter position. */
83 tree_gen_interval_profiler (histogram_value value
, unsigned tag
, unsigned base
)
85 tree op
, op1
, op2
, op1copy
, op2copy
;
86 tree tmp1
, tmp2
, tmp3
, val
, index
;
87 tree label_decl2
, label_decl3
, label_decl4
, label_decl5
, label_decl6
;
88 edge e12
, e23
, e34
, e45
, e56
;
89 tree label2
, label3
, label4
, label5
, label6
;
90 tree stmt1
, stmt2
, stmt3
, stmt4
;
91 /* Initializations are to prevent bogus uninitialized warnings. */
92 tree bb1end
= NULL_TREE
, bb2end
= NULL_TREE
, bb3end
= NULL_TREE
;
93 tree bb4end
= NULL_TREE
, bb5end
= NULL_TREE
;
94 tree ref
= tree_coverage_counter_ref (tag
, base
), ref2
;
95 basic_block bb2
, bb3
, bb4
, bb5
, bb6
;
96 tree stmt
= value
->hvalue
.tree
.stmt
;
97 block_stmt_iterator bsi
= bsi_for_stmt (stmt
);
98 basic_block bb
= bb_for_stmt (stmt
);
99 tree gcov_type_node
= get_gcov_type ();
103 if (TREE_CODE (stmt
) == RETURN_EXPR
104 && TREE_OPERAND (stmt
, 0)
105 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == MODIFY_EXPR
)
106 op
= TREE_OPERAND (stmt
, 0);
107 /* op == MODIFY_EXPR */
108 op
= TREE_OPERAND (op
, 1);
109 /* op == TRUNC_DIV or TRUNC_MOD */
110 op1
= TREE_OPERAND (op
, 0);
111 op2
= TREE_OPERAND (op
, 1);
112 optype
= TREE_TYPE (op
);
117 Normal case, neither more nor less = 3
121 label_decl2
= create_artificial_label ();
122 label_decl3
= create_artificial_label ();
123 label_decl4
= create_artificial_label ();
124 label_decl5
= create_artificial_label ();
125 label_decl6
= create_artificial_label ();
127 /* Do not evaluate op1 or op2 more than once. Probably
128 volatile loads are the only things that could cause
129 a problem, but this is harmless in any case. */
130 op1copy
= create_tmp_var (optype
, "PROF");
131 op2copy
= create_tmp_var (optype
, "PROF");
132 stmt1
= build2 (MODIFY_EXPR
, optype
, op1copy
, op1
);
133 stmt2
= build2 (MODIFY_EXPR
, optype
, op2copy
, op2
);
134 TREE_OPERAND (op
, 0) = op1copy
;
135 TREE_OPERAND (op
, 1) = op2copy
;
137 val
= create_tmp_var (optype
, "PROF");
138 stmt3
= build2 (MODIFY_EXPR
, optype
, val
,
139 build2 (TRUNC_DIV_EXPR
, optype
, op1copy
, op2copy
));
140 stmt4
= build2 (MODIFY_EXPR
, optype
, val
,
141 build2 (MINUS_EXPR
, optype
, val
,
142 build_int_cst (optype
, value
->hdata
.intvl
.int_start
)));
143 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
144 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
145 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
146 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
148 index
= create_tmp_var (gcov_type_node
, "PROF");
150 /* Check for too big. */
151 stmt1
= build3 (COND_EXPR
, void_type_node
,
152 build2 (GE_EXPR
, boolean_type_node
, val
,
153 build_int_cst (optype
, value
->hdata
.intvl
.steps
)),
154 build1 (GOTO_EXPR
, void_type_node
, label_decl4
),
155 build1 (GOTO_EXPR
, void_type_node
, label_decl2
));
156 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
159 /* Check for too small. */
160 label2
= build1 (LABEL_EXPR
, void_type_node
, label_decl2
);
161 bsi_insert_before (&bsi
, label2
, BSI_SAME_STMT
);
162 stmt1
= build3 (COND_EXPR
, void_type_node
,
163 build2 (LT_EXPR
, boolean_type_node
, val
, integer_zero_node
),
164 build1 (GOTO_EXPR
, void_type_node
, label_decl5
),
165 build1 (GOTO_EXPR
, void_type_node
, label_decl3
));
166 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
169 /* Normal case, within range. */
170 label3
= build1 (LABEL_EXPR
, void_type_node
, label_decl3
);
171 bsi_insert_before (&bsi
, label3
, BSI_SAME_STMT
);
172 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, index
,
173 build1 (NOP_EXPR
, gcov_type_node
, val
));
174 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
178 label4
= build1 (LABEL_EXPR
, void_type_node
, label_decl4
);
179 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, index
,
180 build_int_cst (gcov_type_node
, value
->hdata
.intvl
.steps
));
181 bsi_insert_before (&bsi
, label4
, BSI_SAME_STMT
);
182 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
186 label5
= build1 (LABEL_EXPR
, void_type_node
, label_decl5
);
187 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, index
,
188 build_int_cst (gcov_type_node
,
189 value
->hdata
.intvl
.steps
+ 1));
190 bsi_insert_before (&bsi
, label5
, BSI_SAME_STMT
);
191 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
194 /* Increment appropriate counter. */
195 label6
= build1 (LABEL_EXPR
, void_type_node
, label_decl6
);
196 bsi_insert_before (&bsi
, label6
, BSI_SAME_STMT
);
198 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
199 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
200 tmp3
= create_tmp_var (gcov_type_node
, "PROF");
201 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
,
202 build2 (PLUS_EXPR
, gcov_type_node
, index
,
203 TREE_OPERAND (ref
, 1)));
204 TREE_OPERAND (ref
, 1) = tmp1
;
205 /* Make a copy to avoid sharing complaints. */
206 ref2
= build4 (ARRAY_REF
, TREE_TYPE (ref
), TREE_OPERAND (ref
, 0),
207 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2),
208 TREE_OPERAND (ref
, 3));
210 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
, ref
);
211 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp3
,
212 build2 (PLUS_EXPR
, gcov_type_node
, tmp2
, integer_one_node
));
213 stmt4
= build2 (MODIFY_EXPR
, gcov_type_node
, ref2
, tmp3
);
214 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
215 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
216 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
217 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
219 /* Now fix up the CFG. */
220 /* 1->2,4; 2->3,5; 3->6; 4->6; 5->6 */
221 e12
= split_block (bb
, bb1end
);
223 e23
= split_block (bb2
, bb2end
);
225 e34
= split_block (bb3
, bb3end
);
227 e45
= split_block (bb4
, bb4end
);
229 e56
= split_block (bb5
, bb5end
);
232 e12
->flags
&= ~EDGE_FALLTHRU
;
233 e12
->flags
|= EDGE_FALSE_VALUE
;
234 make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
235 e23
->flags
&= ~EDGE_FALLTHRU
;
236 e23
->flags
|= EDGE_FALSE_VALUE
;
237 make_edge (bb2
, bb5
, EDGE_TRUE_VALUE
);
239 make_edge (bb3
, bb6
, EDGE_FALLTHRU
);
241 make_edge (bb4
, bb6
, EDGE_FALLTHRU
);
244 /* Output instructions as GIMPLE trees to increment the power of two histogram
245 counter. VALUE is the expression whose value is profiled. TAG is the tag
246 of the section for counters, BASE is offset of the counter position. */
249 tree_gen_pow2_profiler (histogram_value value
, unsigned tag
, unsigned base
)
252 tree tmp1
, tmp2
, tmp3
;
254 tree label_decl1
= create_artificial_label ();
255 tree label_decl2
= create_artificial_label ();
256 tree label_decl3
= create_artificial_label ();
257 tree label1
, label2
, label3
;
258 tree stmt1
, stmt2
, stmt3
, stmt4
;
259 tree bb1end
, bb2end
, bb3end
;
260 tree ref
= tree_coverage_counter_ref (tag
, base
), ref2
;
261 basic_block bb2
, bb3
, bb4
;
262 tree stmt
= value
->hvalue
.tree
.stmt
;
263 block_stmt_iterator bsi
= bsi_for_stmt (stmt
);
264 basic_block bb
= bb_for_stmt (stmt
);
265 tree gcov_type_node
= get_gcov_type ();
266 tree optype
, optypesigned
, optypeunsigned
;
269 if (TREE_CODE (stmt
) == RETURN_EXPR
270 && TREE_OPERAND (stmt
, 0)
271 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == MODIFY_EXPR
)
272 op
= TREE_OPERAND (stmt
, 0);
273 /* op == MODIFY_EXPR */
274 op
= TREE_OPERAND (op
, 1);
275 /* op == TRUNC_DIV or TRUNC_MOD */
276 op
= TREE_OPERAND (op
, 1);
277 /* op == denominator */
278 optype
= TREE_TYPE (op
);
279 if (TYPE_UNSIGNED (optype
))
281 /* Right shift must be unsigned. */
282 optypeunsigned
= optype
;
283 optypesigned
= build_distinct_type_copy (optype
);
284 TYPE_UNSIGNED (optypesigned
) = false;
288 /* Compare to zero must be signed. */
289 optypesigned
= optype
;
290 optypeunsigned
= build_distinct_type_copy (optype
);
291 TYPE_UNSIGNED (optypeunsigned
) = true;
294 /* Set up variables and check if denominator is negative when considered
296 index
= create_tmp_var (gcov_type_node
, "PROF");
297 denom
= create_tmp_var (optype
, "PROF");
298 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, index
, integer_zero_node
);
299 stmt2
= build2 (MODIFY_EXPR
, optype
, denom
, op
);
300 if (optypesigned
== optype
)
307 tmp1
= create_tmp_var (optypesigned
, "PROF");
308 stmt3
= build2 (MODIFY_EXPR
, optypesigned
, tmp1
,
309 build1 (NOP_EXPR
, optypesigned
, denom
));
311 stmt4
= build3 (COND_EXPR
, void_type_node
,
312 build2 (LE_EXPR
, boolean_type_node
, tmp1
, integer_zero_node
),
313 build1 (GOTO_EXPR
, void_type_node
, label_decl3
),
314 build1 (GOTO_EXPR
, void_type_node
, label_decl1
));
315 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
316 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
318 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
319 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
322 /* Nonnegative. Check if denominator is power of 2. */
323 label1
= build1 (LABEL_EXPR
, void_type_node
, label_decl1
);
324 tmp1
= create_tmp_var (optype
, "PROF");
325 tmp2
= create_tmp_var (optype
, "PROF");
326 stmt1
= build2 (MODIFY_EXPR
, optype
, tmp1
,
327 build2 (PLUS_EXPR
, optype
, denom
, integer_minus_one_node
));
328 stmt2
= build2 (MODIFY_EXPR
, optype
, tmp2
,
329 build2 (BIT_AND_EXPR
, optype
, tmp1
, denom
));
330 stmt3
= build3 (COND_EXPR
, void_type_node
,
331 build2 (NE_EXPR
, boolean_type_node
, tmp2
, integer_zero_node
),
332 build1 (GOTO_EXPR
, void_type_node
, label_decl3
),
333 build1 (GOTO_EXPR
, void_type_node
, label_decl2
));
334 bsi_insert_before (&bsi
, label1
, BSI_SAME_STMT
);
335 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
336 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
337 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
340 /* Loop. Increment index, shift denominator, repeat if denominator nonzero. */
341 label2
= build1 (LABEL_EXPR
, void_type_node
, label_decl2
);
342 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, index
,
343 build2 (PLUS_EXPR
, gcov_type_node
, index
, integer_one_node
));
344 if (optypeunsigned
== optype
)
351 tmp1
= create_tmp_var (optypeunsigned
, "PROF");
352 stmt2
= build2 (MODIFY_EXPR
, optypeunsigned
, tmp1
,
353 build1 (NOP_EXPR
, optypeunsigned
, denom
));
355 stmt3
= build2 (MODIFY_EXPR
, optype
, denom
,
356 build2 (RSHIFT_EXPR
, optype
, tmp1
, integer_one_node
));
357 stmt4
= build3 (COND_EXPR
, void_type_node
,
358 build2 (NE_EXPR
, boolean_type_node
, denom
, integer_zero_node
),
359 build1 (GOTO_EXPR
, void_type_node
, label_decl2
),
360 build1 (GOTO_EXPR
, void_type_node
, label_decl3
));
361 bsi_insert_before (&bsi
, label2
, BSI_SAME_STMT
);
362 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
364 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
365 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
366 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
369 /* Increment the appropriate counter. */
370 label3
= build1 (LABEL_EXPR
, void_type_node
, label_decl3
);
371 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
372 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
373 tmp3
= create_tmp_var (gcov_type_node
, "PROF");
374 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
,
375 build2 (PLUS_EXPR
, gcov_type_node
,
376 index
, TREE_OPERAND (ref
, 1)));
377 TREE_OPERAND (ref
, 1) = tmp1
;
378 /* Make a copy to avoid sharing complaints. */
379 ref2
= build4 (ARRAY_REF
, TREE_TYPE (ref
), TREE_OPERAND (ref
, 0),
380 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
381 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
, ref
);
382 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp3
,
383 build2 (PLUS_EXPR
, gcov_type_node
, tmp2
, integer_one_node
));
384 stmt4
= build2 (MODIFY_EXPR
, gcov_type_node
, ref2
, tmp3
);
385 bsi_insert_before (&bsi
, label3
, BSI_SAME_STMT
);
386 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
387 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
388 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
389 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
391 /* Now fix up the CFG. */
392 bb2
= (split_block (bb
, bb1end
))->dest
;
393 bb3
= (split_block (bb2
, bb2end
))->dest
;
394 bb4
= (split_block (bb3
, bb3end
))->dest
;
396 EDGE_SUCC (bb
, 0)->flags
&= ~EDGE_FALLTHRU
;
397 EDGE_SUCC (bb
, 0)->flags
|= EDGE_FALSE_VALUE
;
398 make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
400 EDGE_SUCC (bb2
, 0)->flags
&= ~EDGE_FALLTHRU
;
401 EDGE_SUCC (bb2
, 0)->flags
|= EDGE_FALSE_VALUE
;
402 make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
404 EDGE_SUCC (bb3
, 0)->flags
&= ~EDGE_FALLTHRU
;
405 EDGE_SUCC (bb3
, 0)->flags
|= EDGE_FALSE_VALUE
;
406 make_edge (bb3
, bb3
, EDGE_TRUE_VALUE
);
409 /* Output instructions as GIMPLE trees for code to find the most common value.
410 VALUE is the expression whose value is profiled. TAG is the tag of the
411 section for counters, BASE is offset of the counter position. */
414 tree_gen_one_value_profiler (histogram_value value
, unsigned tag
, unsigned base
)
417 tree tmp1
, tmp2
, tmp3
;
418 tree label_decl1
= create_artificial_label ();
419 tree label_decl2
= create_artificial_label ();
420 tree label_decl3
= create_artificial_label ();
421 tree label_decl4
= create_artificial_label ();
422 tree label_decl5
= create_artificial_label ();
423 tree label1
, label2
, label3
, label4
, label5
;
424 tree stmt1
, stmt2
, stmt3
, stmt4
;
425 tree bb1end
, bb2end
, bb3end
, bb4end
, bb5end
;
426 tree ref1
= tree_coverage_counter_ref (tag
, base
);
427 tree ref2
= tree_coverage_counter_ref (tag
, base
+ 1);
428 tree ref3
= tree_coverage_counter_ref (tag
, base
+ 2);
429 basic_block bb2
, bb3
, bb4
, bb5
, bb6
;
430 tree stmt
= value
->hvalue
.tree
.stmt
;
431 block_stmt_iterator bsi
= bsi_for_stmt (stmt
);
432 basic_block bb
= bb_for_stmt (stmt
);
433 tree gcov_type_node
= get_gcov_type ();
437 if (TREE_CODE (stmt
) == RETURN_EXPR
438 && TREE_OPERAND (stmt
, 0)
439 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == MODIFY_EXPR
)
440 op
= TREE_OPERAND (stmt
, 0);
441 /* op == MODIFY_EXPR */
442 op
= TREE_OPERAND (op
, 1);
443 /* op == TRUNC_DIV or TRUNC_MOD */
444 op
= TREE_OPERAND (op
, 1);
445 /* op == denominator */
446 optype
= TREE_TYPE (op
);
448 /* Check if the stored value matches. */
449 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
450 tmp2
= create_tmp_var (optype
, "PROF");
451 tmp3
= create_tmp_var (optype
, "PROF");
452 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref1
);
453 stmt2
= build2 (MODIFY_EXPR
, optype
, tmp2
,
454 build1 (NOP_EXPR
, optype
, tmp1
));
455 stmt3
= build2 (MODIFY_EXPR
, optype
, tmp3
, op
);
456 stmt4
= build3 (COND_EXPR
, void_type_node
,
457 build2 (EQ_EXPR
, boolean_type_node
, tmp2
, tmp3
),
458 build1 (GOTO_EXPR
, void_type_node
, label_decl4
),
459 build1 (GOTO_EXPR
, void_type_node
, label_decl1
));
460 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
461 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
462 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
463 bsi_insert_before (&bsi
, stmt4
, BSI_SAME_STMT
);
466 /* Does not match; check whether the counter is zero. */
467 label1
= build1 (LABEL_EXPR
, void_type_node
, label_decl1
);
468 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
469 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref2
);
470 stmt2
= build3 (COND_EXPR
, void_type_node
,
471 build2 (EQ_EXPR
, boolean_type_node
, tmp1
, integer_zero_node
),
472 build1 (GOTO_EXPR
, void_type_node
, label_decl3
),
473 build1 (GOTO_EXPR
, void_type_node
, label_decl2
));
474 bsi_insert_before (&bsi
, label1
, BSI_SAME_STMT
);
475 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
476 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
479 /* Counter is not zero yet, decrement. */
480 label2
= build1 (LABEL_EXPR
, void_type_node
, label_decl2
);
481 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
482 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
483 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref2
);
484 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
,
485 build (MINUS_EXPR
, gcov_type_node
, tmp1
, integer_one_node
));
486 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, ref2
, tmp2
);
487 bsi_insert_before (&bsi
, label2
, BSI_SAME_STMT
);
488 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
489 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
490 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
493 /* Counter was zero, store new value. */
494 label3
= build1 (LABEL_EXPR
, void_type_node
, label_decl3
);
495 tmp1
= create_tmp_var (optype
, "PROF");
496 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
497 stmt1
= build2 (MODIFY_EXPR
, optype
, tmp1
, op
);
498 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
,
499 build1 (NOP_EXPR
, gcov_type_node
, tmp1
));
500 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, ref1
, tmp2
);
501 bsi_insert_before (&bsi
, label3
, BSI_SAME_STMT
);
502 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
503 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
504 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
508 /* Increment counter. */
509 label4
= build1 (LABEL_EXPR
, void_type_node
, label_decl4
);
510 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
511 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
512 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref2
);
513 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
,
514 build (PLUS_EXPR
, gcov_type_node
, tmp1
, integer_one_node
));
515 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, ref2
, tmp2
);
516 bsi_insert_before (&bsi
, label4
, BSI_SAME_STMT
);
517 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
518 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
519 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
522 /* Increment the counter of all executions; this seems redundant given
523 that we have counts for edges in cfg, but it may happen that some
524 optimization will change the counts for the block (either because
525 it is unable to update them correctly, or because it will duplicate
526 the block or its part). */
527 label5
= build1 (LABEL_EXPR
, void_type_node
, label_decl5
);
528 tmp1
= create_tmp_var (gcov_type_node
, "PROF");
529 tmp2
= create_tmp_var (gcov_type_node
, "PROF");
530 stmt1
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp1
, ref3
);
531 stmt2
= build2 (MODIFY_EXPR
, gcov_type_node
, tmp2
,
532 build (PLUS_EXPR
, gcov_type_node
, tmp1
, integer_one_node
));
533 stmt3
= build2 (MODIFY_EXPR
, gcov_type_node
, ref3
, tmp2
);
534 bsi_insert_before (&bsi
, label5
, BSI_SAME_STMT
);
535 bsi_insert_before (&bsi
, stmt1
, BSI_SAME_STMT
);
536 bsi_insert_before (&bsi
, stmt2
, BSI_SAME_STMT
);
537 bsi_insert_before (&bsi
, stmt3
, BSI_SAME_STMT
);
539 /* Now fix up the CFG. */
540 bb2
= (split_block (bb
, bb1end
))->dest
;
541 bb3
= (split_block (bb2
, bb2end
))->dest
;
542 bb4
= (split_block (bb3
, bb3end
))->dest
;
543 bb5
= (split_block (bb4
, bb4end
))->dest
;
544 bb6
= (split_block (bb5
, bb5end
))->dest
;
546 EDGE_SUCC (bb
, 0)->flags
&= ~EDGE_FALLTHRU
;
547 EDGE_SUCC (bb
, 0)->flags
|= EDGE_FALSE_VALUE
;
548 make_edge (bb
, bb5
, EDGE_TRUE_VALUE
);
550 EDGE_SUCC (bb2
, 0)->flags
&= ~EDGE_FALLTHRU
;
551 EDGE_SUCC (bb2
, 0)->flags
|= EDGE_FALSE_VALUE
;
552 make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
554 remove_edge (EDGE_SUCC (bb3
, 0));
555 make_edge (bb3
, bb6
, EDGE_FALLTHRU
);
558 /* Output instructions as GIMPLE trees for code to find the most common value
559 of a difference between two evaluations of an expression.
560 VALUE is the expression whose value is profiled. TAG is the tag of the
561 section for counters, BASE is offset of the counter position. */
564 tree_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED
,
565 unsigned tag ATTRIBUTE_UNUSED
,
566 unsigned base ATTRIBUTE_UNUSED
)
568 /* FIXME implement this. */
569 #ifdef ENABLE_CHECKING
570 internal_error ("unimplemented functionality");
575 /* Return 1 if tree-based profiling is in effect, else 0.
576 If it is, set up hooks for tree-based profiling.
577 Gate for pass_tree_profile. */
580 do_tree_profiling (void)
582 if (flag_tree_based_profiling
583 && (profile_arc_flag
|| flag_test_coverage
|| flag_branch_probabilities
))
585 tree_register_profile_hooks ();
586 tree_register_value_prof_hooks ();
592 /* Return the file on which profile dump output goes, if any. */
594 static FILE *tree_profile_dump_file (void) {
599 tree_profiling (void)
602 if (flag_branch_probabilities
603 && flag_profile_values
604 && flag_value_profile_transformations
)
605 value_profile_transformations ();
606 /* The above could hose dominator info. Currently there is
607 none coming in, this is a safety valve. It should be
608 easy to adjust it, if and when there is some. */
609 free_dominance_info (CDI_DOMINATORS
);
610 free_dominance_info (CDI_POST_DOMINATORS
);
613 struct tree_opt_pass pass_tree_profile
=
615 "tree_profile", /* name */
616 do_tree_profiling
, /* gate */
617 tree_profiling
, /* execute */
620 0, /* static_pass_number */
621 TV_BRANCH_PROB
, /* tv_id */
622 PROP_gimple_leh
| PROP_cfg
, /* properties_required */
623 PROP_gimple_leh
| PROP_cfg
, /* properties_provided */
624 0, /* properties_destroyed */
625 0, /* todo_flags_start */
626 TODO_verify_stmts
, /* todo_flags_finish */
630 struct profile_hooks tree_profile_hooks
=
632 tree_init_edge_profiler
, /* init_edge_profiler */
633 tree_gen_edge_profiler
, /* gen_edge_profiler */
634 tree_gen_interval_profiler
, /* gen_interval_profiler */
635 tree_gen_pow2_profiler
, /* gen_pow2_profiler */
636 tree_gen_one_value_profiler
, /* gen_one_value_profiler */
637 tree_gen_const_delta_profiler
,/* gen_const_delta_profiler */
638 tree_profile_dump_file
/* profile_dump_file */