1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
36 #include "fold-const.h"
38 #include "hard-reg-set.h"
41 #include "dominance.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
49 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "splay-tree.h"
54 #include "plugin-api.h"
57 #include "ipa-utils.h"
59 #include "ipa-reference.h"
61 #include "diagnostic.h"
62 #include "langhooks.h"
63 #include "lto-streamer.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
67 #include "ipa-inline.h"
69 /* Debugging function for postorder and inorder code. NOTE is a string
70 that is printed before the nodes are printed. ORDER is an array of
71 cgraph_nodes that has COUNT useful nodes in it. */
74 ipa_print_order (FILE* out
,
76 struct cgraph_node
** order
,
80 fprintf (out
, "\n\n ordered call graph: %s\n", note
);
82 for (i
= count
- 1; i
>= 0; i
--)
90 struct cgraph_node
**stack
;
92 struct cgraph_node
**result
;
94 splay_tree nodes_marked_new
;
96 bool allow_overwritable
;
100 /* This is an implementation of Tarjan's strongly connected region
101 finder as reprinted in Aho Hopcraft and Ullman's The Design and
102 Analysis of Computer Programs (1975) pages 192-193. This version
103 has been customized for cgraph_nodes. The env parameter is because
104 it is recursive and there are no nested functions here. This
105 function should only be called from itself or
106 ipa_reduced_postorder. ENV is a stack env and would be
107 unnecessary if C had nested functions. V is the node to start
111 searchc (struct searchc_env
* env
, struct cgraph_node
*v
,
112 bool (*ignore_edge
) (struct cgraph_edge
*))
114 struct cgraph_edge
*edge
;
115 struct ipa_dfs_info
*v_info
= (struct ipa_dfs_info
*) v
->aux
;
117 /* mark node as old */
118 v_info
->new_node
= false;
119 splay_tree_remove (env
->nodes_marked_new
, v
->uid
);
121 v_info
->dfn_number
= env
->count
;
122 v_info
->low_link
= env
->count
;
124 env
->stack
[(env
->stack_size
)++] = v
;
125 v_info
->on_stack
= true;
127 for (edge
= v
->callees
; edge
; edge
= edge
->next_callee
)
129 struct ipa_dfs_info
* w_info
;
130 enum availability avail
;
131 struct cgraph_node
*w
= edge
->callee
->ultimate_alias_target (&avail
);
133 if (!w
|| (ignore_edge
&& ignore_edge (edge
)))
137 && (avail
> AVAIL_INTERPOSABLE
138 || (env
->allow_overwritable
&& avail
== AVAIL_INTERPOSABLE
)))
140 w_info
= (struct ipa_dfs_info
*) w
->aux
;
141 if (w_info
->new_node
)
143 searchc (env
, w
, ignore_edge
);
145 (v_info
->low_link
< w_info
->low_link
) ?
146 v_info
->low_link
: w_info
->low_link
;
149 if ((w_info
->dfn_number
< v_info
->dfn_number
)
150 && (w_info
->on_stack
))
152 (w_info
->dfn_number
< v_info
->low_link
) ?
153 w_info
->dfn_number
: v_info
->low_link
;
158 if (v_info
->low_link
== v_info
->dfn_number
)
160 struct cgraph_node
*last
= NULL
;
161 struct cgraph_node
*x
;
162 struct ipa_dfs_info
*x_info
;
164 x
= env
->stack
[--(env
->stack_size
)];
165 x_info
= (struct ipa_dfs_info
*) x
->aux
;
166 x_info
->on_stack
= false;
167 x_info
->scc_no
= v_info
->dfn_number
;
171 x_info
->next_cycle
= last
;
175 env
->result
[env
->order_pos
++] = x
;
179 env
->result
[env
->order_pos
++] = v
;
183 /* Topsort the call graph by caller relation. Put the result in ORDER.
185 The REDUCE flag is true if you want the cycles reduced to single nodes.
186 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
187 call graph nodes in a reduced node.
189 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
190 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
191 for the topological sort. */
194 ipa_reduced_postorder (struct cgraph_node
**order
,
195 bool reduce
, bool allow_overwritable
,
196 bool (*ignore_edge
) (struct cgraph_edge
*))
198 struct cgraph_node
*node
;
199 struct searchc_env env
;
200 splay_tree_node result
;
201 env
.stack
= XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
205 env
.nodes_marked_new
= splay_tree_new (splay_tree_compare_ints
, 0, 0);
208 env
.allow_overwritable
= allow_overwritable
;
210 FOR_EACH_DEFINED_FUNCTION (node
)
212 enum availability avail
= node
->get_availability ();
214 if (avail
> AVAIL_INTERPOSABLE
215 || (allow_overwritable
216 && (avail
== AVAIL_INTERPOSABLE
)))
218 /* Reuse the info if it is already there. */
219 struct ipa_dfs_info
*info
= (struct ipa_dfs_info
*) node
->aux
;
221 info
= XCNEW (struct ipa_dfs_info
);
222 info
->new_node
= true;
223 info
->on_stack
= false;
224 info
->next_cycle
= NULL
;
227 splay_tree_insert (env
.nodes_marked_new
,
228 (splay_tree_key
)node
->uid
,
229 (splay_tree_value
)node
);
234 result
= splay_tree_min (env
.nodes_marked_new
);
237 node
= (struct cgraph_node
*)result
->value
;
238 searchc (&env
, node
, ignore_edge
);
239 result
= splay_tree_min (env
.nodes_marked_new
);
241 splay_tree_delete (env
.nodes_marked_new
);
244 return env
.order_pos
;
247 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
251 ipa_free_postorder_info (void)
253 struct cgraph_node
*node
;
254 FOR_EACH_DEFINED_FUNCTION (node
)
256 /* Get rid of the aux information. */
265 /* Get the set of nodes for the cycle in the reduced call graph starting
269 ipa_get_nodes_in_cycle (struct cgraph_node
*node
)
271 vec
<cgraph_node
*> v
= vNULL
;
272 struct ipa_dfs_info
*node_dfs_info
;
276 node_dfs_info
= (struct ipa_dfs_info
*) node
->aux
;
277 node
= node_dfs_info
->next_cycle
;
282 /* Return true iff the CS is an edge within a strongly connected component as
283 computed by ipa_reduced_postorder. */
286 ipa_edge_within_scc (struct cgraph_edge
*cs
)
288 struct ipa_dfs_info
*caller_dfs
= (struct ipa_dfs_info
*) cs
->caller
->aux
;
289 struct ipa_dfs_info
*callee_dfs
;
290 struct cgraph_node
*callee
= cs
->callee
->function_symbol ();
292 callee_dfs
= (struct ipa_dfs_info
*) callee
->aux
;
295 && caller_dfs
->scc_no
== callee_dfs
->scc_no
);
298 struct postorder_stack
300 struct cgraph_node
*node
;
301 struct cgraph_edge
*edge
;
305 /* Fill array order with all nodes with output flag set in the reverse
306 topological order. Return the number of elements in the array.
307 FIXME: While walking, consider aliases, too. */
310 ipa_reverse_postorder (struct cgraph_node
**order
)
312 struct cgraph_node
*node
, *node2
;
315 struct cgraph_edge
*edge
;
317 struct ipa_ref
*ref
= NULL
;
319 struct postorder_stack
*stack
=
320 XCNEWVEC (struct postorder_stack
, symtab
->cgraph_count
);
322 /* We have to deal with cycles nicely, so use a depth first traversal
323 output algorithm. Ignore the fact that some functions won't need
324 to be output and put them into order as well, so we get dependencies
325 right through inline functions. */
326 FOR_EACH_FUNCTION (node
)
328 for (pass
= 0; pass
< 2; pass
++)
329 FOR_EACH_FUNCTION (node
)
332 || (!node
->address_taken
333 && !node
->global
.inlined_to
334 && !node
->alias
&& !node
->thunk
.thunk_p
335 && !node
->only_called_directly_p ())))
338 stack
[stack_size
].node
= node
;
339 stack
[stack_size
].edge
= node
->callers
;
340 stack
[stack_size
].ref
= 0;
341 node
->aux
= (void *)(size_t)1;
342 while (stack_size
>= 0)
347 while (stack
[stack_size
].edge
&& !node2
)
349 edge
= stack
[stack_size
].edge
;
350 node2
= edge
->caller
;
351 stack
[stack_size
].edge
= edge
->next_caller
;
352 /* Break possible cycles involving always-inline
353 functions by ignoring edges from always-inline
354 functions to non-always-inline functions. */
355 if (DECL_DISREGARD_INLINE_LIMITS (edge
->caller
->decl
)
356 && !DECL_DISREGARD_INLINE_LIMITS
357 (edge
->callee
->function_symbol ()->decl
))
360 for (; stack
[stack_size
].node
->iterate_referring (
361 stack
[stack_size
].ref
,
363 stack
[stack_size
].ref
++)
365 if (ref
->use
== IPA_REF_ALIAS
)
366 node2
= dyn_cast
<cgraph_node
*> (ref
->referring
);
372 stack
[++stack_size
].node
= node2
;
373 stack
[stack_size
].edge
= node2
->callers
;
374 stack
[stack_size
].ref
= 0;
375 node2
->aux
= (void *)(size_t)1;
378 order
[order_pos
++] = stack
[stack_size
--].node
;
382 FOR_EACH_FUNCTION (node
)
389 /* Given a memory reference T, will return the variable at the bottom
390 of the access. Unlike get_base_address, this will recurse through
394 get_base_var (tree t
)
396 while (!SSA_VAR_P (t
)
397 && (!CONSTANT_CLASS_P (t
))
398 && TREE_CODE (t
) != LABEL_DECL
399 && TREE_CODE (t
) != FUNCTION_DECL
400 && TREE_CODE (t
) != CONST_DECL
401 && TREE_CODE (t
) != CONSTRUCTOR
)
403 t
= TREE_OPERAND (t
, 0);
409 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
410 DST so it is not going to be lost. Possibly destroy SRC's body on the way
411 unless PRESERVE_BODY is set. */
414 ipa_merge_profiles (struct cgraph_node
*dst
,
415 struct cgraph_node
*src
,
418 tree oldsrcdecl
= src
->decl
;
419 struct function
*srccfun
, *dstcfun
;
425 if (src
->frequency
< dst
->frequency
)
426 src
->frequency
= dst
->frequency
;
428 /* Time profiles are merged. */
429 if (dst
->tp_first_run
> src
->tp_first_run
&& src
->tp_first_run
)
430 dst
->tp_first_run
= src
->tp_first_run
;
432 if (src
->profile_id
&& !dst
->profile_id
)
433 dst
->profile_id
= src
->profile_id
;
437 if (symtab
->dump_file
)
439 fprintf (symtab
->dump_file
, "Merging profiles of %s/%i to %s/%i\n",
440 xstrdup_for_dump (src
->name ()), src
->order
,
441 xstrdup_for_dump (dst
->name ()), dst
->order
);
443 dst
->count
+= src
->count
;
445 /* This is ugly. We need to get both function bodies into memory.
446 If declaration is merged, we need to duplicate it to be able
447 to load body that is being replaced. This makes symbol table
448 temporarily inconsistent. */
449 if (src
->decl
== dst
->decl
)
451 struct lto_in_decl_state temp
;
452 struct lto_in_decl_state
*state
;
454 /* We are going to move the decl, we want to remove its file decl data.
455 and link these with the new decl. */
456 temp
.fn_decl
= src
->decl
;
457 lto_in_decl_state
**slot
458 = src
->lto_file_data
->function_decl_states
->find_slot (&temp
,
461 src
->lto_file_data
->function_decl_states
->clear_slot (slot
);
464 /* Duplicate the decl and be sure it does not link into body of DST. */
465 src
->decl
= copy_node (src
->decl
);
466 DECL_STRUCT_FUNCTION (src
->decl
) = NULL
;
467 DECL_ARGUMENTS (src
->decl
) = NULL
;
468 DECL_INITIAL (src
->decl
) = NULL
;
469 DECL_RESULT (src
->decl
) = NULL
;
471 /* Associate the decl state with new declaration, so LTO streamer
473 state
->fn_decl
= src
->decl
;
475 = src
->lto_file_data
->function_decl_states
->find_slot (state
, INSERT
);
479 src
->get_untransformed_body ();
480 dst
->get_untransformed_body ();
481 srccfun
= DECL_STRUCT_FUNCTION (src
->decl
);
482 dstcfun
= DECL_STRUCT_FUNCTION (dst
->decl
);
483 if (n_basic_blocks_for_fn (srccfun
)
484 != n_basic_blocks_for_fn (dstcfun
))
486 if (symtab
->dump_file
)
487 fprintf (symtab
->dump_file
,
488 "Giving up; number of basic block mismatch.\n");
491 else if (last_basic_block_for_fn (srccfun
)
492 != last_basic_block_for_fn (dstcfun
))
494 if (symtab
->dump_file
)
495 fprintf (symtab
->dump_file
,
496 "Giving up; last block mismatch.\n");
501 basic_block srcbb
, dstbb
;
503 FOR_ALL_BB_FN (srcbb
, srccfun
)
507 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
510 if (symtab
->dump_file
)
511 fprintf (symtab
->dump_file
,
512 "No matching block for bb %i.\n",
517 if (EDGE_COUNT (srcbb
->succs
) != EDGE_COUNT (dstbb
->succs
))
519 if (symtab
->dump_file
)
520 fprintf (symtab
->dump_file
,
521 "Edge count mistmatch for bb %i.\n",
526 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
528 edge srce
= EDGE_SUCC (srcbb
, i
);
529 edge dste
= EDGE_SUCC (dstbb
, i
);
530 if (srce
->dest
->index
!= dste
->dest
->index
)
532 if (symtab
->dump_file
)
533 fprintf (symtab
->dump_file
,
534 "Succ edge mistmatch for bb %i.\n",
544 struct cgraph_edge
*e
, *e2
;
545 basic_block srcbb
, dstbb
;
547 /* TODO: merge also statement histograms. */
548 FOR_ALL_BB_FN (srcbb
, srccfun
)
552 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
553 dstbb
->count
+= srcbb
->count
;
554 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
556 edge srce
= EDGE_SUCC (srcbb
, i
);
557 edge dste
= EDGE_SUCC (dstbb
, i
);
558 dste
->count
+= srce
->count
;
563 compute_function_frequency ();
565 for (e
= dst
->callees
; e
; e
= e
->next_callee
)
569 e
->count
= gimple_bb (e
->call_stmt
)->count
;
570 e
->frequency
= compute_call_stmt_bb_frequency
572 gimple_bb (e
->call_stmt
));
574 for (e
= dst
->indirect_calls
, e2
= src
->indirect_calls
; e
;
575 e2
= (e2
? e2
->next_callee
: NULL
), e
= e
->next_callee
)
577 gcov_type count
= gimple_bb (e
->call_stmt
)->count
;
578 int freq
= compute_call_stmt_bb_frequency
580 gimple_bb (e
->call_stmt
));
581 /* When call is speculative, we need to re-distribute probabilities
582 the same way as they was. This is not really correct because
583 in the other copy the speculation may differ; but probably it
584 is not really worth the effort. */
587 cgraph_edge
*direct
, *indirect
;
588 cgraph_edge
*direct2
= NULL
, *indirect2
= NULL
;
591 e
->speculative_call_info (direct
, indirect
, ref
);
592 gcc_assert (e
== indirect
);
593 if (e2
&& e2
->speculative
)
594 e2
->speculative_call_info (direct2
, indirect2
, ref
);
595 if (indirect
->count
|| direct
->count
)
597 /* We should mismatch earlier if there is no matching
603 "Mismatch in merging indirect edges\n");
605 else if (!e2
->speculative
)
606 indirect
->count
+= e2
->count
;
607 else if (e2
->speculative
)
609 if (DECL_ASSEMBLER_NAME (direct2
->callee
->decl
)
610 != DECL_ASSEMBLER_NAME (direct
->callee
->decl
))
612 if (direct2
->count
>= direct
->count
)
614 direct
->redirect_callee (direct2
->callee
);
615 indirect
->count
+= indirect2
->count
617 direct
->count
= direct2
->count
;
620 indirect
->count
+= indirect2
->count
+ direct2
->count
;
624 direct
->count
+= direct2
->count
;
625 indirect
->count
+= indirect2
->count
;
628 int prob
= RDIV (direct
->count
* REG_BR_PROB_BASE
,
629 direct
->count
+ indirect
->count
);
630 direct
->frequency
= RDIV (freq
* prob
, REG_BR_PROB_BASE
);
631 indirect
->frequency
= RDIV (freq
* (REG_BR_PROB_BASE
- prob
),
635 /* At the moment we should have only profile feedback based
636 speculations when merging. */
639 else if (e2
&& e2
->speculative
)
641 cgraph_edge
*direct
, *indirect
;
644 e2
->speculative_call_info (direct
, indirect
, ref
);
647 int prob
= RDIV (direct
->count
* REG_BR_PROB_BASE
, e
->count
);
648 e
->make_speculative (direct
->callee
, direct
->count
,
649 RDIV (freq
* prob
, REG_BR_PROB_BASE
));
658 src
->release_body ();
659 inline_update_overall_summary (dst
);
661 /* TODO: if there is no match, we can scale up. */
662 src
->decl
= oldsrcdecl
;
665 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
668 recursive_call_p (tree func
, tree dest
)
670 struct cgraph_node
*dest_node
= cgraph_node::get_create (dest
);
671 struct cgraph_node
*cnode
= cgraph_node::get_create (func
);
673 return dest_node
->semantically_equivalent_p (cnode
);