1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
30 #include "lto-streamer.h"
32 #include "splay-tree.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
37 #include "ipa-fnsummary.h"
39 /* Debugging function for postorder and inorder code. NOTE is a string
40 that is printed before the nodes are printed. ORDER is an array of
41 cgraph_nodes that has COUNT useful nodes in it. */
44 ipa_print_order (FILE* out
,
46 struct cgraph_node
** order
,
50 fprintf (out
, "\n\n ordered call graph: %s\n", note
);
52 for (i
= count
- 1; i
>= 0; i
--)
60 struct cgraph_node
**stack
;
61 struct cgraph_node
**result
;
64 splay_tree nodes_marked_new
;
66 bool allow_overwritable
;
70 /* This is an implementation of Tarjan's strongly connected region
71 finder as reprinted in Aho Hopcraft and Ullman's The Design and
72 Analysis of Computer Programs (1975) pages 192-193. This version
73 has been customized for cgraph_nodes. The env parameter is because
74 it is recursive and there are no nested functions here. This
75 function should only be called from itself or
76 ipa_reduced_postorder. ENV is a stack env and would be
77 unnecessary if C had nested functions. V is the node to start
81 searchc (struct searchc_env
* env
, struct cgraph_node
*v
,
82 bool (*ignore_edge
) (struct cgraph_edge
*))
84 struct cgraph_edge
*edge
;
85 struct ipa_dfs_info
*v_info
= (struct ipa_dfs_info
*) v
->aux
;
87 /* mark node as old */
88 v_info
->new_node
= false;
89 splay_tree_remove (env
->nodes_marked_new
, v
->get_uid ());
91 v_info
->dfn_number
= env
->count
;
92 v_info
->low_link
= env
->count
;
94 env
->stack
[(env
->stack_size
)++] = v
;
95 v_info
->on_stack
= true;
97 for (edge
= v
->callees
; edge
; edge
= edge
->next_callee
)
99 struct ipa_dfs_info
* w_info
;
100 enum availability avail
;
101 struct cgraph_node
*w
= edge
->callee
->ultimate_alias_target (&avail
);
103 if (!w
|| (ignore_edge
&& ignore_edge (edge
)))
107 && (avail
> AVAIL_INTERPOSABLE
108 || (env
->allow_overwritable
&& avail
== AVAIL_INTERPOSABLE
)))
110 w_info
= (struct ipa_dfs_info
*) w
->aux
;
111 if (w_info
->new_node
)
113 searchc (env
, w
, ignore_edge
);
115 (v_info
->low_link
< w_info
->low_link
) ?
116 v_info
->low_link
: w_info
->low_link
;
119 if ((w_info
->dfn_number
< v_info
->dfn_number
)
120 && (w_info
->on_stack
))
122 (w_info
->dfn_number
< v_info
->low_link
) ?
123 w_info
->dfn_number
: v_info
->low_link
;
128 if (v_info
->low_link
== v_info
->dfn_number
)
130 struct cgraph_node
*last
= NULL
;
131 struct cgraph_node
*x
;
132 struct ipa_dfs_info
*x_info
;
134 x
= env
->stack
[--(env
->stack_size
)];
135 x_info
= (struct ipa_dfs_info
*) x
->aux
;
136 x_info
->on_stack
= false;
137 x_info
->scc_no
= v_info
->dfn_number
;
141 x_info
->next_cycle
= last
;
145 env
->result
[env
->order_pos
++] = x
;
149 env
->result
[env
->order_pos
++] = v
;
153 /* Topsort the call graph by caller relation. Put the result in ORDER.
155 The REDUCE flag is true if you want the cycles reduced to single nodes.
156 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
157 call graph nodes in a reduced node.
159 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
160 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
161 for the topological sort. */
164 ipa_reduced_postorder (struct cgraph_node
**order
,
165 bool reduce
, bool allow_overwritable
,
166 bool (*ignore_edge
) (struct cgraph_edge
*))
168 struct cgraph_node
*node
;
169 struct searchc_env env
;
170 splay_tree_node result
;
171 env
.stack
= XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
175 env
.nodes_marked_new
= splay_tree_new (splay_tree_compare_ints
, 0, 0);
178 env
.allow_overwritable
= allow_overwritable
;
180 FOR_EACH_DEFINED_FUNCTION (node
)
182 enum availability avail
= node
->get_availability ();
184 if (avail
> AVAIL_INTERPOSABLE
185 || (allow_overwritable
186 && (avail
== AVAIL_INTERPOSABLE
)))
188 /* Reuse the info if it is already there. */
189 struct ipa_dfs_info
*info
= (struct ipa_dfs_info
*) node
->aux
;
191 info
= XCNEW (struct ipa_dfs_info
);
192 info
->new_node
= true;
193 info
->on_stack
= false;
194 info
->next_cycle
= NULL
;
197 splay_tree_insert (env
.nodes_marked_new
,
198 (splay_tree_key
)node
->get_uid (),
199 (splay_tree_value
)node
);
204 result
= splay_tree_min (env
.nodes_marked_new
);
207 node
= (struct cgraph_node
*)result
->value
;
208 searchc (&env
, node
, ignore_edge
);
209 result
= splay_tree_min (env
.nodes_marked_new
);
211 splay_tree_delete (env
.nodes_marked_new
);
214 return env
.order_pos
;
217 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
221 ipa_free_postorder_info (void)
223 struct cgraph_node
*node
;
224 FOR_EACH_DEFINED_FUNCTION (node
)
226 /* Get rid of the aux information. */
235 /* Get the set of nodes for the cycle in the reduced call graph starting
239 ipa_get_nodes_in_cycle (struct cgraph_node
*node
)
241 vec
<cgraph_node
*> v
= vNULL
;
242 struct ipa_dfs_info
*node_dfs_info
;
246 node_dfs_info
= (struct ipa_dfs_info
*) node
->aux
;
247 node
= node_dfs_info
->next_cycle
;
252 /* Return true iff the CS is an edge within a strongly connected component as
253 computed by ipa_reduced_postorder. */
256 ipa_edge_within_scc (struct cgraph_edge
*cs
)
258 struct ipa_dfs_info
*caller_dfs
= (struct ipa_dfs_info
*) cs
->caller
->aux
;
259 struct ipa_dfs_info
*callee_dfs
;
260 struct cgraph_node
*callee
= cs
->callee
->function_symbol ();
262 callee_dfs
= (struct ipa_dfs_info
*) callee
->aux
;
265 && caller_dfs
->scc_no
== callee_dfs
->scc_no
);
268 struct postorder_stack
270 struct cgraph_node
*node
;
271 struct cgraph_edge
*edge
;
275 /* Fill array order with all nodes with output flag set in the reverse
276 topological order. Return the number of elements in the array.
277 FIXME: While walking, consider aliases, too. */
280 ipa_reverse_postorder (struct cgraph_node
**order
)
282 struct cgraph_node
*node
, *node2
;
285 struct cgraph_edge
*edge
;
287 struct ipa_ref
*ref
= NULL
;
289 struct postorder_stack
*stack
=
290 XCNEWVEC (struct postorder_stack
, symtab
->cgraph_count
);
292 /* We have to deal with cycles nicely, so use a depth first traversal
293 output algorithm. Ignore the fact that some functions won't need
294 to be output and put them into order as well, so we get dependencies
295 right through inline functions. */
296 FOR_EACH_FUNCTION (node
)
298 for (pass
= 0; pass
< 2; pass
++)
299 FOR_EACH_FUNCTION (node
)
302 || (!node
->address_taken
303 && !node
->global
.inlined_to
304 && !node
->alias
&& !node
->thunk
.thunk_p
305 && !node
->only_called_directly_p ())))
308 stack
[stack_size
].node
= node
;
309 stack
[stack_size
].edge
= node
->callers
;
310 stack
[stack_size
].ref
= 0;
311 node
->aux
= (void *)(size_t)1;
312 while (stack_size
>= 0)
317 while (stack
[stack_size
].edge
&& !node2
)
319 edge
= stack
[stack_size
].edge
;
320 node2
= edge
->caller
;
321 stack
[stack_size
].edge
= edge
->next_caller
;
322 /* Break possible cycles involving always-inline
323 functions by ignoring edges from always-inline
324 functions to non-always-inline functions. */
325 if (DECL_DISREGARD_INLINE_LIMITS (edge
->caller
->decl
)
326 && !DECL_DISREGARD_INLINE_LIMITS
327 (edge
->callee
->function_symbol ()->decl
))
330 for (; stack
[stack_size
].node
->iterate_referring (
331 stack
[stack_size
].ref
,
333 stack
[stack_size
].ref
++)
335 if (ref
->use
== IPA_REF_ALIAS
)
336 node2
= dyn_cast
<cgraph_node
*> (ref
->referring
);
342 stack
[++stack_size
].node
= node2
;
343 stack
[stack_size
].edge
= node2
->callers
;
344 stack
[stack_size
].ref
= 0;
345 node2
->aux
= (void *)(size_t)1;
348 order
[order_pos
++] = stack
[stack_size
--].node
;
352 FOR_EACH_FUNCTION (node
)
359 /* Given a memory reference T, will return the variable at the bottom
360 of the access. Unlike get_base_address, this will recurse through
364 get_base_var (tree t
)
366 while (!SSA_VAR_P (t
)
367 && (!CONSTANT_CLASS_P (t
))
368 && TREE_CODE (t
) != LABEL_DECL
369 && TREE_CODE (t
) != FUNCTION_DECL
370 && TREE_CODE (t
) != CONST_DECL
371 && TREE_CODE (t
) != CONSTRUCTOR
)
373 t
= TREE_OPERAND (t
, 0);
379 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
380 DST so it is not going to be lost. Possibly destroy SRC's body on the way
381 unless PRESERVE_BODY is set. */
384 ipa_merge_profiles (struct cgraph_node
*dst
,
385 struct cgraph_node
*src
,
388 tree oldsrcdecl
= src
->decl
;
389 struct function
*srccfun
, *dstcfun
;
395 if (src
->frequency
< dst
->frequency
)
396 src
->frequency
= dst
->frequency
;
398 /* Time profiles are merged. */
399 if (dst
->tp_first_run
> src
->tp_first_run
&& src
->tp_first_run
)
400 dst
->tp_first_run
= src
->tp_first_run
;
402 if (src
->profile_id
&& !dst
->profile_id
)
403 dst
->profile_id
= src
->profile_id
;
405 /* FIXME when we merge in unknown profile, we ought to set counts as
407 if (!src
->count
.initialized_p ()
408 || !(src
->count
.ipa () == src
->count
))
410 if (symtab
->dump_file
)
412 fprintf (symtab
->dump_file
, "Merging profiles of %s to %s\n",
413 src
->dump_name (), dst
->dump_name ());
415 if (dst
->count
.initialized_p () && dst
->count
.ipa () == dst
->count
)
416 dst
->count
+= src
->count
.ipa ();
418 dst
->count
= src
->count
.ipa ();
420 /* This is ugly. We need to get both function bodies into memory.
421 If declaration is merged, we need to duplicate it to be able
422 to load body that is being replaced. This makes symbol table
423 temporarily inconsistent. */
424 if (src
->decl
== dst
->decl
)
426 struct lto_in_decl_state temp
;
427 struct lto_in_decl_state
*state
;
429 /* We are going to move the decl, we want to remove its file decl data.
430 and link these with the new decl. */
431 temp
.fn_decl
= src
->decl
;
432 lto_in_decl_state
**slot
433 = src
->lto_file_data
->function_decl_states
->find_slot (&temp
,
436 src
->lto_file_data
->function_decl_states
->clear_slot (slot
);
439 /* Duplicate the decl and be sure it does not link into body of DST. */
440 src
->decl
= copy_node (src
->decl
);
441 DECL_STRUCT_FUNCTION (src
->decl
) = NULL
;
442 DECL_ARGUMENTS (src
->decl
) = NULL
;
443 DECL_INITIAL (src
->decl
) = NULL
;
444 DECL_RESULT (src
->decl
) = NULL
;
446 /* Associate the decl state with new declaration, so LTO streamer
448 state
->fn_decl
= src
->decl
;
450 = src
->lto_file_data
->function_decl_states
->find_slot (state
, INSERT
);
454 src
->get_untransformed_body ();
455 dst
->get_untransformed_body ();
456 srccfun
= DECL_STRUCT_FUNCTION (src
->decl
);
457 dstcfun
= DECL_STRUCT_FUNCTION (dst
->decl
);
458 if (n_basic_blocks_for_fn (srccfun
)
459 != n_basic_blocks_for_fn (dstcfun
))
461 if (symtab
->dump_file
)
462 fprintf (symtab
->dump_file
,
463 "Giving up; number of basic block mismatch.\n");
466 else if (last_basic_block_for_fn (srccfun
)
467 != last_basic_block_for_fn (dstcfun
))
469 if (symtab
->dump_file
)
470 fprintf (symtab
->dump_file
,
471 "Giving up; last block mismatch.\n");
476 basic_block srcbb
, dstbb
;
478 FOR_ALL_BB_FN (srcbb
, srccfun
)
482 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
485 if (symtab
->dump_file
)
486 fprintf (symtab
->dump_file
,
487 "No matching block for bb %i.\n",
492 if (EDGE_COUNT (srcbb
->succs
) != EDGE_COUNT (dstbb
->succs
))
494 if (symtab
->dump_file
)
495 fprintf (symtab
->dump_file
,
496 "Edge count mistmatch for bb %i.\n",
501 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
503 edge srce
= EDGE_SUCC (srcbb
, i
);
504 edge dste
= EDGE_SUCC (dstbb
, i
);
505 if (srce
->dest
->index
!= dste
->dest
->index
)
507 if (symtab
->dump_file
)
508 fprintf (symtab
->dump_file
,
509 "Succ edge mistmatch for bb %i.\n",
519 struct cgraph_edge
*e
, *e2
;
520 basic_block srcbb
, dstbb
;
522 /* TODO: merge also statement histograms. */
523 FOR_ALL_BB_FN (srcbb
, srccfun
)
527 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
529 /* Either sum the profiles if both are IPA and not global0, or
530 pick more informative one (that is nonzero IPA if other is
531 uninitialized, guessed or global0). */
532 if (!dstbb
->count
.ipa ().initialized_p ()
533 || (dstbb
->count
.ipa () == profile_count::zero ()
534 && (srcbb
->count
.ipa ().initialized_p ()
535 && !(srcbb
->count
.ipa () == profile_count::zero ()))))
537 dstbb
->count
= srcbb
->count
;
538 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
540 edge srce
= EDGE_SUCC (srcbb
, i
);
541 edge dste
= EDGE_SUCC (dstbb
, i
);
542 if (srce
->probability
.initialized_p ())
543 dste
->probability
= srce
->probability
;
546 else if (srcbb
->count
.ipa ().initialized_p ()
547 && !(srcbb
->count
.ipa () == profile_count::zero ()))
549 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
551 edge srce
= EDGE_SUCC (srcbb
, i
);
552 edge dste
= EDGE_SUCC (dstbb
, i
);
554 dste
->probability
* dstbb
->count
.probability_in (dstbb
->count
+ srcbb
->count
)
555 + srce
->probability
* srcbb
->count
.probability_in (dstbb
->count
+ srcbb
->count
);
557 dstbb
->count
+= srcbb
->count
;
561 update_max_bb_count ();
562 compute_function_frequency ();
564 for (e
= dst
->callees
; e
; e
= e
->next_callee
)
568 e
->count
= gimple_bb (e
->call_stmt
)->count
;
570 for (e
= dst
->indirect_calls
, e2
= src
->indirect_calls
; e
;
571 e2
= (e2
? e2
->next_callee
: NULL
), e
= e
->next_callee
)
573 profile_count count
= gimple_bb (e
->call_stmt
)->count
;
574 /* When call is speculative, we need to re-distribute probabilities
575 the same way as they was. This is not really correct because
576 in the other copy the speculation may differ; but probably it
577 is not really worth the effort. */
580 cgraph_edge
*direct
, *indirect
;
581 cgraph_edge
*direct2
= NULL
, *indirect2
= NULL
;
584 e
->speculative_call_info (direct
, indirect
, ref
);
585 gcc_assert (e
== indirect
);
586 if (e2
&& e2
->speculative
)
587 e2
->speculative_call_info (direct2
, indirect2
, ref
);
588 if (indirect
->count
> profile_count::zero ()
589 || direct
->count
> profile_count::zero ())
591 /* We should mismatch earlier if there is no matching
597 "Mismatch in merging indirect edges\n");
599 else if (!e2
->speculative
)
600 indirect
->count
+= e2
->count
;
601 else if (e2
->speculative
)
603 if (DECL_ASSEMBLER_NAME (direct2
->callee
->decl
)
604 != DECL_ASSEMBLER_NAME (direct
->callee
->decl
))
606 if (direct2
->count
>= direct
->count
)
608 direct
->redirect_callee (direct2
->callee
);
609 indirect
->count
+= indirect2
->count
611 direct
->count
= direct2
->count
;
614 indirect
->count
+= indirect2
->count
+ direct2
->count
;
618 direct
->count
+= direct2
->count
;
619 indirect
->count
+= indirect2
->count
;
624 /* At the moment we should have only profile feedback based
625 speculations when merging. */
628 else if (e2
&& e2
->speculative
)
630 cgraph_edge
*direct
, *indirect
;
633 e2
->speculative_call_info (direct
, indirect
, ref
);
635 e
->make_speculative (direct
->callee
, direct
->count
);
641 src
->release_body ();
642 ipa_update_overall_fn_summary (dst
);
644 /* TODO: if there is no match, we can scale up. */
645 src
->decl
= oldsrcdecl
;
648 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
651 recursive_call_p (tree func
, tree dest
)
653 struct cgraph_node
*dest_node
= cgraph_node::get_create (dest
);
654 struct cgraph_node
*cnode
= cgraph_node::get_create (func
);
656 enum availability avail
;
658 gcc_assert (!cnode
->alias
);
659 if (cnode
!= dest_node
->ultimate_alias_target (&avail
))
661 if (avail
>= AVAIL_AVAILABLE
)
663 if (!dest_node
->semantically_equivalent_p (cnode
))
665 /* If there is only one way to call the fuction or we know all of them
666 are semantically equivalent, we still can consider call recursive. */
667 FOR_EACH_ALIAS (cnode
, alias
)
668 if (!dest_node
->semantically_equivalent_p (alias
->referring
))