1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
30 #include "lto-streamer.h"
32 #include "splay-tree.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
37 #include "ipa-fnsummary.h"
39 /* Debugging function for postorder and inorder code. NOTE is a string
40 that is printed before the nodes are printed. ORDER is an array of
41 cgraph_nodes that has COUNT useful nodes in it. */
44 ipa_print_order (FILE* out
,
46 struct cgraph_node
** order
,
50 fprintf (out
, "\n\n ordered call graph: %s\n", note
);
52 for (i
= count
- 1; i
>= 0; i
--)
60 struct cgraph_node
**stack
;
61 struct cgraph_node
**result
;
64 splay_tree nodes_marked_new
;
69 /* This is an implementation of Tarjan's strongly connected region
70 finder as reprinted in Aho Hopcraft and Ullman's The Design and
71 Analysis of Computer Programs (1975) pages 192-193. This version
72 has been customized for cgraph_nodes. The env parameter is because
73 it is recursive and there are no nested functions here. This
74 function should only be called from itself or
75 ipa_reduced_postorder. ENV is a stack env and would be
76 unnecessary if C had nested functions. V is the node to start
80 searchc (struct searchc_env
* env
, struct cgraph_node
*v
,
81 bool (*ignore_edge
) (struct cgraph_edge
*))
83 struct cgraph_edge
*edge
;
84 struct ipa_dfs_info
*v_info
= (struct ipa_dfs_info
*) v
->aux
;
86 /* mark node as old */
87 v_info
->new_node
= false;
88 splay_tree_remove (env
->nodes_marked_new
, v
->get_uid ());
90 v_info
->dfn_number
= env
->count
;
91 v_info
->low_link
= env
->count
;
93 env
->stack
[(env
->stack_size
)++] = v
;
94 v_info
->on_stack
= true;
96 for (edge
= v
->callees
; edge
; edge
= edge
->next_callee
)
98 struct ipa_dfs_info
* w_info
;
99 enum availability avail
;
100 struct cgraph_node
*w
= edge
->callee
->ultimate_alias_target (&avail
);
102 if (!w
|| (ignore_edge
&& ignore_edge (edge
)))
106 && (avail
>= AVAIL_INTERPOSABLE
))
108 w_info
= (struct ipa_dfs_info
*) w
->aux
;
109 if (w_info
->new_node
)
111 searchc (env
, w
, ignore_edge
);
113 (v_info
->low_link
< w_info
->low_link
) ?
114 v_info
->low_link
: w_info
->low_link
;
117 if ((w_info
->dfn_number
< v_info
->dfn_number
)
118 && (w_info
->on_stack
))
120 (w_info
->dfn_number
< v_info
->low_link
) ?
121 w_info
->dfn_number
: v_info
->low_link
;
126 if (v_info
->low_link
== v_info
->dfn_number
)
128 struct cgraph_node
*last
= NULL
;
129 struct cgraph_node
*x
;
130 struct ipa_dfs_info
*x_info
;
132 x
= env
->stack
[--(env
->stack_size
)];
133 x_info
= (struct ipa_dfs_info
*) x
->aux
;
134 x_info
->on_stack
= false;
135 x_info
->scc_no
= v_info
->dfn_number
;
139 x_info
->next_cycle
= last
;
143 env
->result
[env
->order_pos
++] = x
;
147 env
->result
[env
->order_pos
++] = v
;
151 /* Topsort the call graph by caller relation. Put the result in ORDER.
153 The REDUCE flag is true if you want the cycles reduced to single nodes.
154 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
155 call graph nodes in a reduced node.
157 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
158 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
159 for the topological sort. */
162 ipa_reduced_postorder (struct cgraph_node
**order
,
164 bool (*ignore_edge
) (struct cgraph_edge
*))
166 struct cgraph_node
*node
;
167 struct searchc_env env
;
168 splay_tree_node result
;
169 env
.stack
= XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
173 env
.nodes_marked_new
= splay_tree_new (splay_tree_compare_ints
, 0, 0);
177 FOR_EACH_DEFINED_FUNCTION (node
)
179 enum availability avail
= node
->get_availability ();
181 if (avail
> AVAIL_INTERPOSABLE
182 || avail
== AVAIL_INTERPOSABLE
)
184 /* Reuse the info if it is already there. */
185 struct ipa_dfs_info
*info
= (struct ipa_dfs_info
*) node
->aux
;
187 info
= XCNEW (struct ipa_dfs_info
);
188 info
->new_node
= true;
189 info
->on_stack
= false;
190 info
->next_cycle
= NULL
;
193 splay_tree_insert (env
.nodes_marked_new
,
194 (splay_tree_key
)node
->get_uid (),
195 (splay_tree_value
)node
);
200 result
= splay_tree_min (env
.nodes_marked_new
);
203 node
= (struct cgraph_node
*)result
->value
;
204 searchc (&env
, node
, ignore_edge
);
205 result
= splay_tree_min (env
.nodes_marked_new
);
207 splay_tree_delete (env
.nodes_marked_new
);
210 return env
.order_pos
;
213 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
217 ipa_free_postorder_info (void)
219 struct cgraph_node
*node
;
220 FOR_EACH_DEFINED_FUNCTION (node
)
222 /* Get rid of the aux information. */
231 /* Get the set of nodes for the cycle in the reduced call graph starting
235 ipa_get_nodes_in_cycle (struct cgraph_node
*node
)
237 vec
<cgraph_node
*> v
= vNULL
;
238 struct ipa_dfs_info
*node_dfs_info
;
242 node_dfs_info
= (struct ipa_dfs_info
*) node
->aux
;
243 node
= node_dfs_info
->next_cycle
;
248 /* Return true iff the CS is an edge within a strongly connected component as
249 computed by ipa_reduced_postorder. */
252 ipa_edge_within_scc (struct cgraph_edge
*cs
)
254 struct ipa_dfs_info
*caller_dfs
= (struct ipa_dfs_info
*) cs
->caller
->aux
;
255 struct ipa_dfs_info
*callee_dfs
;
256 struct cgraph_node
*callee
= cs
->callee
->function_symbol ();
258 callee_dfs
= (struct ipa_dfs_info
*) callee
->aux
;
261 && caller_dfs
->scc_no
== callee_dfs
->scc_no
);
264 struct postorder_stack
266 struct cgraph_node
*node
;
267 struct cgraph_edge
*edge
;
271 /* Fill array order with all nodes with output flag set in the reverse
272 topological order. Return the number of elements in the array.
273 FIXME: While walking, consider aliases, too. */
276 ipa_reverse_postorder (struct cgraph_node
**order
)
278 struct cgraph_node
*node
, *node2
;
281 struct cgraph_edge
*edge
;
283 struct ipa_ref
*ref
= NULL
;
285 struct postorder_stack
*stack
=
286 XCNEWVEC (struct postorder_stack
, symtab
->cgraph_count
);
288 /* We have to deal with cycles nicely, so use a depth first traversal
289 output algorithm. Ignore the fact that some functions won't need
290 to be output and put them into order as well, so we get dependencies
291 right through inline functions. */
292 FOR_EACH_FUNCTION (node
)
294 for (pass
= 0; pass
< 2; pass
++)
295 FOR_EACH_FUNCTION (node
)
298 || (!node
->address_taken
300 && !node
->alias
&& !node
->thunk
301 && !node
->only_called_directly_p ())))
304 stack
[stack_size
].node
= node
;
305 stack
[stack_size
].edge
= node
->callers
;
306 stack
[stack_size
].ref
= 0;
307 node
->aux
= (void *)(size_t)1;
308 while (stack_size
>= 0)
313 while (stack
[stack_size
].edge
&& !node2
)
315 edge
= stack
[stack_size
].edge
;
316 node2
= edge
->caller
;
317 stack
[stack_size
].edge
= edge
->next_caller
;
318 /* Break possible cycles involving always-inline
319 functions by ignoring edges from always-inline
320 functions to non-always-inline functions. */
321 if (DECL_DISREGARD_INLINE_LIMITS (edge
->caller
->decl
)
322 && !DECL_DISREGARD_INLINE_LIMITS
323 (edge
->callee
->function_symbol ()->decl
))
326 for (; stack
[stack_size
].node
->iterate_referring (
327 stack
[stack_size
].ref
,
329 stack
[stack_size
].ref
++)
331 if (ref
->use
== IPA_REF_ALIAS
)
332 node2
= dyn_cast
<cgraph_node
*> (ref
->referring
);
338 stack
[++stack_size
].node
= node2
;
339 stack
[stack_size
].edge
= node2
->callers
;
340 stack
[stack_size
].ref
= 0;
341 node2
->aux
= (void *)(size_t)1;
344 order
[order_pos
++] = stack
[stack_size
--].node
;
348 FOR_EACH_FUNCTION (node
)
355 /* Given a memory reference T, will return the variable at the bottom
356 of the access. Unlike get_base_address, this will recurse through
360 get_base_var (tree t
)
362 while (!SSA_VAR_P (t
)
363 && (!CONSTANT_CLASS_P (t
))
364 && TREE_CODE (t
) != LABEL_DECL
365 && TREE_CODE (t
) != FUNCTION_DECL
366 && TREE_CODE (t
) != CONST_DECL
367 && TREE_CODE (t
) != CONSTRUCTOR
)
369 t
= TREE_OPERAND (t
, 0);
374 /* Scale function of calls in NODE by ratio ORIG_COUNT/NODE->count. */
377 scale_ipa_profile_for_fn (struct cgraph_node
*node
, profile_count orig_count
)
379 profile_count to
= node
->count
;
380 profile_count::adjust_for_ipa_scaling (&to
, &orig_count
);
381 struct cgraph_edge
*e
;
383 for (e
= node
->callees
; e
; e
= e
->next_callee
)
384 e
->count
= e
->count
.apply_scale (to
, orig_count
);
385 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
386 e
->count
= e
->count
.apply_scale (to
, orig_count
);
389 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
390 DST so it is not going to be lost. Possibly destroy SRC's body on the way
391 unless PRESERVE_BODY is set. */
394 ipa_merge_profiles (struct cgraph_node
*dst
,
395 struct cgraph_node
*src
,
398 tree oldsrcdecl
= src
->decl
;
399 struct function
*srccfun
, *dstcfun
;
401 bool copy_counts
= false;
407 if (src
->frequency
< dst
->frequency
)
408 src
->frequency
= dst
->frequency
;
410 /* Time profiles are merged. */
411 if (dst
->tp_first_run
> src
->tp_first_run
&& src
->tp_first_run
)
412 dst
->tp_first_run
= src
->tp_first_run
;
414 if (src
->profile_id
&& !dst
->profile_id
)
415 dst
->profile_id
= src
->profile_id
;
417 /* Merging zero profile to dst is no-op. */
418 if (src
->count
.ipa () == profile_count::zero ())
421 /* FIXME when we merge in unknown profile, we ought to set counts as
423 if (!src
->count
.initialized_p ()
424 || !(src
->count
.ipa () == src
->count
))
426 profile_count orig_count
= dst
->count
;
428 /* Either sum the profiles if both are IPA and not global0, or
429 pick more informative one (that is nonzero IPA if other is
430 uninitialized, guessed or global0). */
432 if ((dst
->count
.ipa ().nonzero_p ()
433 || src
->count
.ipa ().nonzero_p ())
434 && dst
->count
.ipa ().initialized_p ()
435 && src
->count
.ipa ().initialized_p ())
436 dst
->count
= dst
->count
.ipa () + src
->count
.ipa ();
437 else if (dst
->count
.ipa ().initialized_p ())
439 else if (src
->count
.ipa ().initialized_p ())
442 dst
->count
= src
->count
.ipa ();
445 /* If no updating needed return early. */
446 if (dst
->count
== orig_count
)
449 if (symtab
->dump_file
)
451 fprintf (symtab
->dump_file
, "Merging profiles of %s count:",
453 src
->count
.dump (symtab
->dump_file
);
454 fprintf (symtab
->dump_file
, " to %s count:",
456 orig_count
.dump (symtab
->dump_file
);
457 fprintf (symtab
->dump_file
, " resulting count:");
458 dst
->count
.dump (symtab
->dump_file
);
459 fprintf (symtab
->dump_file
, "\n");
462 /* First handle functions with no gimple body. */
463 if (dst
->thunk
|| dst
->alias
464 || src
->thunk
|| src
->alias
)
466 scale_ipa_profile_for_fn (dst
, orig_count
);
470 /* This is ugly. We need to get both function bodies into memory.
471 If declaration is merged, we need to duplicate it to be able
472 to load body that is being replaced. This makes symbol table
473 temporarily inconsistent. */
474 if (src
->decl
== dst
->decl
)
476 struct lto_in_decl_state temp
;
477 struct lto_in_decl_state
*state
;
479 /* We are going to move the decl, we want to remove its file decl data.
480 and link these with the new decl. */
481 temp
.fn_decl
= src
->decl
;
482 lto_in_decl_state
**slot
483 = src
->lto_file_data
->function_decl_states
->find_slot (&temp
,
486 src
->lto_file_data
->function_decl_states
->clear_slot (slot
);
489 /* Duplicate the decl and be sure it does not link into body of DST. */
490 src
->decl
= copy_node (src
->decl
);
491 DECL_STRUCT_FUNCTION (src
->decl
) = NULL
;
492 DECL_ARGUMENTS (src
->decl
) = NULL
;
493 DECL_INITIAL (src
->decl
) = NULL
;
494 DECL_RESULT (src
->decl
) = NULL
;
496 /* Associate the decl state with new declaration, so LTO streamer
498 state
->fn_decl
= src
->decl
;
500 = src
->lto_file_data
->function_decl_states
->find_slot (state
, INSERT
);
504 src
->get_untransformed_body ();
505 dst
->get_untransformed_body ();
506 srccfun
= DECL_STRUCT_FUNCTION (src
->decl
);
507 dstcfun
= DECL_STRUCT_FUNCTION (dst
->decl
);
508 if (n_basic_blocks_for_fn (srccfun
)
509 != n_basic_blocks_for_fn (dstcfun
))
511 if (symtab
->dump_file
)
512 fprintf (symtab
->dump_file
,
513 "Giving up; number of basic block mismatch.\n");
516 else if (last_basic_block_for_fn (srccfun
)
517 != last_basic_block_for_fn (dstcfun
))
519 if (symtab
->dump_file
)
520 fprintf (symtab
->dump_file
,
521 "Giving up; last block mismatch.\n");
526 basic_block srcbb
, dstbb
;
527 struct cgraph_edge
*e
, *e2
;
529 for (e
= dst
->callees
, e2
= src
->callees
; e
&& e2
&& match
;
530 e2
= e2
->next_callee
, e
= e
->next_callee
)
532 if (gimple_bb (e
->call_stmt
)->index
533 != gimple_bb (e2
->call_stmt
)->index
)
535 if (symtab
->dump_file
)
536 fprintf (symtab
->dump_file
,
537 "Giving up; call stmt mismatch.\n");
543 if (symtab
->dump_file
)
544 fprintf (symtab
->dump_file
,
545 "Giving up; number of calls differs.\n");
548 for (e
= dst
->indirect_calls
, e2
= src
->indirect_calls
; e
&& e2
&& match
;
549 e2
= e2
->next_callee
, e
= e
->next_callee
)
551 if (gimple_bb (e
->call_stmt
)->index
552 != gimple_bb (e2
->call_stmt
)->index
)
554 if (symtab
->dump_file
)
555 fprintf (symtab
->dump_file
,
556 "Giving up; indirect call stmt mismatch.\n");
562 if (symtab
->dump_file
)
563 fprintf (symtab
->dump_file
,
564 "Giving up; number of indirect calls differs.\n");
569 FOR_ALL_BB_FN (srcbb
, srccfun
)
573 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
576 if (symtab
->dump_file
)
577 fprintf (symtab
->dump_file
,
578 "No matching block for bb %i.\n",
583 if (EDGE_COUNT (srcbb
->succs
) != EDGE_COUNT (dstbb
->succs
))
585 if (symtab
->dump_file
)
586 fprintf (symtab
->dump_file
,
587 "Edge count mismatch for bb %i.\n",
592 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
594 edge srce
= EDGE_SUCC (srcbb
, i
);
595 edge dste
= EDGE_SUCC (dstbb
, i
);
596 if (srce
->dest
->index
!= dste
->dest
->index
)
598 if (symtab
->dump_file
)
599 fprintf (symtab
->dump_file
,
600 "Succ edge mismatch for bb %i.\n",
610 struct cgraph_edge
*e
, *e2
;
611 basic_block srcbb
, dstbb
;
613 /* Function and global profile may be out of sync. First scale it same
614 way as fixup_cfg would. */
615 profile_count srcnum
= src
->count
;
616 profile_count srcden
= ENTRY_BLOCK_PTR_FOR_FN (srccfun
)->count
;
617 bool srcscale
= srcnum
.initialized_p () && !(srcnum
== srcden
);
618 profile_count dstnum
= orig_count
;
619 profile_count dstden
= ENTRY_BLOCK_PTR_FOR_FN (dstcfun
)->count
;
620 bool dstscale
= !copy_counts
621 && dstnum
.initialized_p () && !(dstnum
== dstden
);
623 /* TODO: merge also statement histograms. */
624 FOR_ALL_BB_FN (srcbb
, srccfun
)
628 dstbb
= BASIC_BLOCK_FOR_FN (dstcfun
, srcbb
->index
);
630 profile_count srccount
= srcbb
->count
;
632 srccount
= srccount
.apply_scale (srcnum
, srcden
);
634 dstbb
->count
= dstbb
->count
.apply_scale (dstnum
, dstden
);
638 dstbb
->count
= srccount
;
639 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
641 edge srce
= EDGE_SUCC (srcbb
, i
);
642 edge dste
= EDGE_SUCC (dstbb
, i
);
643 if (srce
->probability
.initialized_p ())
644 dste
->probability
= srce
->probability
;
649 for (i
= 0; i
< EDGE_COUNT (srcbb
->succs
); i
++)
651 edge srce
= EDGE_SUCC (srcbb
, i
);
652 edge dste
= EDGE_SUCC (dstbb
, i
);
654 dste
->probability
* dstbb
->count
.ipa ().probability_in
657 + srce
->probability
* srcbb
->count
.ipa ().probability_in
661 dstbb
->count
= dstbb
->count
.ipa () + srccount
.ipa ();
665 update_max_bb_count ();
666 compute_function_frequency ();
668 for (e
= dst
->callees
; e
; e
= e
->next_callee
)
672 e
->count
= gimple_bb (e
->call_stmt
)->count
;
674 for (e
= dst
->indirect_calls
, e2
= src
->indirect_calls
; e
;
675 e2
= (e2
? e2
->next_callee
: NULL
), e
= e
->next_callee
)
677 if (!e
->speculative
&& !e2
->speculative
)
679 /* FIXME: we need to also merge ipa-profile histograms
680 because with LTO merging happens from lto-symtab before
681 these are converted to indirect edges. */
682 e
->count
= gimple_bb (e
->call_stmt
)->count
;
686 /* When copying just remove all speuclations on dst and then copy
690 while (e
->speculative
)
691 cgraph_edge::resolve_speculation (e
, NULL
);
692 e
->count
= gimple_bb (e
->call_stmt
)->count
;
695 for (cgraph_edge
*e3
= e2
->first_speculative_call_target ();
697 e3
= e3
->next_speculative_call_target ())
700 ns
= e
->make_speculative
701 (dyn_cast
<cgraph_node
*>
702 (e3
->speculative_call_target_ref ()->referred
),
703 e3
->count
, e3
->speculative_id
);
704 /* Target may differ from ref (for example it may be
705 redirected to local alias. */
706 ns
->redirect_callee (e3
->callee
);
712 /* Iterate all speculations in SRC, see if corresponding ones exist
713 int DST and if so, sum the counts. Otherwise create new
716 for (cgraph_edge
*e3
= e
->first_speculative_call_target ();
718 e3
= e3
->next_speculative_call_target ())
719 if (e3
->speculative_id
> max_spec
)
720 max_spec
= e3
->speculative_id
;
721 for (cgraph_edge
*e3
= e2
->first_speculative_call_target ();
723 e3
= e3
->next_speculative_call_target ())
726 = e
->speculative_call_for_target
727 (dyn_cast
<cgraph_node
*>
728 (e3
->speculative_call_target_ref ()->referred
));
730 te
->count
= te
->count
+ e3
->count
;
733 e
->count
= e
->count
+ e3
->count
;
735 ns
= e
->make_speculative
736 (dyn_cast
<cgraph_node
*>
737 (e3
->speculative_call_target_ref ()
740 e3
->speculative_id
+ max_spec
+ 1);
741 /* Target may differ from ref (for example it may be
742 redirected to local alias. */
743 ns
->redirect_callee (e3
->callee
);
748 src
->release_body ();
749 /* Update summary. */
750 compute_fn_summary (dst
, 0);
752 /* We can't update CFG profile, but we can scale IPA profile. CFG
753 will be scaled according to dst->count after IPA passes. */
755 scale_ipa_profile_for_fn (dst
, orig_count
);
756 src
->decl
= oldsrcdecl
;
759 /* Return true if call to DEST is known to be self-recusive
760 call withing FUNC. */
763 recursive_call_p (tree func
, tree dest
)
765 struct cgraph_node
*dest_node
= cgraph_node::get_create (dest
);
766 struct cgraph_node
*cnode
= cgraph_node::get_create (func
);
768 enum availability avail
;
770 gcc_assert (!cnode
->alias
);
771 if (cnode
!= dest_node
->ultimate_alias_target (&avail
))
773 if (avail
>= AVAIL_AVAILABLE
)
775 if (!dest_node
->semantically_equivalent_p (cnode
))
777 /* If there is only one way to call the fuction or we know all of them
778 are semantically equivalent, we still can consider call recursive. */
779 FOR_EACH_ALIAS (cnode
, alias
)
780 if (!dest_node
->semantically_equivalent_p (alias
->referring
))