1 /* Callgraph construction.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-flow.h"
27 #include "langhooks.h"
28 #include "pointer-set.h"
35 #include "tree-pass.h"
36 #include "ipa-utils.h"
39 #include "ipa-inline.h"
41 /* Context of record_reference. */
42 struct record_reference_ctx
45 struct varpool_node
*varpool_node
;
48 /* Walk tree and record all calls and references to functions/variables.
49 Called via walk_tree: TP is pointer to tree to be examined.
50 When DATA is non-null, record references to callgraph.
54 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
58 struct record_reference_ctx
*ctx
= (struct record_reference_ctx
*)data
;
60 t
= canonicalize_constructor_val (t
, NULL
);
66 switch (TREE_CODE (t
))
75 /* Record dereferences to the functions. This makes the
76 functions reachable unconditionally. */
77 decl
= get_base_var (*tp
);
78 if (TREE_CODE (decl
) == FUNCTION_DECL
)
80 struct cgraph_node
*node
= cgraph_get_create_real_symbol_node (decl
);
82 cgraph_mark_address_taken_node (node
);
83 ipa_record_reference ((symtab_node
)ctx
->varpool_node
,
88 if (TREE_CODE (decl
) == VAR_DECL
)
90 struct varpool_node
*vnode
= varpool_node_for_decl (decl
);
91 ipa_record_reference ((symtab_node
)ctx
->varpool_node
,
99 /* Save some cycles by not walking types and declaration as we
100 won't find anything useful there anyway. */
101 if (IS_TYPE_OR_DECL_P (*tp
))
112 /* Record references to typeinfos in the type list LIST. */
115 record_type_list (struct cgraph_node
*node
, tree list
)
117 for (; list
; list
= TREE_CHAIN (list
))
119 tree type
= TREE_VALUE (list
);
122 type
= lookup_type_for_runtime (type
);
124 if (TREE_CODE (type
) == ADDR_EXPR
)
126 type
= TREE_OPERAND (type
, 0);
127 if (TREE_CODE (type
) == VAR_DECL
)
129 struct varpool_node
*vnode
= varpool_node_for_decl (type
);
130 ipa_record_reference ((symtab_node
)node
,
138 /* Record all references we will introduce by producing EH tables
142 record_eh_tables (struct cgraph_node
*node
, struct function
*fun
)
146 if (DECL_FUNCTION_PERSONALITY (node
->symbol
.decl
))
148 struct cgraph_node
*per_node
;
150 per_node
= cgraph_get_create_real_symbol_node (DECL_FUNCTION_PERSONALITY (node
->symbol
.decl
));
151 ipa_record_reference ((symtab_node
)node
, (symtab_node
)per_node
, IPA_REF_ADDR
, NULL
);
152 cgraph_mark_address_taken_node (per_node
);
155 i
= fun
->eh
->region_tree
;
164 case ERT_MUST_NOT_THROW
:
170 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
171 record_type_list (node
, c
->type_list
);
175 case ERT_ALLOWED_EXCEPTIONS
:
176 record_type_list (node
, i
->u
.allowed
.type_list
);
179 /* If there are sub-regions, process them. */
182 /* If there are peers, process them. */
183 else if (i
->next_peer
)
185 /* Otherwise, step back up the tree to the next peer. */
194 while (i
->next_peer
== NULL
);
200 /* Computes the frequency of the call statement so that it can be stored in
201 cgraph_edge. BB is the basic block of the call statement. */
203 compute_call_stmt_bb_frequency (tree decl
, basic_block bb
)
205 int entry_freq
= ENTRY_BLOCK_PTR_FOR_FUNCTION
206 (DECL_STRUCT_FUNCTION (decl
))->frequency
;
207 int freq
= bb
->frequency
;
209 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl
)) == PROFILE_ABSENT
)
210 return CGRAPH_FREQ_BASE
;
213 entry_freq
= 1, freq
++;
215 freq
= freq
* CGRAPH_FREQ_BASE
/ entry_freq
;
216 if (freq
> CGRAPH_FREQ_MAX
)
217 freq
= CGRAPH_FREQ_MAX
;
223 bool cgraph_pre_profiling_inlining_done
= false;
225 /* Return true if E is a fake indirect call edge. */
228 cgraph_is_fake_indirect_call_edge (struct cgraph_edge
*e
)
230 return !e
->call_stmt
;
234 /* Add fake cgraph edges from NODE to its indirect call callees
235 using profile data. */
238 add_fake_indirect_call_edges (struct cgraph_node
*node
)
240 unsigned n_counts
, i
;
241 gcov_type
*ic_counts
;
243 /* Enable this only for LIPO for now. */
244 if (!L_IPO_COMP_MODE
)
247 if (cgraph_pre_profiling_inlining_done
)
251 = get_coverage_counts_no_warn (DECL_STRUCT_FUNCTION (node
->symbol
.decl
),
252 GCOV_COUNTER_ICALL_TOPNV
, &n_counts
);
257 gcc_assert ((n_counts
% GCOV_ICALL_TOPN_NCOUNTS
) == 0);
258 gcc_assert (!flag_auto_profile
);
260 /* After the early_inline_1 before value profile transformation,
261 functions that are indirect call targets may have their bodies
262 removed (extern inline functions or functions from aux modules,
263 functions in comdat etc) if all direct callsites are inlined. This
264 will lead to missing inline opportunities after profile based
265 indirect call promotion. The solution is to add fake edges to
266 indirect call targets. Note that such edges are not associated
267 with actual indirect call sites because it is not possible to
268 reliably match pre-early-inline indirect callsites with indirect
269 call profile counters which are from post-early inline function body. */
271 for (i
= 0; i
< n_counts
;
272 i
+= GCOV_ICALL_TOPN_NCOUNTS
, ic_counts
+= GCOV_ICALL_TOPN_NCOUNTS
)
274 gcov_type val1
, val2
, count1
, count2
;
275 struct cgraph_node
*direct_call1
= 0, *direct_call2
= 0;
278 count1
= ic_counts
[2];
280 count2
= ic_counts
[4];
282 if (val1
== 0 || count1
== 0)
285 direct_call1
= find_func_by_global_id (val1
, false);
288 tree decl
= direct_call1
->symbol
.decl
;
289 cgraph_create_edge (node
,
290 cgraph_get_create_node (decl
),
295 if (val2
== 0 || count2
== 0)
297 direct_call2
= find_func_by_global_id (val2
, false);
300 tree decl
= direct_call2
->symbol
.decl
;
301 cgraph_create_edge (node
,
302 cgraph_get_create_node (decl
),
310 /* This can be implemented as an IPA pass that must be first one
311 before any unreachable node elimination. */
314 cgraph_add_fake_indirect_call_edges (void)
316 struct cgraph_node
*node
;
318 /* Enable this only for LIPO for now. */
319 if (!L_IPO_COMP_MODE
)
322 FOR_EACH_DEFINED_FUNCTION (node
)
324 if (!gimple_has_body_p (node
->symbol
.decl
))
326 add_fake_indirect_call_edges (node
);
330 /* Remove zero count fake edges added for the purpose of ensuring
331 the right processing order. This should be called after all
334 cgraph_remove_zero_count_fake_edges (void)
336 struct cgraph_node
*node
;
338 /* Enable this only for LIPO for now. */
339 if (!L_IPO_COMP_MODE
)
342 FOR_EACH_DEFINED_FUNCTION (node
)
344 if (!gimple_has_body_p (node
->symbol
.decl
))
347 struct cgraph_edge
*e
, *f
;
348 for (e
= node
->callees
; e
; e
= f
)
351 if (!e
->call_stmt
&& !e
->count
&& !e
->frequency
)
352 cgraph_remove_edge (e
);
358 record_reference_to_real_target_from_alias (struct cgraph_node
*alias
)
360 if (!L_IPO_COMP_MODE
|| !cgraph_pre_profiling_inlining_done
)
363 /* Need to add a reference to the resolved node in LIPO
364 mode to avoid the real node from eliminated */
365 if (alias
->alias
&& alias
->analyzed
)
367 struct cgraph_node
*target
, *real_target
;
369 target
= cgraph_alias_aliased_node (alias
);
370 real_target
= cgraph_lipo_get_resolved_node (target
->symbol
.decl
);
371 /* TODO: this make create duplicate entries in the reference list. */
372 if (real_target
!= target
)
373 ipa_record_reference ((symtab_node
)alias
, (symtab_node
)real_target
,
374 IPA_REF_ALIAS
, NULL
);
378 /* Mark address taken in STMT. */
381 mark_address (gimple stmt
, tree addr
, void *data
)
383 addr
= get_base_address (addr
);
384 if (TREE_CODE (addr
) == FUNCTION_DECL
)
386 struct cgraph_node
*node
= cgraph_get_create_real_symbol_node (addr
);
387 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
)
388 node
= cgraph_lipo_get_resolved_node (addr
);
390 cgraph_mark_address_taken_node (node
);
391 ipa_record_reference ((symtab_node
)data
,
394 record_reference_to_real_target_from_alias (node
);
396 else if (addr
&& TREE_CODE (addr
) == VAR_DECL
397 && (TREE_STATIC (addr
) || DECL_EXTERNAL (addr
)))
399 struct varpool_node
*vnode
= varpool_node_for_decl (addr
);
401 ipa_record_reference ((symtab_node
)data
,
404 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
)
406 struct varpool_node
*rvnode
= real_varpool_node (addr
);
408 ipa_record_reference ((symtab_node
)data
,
417 /* Mark load of T. */
420 mark_load (gimple stmt
, tree t
, void *data
)
422 t
= get_base_address (t
);
423 if (t
&& TREE_CODE (t
) == FUNCTION_DECL
)
425 /* ??? This can happen on platforms with descriptors when these are
426 directly manipulated in the code. Pretend that it's an address. */
427 struct cgraph_node
*node
= cgraph_get_create_real_symbol_node (t
);
428 cgraph_mark_address_taken_node (node
);
429 ipa_record_reference ((symtab_node
)data
,
433 else if (t
&& TREE_CODE (t
) == VAR_DECL
434 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
436 struct varpool_node
*vnode
= varpool_node_for_decl (t
);
438 ipa_record_reference ((symtab_node
)data
,
442 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
)
444 struct varpool_node
*rvnode
= real_varpool_node (t
);
446 ipa_record_reference ((symtab_node
)data
,
454 /* Mark store of T. */
457 mark_store (gimple stmt
, tree t
, void *data
)
459 t
= get_base_address (t
);
460 if (t
&& TREE_CODE (t
) == VAR_DECL
461 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
463 struct varpool_node
*vnode
= varpool_node_for_decl (t
);
465 ipa_record_reference ((symtab_node
)data
,
467 IPA_REF_STORE
, stmt
);
468 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
)
470 struct varpool_node
*rvnode
= real_varpool_node (t
);
472 ipa_record_reference ((symtab_node
)data
,
480 /* Create cgraph edges for function calls.
481 Also look for functions and variables having addresses taken. */
484 build_cgraph_edges (void)
487 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
488 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
489 gimple_stmt_iterator gsi
;
493 /* Create the callgraph edges and record the nodes referenced by the function.
497 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
499 gimple stmt
= gsi_stmt (gsi
);
502 if (is_gimple_call (stmt
))
504 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
506 decl
= gimple_call_fndecl (stmt
);
508 cgraph_create_edge (node
, cgraph_get_create_node (decl
),
509 stmt
, bb
->count
, freq
);
511 cgraph_create_indirect_edge (node
, stmt
,
512 gimple_call_flags (stmt
),
515 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
516 mark_store
, mark_address
);
517 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
518 && gimple_omp_parallel_child_fn (stmt
))
520 tree fn
= gimple_omp_parallel_child_fn (stmt
);
521 ipa_record_reference ((symtab_node
)node
,
522 (symtab_node
)cgraph_get_create_real_symbol_node (fn
),
525 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
527 tree fn
= gimple_omp_task_child_fn (stmt
);
529 ipa_record_reference ((symtab_node
)node
,
530 (symtab_node
) cgraph_get_create_real_symbol_node (fn
),
532 fn
= gimple_omp_task_copy_fn (stmt
);
534 ipa_record_reference ((symtab_node
)node
,
535 (symtab_node
)cgraph_get_create_real_symbol_node (fn
),
539 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
540 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
541 mark_load
, mark_store
, mark_address
);
545 /* Look for initializers of constant variables and private statics. */
546 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
547 if (TREE_CODE (decl
) == VAR_DECL
548 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
549 && !DECL_HAS_VALUE_EXPR_P (decl
))
550 varpool_finalize_decl (decl
);
551 record_eh_tables (node
, cfun
);
553 pointer_set_destroy (visited_nodes
);
557 struct gimple_opt_pass pass_build_cgraph_edges
=
561 "*build_cgraph_edges", /* name */
562 OPTGROUP_NONE
, /* optinfo_flags */
564 build_cgraph_edges
, /* execute */
567 0, /* static_pass_number */
569 PROP_cfg
, /* properties_required */
570 0, /* properties_provided */
571 0, /* properties_destroyed */
572 0, /* todo_flags_start */
573 0 /* todo_flags_finish */
577 /* Record references to functions and other variables present in the
578 initial value of DECL, a variable.
579 When ONLY_VARS is true, we mark needed only variables, not functions. */
582 record_references_in_initializer (tree decl
, bool only_vars
)
584 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
585 struct varpool_node
*node
= varpool_node_for_decl (decl
);
586 struct record_reference_ctx ctx
= {false, NULL
};
588 ctx
.varpool_node
= node
;
589 ctx
.only_vars
= only_vars
;
590 walk_tree (&DECL_INITIAL (decl
), record_reference
,
591 &ctx
, visited_nodes
);
592 pointer_set_destroy (visited_nodes
);
595 /* In LIPO mode, before tree_profiling, the call graph edge
596 needs to be built with the original target node to make
597 sure consistent early inline decisions between profile
598 generate and profile use. After tree-profiling, the target
599 needs to be set to the resolved node so that ipa-inline
600 sees the definitions. */
601 #include "gimple-pretty-print.h"
603 lipo_fixup_cgraph_edge_call_target (gimple stmt
)
606 gcc_assert (is_gimple_call (stmt
));
608 decl
= gimple_call_fndecl (stmt
);
611 struct cgraph_node
*real_callee
;
612 real_callee
= cgraph_lipo_get_resolved_node (decl
);
614 if (decl
!= real_callee
->symbol
.decl
)
618 gcc_assert (!real_callee
->clone
.combined_args_to_skip
);
619 gimple_call_set_fndecl (stmt
, real_callee
->symbol
.decl
);
621 lp_nr
= lookup_stmt_eh_lp (stmt
);
622 if (lp_nr
!= 0 && !stmt_could_throw_p (stmt
))
623 remove_stmt_from_eh_lp (stmt
);
628 /* Rebuild cgraph edges for current function node. This needs to be run after
629 passes that don't update the cgraph. */
632 rebuild_cgraph_edges (void)
635 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
636 gimple_stmt_iterator gsi
;
638 cgraph_node_remove_callees (node
);
639 ipa_remove_all_references (&node
->symbol
.ref_list
);
641 node
->count
= ENTRY_BLOCK_PTR
->count
;
642 node
->max_bb_count
= 0;
646 if (bb
->count
> node
->max_bb_count
)
647 node
->max_bb_count
= bb
->count
;
648 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
650 gimple stmt
= gsi_stmt (gsi
);
653 if (is_gimple_call (stmt
))
655 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
657 decl
= gimple_call_fndecl (stmt
);
660 struct cgraph_node
*callee
= cgraph_get_create_node (decl
);
662 record_reference_to_real_target_from_alias (callee
);
663 cgraph_create_edge (node
, callee
, stmt
,
667 cgraph_create_indirect_edge (node
, stmt
,
668 gimple_call_flags (stmt
),
671 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
672 mark_store
, mark_address
);
675 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
676 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
677 mark_load
, mark_store
, mark_address
);
680 add_fake_indirect_call_edges (node
);
681 record_eh_tables (node
, cfun
);
682 gcc_assert (!node
->global
.inlined_to
);
687 /* Rebuild cgraph edges for current function node. This needs to be run after
688 passes that don't update the cgraph. */
691 cgraph_rebuild_references (void)
694 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
695 gimple_stmt_iterator gsi
;
697 ipa_remove_all_references (&node
->symbol
.ref_list
);
699 node
->count
= ENTRY_BLOCK_PTR
->count
;
703 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
705 gimple stmt
= gsi_stmt (gsi
);
707 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
708 mark_store
, mark_address
);
711 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
712 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
713 mark_load
, mark_store
, mark_address
);
715 record_eh_tables (node
, cfun
);
718 struct gimple_opt_pass pass_rebuild_cgraph_edges
=
722 "*rebuild_cgraph_edges", /* name */
723 OPTGROUP_NONE
, /* optinfo_flags */
725 rebuild_cgraph_edges
, /* execute */
728 0, /* static_pass_number */
729 TV_CGRAPH
, /* tv_id */
730 PROP_cfg
, /* properties_required */
731 0, /* properties_provided */
732 0, /* properties_destroyed */
733 0, /* todo_flags_start */
734 0, /* todo_flags_finish */
738 /* Defined in tree-optimize.c */
739 extern bool cgraph_callee_edges_final_cleanup
;
742 remove_cgraph_callee_edges (void)
744 /* The -freorder-functions=* needs the call-graph preserved till
746 if (cgraph_callee_edges_final_cleanup
747 && (flag_reorder_functions
> 1))
750 cgraph_node_remove_callees (cgraph_get_node (current_function_decl
));
754 struct gimple_opt_pass pass_remove_cgraph_callee_edges
=
758 "*remove_cgraph_callee_edges", /* name */
759 OPTGROUP_NONE
, /* optinfo_flags */
761 remove_cgraph_callee_edges
, /* execute */
764 0, /* static_pass_number */
766 0, /* properties_required */
767 0, /* properties_provided */
768 0, /* properties_destroyed */
769 0, /* todo_flags_start */
770 0, /* todo_flags_finish */