1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
36 #include "tree-pass.h"
37 #include "ipa-utils.h"
41 /* Context of record_reference. */
42 struct record_reference_ctx
45 struct varpool_node
*varpool_node
;
48 /* Walk tree and record all calls and references to functions/variables.
49 Called via walk_tree: TP is pointer to tree to be examined.
50 When DATA is non-null, record references to callgraph.
54 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
58 struct record_reference_ctx
*ctx
= (struct record_reference_ctx
*)data
;
60 switch (TREE_CODE (t
))
69 /* Record dereferences to the functions. This makes the
70 functions reachable unconditionally. */
71 decl
= get_base_var (*tp
);
72 if (TREE_CODE (decl
) == FUNCTION_DECL
)
75 cgraph_mark_address_taken_node (cgraph_node (decl
));
76 ipa_record_reference (NULL
, ctx
->varpool_node
,
77 cgraph_node (decl
), NULL
,
81 if (TREE_CODE (decl
) == VAR_DECL
)
83 struct varpool_node
*vnode
= varpool_node (decl
);
84 if (lang_hooks
.callgraph
.analyze_expr
)
85 lang_hooks
.callgraph
.analyze_expr (&decl
, walk_subtrees
);
86 varpool_mark_needed_node (vnode
);
87 if (vnode
->alias
&& vnode
->extra_name
)
88 vnode
= vnode
->extra_name
;
89 ipa_record_reference (NULL
, ctx
->varpool_node
,
97 /* Save some cycles by not walking types and declaration as we
98 won't find anything useful there anyway. */
99 if (IS_TYPE_OR_DECL_P (*tp
))
105 if ((unsigned int) TREE_CODE (t
) >= LAST_AND_UNUSED_TREE_CODE
)
106 return lang_hooks
.callgraph
.analyze_expr (tp
, walk_subtrees
);
113 /* Record references to typeinfos in the type list LIST. */
116 record_type_list (struct cgraph_node
*node
, tree list
)
118 for (; list
; list
= TREE_CHAIN (list
))
120 tree type
= TREE_VALUE (list
);
123 type
= lookup_type_for_runtime (type
);
125 if (TREE_CODE (type
) == ADDR_EXPR
)
127 type
= TREE_OPERAND (type
, 0);
128 if (TREE_CODE (type
) == VAR_DECL
)
130 struct varpool_node
*vnode
= varpool_node (type
);
131 varpool_mark_needed_node (vnode
);
132 ipa_record_reference (node
, NULL
,
140 /* Record all references we will introduce by producing EH tables
144 record_eh_tables (struct cgraph_node
*node
, struct function
*fun
)
148 if (DECL_FUNCTION_PERSONALITY (node
->decl
))
149 ipa_record_reference (node
, NULL
,
150 cgraph_node (DECL_FUNCTION_PERSONALITY (node
->decl
)),
151 NULL
, IPA_REF_ADDR
, NULL
);
153 i
= fun
->eh
->region_tree
;
162 case ERT_MUST_NOT_THROW
:
168 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
169 record_type_list (node
, c
->type_list
);
173 case ERT_ALLOWED_EXCEPTIONS
:
174 record_type_list (node
, i
->u
.allowed
.type_list
);
177 /* If there are sub-regions, process them. */
180 /* If there are peers, process them. */
181 else if (i
->next_peer
)
183 /* Otherwise, step back up the tree to the next peer. */
192 while (i
->next_peer
== NULL
);
198 /* Reset inlining information of all incoming call edges of NODE. */
201 reset_inline_failed (struct cgraph_node
*node
)
203 struct cgraph_edge
*e
;
205 for (e
= node
->callers
; e
; e
= e
->next_caller
)
207 e
->callee
->global
.inlined_to
= NULL
;
209 e
->inline_failed
= CIF_BODY_NOT_AVAILABLE
;
210 else if (node
->local
.redefined_extern_inline
)
211 e
->inline_failed
= CIF_REDEFINED_EXTERN_INLINE
;
212 else if (!node
->local
.inlinable
)
213 e
->inline_failed
= CIF_FUNCTION_NOT_INLINABLE
;
214 else if (e
->call_stmt_cannot_inline_p
)
215 e
->inline_failed
= CIF_MISMATCHED_ARGUMENTS
;
217 e
->inline_failed
= CIF_FUNCTION_NOT_CONSIDERED
;
221 /* Computes the frequency of the call statement so that it can be stored in
222 cgraph_edge. BB is the basic block of the call statement. */
224 compute_call_stmt_bb_frequency (tree decl
, basic_block bb
)
226 int entry_freq
= ENTRY_BLOCK_PTR_FOR_FUNCTION
227 (DECL_STRUCT_FUNCTION (decl
))->frequency
;
228 int freq
= bb
->frequency
;
230 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl
)) == PROFILE_ABSENT
)
231 return CGRAPH_FREQ_BASE
;
234 entry_freq
= 1, freq
++;
236 freq
= freq
* CGRAPH_FREQ_BASE
/ entry_freq
;
237 if (freq
> CGRAPH_FREQ_MAX
)
238 freq
= CGRAPH_FREQ_MAX
;
244 bool cgraph_pre_profiling_inlining_done
= false;
246 /* Return true if E is a fake indirect call edge. */
249 cgraph_is_fake_indirect_call_edge (struct cgraph_edge
*e
)
251 return !e
->call_stmt
;
255 /* Add fake cgraph edges from NODE to its indirect call callees
256 using profile data. */
259 add_fake_indirect_call_edges (struct cgraph_node
*node
)
261 unsigned n_counts
, i
;
262 gcov_type
*ic_counts
;
264 /* Enable this only for LIPO for now. */
265 if (!L_IPO_COMP_MODE
)
268 if (cgraph_pre_profiling_inlining_done
)
272 = get_coverage_counts_no_warn (DECL_STRUCT_FUNCTION (node
->decl
),
273 GCOV_COUNTER_ICALL_TOPNV
, &n_counts
);
278 gcc_assert ((n_counts
% GCOV_ICALL_TOPN_NCOUNTS
) == 0);
280 /* After the early_inline_1 before value profile transformation,
281 functions that are indirect call targets may have their bodies
282 removed (extern inline functions or functions from aux modules,
283 functions in comdat etc) if all direct callsites are inlined. This
284 will lead to missing inline opportunities after profile based
285 indirect call promotion. The solution is to add fake edges to
286 indirect call targets. Note that such edges are not associated
287 with actual indirect call sites because it is not possible to
288 reliably match pre-early-inline indirect callsites with indirect
289 call profile counters which are from post-early inline function body. */
291 for (i
= 0; i
< n_counts
;
292 i
+= GCOV_ICALL_TOPN_NCOUNTS
, ic_counts
+= GCOV_ICALL_TOPN_NCOUNTS
)
294 gcov_type val1
, val2
, count1
, count2
;
295 struct cgraph_node
*direct_call1
= 0, *direct_call2
= 0;
298 count1
= ic_counts
[2];
300 count2
= ic_counts
[4];
302 if (val1
== 0 || count1
== 0)
305 direct_call1
= find_func_by_global_id (val1
);
308 tree decl
= direct_call1
->decl
;
309 cgraph_create_edge (node
,
315 if (val2
== 0 || count2
== 0)
317 direct_call2
= find_func_by_global_id (val2
);
320 tree decl
= direct_call2
->decl
;
321 cgraph_create_edge (node
,
330 /* This can be implemented as an IPA pass that must be first one
331 before any unreachable node elimination. */
334 cgraph_add_fake_indirect_call_edges (void)
336 struct cgraph_node
*node
;
338 /* Enable this only for LIPO for now. */
339 if (!L_IPO_COMP_MODE
)
342 for (node
= cgraph_nodes
; node
; node
= node
->next
)
344 if (node
->analyzed
&& (node
->needed
|| node
->reachable
))
345 add_fake_indirect_call_edges (node
);
349 /* Remove zero count fake edges added for the purpose of ensuring
350 the right processing order. This should be called after all
353 cgraph_remove_zero_count_fake_edges (void)
355 struct cgraph_node
*node
;
357 /* Enable this only for LIPO for now. */
358 if (!L_IPO_COMP_MODE
)
361 for (node
= cgraph_nodes
; node
; node
= node
->next
)
363 if (node
->analyzed
&& (node
->needed
|| node
->reachable
))
365 struct cgraph_edge
*e
, *f
;
366 for (e
= node
->callees
; e
; e
= f
)
369 if (!e
->call_stmt
&& !e
->count
&& !e
->loop_nest
371 cgraph_remove_edge (e
);
377 /* Mark address taken in STMT. */
380 mark_address (gimple stmt
, tree addr
, void *data
)
382 addr
= get_base_address (addr
);
383 if (TREE_CODE (addr
) == FUNCTION_DECL
)
385 struct cgraph_node
*node
= cgraph_node (addr
);
386 cgraph_mark_address_taken_node (node
);
387 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
391 else if (addr
&& TREE_CODE (addr
) == VAR_DECL
392 && (TREE_STATIC (addr
) || DECL_EXTERNAL (addr
)))
394 struct varpool_node
*vnode
= varpool_node (addr
);
397 if (lang_hooks
.callgraph
.analyze_expr
)
398 lang_hooks
.callgraph
.analyze_expr (&addr
, &walk_subtrees
);
399 varpool_mark_needed_node (vnode
);
400 if (vnode
->alias
&& vnode
->extra_name
)
401 vnode
= vnode
->extra_name
;
402 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
410 /* Mark load of T. */
413 mark_load (gimple stmt
, tree t
, void *data
)
415 t
= get_base_address (t
);
416 if (t
&& TREE_CODE (t
) == FUNCTION_DECL
)
418 /* ??? This can happen on platforms with descriptors when these are
419 directly manipulated in the code. Pretend that it's an address. */
420 struct cgraph_node
*node
= cgraph_node (t
);
421 cgraph_mark_address_taken_node (node
);
422 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
426 else if (t
&& TREE_CODE (t
) == VAR_DECL
427 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
429 struct varpool_node
*vnode
= varpool_node (t
);
432 if (lang_hooks
.callgraph
.analyze_expr
)
433 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
434 varpool_mark_needed_node (vnode
);
435 if (vnode
->alias
&& vnode
->extra_name
)
436 vnode
= vnode
->extra_name
;
437 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
444 /* Mark store of T. */
447 mark_store (gimple stmt
, tree t
, void *data
)
449 t
= get_base_address (t
);
450 if (t
&& TREE_CODE (t
) == VAR_DECL
451 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
453 struct varpool_node
*vnode
= varpool_node (t
);
456 if (lang_hooks
.callgraph
.analyze_expr
)
457 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
458 varpool_mark_needed_node (vnode
);
459 if (vnode
->alias
&& vnode
->extra_name
)
460 vnode
= vnode
->extra_name
;
461 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
463 IPA_REF_STORE
, stmt
);
468 /* Create cgraph edges for function calls.
469 Also look for functions and variables having addresses taken. */
472 build_cgraph_edges (void)
475 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
476 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
477 gimple_stmt_iterator gsi
;
481 /* Create the callgraph edges and record the nodes referenced by the function.
485 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
487 gimple stmt
= gsi_stmt (gsi
);
490 if (is_gimple_call (stmt
))
492 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
494 decl
= gimple_call_fndecl (stmt
);
496 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
500 cgraph_create_indirect_edge (node
, stmt
,
501 gimple_call_flags (stmt
),
505 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
506 mark_store
, mark_address
);
507 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
508 && gimple_omp_parallel_child_fn (stmt
))
510 tree fn
= gimple_omp_parallel_child_fn (stmt
);
511 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
512 NULL
, IPA_REF_ADDR
, stmt
);
514 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
516 tree fn
= gimple_omp_task_child_fn (stmt
);
518 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
519 NULL
, IPA_REF_ADDR
, stmt
);
520 fn
= gimple_omp_task_copy_fn (stmt
);
522 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
523 NULL
, IPA_REF_ADDR
, stmt
);
526 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
527 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
528 mark_load
, mark_store
, mark_address
);
532 /* Look for initializers of constant variables and private statics. */
533 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
534 if (TREE_CODE (decl
) == VAR_DECL
535 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)))
536 varpool_finalize_decl (decl
);
537 record_eh_tables (node
, cfun
);
539 pointer_set_destroy (visited_nodes
);
543 struct gimple_opt_pass pass_build_cgraph_edges
=
547 "*build_cgraph_edges", /* name */
549 build_cgraph_edges
, /* execute */
552 0, /* static_pass_number */
554 PROP_cfg
, /* properties_required */
555 0, /* properties_provided */
556 0, /* properties_destroyed */
557 0, /* todo_flags_start */
558 0 /* todo_flags_finish */
562 /* Record references to functions and other variables present in the
563 initial value of DECL, a variable.
564 When ONLY_VARS is true, we mark needed only variables, not functions. */
567 record_references_in_initializer (tree decl
, bool only_vars
)
569 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
570 struct varpool_node
*node
= varpool_node (decl
);
571 struct record_reference_ctx ctx
= {false, NULL
};
573 ctx
.varpool_node
= node
;
574 ctx
.only_vars
= only_vars
;
575 walk_tree (&DECL_INITIAL (decl
), record_reference
,
576 &ctx
, visited_nodes
);
577 pointer_set_destroy (visited_nodes
);
580 /* Rebuild cgraph edges for current function node. This needs to be run after
581 passes that don't update the cgraph. */
584 rebuild_cgraph_edges (void)
587 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
588 gimple_stmt_iterator gsi
;
590 cgraph_node_remove_callees (node
);
591 ipa_remove_all_references (&node
->ref_list
);
593 node
->count
= ENTRY_BLOCK_PTR
->count
;
594 node
->max_bb_count
= 0;
598 if (bb
->count
> node
->max_bb_count
)
599 node
->max_bb_count
= bb
->count
;
600 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
602 gimple stmt
= gsi_stmt (gsi
);
605 if (is_gimple_call (stmt
))
607 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
609 decl
= gimple_call_fndecl (stmt
);
612 struct cgraph_node
*callee
;
613 /* In LIPO mode, before tree_profiling, the call graph edge
614 needs to be built with the original target node to make
615 sure consistent early inline decisions between profile generate
616 and profile use. After tree-profiling, the target needs to be
617 set to the resolved node so that ipa-inline sees the definitions. */
618 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
)
619 callee
= cgraph_lipo_get_resolved_node (decl
);
621 callee
= cgraph_node (decl
);
622 cgraph_create_edge (node
, callee
, stmt
,
625 if (L_IPO_COMP_MODE
&& cgraph_pre_profiling_inlining_done
626 && decl
!= callee
->decl
)
627 gimple_call_set_fndecl (stmt
, callee
->decl
);
630 cgraph_create_indirect_edge (node
, stmt
,
631 gimple_call_flags (stmt
),
635 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
636 mark_store
, mark_address
);
639 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
640 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
641 mark_load
, mark_store
, mark_address
);
643 add_fake_indirect_call_edges (node
);
644 record_eh_tables (node
, cfun
);
645 gcc_assert (!node
->global
.inlined_to
);
650 /* Rebuild cgraph edges for current function node. This needs to be run after
651 passes that don't update the cgraph. */
654 cgraph_rebuild_references (void)
657 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
658 gimple_stmt_iterator gsi
;
660 ipa_remove_all_references (&node
->ref_list
);
662 node
->count
= ENTRY_BLOCK_PTR
->count
;
666 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
668 gimple stmt
= gsi_stmt (gsi
);
670 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
671 mark_store
, mark_address
);
674 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
675 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
676 mark_load
, mark_store
, mark_address
);
678 record_eh_tables (node
, cfun
);
681 struct gimple_opt_pass pass_rebuild_cgraph_edges
=
685 "*rebuild_cgraph_edges", /* name */
687 rebuild_cgraph_edges
, /* execute */
690 0, /* static_pass_number */
691 TV_CGRAPH
, /* tv_id */
692 PROP_cfg
, /* properties_required */
693 0, /* properties_provided */
694 0, /* properties_destroyed */
695 0, /* todo_flags_start */
696 0, /* todo_flags_finish */
700 /* Defined in tree-optimize.c */
701 extern bool cgraph_callee_edges_final_cleanup
;
704 remove_cgraph_callee_edges (void)
706 /* The -freorder-functions=* and the fcallgraph-profiles-sections
707 needs the call-graph preserved till pass_final. */
708 if (cgraph_callee_edges_final_cleanup
709 && (flag_callgraph_profiles_sections
710 || (flag_reorder_functions
> 1)))
713 cgraph_node_remove_callees (cgraph_node (current_function_decl
));
717 struct gimple_opt_pass pass_remove_cgraph_callee_edges
=
721 "*remove_cgraph_callee_edges", /* name */
723 remove_cgraph_callee_edges
, /* execute */
726 0, /* static_pass_number */
728 0, /* properties_required */
729 0, /* properties_provided */
730 0, /* properties_destroyed */
731 0, /* todo_flags_start */
732 0, /* todo_flags_finish */