1 /* Callgraph construction.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "pointer-set.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-fold.h"
31 #include "gimple-expr.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "langhooks.h"
38 #include "tree-pass.h"
39 #include "ipa-utils.h"
41 #include "ipa-inline.h"
43 /* Context of record_reference. */
44 struct record_reference_ctx
47 class varpool_node
*varpool_node
;
50 /* Walk tree and record all calls and references to functions/variables.
51 Called via walk_tree: TP is pointer to tree to be examined.
52 When DATA is non-null, record references to callgraph.
56 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
60 struct record_reference_ctx
*ctx
= (struct record_reference_ctx
*)data
;
62 t
= canonicalize_constructor_val (t
, NULL
);
68 switch (TREE_CODE (t
))
77 /* Record dereferences to the functions. This makes the
78 functions reachable unconditionally. */
79 decl
= get_base_var (*tp
);
80 if (TREE_CODE (decl
) == FUNCTION_DECL
)
82 struct cgraph_node
*node
= cgraph_get_create_node (decl
);
84 cgraph_mark_address_taken_node (node
);
85 ipa_record_reference (ctx
->varpool_node
,
90 if (TREE_CODE (decl
) == VAR_DECL
)
92 varpool_node
*vnode
= varpool_node_for_decl (decl
);
93 ipa_record_reference (ctx
->varpool_node
,
101 /* Save some cycles by not walking types and declaration as we
102 won't find anything useful there anyway. */
103 if (IS_TYPE_OR_DECL_P (*tp
))
114 /* Record references to typeinfos in the type list LIST. */
117 record_type_list (struct cgraph_node
*node
, tree list
)
119 for (; list
; list
= TREE_CHAIN (list
))
121 tree type
= TREE_VALUE (list
);
124 type
= lookup_type_for_runtime (type
);
126 if (TREE_CODE (type
) == ADDR_EXPR
)
128 type
= TREE_OPERAND (type
, 0);
129 if (TREE_CODE (type
) == VAR_DECL
)
131 varpool_node
*vnode
= varpool_node_for_decl (type
);
132 ipa_record_reference (node
,
140 /* Record all references we will introduce by producing EH tables
144 record_eh_tables (struct cgraph_node
*node
, struct function
*fun
)
148 if (DECL_FUNCTION_PERSONALITY (node
->decl
))
150 tree per_decl
= DECL_FUNCTION_PERSONALITY (node
->decl
);
151 struct cgraph_node
*per_node
= cgraph_get_create_node (per_decl
);
153 ipa_record_reference (node
, per_node
, IPA_REF_ADDR
, NULL
);
154 cgraph_mark_address_taken_node (per_node
);
157 i
= fun
->eh
->region_tree
;
166 case ERT_MUST_NOT_THROW
:
172 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
173 record_type_list (node
, c
->type_list
);
177 case ERT_ALLOWED_EXCEPTIONS
:
178 record_type_list (node
, i
->u
.allowed
.type_list
);
181 /* If there are sub-regions, process them. */
184 /* If there are peers, process them. */
185 else if (i
->next_peer
)
187 /* Otherwise, step back up the tree to the next peer. */
196 while (i
->next_peer
== NULL
);
202 /* Computes the frequency of the call statement so that it can be stored in
203 cgraph_edge. BB is the basic block of the call statement. */
205 compute_call_stmt_bb_frequency (tree decl
, basic_block bb
)
207 int entry_freq
= ENTRY_BLOCK_PTR_FOR_FN
208 (DECL_STRUCT_FUNCTION (decl
))->frequency
;
209 int freq
= bb
->frequency
;
211 if (profile_status_for_fn (DECL_STRUCT_FUNCTION (decl
)) == PROFILE_ABSENT
)
212 return CGRAPH_FREQ_BASE
;
215 entry_freq
= 1, freq
++;
217 freq
= freq
* CGRAPH_FREQ_BASE
/ entry_freq
;
218 if (freq
> CGRAPH_FREQ_MAX
)
219 freq
= CGRAPH_FREQ_MAX
;
224 /* Mark address taken in STMT. */
227 mark_address (gimple stmt
, tree addr
, tree
, void *data
)
229 addr
= get_base_address (addr
);
230 if (TREE_CODE (addr
) == FUNCTION_DECL
)
232 struct cgraph_node
*node
= cgraph_get_create_node (addr
);
233 cgraph_mark_address_taken_node (node
);
234 ipa_record_reference ((symtab_node
*)data
,
238 else if (addr
&& TREE_CODE (addr
) == VAR_DECL
239 && (TREE_STATIC (addr
) || DECL_EXTERNAL (addr
)))
241 varpool_node
*vnode
= varpool_node_for_decl (addr
);
243 ipa_record_reference ((symtab_node
*)data
,
251 /* Mark load of T. */
254 mark_load (gimple stmt
, tree t
, tree
, void *data
)
256 t
= get_base_address (t
);
257 if (t
&& TREE_CODE (t
) == FUNCTION_DECL
)
259 /* ??? This can happen on platforms with descriptors when these are
260 directly manipulated in the code. Pretend that it's an address. */
261 struct cgraph_node
*node
= cgraph_get_create_node (t
);
262 cgraph_mark_address_taken_node (node
);
263 ipa_record_reference ((symtab_node
*)data
,
267 else if (t
&& TREE_CODE (t
) == VAR_DECL
268 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
270 varpool_node
*vnode
= varpool_node_for_decl (t
);
272 ipa_record_reference ((symtab_node
*)data
,
279 /* Mark store of T. */
282 mark_store (gimple stmt
, tree t
, tree
, void *data
)
284 t
= get_base_address (t
);
285 if (t
&& TREE_CODE (t
) == VAR_DECL
286 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
288 varpool_node
*vnode
= varpool_node_for_decl (t
);
290 ipa_record_reference ((symtab_node
*)data
,
292 IPA_REF_STORE
, stmt
);
297 /* Record all references from NODE that are taken in statement STMT. */
299 ipa_record_stmt_references (struct cgraph_node
*node
, gimple stmt
)
301 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
, mark_store
,
305 /* Create cgraph edges for function calls.
306 Also look for functions and variables having addresses taken. */
309 build_cgraph_edges (void)
312 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
313 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
314 gimple_stmt_iterator gsi
;
318 /* Create the callgraph edges and record the nodes referenced by the function.
320 FOR_EACH_BB_FN (bb
, cfun
)
322 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
324 gimple stmt
= gsi_stmt (gsi
);
327 if (is_gimple_debug (stmt
))
330 if (is_gimple_call (stmt
))
332 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
334 decl
= gimple_call_fndecl (stmt
);
336 cgraph_create_edge (node
, cgraph_get_create_node (decl
),
337 stmt
, bb
->count
, freq
);
338 else if (gimple_call_internal_p (stmt
))
341 cgraph_create_indirect_edge (node
, stmt
,
342 gimple_call_flags (stmt
),
345 ipa_record_stmt_references (node
, stmt
);
346 if (gimple_code (stmt
) == GIMPLE_OACC_PARALLEL
347 && gimple_oacc_parallel_child_fn (stmt
))
349 tree fn
= gimple_oacc_parallel_child_fn (stmt
);
350 ipa_record_reference (node
,
351 cgraph_get_create_node (fn
),
354 else if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
355 && gimple_omp_parallel_child_fn (stmt
))
357 tree fn
= gimple_omp_parallel_child_fn (stmt
);
358 ipa_record_reference (node
,
359 cgraph_get_create_node (fn
),
362 else if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
364 tree fn
= gimple_omp_task_child_fn (stmt
);
366 ipa_record_reference (node
,
367 cgraph_get_create_node (fn
),
369 fn
= gimple_omp_task_copy_fn (stmt
);
371 ipa_record_reference (node
,
372 cgraph_get_create_node (fn
),
376 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
377 ipa_record_stmt_references (node
, gsi_stmt (gsi
));
380 /* Look for initializers of constant variables and private statics. */
381 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
382 if (TREE_CODE (decl
) == VAR_DECL
383 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
384 && !DECL_HAS_VALUE_EXPR_P (decl
))
385 varpool_finalize_decl (decl
);
386 record_eh_tables (node
, cfun
);
388 pointer_set_destroy (visited_nodes
);
394 const pass_data pass_data_build_cgraph_edges
=
396 GIMPLE_PASS
, /* type */
397 "*build_cgraph_edges", /* name */
398 OPTGROUP_NONE
, /* optinfo_flags */
399 false, /* has_gate */
400 true, /* has_execute */
402 PROP_cfg
, /* properties_required */
403 0, /* properties_provided */
404 0, /* properties_destroyed */
405 0, /* todo_flags_start */
406 0, /* todo_flags_finish */
409 class pass_build_cgraph_edges
: public gimple_opt_pass
412 pass_build_cgraph_edges (gcc::context
*ctxt
)
413 : gimple_opt_pass (pass_data_build_cgraph_edges
, ctxt
)
416 /* opt_pass methods: */
417 unsigned int execute () { return build_cgraph_edges (); }
419 }; // class pass_build_cgraph_edges
424 make_pass_build_cgraph_edges (gcc::context
*ctxt
)
426 return new pass_build_cgraph_edges (ctxt
);
429 /* Record references to functions and other variables present in the
430 initial value of DECL, a variable.
431 When ONLY_VARS is true, we mark needed only variables, not functions. */
434 record_references_in_initializer (tree decl
, bool only_vars
)
436 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
437 varpool_node
*node
= varpool_node_for_decl (decl
);
438 struct record_reference_ctx ctx
= {false, NULL
};
440 ctx
.varpool_node
= node
;
441 ctx
.only_vars
= only_vars
;
442 walk_tree (&DECL_INITIAL (decl
), record_reference
,
443 &ctx
, visited_nodes
);
444 pointer_set_destroy (visited_nodes
);
447 /* Rebuild cgraph edges for current function node. This needs to be run after
448 passes that don't update the cgraph. */
451 rebuild_cgraph_edges (void)
454 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
455 gimple_stmt_iterator gsi
;
457 cgraph_node_remove_callees (node
);
458 ipa_remove_all_references (&node
->ref_list
);
460 node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
462 FOR_EACH_BB_FN (bb
, cfun
)
464 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
466 gimple stmt
= gsi_stmt (gsi
);
469 if (is_gimple_call (stmt
))
471 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
473 decl
= gimple_call_fndecl (stmt
);
475 cgraph_create_edge (node
, cgraph_get_create_node (decl
), stmt
,
477 else if (gimple_call_internal_p (stmt
))
480 cgraph_create_indirect_edge (node
, stmt
,
481 gimple_call_flags (stmt
),
484 ipa_record_stmt_references (node
, stmt
);
486 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
487 ipa_record_stmt_references (node
, gsi_stmt (gsi
));
489 record_eh_tables (node
, cfun
);
490 gcc_assert (!node
->global
.inlined_to
);
495 /* Rebuild cgraph edges for current function node. This needs to be run after
496 passes that don't update the cgraph. */
499 cgraph_rebuild_references (void)
502 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
503 gimple_stmt_iterator gsi
;
507 /* Keep speculative references for further cgraph edge expansion. */
508 for (i
= 0; ipa_ref_list_reference_iterate (&node
->ref_list
, i
, ref
);)
509 if (!ref
->speculative
)
510 ipa_remove_reference (ref
);
514 node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
516 FOR_EACH_BB_FN (bb
, cfun
)
518 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
519 ipa_record_stmt_references (node
, gsi_stmt (gsi
));
520 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
521 ipa_record_stmt_references (node
, gsi_stmt (gsi
));
523 record_eh_tables (node
, cfun
);
528 const pass_data pass_data_rebuild_cgraph_edges
=
530 GIMPLE_PASS
, /* type */
531 "*rebuild_cgraph_edges", /* name */
532 OPTGROUP_NONE
, /* optinfo_flags */
533 false, /* has_gate */
534 true, /* has_execute */
535 TV_CGRAPH
, /* tv_id */
536 PROP_cfg
, /* properties_required */
537 0, /* properties_provided */
538 0, /* properties_destroyed */
539 0, /* todo_flags_start */
540 0, /* todo_flags_finish */
543 class pass_rebuild_cgraph_edges
: public gimple_opt_pass
546 pass_rebuild_cgraph_edges (gcc::context
*ctxt
)
547 : gimple_opt_pass (pass_data_rebuild_cgraph_edges
, ctxt
)
550 /* opt_pass methods: */
551 opt_pass
* clone () { return new pass_rebuild_cgraph_edges (m_ctxt
); }
552 unsigned int execute () { return rebuild_cgraph_edges (); }
554 }; // class pass_rebuild_cgraph_edges
559 make_pass_rebuild_cgraph_edges (gcc::context
*ctxt
)
561 return new pass_rebuild_cgraph_edges (ctxt
);
566 remove_cgraph_callee_edges (void)
568 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
569 cgraph_node_remove_callees (node
);
570 ipa_remove_all_references (&node
->ref_list
);
576 const pass_data pass_data_remove_cgraph_callee_edges
=
578 GIMPLE_PASS
, /* type */
579 "*remove_cgraph_callee_edges", /* name */
580 OPTGROUP_NONE
, /* optinfo_flags */
581 false, /* has_gate */
582 true, /* has_execute */
584 0, /* properties_required */
585 0, /* properties_provided */
586 0, /* properties_destroyed */
587 0, /* todo_flags_start */
588 0, /* todo_flags_finish */
591 class pass_remove_cgraph_callee_edges
: public gimple_opt_pass
594 pass_remove_cgraph_callee_edges (gcc::context
*ctxt
)
595 : gimple_opt_pass (pass_data_remove_cgraph_callee_edges
, ctxt
)
598 /* opt_pass methods: */
599 opt_pass
* clone () {
600 return new pass_remove_cgraph_callee_edges (m_ctxt
);
602 unsigned int execute () { return remove_cgraph_callee_edges (); }
604 }; // class pass_remove_cgraph_callee_edges
609 make_pass_remove_cgraph_callee_edges (gcc::context
*ctxt
)
611 return new pass_remove_cgraph_callee_edges (ctxt
);