1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
37 /* Context of record_reference. */
38 struct record_reference_ctx
41 struct varpool_node
*varpool_node
;
44 /* Walk tree and record all calls and references to functions/variables.
45 Called via walk_tree: TP is pointer to tree to be examined.
46 When DATA is non-null, record references to callgraph.
50 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
54 struct record_reference_ctx
*ctx
= (struct record_reference_ctx
*)data
;
56 switch (TREE_CODE (t
))
65 /* Record dereferences to the functions. This makes the
66 functions reachable unconditionally. */
67 decl
= get_base_var (*tp
);
68 if (TREE_CODE (decl
) == FUNCTION_DECL
)
71 cgraph_mark_address_taken_node (cgraph_node (decl
));
72 ipa_record_reference (NULL
, ctx
->varpool_node
,
73 cgraph_node (decl
), NULL
,
77 if (TREE_CODE (decl
) == VAR_DECL
)
79 struct varpool_node
*vnode
= varpool_node (decl
);
80 if (lang_hooks
.callgraph
.analyze_expr
)
81 lang_hooks
.callgraph
.analyze_expr (&decl
, walk_subtrees
);
82 varpool_mark_needed_node (vnode
);
83 if (vnode
->alias
&& vnode
->extra_name
)
84 vnode
= vnode
->extra_name
;
85 ipa_record_reference (NULL
, ctx
->varpool_node
,
93 /* Save some cycles by not walking types and declaration as we
94 won't find anything useful there anyway. */
95 if (IS_TYPE_OR_DECL_P (*tp
))
101 if ((unsigned int) TREE_CODE (t
) >= LAST_AND_UNUSED_TREE_CODE
)
102 return lang_hooks
.callgraph
.analyze_expr (tp
, walk_subtrees
);
109 /* Record references to typeinfos in the type list LIST. */
112 record_type_list (struct cgraph_node
*node
, tree list
)
114 for (; list
; list
= TREE_CHAIN (list
))
116 tree type
= TREE_VALUE (list
);
119 type
= lookup_type_for_runtime (type
);
121 if (TREE_CODE (type
) == ADDR_EXPR
)
123 type
= TREE_OPERAND (type
, 0);
124 if (TREE_CODE (type
) == VAR_DECL
)
126 struct varpool_node
*vnode
= varpool_node (type
);
127 varpool_mark_needed_node (vnode
);
128 ipa_record_reference (node
, NULL
,
136 /* Record all references we will introduce by producing EH tables
140 record_eh_tables (struct cgraph_node
*node
, struct function
*fun
)
144 i
= fun
->eh
->region_tree
;
153 case ERT_MUST_NOT_THROW
:
159 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
160 record_type_list (node
, c
->type_list
);
164 case ERT_ALLOWED_EXCEPTIONS
:
165 record_type_list (node
, i
->u
.allowed
.type_list
);
168 /* If there are sub-regions, process them. */
171 /* If there are peers, process them. */
172 else if (i
->next_peer
)
174 /* Otherwise, step back up the tree to the next peer. */
183 while (i
->next_peer
== NULL
);
189 /* Reset inlining information of all incoming call edges of NODE. */
192 reset_inline_failed (struct cgraph_node
*node
)
194 struct cgraph_edge
*e
;
196 for (e
= node
->callers
; e
; e
= e
->next_caller
)
198 e
->callee
->global
.inlined_to
= NULL
;
200 e
->inline_failed
= CIF_BODY_NOT_AVAILABLE
;
201 else if (node
->local
.redefined_extern_inline
)
202 e
->inline_failed
= CIF_REDEFINED_EXTERN_INLINE
;
203 else if (!node
->local
.inlinable
)
204 e
->inline_failed
= CIF_FUNCTION_NOT_INLINABLE
;
205 else if (e
->call_stmt_cannot_inline_p
)
206 e
->inline_failed
= CIF_MISMATCHED_ARGUMENTS
;
208 e
->inline_failed
= CIF_FUNCTION_NOT_CONSIDERED
;
212 /* Computes the frequency of the call statement so that it can be stored in
213 cgraph_edge. BB is the basic block of the call statement. */
215 compute_call_stmt_bb_frequency (tree decl
, basic_block bb
)
217 int entry_freq
= ENTRY_BLOCK_PTR_FOR_FUNCTION
218 (DECL_STRUCT_FUNCTION (decl
))->frequency
;
219 int freq
= bb
->frequency
;
221 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl
)) == PROFILE_ABSENT
)
222 return CGRAPH_FREQ_BASE
;
225 entry_freq
= 1, freq
++;
227 freq
= freq
* CGRAPH_FREQ_BASE
/ entry_freq
;
228 if (freq
> CGRAPH_FREQ_MAX
)
229 freq
= CGRAPH_FREQ_MAX
;
234 /* Mark address taken in STMT. */
237 mark_address (gimple stmt ATTRIBUTE_UNUSED
, tree addr
,
238 void *data ATTRIBUTE_UNUSED
)
240 if (TREE_CODE (addr
) == FUNCTION_DECL
)
242 struct cgraph_node
*node
= cgraph_node (addr
);
243 cgraph_mark_address_taken_node (node
);
244 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
250 addr
= get_base_address (addr
);
251 if (addr
&& TREE_CODE (addr
) == VAR_DECL
252 && (TREE_STATIC (addr
) || DECL_EXTERNAL (addr
)))
254 struct varpool_node
*vnode
= varpool_node (addr
);
257 if (lang_hooks
.callgraph
.analyze_expr
)
258 lang_hooks
.callgraph
.analyze_expr (&addr
, &walk_subtrees
);
259 varpool_mark_needed_node (vnode
);
260 if (vnode
->alias
&& vnode
->extra_name
)
261 vnode
= vnode
->extra_name
;
262 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
271 /* Mark load of T. */
274 mark_load (gimple stmt ATTRIBUTE_UNUSED
, tree t
,
275 void *data ATTRIBUTE_UNUSED
)
277 t
= get_base_address (t
);
278 if (t
&& TREE_CODE (t
) == VAR_DECL
279 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
281 struct varpool_node
*vnode
= varpool_node (t
);
284 if (lang_hooks
.callgraph
.analyze_expr
)
285 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
286 varpool_mark_needed_node (vnode
);
287 if (vnode
->alias
&& vnode
->extra_name
)
288 vnode
= vnode
->extra_name
;
289 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
296 /* Mark store of T. */
299 mark_store (gimple stmt ATTRIBUTE_UNUSED
, tree t
,
300 void *data ATTRIBUTE_UNUSED
)
302 t
= get_base_address (t
);
303 if (t
&& TREE_CODE (t
) == VAR_DECL
304 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
306 struct varpool_node
*vnode
= varpool_node (t
);
309 if (lang_hooks
.callgraph
.analyze_expr
)
310 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
311 varpool_mark_needed_node (vnode
);
312 if (vnode
->alias
&& vnode
->extra_name
)
313 vnode
= vnode
->extra_name
;
314 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
316 IPA_REF_STORE
, NULL
);
321 /* Create cgraph edges for function calls.
322 Also look for functions and variables having addresses taken. */
325 build_cgraph_edges (void)
328 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
329 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
330 gimple_stmt_iterator gsi
;
334 /* Create the callgraph edges and record the nodes referenced by the function.
338 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
340 gimple stmt
= gsi_stmt (gsi
);
343 if (is_gimple_call (stmt
))
345 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
347 decl
= gimple_call_fndecl (stmt
);
349 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
353 cgraph_create_indirect_edge (node
, stmt
,
354 gimple_call_flags (stmt
),
358 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
359 mark_store
, mark_address
);
360 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
361 && gimple_omp_parallel_child_fn (stmt
))
363 tree fn
= gimple_omp_parallel_child_fn (stmt
);
364 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
365 NULL
, IPA_REF_ADDR
, stmt
);
367 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
369 tree fn
= gimple_omp_task_child_fn (stmt
);
371 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
372 NULL
, IPA_REF_ADDR
, stmt
);
373 fn
= gimple_omp_task_copy_fn (stmt
);
375 ipa_record_reference (node
, NULL
, cgraph_node (fn
),
376 NULL
, IPA_REF_ADDR
, stmt
);
379 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
380 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
381 mark_load
, mark_store
, mark_address
);
384 /* Look for initializers of constant variables and private statics. */
385 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
386 if (TREE_CODE (decl
) == VAR_DECL
387 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)))
388 varpool_finalize_decl (decl
);
389 record_eh_tables (node
, cfun
);
391 pointer_set_destroy (visited_nodes
);
395 struct gimple_opt_pass pass_build_cgraph_edges
=
399 "*build_cgraph_edges", /* name */
401 build_cgraph_edges
, /* execute */
404 0, /* static_pass_number */
406 PROP_cfg
, /* properties_required */
407 0, /* properties_provided */
408 0, /* properties_destroyed */
409 0, /* todo_flags_start */
410 0 /* todo_flags_finish */
414 /* Record references to functions and other variables present in the
415 initial value of DECL, a variable.
416 When ONLY_VARS is true, we mark needed only variables, not functions. */
419 record_references_in_initializer (tree decl
, bool only_vars
)
421 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
422 struct varpool_node
*node
= varpool_node (decl
);
423 struct record_reference_ctx ctx
= {false, NULL
};
425 ctx
.varpool_node
= node
;
426 ctx
.only_vars
= only_vars
;
427 walk_tree (&DECL_INITIAL (decl
), record_reference
,
428 &ctx
, visited_nodes
);
429 pointer_set_destroy (visited_nodes
);
432 /* Rebuild cgraph edges for current function node. This needs to be run after
433 passes that don't update the cgraph. */
436 rebuild_cgraph_edges (void)
439 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
440 gimple_stmt_iterator gsi
;
442 cgraph_node_remove_callees (node
);
443 ipa_remove_all_references (&node
->ref_list
);
445 node
->count
= ENTRY_BLOCK_PTR
->count
;
449 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
451 gimple stmt
= gsi_stmt (gsi
);
454 if (is_gimple_call (stmt
))
456 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
458 decl
= gimple_call_fndecl (stmt
);
460 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
464 cgraph_create_indirect_edge (node
, stmt
,
465 gimple_call_flags (stmt
),
469 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
470 mark_store
, mark_address
);
473 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
474 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
475 mark_load
, mark_store
, mark_address
);
477 record_eh_tables (node
, cfun
);
478 gcc_assert (!node
->global
.inlined_to
);
483 /* Rebuild cgraph edges for current function node. This needs to be run after
484 passes that don't update the cgraph. */
487 cgraph_rebuild_references (void)
490 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
491 gimple_stmt_iterator gsi
;
493 ipa_remove_all_references (&node
->ref_list
);
495 node
->count
= ENTRY_BLOCK_PTR
->count
;
499 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
501 gimple stmt
= gsi_stmt (gsi
);
503 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
504 mark_store
, mark_address
);
507 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
508 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
509 mark_load
, mark_store
, mark_address
);
511 record_eh_tables (node
, cfun
);
514 struct gimple_opt_pass pass_rebuild_cgraph_edges
=
518 "*rebuild_cgraph_edges", /* name */
520 rebuild_cgraph_edges
, /* execute */
523 0, /* static_pass_number */
525 PROP_cfg
, /* properties_required */
526 0, /* properties_provided */
527 0, /* properties_destroyed */
528 0, /* todo_flags_start */
529 0, /* todo_flags_finish */
535 remove_cgraph_callee_edges (void)
537 cgraph_node_remove_callees (cgraph_node (current_function_decl
));
541 struct gimple_opt_pass pass_remove_cgraph_callee_edges
=
545 "*remove_cgraph_callee_edges", /* name */
547 remove_cgraph_callee_edges
, /* execute */
550 0, /* static_pass_number */
552 0, /* properties_required */
553 0, /* properties_provided */
554 0, /* properties_destroyed */
555 0, /* todo_flags_start */
556 0, /* todo_flags_finish */