1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
37 /* Context of record_reference. */
38 struct record_reference_ctx
41 struct varpool_node
*varpool_node
;
44 /* Walk tree and record all calls and references to functions/variables.
45 Called via walk_tree: TP is pointer to tree to be examined.
46 When DATA is non-null, record references to callgraph.
50 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
54 struct record_reference_ctx
*ctx
= (struct record_reference_ctx
*)data
;
56 switch (TREE_CODE (t
))
65 /* Record dereferences to the functions. This makes the
66 functions reachable unconditionally. */
67 decl
= get_base_var (*tp
);
68 if (TREE_CODE (decl
) == FUNCTION_DECL
)
71 cgraph_mark_address_taken_node (cgraph_node (decl
));
72 ipa_record_reference (NULL
, ctx
->varpool_node
,
73 cgraph_node (decl
), NULL
,
77 if (TREE_CODE (decl
) == VAR_DECL
)
79 struct varpool_node
*vnode
= varpool_node (decl
);
80 if (lang_hooks
.callgraph
.analyze_expr
)
81 lang_hooks
.callgraph
.analyze_expr (&decl
, walk_subtrees
);
82 varpool_mark_needed_node (vnode
);
83 if (vnode
->alias
&& vnode
->extra_name
)
84 vnode
= vnode
->extra_name
;
85 ipa_record_reference (NULL
, ctx
->varpool_node
,
93 /* Save some cycles by not walking types and declaration as we
94 won't find anything useful there anyway. */
95 if (IS_TYPE_OR_DECL_P (*tp
))
101 if ((unsigned int) TREE_CODE (t
) >= LAST_AND_UNUSED_TREE_CODE
)
102 return lang_hooks
.callgraph
.analyze_expr (tp
, walk_subtrees
);
109 /* Record references to typeinfos in the type list LIST. */
112 record_type_list (struct cgraph_node
*node
, tree list
)
114 for (; list
; list
= TREE_CHAIN (list
))
116 tree type
= TREE_VALUE (list
);
119 type
= lookup_type_for_runtime (type
);
121 if (TREE_CODE (type
) == ADDR_EXPR
)
123 type
= TREE_OPERAND (type
, 0);
124 if (TREE_CODE (type
) == VAR_DECL
)
126 struct varpool_node
*vnode
= varpool_node (type
);
127 varpool_mark_needed_node (vnode
);
128 ipa_record_reference (node
, NULL
,
136 /* Record all references we will introduce by producing EH tables
140 record_eh_tables (struct cgraph_node
*node
, struct function
*fun
)
144 i
= fun
->eh
->region_tree
;
153 case ERT_MUST_NOT_THROW
:
159 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
160 record_type_list (node
, c
->type_list
);
164 case ERT_ALLOWED_EXCEPTIONS
:
165 record_type_list (node
, i
->u
.allowed
.type_list
);
168 /* If there are sub-regions, process them. */
171 /* If there are peers, process them. */
172 else if (i
->next_peer
)
174 /* Otherwise, step back up the tree to the next peer. */
183 while (i
->next_peer
== NULL
);
189 /* Reset inlining information of all incoming call edges of NODE. */
192 reset_inline_failed (struct cgraph_node
*node
)
194 struct cgraph_edge
*e
;
196 for (e
= node
->callers
; e
; e
= e
->next_caller
)
198 e
->callee
->global
.inlined_to
= NULL
;
200 e
->inline_failed
= CIF_BODY_NOT_AVAILABLE
;
201 else if (node
->local
.redefined_extern_inline
)
202 e
->inline_failed
= CIF_REDEFINED_EXTERN_INLINE
;
203 else if (!node
->local
.inlinable
)
204 e
->inline_failed
= CIF_FUNCTION_NOT_INLINABLE
;
205 else if (e
->call_stmt_cannot_inline_p
)
206 e
->inline_failed
= CIF_MISMATCHED_ARGUMENTS
;
208 e
->inline_failed
= CIF_FUNCTION_NOT_CONSIDERED
;
212 /* Computes the frequency of the call statement so that it can be stored in
213 cgraph_edge. BB is the basic block of the call statement. */
215 compute_call_stmt_bb_frequency (tree decl
, basic_block bb
)
217 int entry_freq
= ENTRY_BLOCK_PTR_FOR_FUNCTION
218 (DECL_STRUCT_FUNCTION (decl
))->frequency
;
219 int freq
= bb
->frequency
;
221 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl
)) == PROFILE_ABSENT
)
222 return CGRAPH_FREQ_BASE
;
225 entry_freq
= 1, freq
++;
227 freq
= freq
* CGRAPH_FREQ_BASE
/ entry_freq
;
228 if (freq
> CGRAPH_FREQ_MAX
)
229 freq
= CGRAPH_FREQ_MAX
;
234 /* Mark address taken in STMT. */
237 mark_address (gimple stmt ATTRIBUTE_UNUSED
, tree addr
,
238 void *data ATTRIBUTE_UNUSED
)
240 if (TREE_CODE (addr
) == FUNCTION_DECL
)
242 struct cgraph_node
*node
= cgraph_node (addr
);
243 cgraph_mark_address_taken_node (node
);
244 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
250 addr
= get_base_address (addr
);
251 if (addr
&& TREE_CODE (addr
) == VAR_DECL
252 && (TREE_STATIC (addr
) || DECL_EXTERNAL (addr
)))
254 struct varpool_node
*vnode
= varpool_node (addr
);
257 if (lang_hooks
.callgraph
.analyze_expr
)
258 lang_hooks
.callgraph
.analyze_expr (&addr
, &walk_subtrees
);
259 varpool_mark_needed_node (vnode
);
260 if (vnode
->alias
&& vnode
->extra_name
)
261 vnode
= vnode
->extra_name
;
262 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
271 /* Mark load of T. */
274 mark_load (gimple stmt ATTRIBUTE_UNUSED
, tree t
,
275 void *data ATTRIBUTE_UNUSED
)
277 t
= get_base_address (t
);
278 if (t
&& TREE_CODE (t
) == VAR_DECL
279 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
281 struct varpool_node
*vnode
= varpool_node (t
);
284 if (lang_hooks
.callgraph
.analyze_expr
)
285 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
286 varpool_mark_needed_node (vnode
);
287 if (vnode
->alias
&& vnode
->extra_name
)
288 vnode
= vnode
->extra_name
;
289 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
296 /* Mark store of T. */
299 mark_store (gimple stmt ATTRIBUTE_UNUSED
, tree t
,
300 void *data ATTRIBUTE_UNUSED
)
302 t
= get_base_address (t
);
303 if (t
&& TREE_CODE (t
) == VAR_DECL
304 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
306 struct varpool_node
*vnode
= varpool_node (t
);
309 if (lang_hooks
.callgraph
.analyze_expr
)
310 lang_hooks
.callgraph
.analyze_expr (&t
, &walk_subtrees
);
311 varpool_mark_needed_node (vnode
);
312 if (vnode
->alias
&& vnode
->extra_name
)
313 vnode
= vnode
->extra_name
;
314 ipa_record_reference ((struct cgraph_node
*)data
, NULL
,
316 IPA_REF_STORE
, NULL
);
321 /* Create cgraph edges for function calls.
322 Also look for functions and variables having addresses taken. */
325 build_cgraph_edges (void)
328 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
329 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
330 gimple_stmt_iterator gsi
;
334 /* Create the callgraph edges and record the nodes referenced by the function.
338 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
340 gimple stmt
= gsi_stmt (gsi
);
343 if (is_gimple_call (stmt
))
345 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
347 decl
= gimple_call_fndecl (stmt
);
349 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
353 cgraph_create_indirect_edge (node
, stmt
,
354 gimple_call_flags (stmt
),
358 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
359 mark_store
, mark_address
);
360 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
361 && gimple_omp_parallel_child_fn (stmt
))
363 tree fn
= gimple_omp_parallel_child_fn (stmt
);
364 cgraph_mark_needed_node (cgraph_node (fn
));
366 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
368 tree fn
= gimple_omp_task_child_fn (stmt
);
370 cgraph_mark_needed_node (cgraph_node (fn
));
371 fn
= gimple_omp_task_copy_fn (stmt
);
373 cgraph_mark_needed_node (cgraph_node (fn
));
376 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
377 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
378 mark_load
, mark_store
, mark_address
);
381 /* Look for initializers of constant variables and private statics. */
382 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
383 if (TREE_CODE (decl
) == VAR_DECL
384 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)))
385 varpool_finalize_decl (decl
);
386 record_eh_tables (node
, cfun
);
388 pointer_set_destroy (visited_nodes
);
392 struct gimple_opt_pass pass_build_cgraph_edges
=
396 "*build_cgraph_edges", /* name */
398 build_cgraph_edges
, /* execute */
401 0, /* static_pass_number */
403 PROP_cfg
, /* properties_required */
404 0, /* properties_provided */
405 0, /* properties_destroyed */
406 0, /* todo_flags_start */
407 0 /* todo_flags_finish */
411 /* Record references to functions and other variables present in the
412 initial value of DECL, a variable.
413 When ONLY_VARS is true, we mark needed only variables, not functions. */
416 record_references_in_initializer (tree decl
, bool only_vars
)
418 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
419 struct varpool_node
*node
= varpool_node (decl
);
420 struct record_reference_ctx ctx
= {false, NULL
};
422 ctx
.varpool_node
= node
;
423 ctx
.only_vars
= only_vars
;
424 walk_tree (&DECL_INITIAL (decl
), record_reference
,
425 &ctx
, visited_nodes
);
426 pointer_set_destroy (visited_nodes
);
429 /* Rebuild cgraph edges for current function node. This needs to be run after
430 passes that don't update the cgraph. */
433 rebuild_cgraph_edges (void)
436 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
437 gimple_stmt_iterator gsi
;
439 cgraph_node_remove_callees (node
);
440 ipa_remove_all_references (&node
->ref_list
);
442 node
->count
= ENTRY_BLOCK_PTR
->count
;
446 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
448 gimple stmt
= gsi_stmt (gsi
);
451 if (is_gimple_call (stmt
))
453 int freq
= compute_call_stmt_bb_frequency (current_function_decl
,
455 decl
= gimple_call_fndecl (stmt
);
457 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
461 cgraph_create_indirect_edge (node
, stmt
,
462 gimple_call_flags (stmt
),
466 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
467 mark_store
, mark_address
);
470 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
472 mark_load
, mark_store
, mark_address
);
474 record_eh_tables (node
, cfun
);
475 gcc_assert (!node
->global
.inlined_to
);
480 /* Rebuild cgraph edges for current function node. This needs to be run after
481 passes that don't update the cgraph. */
484 cgraph_rebuild_references (void)
487 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
488 gimple_stmt_iterator gsi
;
490 ipa_remove_all_references (&node
->ref_list
);
492 node
->count
= ENTRY_BLOCK_PTR
->count
;
496 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
498 gimple stmt
= gsi_stmt (gsi
);
500 walk_stmt_load_store_addr_ops (stmt
, node
, mark_load
,
501 mark_store
, mark_address
);
504 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
505 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), node
,
506 mark_load
, mark_store
, mark_address
);
508 record_eh_tables (node
, cfun
);
511 struct gimple_opt_pass pass_rebuild_cgraph_edges
=
515 "*rebuild_cgraph_edges", /* name */
517 rebuild_cgraph_edges
, /* execute */
520 0, /* static_pass_number */
522 PROP_cfg
, /* properties_required */
523 0, /* properties_provided */
524 0, /* properties_destroyed */
525 0, /* todo_flags_start */
526 0, /* todo_flags_finish */
532 remove_cgraph_callee_edges (void)
534 cgraph_node_remove_callees (cgraph_node (current_function_decl
));
538 struct gimple_opt_pass pass_remove_cgraph_callee_edges
=
542 "*remove_cgraph_callee_edges", /* name */
544 remove_cgraph_callee_edges
, /* execute */
547 0, /* static_pass_number */
549 0, /* properties_required */
550 0, /* properties_provided */
551 0, /* properties_destroyed */
552 0, /* todo_flags_start */
553 0, /* todo_flags_finish */