Merge trunk version 190524 into gupc branch.
[official-gcc.git] / gcc / cgraphbuild.c
blobce8f2ee080a798f8fe1a1efab09aa4e8dedbeef0
1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
30 #include "cgraph.h"
31 #include "intl.h"
32 #include "gimple.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
35 #include "except.h"
36 #include "ipa-inline.h"
38 /* Context of record_reference. */
39 struct record_reference_ctx
41 bool only_vars;
42 struct varpool_node *varpool_node;
45 /* Walk tree and record all calls and references to functions/variables.
46 Called via walk_tree: TP is pointer to tree to be examined.
47 When DATA is non-null, record references to callgraph.
50 static tree
51 record_reference (tree *tp, int *walk_subtrees, void *data)
53 tree t = *tp;
54 tree decl;
55 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
57 t = canonicalize_constructor_val (t, NULL);
58 if (!t)
59 t = *tp;
60 else if (t != *tp)
61 *tp = t;
63 switch (TREE_CODE (t))
65 case VAR_DECL:
66 case FUNCTION_DECL:
67 gcc_unreachable ();
68 break;
70 case FDESC_EXPR:
71 case ADDR_EXPR:
72 /* Record dereferences to the functions. This makes the
73 functions reachable unconditionally. */
74 decl = get_base_var (*tp);
75 if (TREE_CODE (decl) == FUNCTION_DECL)
77 struct cgraph_node *node = cgraph_get_create_node (decl);
78 if (!ctx->only_vars)
79 cgraph_mark_address_taken_node (node);
80 ipa_record_reference ((symtab_node)ctx->varpool_node,
81 (symtab_node)node,
82 IPA_REF_ADDR, NULL);
85 if (TREE_CODE (decl) == VAR_DECL)
87 struct varpool_node *vnode = varpool_node (decl);
88 ipa_record_reference ((symtab_node)ctx->varpool_node,
89 (symtab_node)vnode,
90 IPA_REF_ADDR, NULL);
92 *walk_subtrees = 0;
93 break;
95 default:
96 /* Save some cycles by not walking types and declaration as we
97 won't find anything useful there anyway. */
98 if (IS_TYPE_OR_DECL_P (*tp))
100 *walk_subtrees = 0;
101 break;
103 break;
106 return NULL_TREE;
109 /* Record references to typeinfos in the type list LIST. */
111 static void
112 record_type_list (struct cgraph_node *node, tree list)
114 for (; list; list = TREE_CHAIN (list))
116 tree type = TREE_VALUE (list);
118 if (TYPE_P (type))
119 type = lookup_type_for_runtime (type);
120 STRIP_NOPS (type);
121 if (TREE_CODE (type) == ADDR_EXPR)
123 type = TREE_OPERAND (type, 0);
124 if (TREE_CODE (type) == VAR_DECL)
126 struct varpool_node *vnode = varpool_node (type);
127 ipa_record_reference ((symtab_node)node,
128 (symtab_node)vnode,
129 IPA_REF_ADDR, NULL);
135 /* Record all references we will introduce by producing EH tables
136 for NODE. */
138 static void
139 record_eh_tables (struct cgraph_node *node, struct function *fun)
141 eh_region i;
143 if (DECL_FUNCTION_PERSONALITY (node->symbol.decl))
145 struct cgraph_node *per_node;
147 per_node = cgraph_get_create_node (DECL_FUNCTION_PERSONALITY (node->symbol.decl));
148 ipa_record_reference ((symtab_node)node, (symtab_node)per_node, IPA_REF_ADDR, NULL);
149 cgraph_mark_address_taken_node (per_node);
152 i = fun->eh->region_tree;
153 if (!i)
154 return;
156 while (1)
158 switch (i->type)
160 case ERT_CLEANUP:
161 case ERT_MUST_NOT_THROW:
162 break;
164 case ERT_TRY:
166 eh_catch c;
167 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
168 record_type_list (node, c->type_list);
170 break;
172 case ERT_ALLOWED_EXCEPTIONS:
173 record_type_list (node, i->u.allowed.type_list);
174 break;
176 /* If there are sub-regions, process them. */
177 if (i->inner)
178 i = i->inner;
179 /* If there are peers, process them. */
180 else if (i->next_peer)
181 i = i->next_peer;
182 /* Otherwise, step back up the tree to the next peer. */
183 else
187 i = i->outer;
188 if (i == NULL)
189 return;
191 while (i->next_peer == NULL);
192 i = i->next_peer;
197 /* Computes the frequency of the call statement so that it can be stored in
198 cgraph_edge. BB is the basic block of the call statement. */
200 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
202 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
203 (DECL_STRUCT_FUNCTION (decl))->frequency;
204 int freq = bb->frequency;
206 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
207 return CGRAPH_FREQ_BASE;
209 if (!entry_freq)
210 entry_freq = 1, freq++;
212 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
213 if (freq > CGRAPH_FREQ_MAX)
214 freq = CGRAPH_FREQ_MAX;
216 return freq;
219 /* Mark address taken in STMT. */
221 static bool
222 mark_address (gimple stmt, tree addr, void *data)
224 addr = get_base_address (addr);
225 if (TREE_CODE (addr) == FUNCTION_DECL)
227 struct cgraph_node *node = cgraph_get_create_node (addr);
228 cgraph_mark_address_taken_node (node);
229 ipa_record_reference ((symtab_node)data,
230 (symtab_node)node,
231 IPA_REF_ADDR, stmt);
233 else if (addr && TREE_CODE (addr) == VAR_DECL
234 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
236 struct varpool_node *vnode = varpool_node (addr);
238 ipa_record_reference ((symtab_node)data,
239 (symtab_node)vnode,
240 IPA_REF_ADDR, stmt);
243 return false;
246 /* Mark load of T. */
248 static bool
249 mark_load (gimple stmt, tree t, void *data)
251 t = get_base_address (t);
252 if (t && TREE_CODE (t) == FUNCTION_DECL)
254 /* ??? This can happen on platforms with descriptors when these are
255 directly manipulated in the code. Pretend that it's an address. */
256 struct cgraph_node *node = cgraph_get_create_node (t);
257 cgraph_mark_address_taken_node (node);
258 ipa_record_reference ((symtab_node)data,
259 (symtab_node)node,
260 IPA_REF_ADDR, stmt);
262 else if (t && TREE_CODE (t) == VAR_DECL
263 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
265 struct varpool_node *vnode = varpool_node (t);
267 ipa_record_reference ((symtab_node)data,
268 (symtab_node)vnode,
269 IPA_REF_LOAD, stmt);
271 return false;
274 /* Mark store of T. */
276 static bool
277 mark_store (gimple stmt, tree t, void *data)
279 t = get_base_address (t);
280 if (t && TREE_CODE (t) == VAR_DECL
281 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
283 struct varpool_node *vnode = varpool_node (t);
285 ipa_record_reference ((symtab_node)data,
286 (symtab_node)vnode,
287 IPA_REF_STORE, stmt);
289 return false;
292 /* Create cgraph edges for function calls.
293 Also look for functions and variables having addresses taken. */
295 static unsigned int
296 build_cgraph_edges (void)
298 basic_block bb;
299 struct cgraph_node *node = cgraph_get_node (current_function_decl);
300 struct pointer_set_t *visited_nodes = pointer_set_create ();
301 gimple_stmt_iterator gsi;
302 tree decl;
303 unsigned ix;
305 /* Create the callgraph edges and record the nodes referenced by the function.
306 body. */
307 FOR_EACH_BB (bb)
309 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
311 gimple stmt = gsi_stmt (gsi);
312 tree decl;
314 if (is_gimple_call (stmt))
316 int freq = compute_call_stmt_bb_frequency (current_function_decl,
317 bb);
318 decl = gimple_call_fndecl (stmt);
319 if (decl)
320 cgraph_create_edge (node, cgraph_get_create_node (decl),
321 stmt, bb->count, freq);
322 else
323 cgraph_create_indirect_edge (node, stmt,
324 gimple_call_flags (stmt),
325 bb->count, freq);
327 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
328 mark_store, mark_address);
329 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
330 && gimple_omp_parallel_child_fn (stmt))
332 tree fn = gimple_omp_parallel_child_fn (stmt);
333 ipa_record_reference ((symtab_node)node,
334 (symtab_node)cgraph_get_create_node (fn),
335 IPA_REF_ADDR, stmt);
337 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
339 tree fn = gimple_omp_task_child_fn (stmt);
340 if (fn)
341 ipa_record_reference ((symtab_node)node,
342 (symtab_node) cgraph_get_create_node (fn),
343 IPA_REF_ADDR, stmt);
344 fn = gimple_omp_task_copy_fn (stmt);
345 if (fn)
346 ipa_record_reference ((symtab_node)node,
347 (symtab_node)cgraph_get_create_node (fn),
348 IPA_REF_ADDR, stmt);
351 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
352 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
353 mark_load, mark_store, mark_address);
356 /* Look for initializers of constant variables and private statics. */
357 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
358 if (TREE_CODE (decl) == VAR_DECL
359 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
360 && !DECL_HAS_VALUE_EXPR_P (decl))
361 varpool_finalize_decl (decl);
362 record_eh_tables (node, cfun);
364 pointer_set_destroy (visited_nodes);
365 return 0;
368 struct gimple_opt_pass pass_build_cgraph_edges =
371 GIMPLE_PASS,
372 "*build_cgraph_edges", /* name */
373 NULL, /* gate */
374 build_cgraph_edges, /* execute */
375 NULL, /* sub */
376 NULL, /* next */
377 0, /* static_pass_number */
378 TV_NONE, /* tv_id */
379 PROP_cfg, /* properties_required */
380 0, /* properties_provided */
381 0, /* properties_destroyed */
382 0, /* todo_flags_start */
383 0 /* todo_flags_finish */
387 /* Record references to functions and other variables present in the
388 initial value of DECL, a variable.
389 When ONLY_VARS is true, we mark needed only variables, not functions. */
391 void
392 record_references_in_initializer (tree decl, bool only_vars)
394 struct pointer_set_t *visited_nodes = pointer_set_create ();
395 struct varpool_node *node = varpool_node (decl);
396 struct record_reference_ctx ctx = {false, NULL};
398 ctx.varpool_node = node;
399 ctx.only_vars = only_vars;
400 walk_tree (&DECL_INITIAL (decl), record_reference,
401 &ctx, visited_nodes);
402 pointer_set_destroy (visited_nodes);
405 /* Rebuild cgraph edges for current function node. This needs to be run after
406 passes that don't update the cgraph. */
408 unsigned int
409 rebuild_cgraph_edges (void)
411 basic_block bb;
412 struct cgraph_node *node = cgraph_get_node (current_function_decl);
413 gimple_stmt_iterator gsi;
415 cgraph_node_remove_callees (node);
416 ipa_remove_all_references (&node->symbol.ref_list);
418 node->count = ENTRY_BLOCK_PTR->count;
420 FOR_EACH_BB (bb)
422 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
424 gimple stmt = gsi_stmt (gsi);
425 tree decl;
427 if (is_gimple_call (stmt))
429 int freq = compute_call_stmt_bb_frequency (current_function_decl,
430 bb);
431 decl = gimple_call_fndecl (stmt);
432 if (decl)
433 cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
434 bb->count, freq);
435 else
436 cgraph_create_indirect_edge (node, stmt,
437 gimple_call_flags (stmt),
438 bb->count, freq);
440 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
441 mark_store, mark_address);
444 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
445 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
446 mark_load, mark_store, mark_address);
448 record_eh_tables (node, cfun);
449 gcc_assert (!node->global.inlined_to);
451 return 0;
454 /* Rebuild cgraph edges for current function node. This needs to be run after
455 passes that don't update the cgraph. */
457 void
458 cgraph_rebuild_references (void)
460 basic_block bb;
461 struct cgraph_node *node = cgraph_get_node (current_function_decl);
462 gimple_stmt_iterator gsi;
464 ipa_remove_all_references (&node->symbol.ref_list);
466 node->count = ENTRY_BLOCK_PTR->count;
468 FOR_EACH_BB (bb)
470 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
472 gimple stmt = gsi_stmt (gsi);
474 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
475 mark_store, mark_address);
478 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
479 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
480 mark_load, mark_store, mark_address);
482 record_eh_tables (node, cfun);
485 struct gimple_opt_pass pass_rebuild_cgraph_edges =
488 GIMPLE_PASS,
489 "*rebuild_cgraph_edges", /* name */
490 NULL, /* gate */
491 rebuild_cgraph_edges, /* execute */
492 NULL, /* sub */
493 NULL, /* next */
494 0, /* static_pass_number */
495 TV_CGRAPH, /* tv_id */
496 PROP_cfg, /* properties_required */
497 0, /* properties_provided */
498 0, /* properties_destroyed */
499 0, /* todo_flags_start */
500 0, /* todo_flags_finish */
505 static unsigned int
506 remove_cgraph_callee_edges (void)
508 cgraph_node_remove_callees (cgraph_get_node (current_function_decl));
509 return 0;
512 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
515 GIMPLE_PASS,
516 "*remove_cgraph_callee_edges", /* name */
517 NULL, /* gate */
518 remove_cgraph_callee_edges, /* execute */
519 NULL, /* sub */
520 NULL, /* next */
521 0, /* static_pass_number */
522 TV_NONE, /* tv_id */
523 0, /* properties_required */
524 0, /* properties_provided */
525 0, /* properties_destroyed */
526 0, /* todo_flags_start */
527 0, /* todo_flags_finish */