i386.md (simple LEA peephole2): Add missing mode to zero_extend for zero-extended...
[official-gcc.git] / gcc / cgraphbuild.c
blobf1595fd0aff47cf33cc18630ef5c6a7afc688a07
1 /* Callgraph construction.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "pointer-set.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-fold.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "langhooks.h"
37 #include "intl.h"
38 #include "tree-pass.h"
39 #include "ipa-utils.h"
40 #include "except.h"
41 #include "ipa-inline.h"
43 /* Context of record_reference. */
44 struct record_reference_ctx
46 bool only_vars;
47 class varpool_node *varpool_node;
50 /* Walk tree and record all calls and references to functions/variables.
51 Called via walk_tree: TP is pointer to tree to be examined.
52 When DATA is non-null, record references to callgraph.
55 static tree
56 record_reference (tree *tp, int *walk_subtrees, void *data)
58 tree t = *tp;
59 tree decl;
60 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
62 t = canonicalize_constructor_val (t, NULL);
63 if (!t)
64 t = *tp;
65 else if (t != *tp)
66 *tp = t;
68 switch (TREE_CODE (t))
70 case VAR_DECL:
71 case FUNCTION_DECL:
72 gcc_unreachable ();
73 break;
75 case FDESC_EXPR:
76 case ADDR_EXPR:
77 /* Record dereferences to the functions. This makes the
78 functions reachable unconditionally. */
79 decl = get_base_var (*tp);
80 if (TREE_CODE (decl) == FUNCTION_DECL)
82 struct cgraph_node *node = cgraph_get_create_node (decl);
83 if (!ctx->only_vars)
84 cgraph_mark_address_taken_node (node);
85 ipa_record_reference (ctx->varpool_node,
86 node,
87 IPA_REF_ADDR, NULL);
90 if (TREE_CODE (decl) == VAR_DECL)
92 varpool_node *vnode = varpool_node_for_decl (decl);
93 ipa_record_reference (ctx->varpool_node,
94 vnode,
95 IPA_REF_ADDR, NULL);
97 *walk_subtrees = 0;
98 break;
100 default:
101 /* Save some cycles by not walking types and declaration as we
102 won't find anything useful there anyway. */
103 if (IS_TYPE_OR_DECL_P (*tp))
105 *walk_subtrees = 0;
106 break;
108 break;
111 return NULL_TREE;
114 /* Record references to typeinfos in the type list LIST. */
116 static void
117 record_type_list (struct cgraph_node *node, tree list)
119 for (; list; list = TREE_CHAIN (list))
121 tree type = TREE_VALUE (list);
123 if (TYPE_P (type))
124 type = lookup_type_for_runtime (type);
125 STRIP_NOPS (type);
126 if (TREE_CODE (type) == ADDR_EXPR)
128 type = TREE_OPERAND (type, 0);
129 if (TREE_CODE (type) == VAR_DECL)
131 varpool_node *vnode = varpool_node_for_decl (type);
132 ipa_record_reference (node,
133 vnode,
134 IPA_REF_ADDR, NULL);
140 /* Record all references we will introduce by producing EH tables
141 for NODE. */
143 static void
144 record_eh_tables (struct cgraph_node *node, struct function *fun)
146 eh_region i;
148 if (DECL_FUNCTION_PERSONALITY (node->decl))
150 tree per_decl = DECL_FUNCTION_PERSONALITY (node->decl);
151 struct cgraph_node *per_node = cgraph_get_create_node (per_decl);
153 ipa_record_reference (node, per_node, IPA_REF_ADDR, NULL);
154 cgraph_mark_address_taken_node (per_node);
157 i = fun->eh->region_tree;
158 if (!i)
159 return;
161 while (1)
163 switch (i->type)
165 case ERT_CLEANUP:
166 case ERT_MUST_NOT_THROW:
167 break;
169 case ERT_TRY:
171 eh_catch c;
172 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
173 record_type_list (node, c->type_list);
175 break;
177 case ERT_ALLOWED_EXCEPTIONS:
178 record_type_list (node, i->u.allowed.type_list);
179 break;
181 /* If there are sub-regions, process them. */
182 if (i->inner)
183 i = i->inner;
184 /* If there are peers, process them. */
185 else if (i->next_peer)
186 i = i->next_peer;
187 /* Otherwise, step back up the tree to the next peer. */
188 else
192 i = i->outer;
193 if (i == NULL)
194 return;
196 while (i->next_peer == NULL);
197 i = i->next_peer;
202 /* Computes the frequency of the call statement so that it can be stored in
203 cgraph_edge. BB is the basic block of the call statement. */
205 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
207 int entry_freq = ENTRY_BLOCK_PTR_FOR_FN
208 (DECL_STRUCT_FUNCTION (decl))->frequency;
209 int freq = bb->frequency;
211 if (profile_status_for_fn (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
212 return CGRAPH_FREQ_BASE;
214 if (!entry_freq)
215 entry_freq = 1, freq++;
217 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
218 if (freq > CGRAPH_FREQ_MAX)
219 freq = CGRAPH_FREQ_MAX;
221 return freq;
224 /* Mark address taken in STMT. */
226 static bool
227 mark_address (gimple stmt, tree addr, void *data)
229 addr = get_base_address (addr);
230 if (TREE_CODE (addr) == FUNCTION_DECL)
232 struct cgraph_node *node = cgraph_get_create_node (addr);
233 cgraph_mark_address_taken_node (node);
234 ipa_record_reference ((symtab_node *)data,
235 node,
236 IPA_REF_ADDR, stmt);
238 else if (addr && TREE_CODE (addr) == VAR_DECL
239 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
241 varpool_node *vnode = varpool_node_for_decl (addr);
243 ipa_record_reference ((symtab_node *)data,
244 vnode,
245 IPA_REF_ADDR, stmt);
248 return false;
251 /* Mark load of T. */
253 static bool
254 mark_load (gimple stmt, tree t, void *data)
256 t = get_base_address (t);
257 if (t && TREE_CODE (t) == FUNCTION_DECL)
259 /* ??? This can happen on platforms with descriptors when these are
260 directly manipulated in the code. Pretend that it's an address. */
261 struct cgraph_node *node = cgraph_get_create_node (t);
262 cgraph_mark_address_taken_node (node);
263 ipa_record_reference ((symtab_node *)data,
264 node,
265 IPA_REF_ADDR, stmt);
267 else if (t && TREE_CODE (t) == VAR_DECL
268 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
270 varpool_node *vnode = varpool_node_for_decl (t);
272 ipa_record_reference ((symtab_node *)data,
273 vnode,
274 IPA_REF_LOAD, stmt);
276 return false;
279 /* Mark store of T. */
281 static bool
282 mark_store (gimple stmt, tree t, void *data)
284 t = get_base_address (t);
285 if (t && TREE_CODE (t) == VAR_DECL
286 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
288 varpool_node *vnode = varpool_node_for_decl (t);
290 ipa_record_reference ((symtab_node *)data,
291 vnode,
292 IPA_REF_STORE, stmt);
294 return false;
297 /* Record all references from NODE that are taken in statement STMT. */
298 void
299 ipa_record_stmt_references (struct cgraph_node *node, gimple stmt)
301 walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store,
302 mark_address);
305 /* Create cgraph edges for function calls.
306 Also look for functions and variables having addresses taken. */
308 static unsigned int
309 build_cgraph_edges (void)
311 basic_block bb;
312 struct cgraph_node *node = cgraph_get_node (current_function_decl);
313 struct pointer_set_t *visited_nodes = pointer_set_create ();
314 gimple_stmt_iterator gsi;
315 tree decl;
316 unsigned ix;
318 /* Create the callgraph edges and record the nodes referenced by the function.
319 body. */
320 FOR_EACH_BB_FN (bb, cfun)
322 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
324 gimple stmt = gsi_stmt (gsi);
325 tree decl;
327 if (is_gimple_debug (stmt))
328 continue;
330 if (is_gimple_call (stmt))
332 int freq = compute_call_stmt_bb_frequency (current_function_decl,
333 bb);
334 decl = gimple_call_fndecl (stmt);
335 if (decl)
336 cgraph_create_edge (node, cgraph_get_create_node (decl),
337 stmt, bb->count, freq);
338 else if (gimple_call_internal_p (stmt))
340 else
341 cgraph_create_indirect_edge (node, stmt,
342 gimple_call_flags (stmt),
343 bb->count, freq);
345 ipa_record_stmt_references (node, stmt);
346 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
347 && gimple_omp_parallel_child_fn (stmt))
349 tree fn = gimple_omp_parallel_child_fn (stmt);
350 ipa_record_reference (node,
351 cgraph_get_create_node (fn),
352 IPA_REF_ADDR, stmt);
354 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
356 tree fn = gimple_omp_task_child_fn (stmt);
357 if (fn)
358 ipa_record_reference (node,
359 cgraph_get_create_node (fn),
360 IPA_REF_ADDR, stmt);
361 fn = gimple_omp_task_copy_fn (stmt);
362 if (fn)
363 ipa_record_reference (node,
364 cgraph_get_create_node (fn),
365 IPA_REF_ADDR, stmt);
368 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
369 ipa_record_stmt_references (node, gsi_stmt (gsi));
372 /* Look for initializers of constant variables and private statics. */
373 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
374 if (TREE_CODE (decl) == VAR_DECL
375 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
376 && !DECL_HAS_VALUE_EXPR_P (decl))
377 varpool_finalize_decl (decl);
378 record_eh_tables (node, cfun);
380 pointer_set_destroy (visited_nodes);
381 return 0;
384 namespace {
386 const pass_data pass_data_build_cgraph_edges =
388 GIMPLE_PASS, /* type */
389 "*build_cgraph_edges", /* name */
390 OPTGROUP_NONE, /* optinfo_flags */
391 false, /* has_gate */
392 true, /* has_execute */
393 TV_NONE, /* tv_id */
394 PROP_cfg, /* properties_required */
395 0, /* properties_provided */
396 0, /* properties_destroyed */
397 0, /* todo_flags_start */
398 0, /* todo_flags_finish */
401 class pass_build_cgraph_edges : public gimple_opt_pass
403 public:
404 pass_build_cgraph_edges (gcc::context *ctxt)
405 : gimple_opt_pass (pass_data_build_cgraph_edges, ctxt)
408 /* opt_pass methods: */
409 unsigned int execute () { return build_cgraph_edges (); }
411 }; // class pass_build_cgraph_edges
413 } // anon namespace
415 gimple_opt_pass *
416 make_pass_build_cgraph_edges (gcc::context *ctxt)
418 return new pass_build_cgraph_edges (ctxt);
421 /* Record references to functions and other variables present in the
422 initial value of DECL, a variable.
423 When ONLY_VARS is true, we mark needed only variables, not functions. */
425 void
426 record_references_in_initializer (tree decl, bool only_vars)
428 struct pointer_set_t *visited_nodes = pointer_set_create ();
429 varpool_node *node = varpool_node_for_decl (decl);
430 struct record_reference_ctx ctx = {false, NULL};
432 ctx.varpool_node = node;
433 ctx.only_vars = only_vars;
434 walk_tree (&DECL_INITIAL (decl), record_reference,
435 &ctx, visited_nodes);
436 pointer_set_destroy (visited_nodes);
439 /* Rebuild cgraph edges for current function node. This needs to be run after
440 passes that don't update the cgraph. */
442 unsigned int
443 rebuild_cgraph_edges (void)
445 basic_block bb;
446 struct cgraph_node *node = cgraph_get_node (current_function_decl);
447 gimple_stmt_iterator gsi;
449 cgraph_node_remove_callees (node);
450 ipa_remove_all_references (&node->ref_list);
452 node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
454 FOR_EACH_BB_FN (bb, cfun)
456 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
458 gimple stmt = gsi_stmt (gsi);
459 tree decl;
461 if (is_gimple_call (stmt))
463 int freq = compute_call_stmt_bb_frequency (current_function_decl,
464 bb);
465 decl = gimple_call_fndecl (stmt);
466 if (decl)
467 cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
468 bb->count, freq);
469 else if (gimple_call_internal_p (stmt))
471 else
472 cgraph_create_indirect_edge (node, stmt,
473 gimple_call_flags (stmt),
474 bb->count, freq);
476 ipa_record_stmt_references (node, stmt);
478 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
479 ipa_record_stmt_references (node, gsi_stmt (gsi));
481 record_eh_tables (node, cfun);
482 gcc_assert (!node->global.inlined_to);
484 return 0;
487 /* Rebuild cgraph edges for current function node. This needs to be run after
488 passes that don't update the cgraph. */
490 void
491 cgraph_rebuild_references (void)
493 basic_block bb;
494 struct cgraph_node *node = cgraph_get_node (current_function_decl);
495 gimple_stmt_iterator gsi;
496 struct ipa_ref *ref;
497 int i;
499 /* Keep speculative references for further cgraph edge expansion. */
500 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref);)
501 if (!ref->speculative)
502 ipa_remove_reference (ref);
503 else
504 i++;
506 node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
508 FOR_EACH_BB_FN (bb, cfun)
510 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
511 ipa_record_stmt_references (node, gsi_stmt (gsi));
512 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
513 ipa_record_stmt_references (node, gsi_stmt (gsi));
515 record_eh_tables (node, cfun);
518 namespace {
520 const pass_data pass_data_rebuild_cgraph_edges =
522 GIMPLE_PASS, /* type */
523 "*rebuild_cgraph_edges", /* name */
524 OPTGROUP_NONE, /* optinfo_flags */
525 false, /* has_gate */
526 true, /* has_execute */
527 TV_CGRAPH, /* tv_id */
528 PROP_cfg, /* properties_required */
529 0, /* properties_provided */
530 0, /* properties_destroyed */
531 0, /* todo_flags_start */
532 0, /* todo_flags_finish */
535 class pass_rebuild_cgraph_edges : public gimple_opt_pass
537 public:
538 pass_rebuild_cgraph_edges (gcc::context *ctxt)
539 : gimple_opt_pass (pass_data_rebuild_cgraph_edges, ctxt)
542 /* opt_pass methods: */
543 opt_pass * clone () { return new pass_rebuild_cgraph_edges (m_ctxt); }
544 unsigned int execute () { return rebuild_cgraph_edges (); }
546 }; // class pass_rebuild_cgraph_edges
548 } // anon namespace
550 gimple_opt_pass *
551 make_pass_rebuild_cgraph_edges (gcc::context *ctxt)
553 return new pass_rebuild_cgraph_edges (ctxt);
557 static unsigned int
558 remove_cgraph_callee_edges (void)
560 struct cgraph_node *node = cgraph_get_node (current_function_decl);
561 cgraph_node_remove_callees (node);
562 ipa_remove_all_references (&node->ref_list);
563 return 0;
566 namespace {
568 const pass_data pass_data_remove_cgraph_callee_edges =
570 GIMPLE_PASS, /* type */
571 "*remove_cgraph_callee_edges", /* name */
572 OPTGROUP_NONE, /* optinfo_flags */
573 false, /* has_gate */
574 true, /* has_execute */
575 TV_NONE, /* tv_id */
576 0, /* properties_required */
577 0, /* properties_provided */
578 0, /* properties_destroyed */
579 0, /* todo_flags_start */
580 0, /* todo_flags_finish */
583 class pass_remove_cgraph_callee_edges : public gimple_opt_pass
585 public:
586 pass_remove_cgraph_callee_edges (gcc::context *ctxt)
587 : gimple_opt_pass (pass_data_remove_cgraph_callee_edges, ctxt)
590 /* opt_pass methods: */
591 opt_pass * clone () {
592 return new pass_remove_cgraph_callee_edges (m_ctxt);
594 unsigned int execute () { return remove_cgraph_callee_edges (); }
596 }; // class pass_remove_cgraph_callee_edges
598 } // anon namespace
600 gimple_opt_pass *
601 make_pass_remove_cgraph_callee_edges (gcc::context *ctxt)
603 return new pass_remove_cgraph_callee_edges (ctxt);