Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_6_3-mobile / gcc / cgraphbuild.c
blob948718868e14f6d08e3cf9f919a36a402cca3755
1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
30 #include "cgraph.h"
31 #include "intl.h"
32 #include "gimple.h"
33 #include "toplev.h"
34 #include "gcov-io.h"
35 #include "coverage.h"
36 #include "tree-pass.h"
37 #include "ipa-utils.h"
38 #include "except.h"
39 #include "l-ipo.h"
41 /* Context of record_reference. */
42 struct record_reference_ctx
44 bool only_vars;
45 struct varpool_node *varpool_node;
48 /* Walk tree and record all calls and references to functions/variables.
49 Called via walk_tree: TP is pointer to tree to be examined.
50 When DATA is non-null, record references to callgraph.
53 static tree
54 record_reference (tree *tp, int *walk_subtrees, void *data)
56 tree t = *tp;
57 tree decl;
58 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
60 switch (TREE_CODE (t))
62 case VAR_DECL:
63 case FUNCTION_DECL:
64 gcc_unreachable ();
65 break;
67 case FDESC_EXPR:
68 case ADDR_EXPR:
69 /* Record dereferences to the functions. This makes the
70 functions reachable unconditionally. */
71 decl = get_base_var (*tp);
72 if (TREE_CODE (decl) == FUNCTION_DECL)
74 if (!ctx->only_vars)
75 cgraph_mark_address_taken_node (cgraph_node (decl));
76 ipa_record_reference (NULL, ctx->varpool_node,
77 cgraph_node (decl), NULL,
78 IPA_REF_ADDR, NULL);
81 if (TREE_CODE (decl) == VAR_DECL)
83 struct varpool_node *vnode = varpool_node (decl);
84 if (lang_hooks.callgraph.analyze_expr)
85 lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
86 varpool_mark_needed_node (vnode);
87 if (vnode->alias && vnode->extra_name)
88 vnode = vnode->extra_name;
89 ipa_record_reference (NULL, ctx->varpool_node,
90 NULL, vnode,
91 IPA_REF_ADDR, NULL);
93 *walk_subtrees = 0;
94 break;
96 default:
97 /* Save some cycles by not walking types and declaration as we
98 won't find anything useful there anyway. */
99 if (IS_TYPE_OR_DECL_P (*tp))
101 *walk_subtrees = 0;
102 break;
105 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
106 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees);
107 break;
110 return NULL_TREE;
113 /* Record references to typeinfos in the type list LIST. */
115 static void
116 record_type_list (struct cgraph_node *node, tree list)
118 for (; list; list = TREE_CHAIN (list))
120 tree type = TREE_VALUE (list);
122 if (TYPE_P (type))
123 type = lookup_type_for_runtime (type);
124 STRIP_NOPS (type);
125 if (TREE_CODE (type) == ADDR_EXPR)
127 type = TREE_OPERAND (type, 0);
128 if (TREE_CODE (type) == VAR_DECL)
130 struct varpool_node *vnode = varpool_node (type);
131 varpool_mark_needed_node (vnode);
132 ipa_record_reference (node, NULL,
133 NULL, vnode,
134 IPA_REF_ADDR, NULL);
140 /* Record all references we will introduce by producing EH tables
141 for NODE. */
143 static void
144 record_eh_tables (struct cgraph_node *node, struct function *fun)
146 eh_region i;
148 if (DECL_FUNCTION_PERSONALITY (node->decl))
149 ipa_record_reference (node, NULL,
150 cgraph_node (DECL_FUNCTION_PERSONALITY (node->decl)),
151 NULL, IPA_REF_ADDR, NULL);
153 i = fun->eh->region_tree;
154 if (!i)
155 return;
157 while (1)
159 switch (i->type)
161 case ERT_CLEANUP:
162 case ERT_MUST_NOT_THROW:
163 break;
165 case ERT_TRY:
167 eh_catch c;
168 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
169 record_type_list (node, c->type_list);
171 break;
173 case ERT_ALLOWED_EXCEPTIONS:
174 record_type_list (node, i->u.allowed.type_list);
175 break;
177 /* If there are sub-regions, process them. */
178 if (i->inner)
179 i = i->inner;
180 /* If there are peers, process them. */
181 else if (i->next_peer)
182 i = i->next_peer;
183 /* Otherwise, step back up the tree to the next peer. */
184 else
188 i = i->outer;
189 if (i == NULL)
190 return;
192 while (i->next_peer == NULL);
193 i = i->next_peer;
198 /* Reset inlining information of all incoming call edges of NODE. */
200 void
201 reset_inline_failed (struct cgraph_node *node)
203 struct cgraph_edge *e;
205 for (e = node->callers; e; e = e->next_caller)
207 e->callee->global.inlined_to = NULL;
208 if (!node->analyzed)
209 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
210 else if (node->local.redefined_extern_inline)
211 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
212 else if (!node->local.inlinable)
213 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
214 else if (e->call_stmt_cannot_inline_p)
215 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
216 else
217 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
221 /* Computes the frequency of the call statement so that it can be stored in
222 cgraph_edge. BB is the basic block of the call statement. */
224 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
226 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
227 (DECL_STRUCT_FUNCTION (decl))->frequency;
228 int freq = bb->frequency;
230 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
231 return CGRAPH_FREQ_BASE;
233 if (!entry_freq)
234 entry_freq = 1, freq++;
236 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
237 if (freq > CGRAPH_FREQ_MAX)
238 freq = CGRAPH_FREQ_MAX;
240 return freq;
244 bool cgraph_pre_profiling_inlining_done = false;
246 /* Return true if E is a fake indirect call edge. */
248 bool
249 cgraph_is_fake_indirect_call_edge (struct cgraph_edge *e)
251 return !e->call_stmt;
255 /* Add fake cgraph edges from NODE to its indirect call callees
256 using profile data. */
258 static void
259 add_fake_indirect_call_edges (struct cgraph_node *node)
261 unsigned n_counts, i;
262 gcov_type *ic_counts;
264 /* Enable this only for LIPO for now. */
265 if (!L_IPO_COMP_MODE)
266 return;
268 if (cgraph_pre_profiling_inlining_done)
269 return;
271 ic_counts
272 = get_coverage_counts_no_warn (DECL_STRUCT_FUNCTION (node->decl),
273 GCOV_COUNTER_ICALL_TOPNV, &n_counts);
275 if (!ic_counts)
276 return;
278 gcc_assert ((n_counts % GCOV_ICALL_TOPN_NCOUNTS) == 0);
280 /* After the early_inline_1 before value profile transformation,
281 functions that are indirect call targets may have their bodies
282 removed (extern inline functions or functions from aux modules,
283 functions in comdat etc) if all direct callsites are inlined. This
284 will lead to missing inline opportunities after profile based
285 indirect call promotion. The solution is to add fake edges to
286 indirect call targets. Note that such edges are not associated
287 with actual indirect call sites because it is not possible to
288 reliably match pre-early-inline indirect callsites with indirect
289 call profile counters which are from post-early inline function body. */
291 for (i = 0; i < n_counts;
292 i += GCOV_ICALL_TOPN_NCOUNTS, ic_counts += GCOV_ICALL_TOPN_NCOUNTS)
294 gcov_type val1, val2, count1, count2;
295 struct cgraph_node *direct_call1 = 0, *direct_call2 = 0;
297 val1 = ic_counts[1];
298 count1 = ic_counts[2];
299 val2 = ic_counts[3];
300 count2 = ic_counts[4];
302 if (val1 == 0 || count1 == 0)
303 continue;
305 direct_call1 = find_func_by_global_id (val1);
306 if (direct_call1)
308 tree decl = direct_call1->decl;
309 cgraph_create_edge (node,
310 cgraph_node (decl),
311 NULL,
312 count1, 0, 0);
315 if (val2 == 0 || count2 == 0)
316 continue;
317 direct_call2 = find_func_by_global_id (val2);
318 if (direct_call2)
320 tree decl = direct_call2->decl;
321 cgraph_create_edge (node,
322 cgraph_node (decl),
323 NULL,
324 count2, 0, 0);
330 /* This can be implemented as an IPA pass that must be first one
331 before any unreachable node elimination. */
333 void
334 cgraph_add_fake_indirect_call_edges (void)
336 struct cgraph_node *node;
338 /* Enable this only for LIPO for now. */
339 if (!L_IPO_COMP_MODE)
340 return;
342 for (node = cgraph_nodes; node; node = node->next)
344 if (node->analyzed && (node->needed || node->reachable))
345 add_fake_indirect_call_edges (node);
349 /* Remove zero count fake edges added for the purpose of ensuring
350 the right processing order. This should be called after all
351 small ipa passes. */
352 void
353 cgraph_remove_zero_count_fake_edges (void)
355 struct cgraph_node *node;
357 /* Enable this only for LIPO for now. */
358 if (!L_IPO_COMP_MODE)
359 return;
361 for (node = cgraph_nodes; node; node = node->next)
363 if (node->analyzed && (node->needed || node->reachable))
365 struct cgraph_edge *e, *f;
366 for (e = node->callees; e; e = f)
368 f = e->next_callee;
369 if (!e->call_stmt && !e->count && !e->loop_nest
370 && !e->frequency)
371 cgraph_remove_edge (e);
377 /* Mark address taken in STMT. */
379 static bool
380 mark_address (gimple stmt, tree addr, void *data)
382 addr = get_base_address (addr);
383 if (TREE_CODE (addr) == FUNCTION_DECL)
385 struct cgraph_node *node = cgraph_node (addr);
386 cgraph_mark_address_taken_node (node);
387 ipa_record_reference ((struct cgraph_node *)data, NULL,
388 node, NULL,
389 IPA_REF_ADDR, stmt);
391 else if (addr && TREE_CODE (addr) == VAR_DECL
392 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
394 struct varpool_node *vnode = varpool_node (addr);
395 int walk_subtrees;
397 if (lang_hooks.callgraph.analyze_expr)
398 lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
399 varpool_mark_needed_node (vnode);
400 if (vnode->alias && vnode->extra_name)
401 vnode = vnode->extra_name;
402 ipa_record_reference ((struct cgraph_node *)data, NULL,
403 NULL, vnode,
404 IPA_REF_ADDR, stmt);
407 return false;
410 /* Mark load of T. */
412 static bool
413 mark_load (gimple stmt, tree t, void *data)
415 t = get_base_address (t);
416 if (t && TREE_CODE (t) == FUNCTION_DECL)
418 /* ??? This can happen on platforms with descriptors when these are
419 directly manipulated in the code. Pretend that it's an address. */
420 struct cgraph_node *node = cgraph_node (t);
421 cgraph_mark_address_taken_node (node);
422 ipa_record_reference ((struct cgraph_node *)data, NULL,
423 node, NULL,
424 IPA_REF_ADDR, stmt);
426 else if (t && TREE_CODE (t) == VAR_DECL
427 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
429 struct varpool_node *vnode = varpool_node (t);
430 int walk_subtrees;
432 if (lang_hooks.callgraph.analyze_expr)
433 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
434 varpool_mark_needed_node (vnode);
435 if (vnode->alias && vnode->extra_name)
436 vnode = vnode->extra_name;
437 ipa_record_reference ((struct cgraph_node *)data, NULL,
438 NULL, vnode,
439 IPA_REF_LOAD, stmt);
441 return false;
444 /* Mark store of T. */
446 static bool
447 mark_store (gimple stmt, tree t, void *data)
449 t = get_base_address (t);
450 if (t && TREE_CODE (t) == VAR_DECL
451 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
453 struct varpool_node *vnode = varpool_node (t);
454 int walk_subtrees;
456 if (lang_hooks.callgraph.analyze_expr)
457 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
458 varpool_mark_needed_node (vnode);
459 if (vnode->alias && vnode->extra_name)
460 vnode = vnode->extra_name;
461 ipa_record_reference ((struct cgraph_node *)data, NULL,
462 NULL, vnode,
463 IPA_REF_STORE, stmt);
465 return false;
468 /* Create cgraph edges for function calls.
469 Also look for functions and variables having addresses taken. */
471 static unsigned int
472 build_cgraph_edges (void)
474 basic_block bb;
475 struct cgraph_node *node = cgraph_node (current_function_decl);
476 struct pointer_set_t *visited_nodes = pointer_set_create ();
477 gimple_stmt_iterator gsi;
478 tree decl;
479 unsigned ix;
481 /* Create the callgraph edges and record the nodes referenced by the function.
482 body. */
483 FOR_EACH_BB (bb)
485 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
487 gimple stmt = gsi_stmt (gsi);
488 tree decl;
490 if (is_gimple_call (stmt))
492 int freq = compute_call_stmt_bb_frequency (current_function_decl,
493 bb);
494 decl = gimple_call_fndecl (stmt);
495 if (decl)
496 cgraph_create_edge (node, cgraph_node (decl), stmt,
497 bb->count, freq,
498 bb->loop_depth);
499 else
500 cgraph_create_indirect_edge (node, stmt,
501 gimple_call_flags (stmt),
502 bb->count, freq,
503 bb->loop_depth);
505 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
506 mark_store, mark_address);
507 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
508 && gimple_omp_parallel_child_fn (stmt))
510 tree fn = gimple_omp_parallel_child_fn (stmt);
511 ipa_record_reference (node, NULL, cgraph_node (fn),
512 NULL, IPA_REF_ADDR, stmt);
514 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
516 tree fn = gimple_omp_task_child_fn (stmt);
517 if (fn)
518 ipa_record_reference (node, NULL, cgraph_node (fn),
519 NULL, IPA_REF_ADDR, stmt);
520 fn = gimple_omp_task_copy_fn (stmt);
521 if (fn)
522 ipa_record_reference (node, NULL, cgraph_node (fn),
523 NULL, IPA_REF_ADDR, stmt);
526 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
527 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
528 mark_load, mark_store, mark_address);
532 /* Look for initializers of constant variables and private statics. */
533 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
534 if (TREE_CODE (decl) == VAR_DECL
535 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)))
536 varpool_finalize_decl (decl);
537 record_eh_tables (node, cfun);
539 pointer_set_destroy (visited_nodes);
540 return 0;
543 struct gimple_opt_pass pass_build_cgraph_edges =
546 GIMPLE_PASS,
547 "*build_cgraph_edges", /* name */
548 NULL, /* gate */
549 build_cgraph_edges, /* execute */
550 NULL, /* sub */
551 NULL, /* next */
552 0, /* static_pass_number */
553 TV_NONE, /* tv_id */
554 PROP_cfg, /* properties_required */
555 0, /* properties_provided */
556 0, /* properties_destroyed */
557 0, /* todo_flags_start */
558 0 /* todo_flags_finish */
562 /* Record references to functions and other variables present in the
563 initial value of DECL, a variable.
564 When ONLY_VARS is true, we mark needed only variables, not functions. */
566 void
567 record_references_in_initializer (tree decl, bool only_vars)
569 struct pointer_set_t *visited_nodes = pointer_set_create ();
570 struct varpool_node *node = varpool_node (decl);
571 struct record_reference_ctx ctx = {false, NULL};
573 ctx.varpool_node = node;
574 ctx.only_vars = only_vars;
575 walk_tree (&DECL_INITIAL (decl), record_reference,
576 &ctx, visited_nodes);
577 pointer_set_destroy (visited_nodes);
580 /* Rebuild cgraph edges for current function node. This needs to be run after
581 passes that don't update the cgraph. */
583 unsigned int
584 rebuild_cgraph_edges (void)
586 basic_block bb;
587 struct cgraph_node *node = cgraph_node (current_function_decl);
588 gimple_stmt_iterator gsi;
590 cgraph_node_remove_callees (node);
591 ipa_remove_all_references (&node->ref_list);
593 node->count = ENTRY_BLOCK_PTR->count;
594 node->max_bb_count = 0;
596 FOR_EACH_BB (bb)
598 if (bb->count > node->max_bb_count)
599 node->max_bb_count = bb->count;
600 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
602 gimple stmt = gsi_stmt (gsi);
603 tree decl;
605 if (is_gimple_call (stmt))
607 int freq = compute_call_stmt_bb_frequency (current_function_decl,
608 bb);
609 decl = gimple_call_fndecl (stmt);
610 if (decl)
612 struct cgraph_node *callee;
613 /* In LIPO mode, before tree_profiling, the call graph edge
614 needs to be built with the original target node to make
615 sure consistent early inline decisions between profile generate
616 and profile use. After tree-profiling, the target needs to be
617 set to the resolved node so that ipa-inline sees the definitions. */
618 if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done)
619 callee = cgraph_lipo_get_resolved_node (decl);
620 else
621 callee = cgraph_node (decl);
622 cgraph_create_edge (node, callee, stmt,
623 bb->count, freq,
624 bb->loop_depth);
625 if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done
626 && decl != callee->decl)
627 gimple_call_set_fndecl (stmt, callee->decl);
629 else
630 cgraph_create_indirect_edge (node, stmt,
631 gimple_call_flags (stmt),
632 bb->count, freq,
633 bb->loop_depth);
635 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
636 mark_store, mark_address);
639 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
640 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
641 mark_load, mark_store, mark_address);
643 add_fake_indirect_call_edges (node);
644 record_eh_tables (node, cfun);
645 gcc_assert (!node->global.inlined_to);
647 return 0;
650 /* Rebuild cgraph edges for current function node. This needs to be run after
651 passes that don't update the cgraph. */
653 void
654 cgraph_rebuild_references (void)
656 basic_block bb;
657 struct cgraph_node *node = cgraph_node (current_function_decl);
658 gimple_stmt_iterator gsi;
660 ipa_remove_all_references (&node->ref_list);
662 node->count = ENTRY_BLOCK_PTR->count;
664 FOR_EACH_BB (bb)
666 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
668 gimple stmt = gsi_stmt (gsi);
670 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
671 mark_store, mark_address);
674 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
675 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
676 mark_load, mark_store, mark_address);
678 record_eh_tables (node, cfun);
681 struct gimple_opt_pass pass_rebuild_cgraph_edges =
684 GIMPLE_PASS,
685 "*rebuild_cgraph_edges", /* name */
686 NULL, /* gate */
687 rebuild_cgraph_edges, /* execute */
688 NULL, /* sub */
689 NULL, /* next */
690 0, /* static_pass_number */
691 TV_CGRAPH, /* tv_id */
692 PROP_cfg, /* properties_required */
693 0, /* properties_provided */
694 0, /* properties_destroyed */
695 0, /* todo_flags_start */
696 0, /* todo_flags_finish */
700 /* Defined in tree-optimize.c */
701 extern bool cgraph_callee_edges_final_cleanup;
703 static unsigned int
704 remove_cgraph_callee_edges (void)
706 /* The -freorder-functions=* and the fcallgraph-profiles-sections
707 needs the call-graph preserved till pass_final. */
708 if (cgraph_callee_edges_final_cleanup
709 && (flag_callgraph_profiles_sections
710 || (flag_reorder_functions > 1)))
711 return 0;
713 cgraph_node_remove_callees (cgraph_node (current_function_decl));
714 return 0;
717 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
720 GIMPLE_PASS,
721 "*remove_cgraph_callee_edges", /* name */
722 NULL, /* gate */
723 remove_cgraph_callee_edges, /* execute */
724 NULL, /* sub */
725 NULL, /* next */
726 0, /* static_pass_number */
727 TV_NONE, /* tv_id */
728 0, /* properties_required */
729 0, /* properties_provided */
730 0, /* properties_destroyed */
731 0, /* todo_flags_start */
732 0, /* todo_flags_finish */