2011-04-29 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / cgraphunit.c
blob70b63b33b919f0e1a83ba07bb6a7efe504b409fa
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141 #include "ipa-inline.h"
143 static void cgraph_expand_all_functions (void);
144 static void cgraph_mark_functions_to_output (void);
145 static void cgraph_expand_function (struct cgraph_node *);
146 static void cgraph_output_pending_asms (void);
148 FILE *cgraph_dump_file;
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
155 configury. */
157 bool
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
162 return true;
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !DECL_DISREGARD_INLINE_LIMITS (decl))
177 return true;
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
194 || (!optimize
195 && !DECL_DISREGARD_INLINE_LIMITS (decl)
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
200 && !flag_lto)
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
202 return true;
204 return false;
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
211 bool
212 cgraph_process_new_functions (void)
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 cgraph_call_function_insertion_hooks (node);
237 break;
239 case CGRAPH_STATE_IPA:
240 case CGRAPH_STATE_IPA_SSA:
241 /* When IPA optimization already started, do all essential
242 transformations that has been already performed on the whole
243 cgraph but not on this function. */
245 gimple_register_cfg_hooks ();
246 if (!node->analyzed)
247 cgraph_analyze_function (node);
248 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
249 current_function_decl = fndecl;
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 else
257 compute_inline_parameters (node, true);
258 free_dominance_info (CDI_POST_DOMINATORS);
259 free_dominance_info (CDI_DOMINATORS);
260 pop_cfun ();
261 current_function_decl = NULL;
262 cgraph_call_function_insertion_hooks (node);
263 break;
265 case CGRAPH_STATE_EXPANSION:
266 /* Functions created during expansion shall be compiled
267 directly. */
268 node->process = 0;
269 cgraph_call_function_insertion_hooks (node);
270 cgraph_expand_function (node);
271 break;
273 default:
274 gcc_unreachable ();
275 break;
277 varpool_analyze_pending_decls ();
279 return output;
282 /* As an GCC extension we allow redefinition of the function. The
283 semantics when both copies of bodies differ is not well defined.
284 We replace the old body with new body so in unit at a time mode
285 we always use new body, while in normal mode we may end up with
286 old body inlined into some functions and new body expanded and
287 inlined in others.
289 ??? It may make more sense to use one body for inlining and other
290 body for expanding the function but this is difficult to do. */
292 static void
293 cgraph_reset_node (struct cgraph_node *node)
295 /* If node->process is set, then we have already begun whole-unit analysis.
296 This is *not* testing for whether we've already emitted the function.
297 That case can be sort-of legitimately seen with real function redefinition
298 errors. I would argue that the front end should never present us with
299 such a case, but don't enforce that for now. */
300 gcc_assert (!node->process);
302 /* Reset our data structures so we can analyze the function again. */
303 memset (&node->local, 0, sizeof (node->local));
304 memset (&node->global, 0, sizeof (node->global));
305 memset (&node->rtl, 0, sizeof (node->rtl));
306 node->analyzed = false;
307 node->local.redefined_extern_inline = true;
308 node->local.finalized = false;
310 cgraph_node_remove_callees (node);
312 /* We may need to re-queue the node for assembling in case
313 we already proceeded it and ignored as not needed or got
314 a re-declaration in IMA mode. */
315 if (node->reachable)
317 struct cgraph_node *n;
319 for (n = cgraph_nodes_queue; n; n = n->next_needed)
320 if (n == node)
321 break;
322 if (!n)
323 node->reachable = 0;
327 static void
328 cgraph_lower_function (struct cgraph_node *node)
330 if (node->lowered)
331 return;
333 if (node->nested)
334 lower_nested_functions (node->decl);
335 gcc_assert (!node->nested);
337 tree_lowering_passes (node->decl);
338 node->lowered = true;
341 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
342 logic in effect. If NESTED is true, then our caller cannot stand to have
343 the garbage collector run at the moment. We would need to either create
344 a new GC context, or just not compile right now. */
346 void
347 cgraph_finalize_function (tree decl, bool nested)
349 struct cgraph_node *node = cgraph_get_create_node (decl);
351 if (node->local.finalized)
352 cgraph_reset_node (node);
354 notice_global_symbol (decl);
355 node->local.finalized = true;
356 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
358 if (cgraph_decide_is_function_needed (node, decl))
359 cgraph_mark_needed_node (node);
361 /* Since we reclaim unreachable nodes at the end of every language
362 level unit, we need to be conservative about possible entry points
363 there. */
364 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
365 || DECL_STATIC_CONSTRUCTOR (decl)
366 || DECL_STATIC_DESTRUCTOR (decl)
367 /* COMDAT virtual functions may be referenced by vtable from
368 other compilation unit. Still we want to devirtualize calls
369 to those so we need to analyze them.
370 FIXME: We should introduce may edges for this purpose and update
371 their handling in unreachable function removal and inliner too. */
372 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
373 cgraph_mark_reachable_node (node);
375 /* If we've not yet emitted decl, tell the debug info about it. */
376 if (!TREE_ASM_WRITTEN (decl))
377 (*debug_hooks->deferred_inline_function) (decl);
379 /* Possibly warn about unused parameters. */
380 if (warn_unused_parameter)
381 do_warn_unused_parameter (decl);
383 if (!nested)
384 ggc_collect ();
387 /* C99 extern inline keywords allow changing of declaration after function
388 has been finalized. We need to re-decide if we want to mark the function as
389 needed then. */
391 void
392 cgraph_mark_if_needed (tree decl)
394 struct cgraph_node *node = cgraph_get_node (decl);
395 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
396 cgraph_mark_needed_node (node);
399 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
400 static bool
401 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
403 while (node != node2 && node2)
404 node2 = node2->clone_of;
405 return node2 != NULL;
408 /* Verify edge E count and frequency. */
410 static bool
411 verify_edge_count_and_frequency (struct cgraph_edge *e)
413 bool error_found = false;
414 if (e->count < 0)
416 error ("caller edge count is negative");
417 error_found = true;
419 if (e->frequency < 0)
421 error ("caller edge frequency is negative");
422 error_found = true;
424 if (e->frequency > CGRAPH_FREQ_MAX)
426 error ("caller edge frequency is too large");
427 error_found = true;
429 if (gimple_has_body_p (e->caller->decl)
430 && !e->caller->global.inlined_to
431 && (e->frequency
432 != compute_call_stmt_bb_frequency (e->caller->decl,
433 gimple_bb (e->call_stmt))))
435 error ("caller edge frequency %i does not match BB frequency %i",
436 e->frequency,
437 compute_call_stmt_bb_frequency (e->caller->decl,
438 gimple_bb (e->call_stmt)));
439 error_found = true;
441 return error_found;
444 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
445 static void
446 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
448 /* debug_gimple_stmt needs correct cfun */
449 if (cfun != this_cfun)
450 set_cfun (this_cfun);
451 debug_gimple_stmt (stmt);
454 /* Verify cgraph nodes of given cgraph node. */
455 DEBUG_FUNCTION void
456 verify_cgraph_node (struct cgraph_node *node)
458 struct cgraph_edge *e;
459 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
460 basic_block this_block;
461 gimple_stmt_iterator gsi;
462 bool error_found = false;
464 if (seen_error ())
465 return;
467 timevar_push (TV_CGRAPH_VERIFY);
468 for (e = node->callees; e; e = e->next_callee)
469 if (e->aux)
471 error ("aux field set for edge %s->%s",
472 identifier_to_locale (cgraph_node_name (e->caller)),
473 identifier_to_locale (cgraph_node_name (e->callee)));
474 error_found = true;
476 if (node->count < 0)
478 error ("execution count is negative");
479 error_found = true;
481 if (node->global.inlined_to && node->local.externally_visible)
483 error ("externally visible inline clone");
484 error_found = true;
486 if (node->global.inlined_to && node->address_taken)
488 error ("inline clone with address taken");
489 error_found = true;
491 if (node->global.inlined_to && node->needed)
493 error ("inline clone is needed");
494 error_found = true;
496 for (e = node->indirect_calls; e; e = e->next_callee)
498 if (e->aux)
500 error ("aux field set for indirect edge from %s",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 error_found = true;
504 if (!e->indirect_unknown_callee
505 || !e->indirect_info)
507 error ("An indirect edge from %s is not marked as indirect or has "
508 "associated indirect_info, the corresponding statement is: ",
509 identifier_to_locale (cgraph_node_name (e->caller)));
510 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
511 error_found = true;
514 for (e = node->callers; e; e = e->next_caller)
516 if (verify_edge_count_and_frequency (e))
517 error_found = true;
518 if (!e->inline_failed)
520 if (node->global.inlined_to
521 != (e->caller->global.inlined_to
522 ? e->caller->global.inlined_to : e->caller))
524 error ("inlined_to pointer is wrong");
525 error_found = true;
527 if (node->callers->next_caller)
529 error ("multiple inline callers");
530 error_found = true;
533 else
534 if (node->global.inlined_to)
536 error ("inlined_to pointer set for noninline callers");
537 error_found = true;
540 for (e = node->indirect_calls; e; e = e->next_callee)
541 if (verify_edge_count_and_frequency (e))
542 error_found = true;
543 if (!node->callers && node->global.inlined_to)
545 error ("inlined_to pointer is set but no predecessors found");
546 error_found = true;
548 if (node->global.inlined_to == node)
550 error ("inlined_to pointer refers to itself");
551 error_found = true;
554 if (!cgraph_get_node (node->decl))
556 error ("node not found in cgraph_hash");
557 error_found = true;
560 if (node->clone_of)
562 struct cgraph_node *n;
563 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
564 if (n == node)
565 break;
566 if (!n)
568 error ("node has wrong clone_of");
569 error_found = true;
572 if (node->clones)
574 struct cgraph_node *n;
575 for (n = node->clones; n; n = n->next_sibling_clone)
576 if (n->clone_of != node)
577 break;
578 if (n)
580 error ("node has wrong clone list");
581 error_found = true;
584 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
586 error ("node is in clone list but it is not clone");
587 error_found = true;
589 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
591 error ("node has wrong prev_clone pointer");
592 error_found = true;
594 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
596 error ("double linked list of clones corrupted");
597 error_found = true;
599 if (node->same_comdat_group)
601 struct cgraph_node *n = node->same_comdat_group;
603 if (!DECL_ONE_ONLY (node->decl))
605 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
606 error_found = true;
608 if (n == node)
610 error ("node is alone in a comdat group");
611 error_found = true;
615 if (!n->same_comdat_group)
617 error ("same_comdat_group is not a circular list");
618 error_found = true;
619 break;
621 n = n->same_comdat_group;
623 while (n != node);
626 if (node->analyzed && gimple_has_body_p (node->decl)
627 && !TREE_ASM_WRITTEN (node->decl)
628 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
629 && !flag_wpa)
631 if (this_cfun->cfg)
633 /* The nodes we're interested in are never shared, so walk
634 the tree ignoring duplicates. */
635 struct pointer_set_t *visited_nodes = pointer_set_create ();
636 /* Reach the trees by walking over the CFG, and note the
637 enclosing basic-blocks in the call edges. */
638 FOR_EACH_BB_FN (this_block, this_cfun)
639 for (gsi = gsi_start_bb (this_block);
640 !gsi_end_p (gsi);
641 gsi_next (&gsi))
643 gimple stmt = gsi_stmt (gsi);
644 if (is_gimple_call (stmt))
646 struct cgraph_edge *e = cgraph_edge (node, stmt);
647 tree decl = gimple_call_fndecl (stmt);
648 if (e)
650 if (e->aux)
652 error ("shared call_stmt:");
653 cgraph_debug_gimple_stmt (this_cfun, stmt);
654 error_found = true;
656 if (!e->indirect_unknown_callee)
658 struct cgraph_node *n;
660 if (e->callee->same_body_alias)
662 error ("edge points to same body alias:");
663 debug_tree (e->callee->decl);
664 error_found = true;
666 else if (!e->callee->global.inlined_to
667 && decl
668 && cgraph_get_node (decl)
669 && (e->callee->former_clone_of
670 != cgraph_get_node (decl)->decl)
671 && !clone_of_p (cgraph_get_node (decl),
672 e->callee))
674 error ("edge points to wrong declaration:");
675 debug_tree (e->callee->decl);
676 fprintf (stderr," Instead of:");
677 debug_tree (decl);
678 error_found = true;
680 else if (decl
681 && (n = cgraph_get_node_or_alias (decl))
682 && (n->same_body_alias
683 && n->thunk.thunk_p))
685 error ("a call to thunk improperly represented "
686 "in the call graph:");
687 cgraph_debug_gimple_stmt (this_cfun, stmt);
688 error_found = true;
691 else if (decl)
693 error ("an indirect edge with unknown callee "
694 "corresponding to a call_stmt with "
695 "a known declaration:");
696 error_found = true;
697 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
699 e->aux = (void *)1;
701 else if (decl)
703 error ("missing callgraph edge for call stmt:");
704 cgraph_debug_gimple_stmt (this_cfun, stmt);
705 error_found = true;
709 pointer_set_destroy (visited_nodes);
711 else
712 /* No CFG available?! */
713 gcc_unreachable ();
715 for (e = node->callees; e; e = e->next_callee)
717 if (!e->aux)
719 error ("edge %s->%s has no corresponding call_stmt",
720 identifier_to_locale (cgraph_node_name (e->caller)),
721 identifier_to_locale (cgraph_node_name (e->callee)));
722 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
723 error_found = true;
725 e->aux = 0;
727 for (e = node->indirect_calls; e; e = e->next_callee)
729 if (!e->aux)
731 error ("an indirect edge from %s has no corresponding call_stmt",
732 identifier_to_locale (cgraph_node_name (e->caller)));
733 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
734 error_found = true;
736 e->aux = 0;
739 if (error_found)
741 dump_cgraph_node (stderr, node);
742 internal_error ("verify_cgraph_node failed");
744 timevar_pop (TV_CGRAPH_VERIFY);
747 /* Verify whole cgraph structure. */
748 DEBUG_FUNCTION void
749 verify_cgraph (void)
751 struct cgraph_node *node;
753 if (seen_error ())
754 return;
756 for (node = cgraph_nodes; node; node = node->next)
757 verify_cgraph_node (node);
760 /* Output all asm statements we have stored up to be output. */
762 static void
763 cgraph_output_pending_asms (void)
765 struct cgraph_asm_node *can;
767 if (seen_error ())
768 return;
770 for (can = cgraph_asm_nodes; can; can = can->next)
771 assemble_asm (can->asm_str);
772 cgraph_asm_nodes = NULL;
775 /* Analyze the function scheduled to be output. */
776 void
777 cgraph_analyze_function (struct cgraph_node *node)
779 tree save = current_function_decl;
780 tree decl = node->decl;
782 current_function_decl = decl;
783 push_cfun (DECL_STRUCT_FUNCTION (decl));
785 assign_assembler_name_if_neeeded (node->decl);
787 /* Make sure to gimplify bodies only once. During analyzing a
788 function we lower it, which will require gimplified nested
789 functions, so we can end up here with an already gimplified
790 body. */
791 if (!gimple_body (decl))
792 gimplify_function_tree (decl);
793 dump_function (TDI_generic, decl);
795 cgraph_lower_function (node);
796 node->analyzed = true;
798 pop_cfun ();
799 current_function_decl = save;
802 /* Process attributes common for vars and functions. */
804 static void
805 process_common_attributes (tree decl)
807 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
809 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
811 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
812 "%<weakref%> attribute should be accompanied with"
813 " an %<alias%> attribute");
814 DECL_WEAK (decl) = 0;
815 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
816 DECL_ATTRIBUTES (decl));
820 /* Look for externally_visible and used attributes and mark cgraph nodes
821 accordingly.
823 We cannot mark the nodes at the point the attributes are processed (in
824 handle_*_attribute) because the copy of the declarations available at that
825 point may not be canonical. For example, in:
827 void f();
828 void f() __attribute__((used));
830 the declaration we see in handle_used_attribute will be the second
831 declaration -- but the front end will subsequently merge that declaration
832 with the original declaration and discard the second declaration.
834 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
836 void f() {}
837 void f() __attribute__((externally_visible));
839 is valid.
841 So, we walk the nodes at the end of the translation unit, applying the
842 attributes at that point. */
844 static void
845 process_function_and_variable_attributes (struct cgraph_node *first,
846 struct varpool_node *first_var)
848 struct cgraph_node *node;
849 struct varpool_node *vnode;
851 for (node = cgraph_nodes; node != first; node = node->next)
853 tree decl = node->decl;
854 if (DECL_PRESERVE_P (decl))
855 cgraph_mark_needed_node (node);
856 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
857 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
858 && TREE_PUBLIC (node->decl))
860 if (node->local.finalized)
861 cgraph_mark_needed_node (node);
863 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
865 if (! TREE_PUBLIC (node->decl))
866 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
867 "%<externally_visible%>"
868 " attribute have effect only on public objects");
869 else if (node->local.finalized)
870 cgraph_mark_needed_node (node);
872 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
873 && node->local.finalized)
875 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
876 "%<weakref%> attribute ignored"
877 " because function is defined");
878 DECL_WEAK (decl) = 0;
879 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
880 DECL_ATTRIBUTES (decl));
882 process_common_attributes (decl);
884 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
886 tree decl = vnode->decl;
887 if (DECL_PRESERVE_P (decl))
889 vnode->force_output = true;
890 if (vnode->finalized)
891 varpool_mark_needed_node (vnode);
893 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
894 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
895 && TREE_PUBLIC (vnode->decl))
897 if (vnode->finalized)
898 varpool_mark_needed_node (vnode);
900 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
902 if (! TREE_PUBLIC (vnode->decl))
903 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
904 "%<externally_visible%>"
905 " attribute have effect only on public objects");
906 else if (vnode->finalized)
907 varpool_mark_needed_node (vnode);
909 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
910 && vnode->finalized
911 && DECL_INITIAL (decl))
913 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
914 "%<weakref%> attribute ignored"
915 " because variable is initialized");
916 DECL_WEAK (decl) = 0;
917 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
918 DECL_ATTRIBUTES (decl));
920 process_common_attributes (decl);
924 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
925 each reachable functions) and build cgraph.
926 The function can be called multiple times after inserting new nodes
927 into beginning of queue. Just the new part of queue is re-scanned then. */
929 static void
930 cgraph_analyze_functions (void)
932 /* Keep track of already processed nodes when called multiple times for
933 intermodule optimization. */
934 static struct cgraph_node *first_analyzed;
935 struct cgraph_node *first_processed = first_analyzed;
936 static struct varpool_node *first_analyzed_var;
937 struct cgraph_node *node, *next;
939 bitmap_obstack_initialize (NULL);
940 process_function_and_variable_attributes (first_processed,
941 first_analyzed_var);
942 first_processed = cgraph_nodes;
943 first_analyzed_var = varpool_nodes;
944 varpool_analyze_pending_decls ();
945 if (cgraph_dump_file)
947 fprintf (cgraph_dump_file, "Initial entry points:");
948 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
949 if (node->needed)
950 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
951 fprintf (cgraph_dump_file, "\n");
953 cgraph_process_new_functions ();
955 /* Propagate reachability flag and lower representation of all reachable
956 functions. In the future, lowering will introduce new functions and
957 new entry points on the way (by template instantiation and virtual
958 method table generation for instance). */
959 while (cgraph_nodes_queue)
961 struct cgraph_edge *edge;
962 tree decl = cgraph_nodes_queue->decl;
964 node = cgraph_nodes_queue;
965 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
966 node->next_needed = NULL;
968 /* ??? It is possible to create extern inline function and later using
969 weak alias attribute to kill its body. See
970 gcc.c-torture/compile/20011119-1.c */
971 if (!DECL_STRUCT_FUNCTION (decl))
973 cgraph_reset_node (node);
974 continue;
977 if (!node->analyzed)
978 cgraph_analyze_function (node);
980 for (edge = node->callees; edge; edge = edge->next_callee)
981 if (!edge->callee->reachable)
982 cgraph_mark_reachable_node (edge->callee);
984 if (node->same_comdat_group)
986 for (next = node->same_comdat_group;
987 next != node;
988 next = next->same_comdat_group)
989 cgraph_mark_reachable_node (next);
992 /* If decl is a clone of an abstract function, mark that abstract
993 function so that we don't release its body. The DECL_INITIAL() of that
994 abstract function declaration will be later needed to output debug
995 info. */
996 if (DECL_ABSTRACT_ORIGIN (decl))
998 struct cgraph_node *origin_node;
999 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1000 origin_node->abstract_and_needed = true;
1003 /* We finalize local static variables during constructing callgraph
1004 edges. Process their attributes too. */
1005 process_function_and_variable_attributes (first_processed,
1006 first_analyzed_var);
1007 first_processed = cgraph_nodes;
1008 first_analyzed_var = varpool_nodes;
1009 varpool_analyze_pending_decls ();
1010 cgraph_process_new_functions ();
1013 /* Collect entry points to the unit. */
1014 if (cgraph_dump_file)
1016 fprintf (cgraph_dump_file, "Unit entry points:");
1017 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1018 if (node->needed)
1019 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1020 fprintf (cgraph_dump_file, "\n\nInitial ");
1021 dump_cgraph (cgraph_dump_file);
1022 dump_varpool (cgraph_dump_file);
1025 if (cgraph_dump_file)
1026 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1028 for (node = cgraph_nodes; node != first_analyzed; node = next)
1030 tree decl = node->decl;
1031 next = node->next;
1033 if (node->local.finalized && !gimple_has_body_p (decl))
1034 cgraph_reset_node (node);
1036 if (!node->reachable && gimple_has_body_p (decl))
1038 if (cgraph_dump_file)
1039 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1040 cgraph_remove_node (node);
1041 continue;
1043 else
1044 node->next_needed = NULL;
1045 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1046 gcc_assert (node->analyzed == node->local.finalized);
1048 if (cgraph_dump_file)
1050 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1051 dump_cgraph (cgraph_dump_file);
1052 dump_varpool (cgraph_dump_file);
1054 bitmap_obstack_release (NULL);
1055 first_analyzed = cgraph_nodes;
1056 ggc_collect ();
1060 /* Analyze the whole compilation unit once it is parsed completely. */
1062 void
1063 cgraph_finalize_compilation_unit (void)
1065 timevar_push (TV_CGRAPH);
1067 /* If we're here there's no current function anymore. Some frontends
1068 are lazy in clearing these. */
1069 current_function_decl = NULL;
1070 set_cfun (NULL);
1072 /* Do not skip analyzing the functions if there were errors, we
1073 miss diagnostics for following functions otherwise. */
1075 /* Emit size functions we didn't inline. */
1076 finalize_size_functions ();
1078 /* Mark alias targets necessary and emit diagnostics. */
1079 finish_aliases_1 ();
1081 if (!quiet_flag)
1083 fprintf (stderr, "\nAnalyzing compilation unit\n");
1084 fflush (stderr);
1087 /* Gimplify and lower all functions, compute reachability and
1088 remove unreachable nodes. */
1089 cgraph_analyze_functions ();
1091 /* Mark alias targets necessary and emit diagnostics. */
1092 finish_aliases_1 ();
1094 /* Gimplify and lower thunks. */
1095 cgraph_analyze_functions ();
1097 /* Finally drive the pass manager. */
1098 cgraph_optimize ();
1100 timevar_pop (TV_CGRAPH);
1104 /* Figure out what functions we want to assemble. */
1106 static void
1107 cgraph_mark_functions_to_output (void)
1109 struct cgraph_node *node;
1110 #ifdef ENABLE_CHECKING
1111 bool check_same_comdat_groups = false;
1113 for (node = cgraph_nodes; node; node = node->next)
1114 gcc_assert (!node->process);
1115 #endif
1117 for (node = cgraph_nodes; node; node = node->next)
1119 tree decl = node->decl;
1120 struct cgraph_edge *e;
1122 gcc_assert (!node->process || node->same_comdat_group);
1123 if (node->process)
1124 continue;
1126 for (e = node->callers; e; e = e->next_caller)
1127 if (e->inline_failed)
1128 break;
1130 /* We need to output all local functions that are used and not
1131 always inlined, as well as those that are reachable from
1132 outside the current compilation unit. */
1133 if (node->analyzed
1134 && !node->global.inlined_to
1135 && (!cgraph_only_called_directly_p (node)
1136 || (e && node->reachable))
1137 && !TREE_ASM_WRITTEN (decl)
1138 && !DECL_EXTERNAL (decl))
1140 node->process = 1;
1141 if (node->same_comdat_group)
1143 struct cgraph_node *next;
1144 for (next = node->same_comdat_group;
1145 next != node;
1146 next = next->same_comdat_group)
1147 next->process = 1;
1150 else if (node->same_comdat_group)
1152 #ifdef ENABLE_CHECKING
1153 check_same_comdat_groups = true;
1154 #endif
1156 else
1158 /* We should've reclaimed all functions that are not needed. */
1159 #ifdef ENABLE_CHECKING
1160 if (!node->global.inlined_to
1161 && gimple_has_body_p (decl)
1162 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1163 are inside partition, we can end up not removing the body since we no longer
1164 have analyzed node pointing to it. */
1165 && !node->in_other_partition
1166 && !DECL_EXTERNAL (decl))
1168 dump_cgraph_node (stderr, node);
1169 internal_error ("failed to reclaim unneeded function");
1171 #endif
1172 gcc_assert (node->global.inlined_to
1173 || !gimple_has_body_p (decl)
1174 || node->in_other_partition
1175 || DECL_EXTERNAL (decl));
1180 #ifdef ENABLE_CHECKING
1181 if (check_same_comdat_groups)
1182 for (node = cgraph_nodes; node; node = node->next)
1183 if (node->same_comdat_group && !node->process)
1185 tree decl = node->decl;
1186 if (!node->global.inlined_to
1187 && gimple_has_body_p (decl)
1188 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1189 are inside partition, we can end up not removing the body since we no longer
1190 have analyzed node pointing to it. */
1191 && !node->in_other_partition
1192 && !DECL_EXTERNAL (decl))
1194 dump_cgraph_node (stderr, node);
1195 internal_error ("failed to reclaim unneeded function");
1198 #endif
1201 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1202 in lowered gimple form.
1204 Set current_function_decl and cfun to newly constructed empty function body.
1205 return basic block in the function body. */
1207 static basic_block
1208 init_lowered_empty_function (tree decl)
1210 basic_block bb;
1212 current_function_decl = decl;
1213 allocate_struct_function (decl, false);
1214 gimple_register_cfg_hooks ();
1215 init_empty_tree_cfg ();
1216 init_tree_ssa (cfun);
1217 init_ssa_operands ();
1218 cfun->gimple_df->in_ssa_p = true;
1219 DECL_INITIAL (decl) = make_node (BLOCK);
1221 DECL_SAVED_TREE (decl) = error_mark_node;
1222 cfun->curr_properties |=
1223 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1224 PROP_ssa);
1226 /* Create BB for body of the function and connect it properly. */
1227 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1228 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1229 make_edge (bb, EXIT_BLOCK_PTR, 0);
1231 return bb;
1234 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1235 offset indicated by VIRTUAL_OFFSET, if that is
1236 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1237 zero for a result adjusting thunk. */
1239 static tree
1240 thunk_adjust (gimple_stmt_iterator * bsi,
1241 tree ptr, bool this_adjusting,
1242 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1244 gimple stmt;
1245 tree ret;
1247 if (this_adjusting
1248 && fixed_offset != 0)
1250 stmt = gimple_build_assign (ptr,
1251 fold_build2_loc (input_location,
1252 POINTER_PLUS_EXPR,
1253 TREE_TYPE (ptr), ptr,
1254 size_int (fixed_offset)));
1255 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1258 /* If there's a virtual offset, look up that value in the vtable and
1259 adjust the pointer again. */
1260 if (virtual_offset)
1262 tree vtabletmp;
1263 tree vtabletmp2;
1264 tree vtabletmp3;
1265 tree offsettmp;
1267 if (!vtable_entry_type)
1269 tree vfunc_type = make_node (FUNCTION_TYPE);
1270 TREE_TYPE (vfunc_type) = integer_type_node;
1271 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1272 layout_type (vfunc_type);
1274 vtable_entry_type = build_pointer_type (vfunc_type);
1277 vtabletmp =
1278 create_tmp_var (build_pointer_type
1279 (build_pointer_type (vtable_entry_type)), "vptr");
1281 /* The vptr is always at offset zero in the object. */
1282 stmt = gimple_build_assign (vtabletmp,
1283 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1284 ptr));
1285 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1286 mark_symbols_for_renaming (stmt);
1287 find_referenced_vars_in (stmt);
1289 /* Form the vtable address. */
1290 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1291 "vtableaddr");
1292 stmt = gimple_build_assign (vtabletmp2,
1293 build_simple_mem_ref (vtabletmp));
1294 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1295 mark_symbols_for_renaming (stmt);
1296 find_referenced_vars_in (stmt);
1298 /* Find the entry with the vcall offset. */
1299 stmt = gimple_build_assign (vtabletmp2,
1300 fold_build2_loc (input_location,
1301 POINTER_PLUS_EXPR,
1302 TREE_TYPE (vtabletmp2),
1303 vtabletmp2,
1304 fold_convert (sizetype,
1305 virtual_offset)));
1306 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1308 /* Get the offset itself. */
1309 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1310 "vcalloffset");
1311 stmt = gimple_build_assign (vtabletmp3,
1312 build_simple_mem_ref (vtabletmp2));
1313 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1314 mark_symbols_for_renaming (stmt);
1315 find_referenced_vars_in (stmt);
1317 /* Cast to sizetype. */
1318 offsettmp = create_tmp_var (sizetype, "offset");
1319 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1320 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1321 mark_symbols_for_renaming (stmt);
1322 find_referenced_vars_in (stmt);
1324 /* Adjust the `this' pointer. */
1325 ptr = fold_build2_loc (input_location,
1326 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1327 offsettmp);
1330 if (!this_adjusting
1331 && fixed_offset != 0)
1332 /* Adjust the pointer by the constant. */
1334 tree ptrtmp;
1336 if (TREE_CODE (ptr) == VAR_DECL)
1337 ptrtmp = ptr;
1338 else
1340 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1341 stmt = gimple_build_assign (ptrtmp, ptr);
1342 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1343 mark_symbols_for_renaming (stmt);
1344 find_referenced_vars_in (stmt);
1346 ptr = fold_build2_loc (input_location,
1347 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1348 size_int (fixed_offset));
1351 /* Emit the statement and gimplify the adjustment expression. */
1352 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1353 stmt = gimple_build_assign (ret, ptr);
1354 mark_symbols_for_renaming (stmt);
1355 find_referenced_vars_in (stmt);
1356 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1358 return ret;
1361 /* Produce assembler for thunk NODE. */
1363 static void
1364 assemble_thunk (struct cgraph_node *node)
1366 bool this_adjusting = node->thunk.this_adjusting;
1367 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1368 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1369 tree virtual_offset = NULL;
1370 tree alias = node->thunk.alias;
1371 tree thunk_fndecl = node->decl;
1372 tree a = DECL_ARGUMENTS (thunk_fndecl);
1374 current_function_decl = thunk_fndecl;
1376 /* Ensure thunks are emitted in their correct sections. */
1377 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1379 if (this_adjusting
1380 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1381 virtual_value, alias))
1383 const char *fnname;
1384 tree fn_block;
1386 DECL_RESULT (thunk_fndecl)
1387 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1388 RESULT_DECL, 0, integer_type_node);
1389 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1391 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1392 create one. */
1393 fn_block = make_node (BLOCK);
1394 BLOCK_VARS (fn_block) = a;
1395 DECL_INITIAL (thunk_fndecl) = fn_block;
1396 init_function_start (thunk_fndecl);
1397 cfun->is_thunk = 1;
1398 assemble_start_function (thunk_fndecl, fnname);
1400 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1401 fixed_offset, virtual_value, alias);
1403 assemble_end_function (thunk_fndecl, fnname);
1404 init_insn_lengths ();
1405 free_after_compilation (cfun);
1406 set_cfun (NULL);
1407 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1409 else
1411 tree restype;
1412 basic_block bb, then_bb, else_bb, return_bb;
1413 gimple_stmt_iterator bsi;
1414 int nargs = 0;
1415 tree arg;
1416 int i;
1417 tree resdecl;
1418 tree restmp = NULL;
1419 VEC(tree, heap) *vargs;
1421 gimple call;
1422 gimple ret;
1424 DECL_IGNORED_P (thunk_fndecl) = 1;
1425 bitmap_obstack_initialize (NULL);
1427 if (node->thunk.virtual_offset_p)
1428 virtual_offset = size_int (virtual_value);
1430 /* Build the return declaration for the function. */
1431 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1432 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1434 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1435 DECL_ARTIFICIAL (resdecl) = 1;
1436 DECL_IGNORED_P (resdecl) = 1;
1437 DECL_RESULT (thunk_fndecl) = resdecl;
1439 else
1440 resdecl = DECL_RESULT (thunk_fndecl);
1442 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1444 bsi = gsi_start_bb (bb);
1446 /* Build call to the function being thunked. */
1447 if (!VOID_TYPE_P (restype))
1449 if (!is_gimple_reg_type (restype))
1451 restmp = resdecl;
1452 add_local_decl (cfun, restmp);
1453 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1455 else
1456 restmp = create_tmp_var_raw (restype, "retval");
1459 for (arg = a; arg; arg = DECL_CHAIN (arg))
1460 nargs++;
1461 vargs = VEC_alloc (tree, heap, nargs);
1462 if (this_adjusting)
1463 VEC_quick_push (tree, vargs,
1464 thunk_adjust (&bsi,
1465 a, 1, fixed_offset,
1466 virtual_offset));
1467 else
1468 VEC_quick_push (tree, vargs, a);
1469 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1470 VEC_quick_push (tree, vargs, arg);
1471 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1472 VEC_free (tree, heap, vargs);
1473 gimple_call_set_cannot_inline (call, true);
1474 gimple_call_set_from_thunk (call, true);
1475 if (restmp)
1476 gimple_call_set_lhs (call, restmp);
1477 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1478 mark_symbols_for_renaming (call);
1479 find_referenced_vars_in (call);
1480 update_stmt (call);
1482 if (restmp && !this_adjusting)
1484 tree true_label = NULL_TREE;
1486 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1488 gimple stmt;
1489 /* If the return type is a pointer, we need to
1490 protect against NULL. We know there will be an
1491 adjustment, because that's why we're emitting a
1492 thunk. */
1493 then_bb = create_basic_block (NULL, (void *) 0, bb);
1494 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1495 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1496 remove_edge (single_succ_edge (bb));
1497 true_label = gimple_block_label (then_bb);
1498 stmt = gimple_build_cond (NE_EXPR, restmp,
1499 build_zero_cst (TREE_TYPE (restmp)),
1500 NULL_TREE, NULL_TREE);
1501 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1502 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1503 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1504 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1505 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1506 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1507 bsi = gsi_last_bb (then_bb);
1510 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1511 fixed_offset, virtual_offset);
1512 if (true_label)
1514 gimple stmt;
1515 bsi = gsi_last_bb (else_bb);
1516 stmt = gimple_build_assign (restmp,
1517 build_zero_cst (TREE_TYPE (restmp)));
1518 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1519 bsi = gsi_last_bb (return_bb);
1522 else
1523 gimple_call_set_tail (call, true);
1525 /* Build return value. */
1526 ret = gimple_build_return (restmp);
1527 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1529 delete_unreachable_blocks ();
1530 update_ssa (TODO_update_ssa);
1532 cgraph_remove_same_body_alias (node);
1533 /* Since we want to emit the thunk, we explicitly mark its name as
1534 referenced. */
1535 cgraph_add_new_function (thunk_fndecl, true);
1536 bitmap_obstack_release (NULL);
1538 current_function_decl = NULL;
1541 /* Expand function specified by NODE. */
1543 static void
1544 cgraph_expand_function (struct cgraph_node *node)
1546 tree decl = node->decl;
1548 /* We ought to not compile any inline clones. */
1549 gcc_assert (!node->global.inlined_to);
1551 announce_function (decl);
1552 node->process = 0;
1553 if (node->same_body)
1555 struct cgraph_node *alias, *next;
1556 bool saved_alias = node->alias;
1557 for (alias = node->same_body;
1558 alias && alias->next; alias = alias->next)
1560 /* Walk aliases in the order they were created; it is possible that
1561 thunks refers to the aliases made earlier. */
1562 for (; alias; alias = next)
1564 next = alias->previous;
1565 if (!alias->thunk.thunk_p)
1566 assemble_alias (alias->decl,
1567 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1568 else
1569 assemble_thunk (alias);
1571 node->alias = saved_alias;
1572 cgraph_process_new_functions ();
1575 gcc_assert (node->lowered);
1577 /* Generate RTL for the body of DECL. */
1578 tree_rest_of_compilation (decl);
1580 /* Make sure that BE didn't give up on compiling. */
1581 gcc_assert (TREE_ASM_WRITTEN (decl));
1582 current_function_decl = NULL;
1583 gcc_assert (!cgraph_preserve_function_body_p (node));
1584 cgraph_release_function_body (node);
1585 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1586 points to the dead function body. */
1587 cgraph_node_remove_callees (node);
1589 cgraph_function_flags_ready = true;
1592 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1594 bool
1595 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1597 *reason = e->inline_failed;
1598 return !e->inline_failed;
1603 /* Expand all functions that must be output.
1605 Attempt to topologically sort the nodes so function is output when
1606 all called functions are already assembled to allow data to be
1607 propagated across the callgraph. Use a stack to get smaller distance
1608 between a function and its callees (later we may choose to use a more
1609 sophisticated algorithm for function reordering; we will likely want
1610 to use subsections to make the output functions appear in top-down
1611 order). */
1613 static void
1614 cgraph_expand_all_functions (void)
1616 struct cgraph_node *node;
1617 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1618 int order_pos, new_order_pos = 0;
1619 int i;
1621 order_pos = cgraph_postorder (order);
1622 gcc_assert (order_pos == cgraph_n_nodes);
1624 /* Garbage collector may remove inline clones we eliminate during
1625 optimization. So we must be sure to not reference them. */
1626 for (i = 0; i < order_pos; i++)
1627 if (order[i]->process)
1628 order[new_order_pos++] = order[i];
1630 for (i = new_order_pos - 1; i >= 0; i--)
1632 node = order[i];
1633 if (node->process)
1635 gcc_assert (node->reachable);
1636 node->process = 0;
1637 cgraph_expand_function (node);
1640 cgraph_process_new_functions ();
1642 free (order);
1646 /* This is used to sort the node types by the cgraph order number. */
1648 enum cgraph_order_sort_kind
1650 ORDER_UNDEFINED = 0,
1651 ORDER_FUNCTION,
1652 ORDER_VAR,
1653 ORDER_ASM
1656 struct cgraph_order_sort
1658 enum cgraph_order_sort_kind kind;
1659 union
1661 struct cgraph_node *f;
1662 struct varpool_node *v;
1663 struct cgraph_asm_node *a;
1664 } u;
1667 /* Output all functions, variables, and asm statements in the order
1668 according to their order fields, which is the order in which they
1669 appeared in the file. This implements -fno-toplevel-reorder. In
1670 this mode we may output functions and variables which don't really
1671 need to be output. */
1673 static void
1674 cgraph_output_in_order (void)
1676 int max;
1677 struct cgraph_order_sort *nodes;
1678 int i;
1679 struct cgraph_node *pf;
1680 struct varpool_node *pv;
1681 struct cgraph_asm_node *pa;
1683 max = cgraph_order;
1684 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1686 varpool_analyze_pending_decls ();
1688 for (pf = cgraph_nodes; pf; pf = pf->next)
1690 if (pf->process)
1692 i = pf->order;
1693 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1694 nodes[i].kind = ORDER_FUNCTION;
1695 nodes[i].u.f = pf;
1699 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1701 i = pv->order;
1702 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1703 nodes[i].kind = ORDER_VAR;
1704 nodes[i].u.v = pv;
1707 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1709 i = pa->order;
1710 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1711 nodes[i].kind = ORDER_ASM;
1712 nodes[i].u.a = pa;
1715 /* In toplevel reorder mode we output all statics; mark them as needed. */
1716 for (i = 0; i < max; ++i)
1718 if (nodes[i].kind == ORDER_VAR)
1720 varpool_mark_needed_node (nodes[i].u.v);
1723 varpool_empty_needed_queue ();
1725 for (i = 0; i < max; ++i)
1726 if (nodes[i].kind == ORDER_VAR)
1727 varpool_finalize_named_section_flags (nodes[i].u.v);
1729 for (i = 0; i < max; ++i)
1731 switch (nodes[i].kind)
1733 case ORDER_FUNCTION:
1734 nodes[i].u.f->process = 0;
1735 cgraph_expand_function (nodes[i].u.f);
1736 break;
1738 case ORDER_VAR:
1739 varpool_assemble_decl (nodes[i].u.v);
1740 break;
1742 case ORDER_ASM:
1743 assemble_asm (nodes[i].u.a->asm_str);
1744 break;
1746 case ORDER_UNDEFINED:
1747 break;
1749 default:
1750 gcc_unreachable ();
1754 cgraph_asm_nodes = NULL;
1755 free (nodes);
1758 /* Return true when function body of DECL still needs to be kept around
1759 for later re-use. */
1760 bool
1761 cgraph_preserve_function_body_p (struct cgraph_node *node)
1763 gcc_assert (cgraph_global_info_ready);
1764 gcc_assert (!node->same_body_alias);
1766 /* Look if there is any clone around. */
1767 if (node->clones)
1768 return true;
1769 return false;
1772 static void
1773 ipa_passes (void)
1775 set_cfun (NULL);
1776 current_function_decl = NULL;
1777 gimple_register_cfg_hooks ();
1778 bitmap_obstack_initialize (NULL);
1780 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1782 if (!in_lto_p)
1784 execute_ipa_pass_list (all_small_ipa_passes);
1785 if (seen_error ())
1786 return;
1789 /* If pass_all_early_optimizations was not scheduled, the state of
1790 the cgraph will not be properly updated. Update it now. */
1791 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1792 cgraph_state = CGRAPH_STATE_IPA_SSA;
1794 if (!in_lto_p)
1796 /* Generate coverage variables and constructors. */
1797 coverage_finish ();
1799 /* Process new functions added. */
1800 set_cfun (NULL);
1801 current_function_decl = NULL;
1802 cgraph_process_new_functions ();
1804 execute_ipa_summary_passes
1805 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1808 /* Some targets need to handle LTO assembler output specially. */
1809 if (flag_generate_lto)
1810 targetm.asm_out.lto_start ();
1812 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1814 if (!in_lto_p)
1815 ipa_write_summaries ();
1817 if (flag_generate_lto)
1818 targetm.asm_out.lto_end ();
1820 if (!flag_ltrans)
1821 execute_ipa_pass_list (all_regular_ipa_passes);
1822 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1824 bitmap_obstack_release (NULL);
1828 /* Perform simple optimizations based on callgraph. */
1830 void
1831 cgraph_optimize (void)
1833 if (seen_error ())
1834 return;
1836 #ifdef ENABLE_CHECKING
1837 verify_cgraph ();
1838 #endif
1840 /* Frontend may output common variables after the unit has been finalized.
1841 It is safe to deal with them here as they are always zero initialized. */
1842 varpool_analyze_pending_decls ();
1844 timevar_push (TV_CGRAPHOPT);
1845 if (pre_ipa_mem_report)
1847 fprintf (stderr, "Memory consumption before IPA\n");
1848 dump_memory_report (false);
1850 if (!quiet_flag)
1851 fprintf (stderr, "Performing interprocedural optimizations\n");
1852 cgraph_state = CGRAPH_STATE_IPA;
1854 /* Don't run the IPA passes if there was any error or sorry messages. */
1855 if (!seen_error ())
1856 ipa_passes ();
1858 /* Do nothing else if any IPA pass found errors. */
1859 if (seen_error ())
1861 timevar_pop (TV_CGRAPHOPT);
1862 return;
1865 /* This pass remove bodies of extern inline functions we never inlined.
1866 Do this later so other IPA passes see what is really going on. */
1867 cgraph_remove_unreachable_nodes (false, dump_file);
1868 cgraph_global_info_ready = true;
1869 if (cgraph_dump_file)
1871 fprintf (cgraph_dump_file, "Optimized ");
1872 dump_cgraph (cgraph_dump_file);
1873 dump_varpool (cgraph_dump_file);
1875 if (post_ipa_mem_report)
1877 fprintf (stderr, "Memory consumption after IPA\n");
1878 dump_memory_report (false);
1880 timevar_pop (TV_CGRAPHOPT);
1882 /* Output everything. */
1883 (*debug_hooks->assembly_start) ();
1884 if (!quiet_flag)
1885 fprintf (stderr, "Assembling functions:\n");
1886 #ifdef ENABLE_CHECKING
1887 verify_cgraph ();
1888 #endif
1890 cgraph_materialize_all_clones ();
1891 cgraph_mark_functions_to_output ();
1893 cgraph_state = CGRAPH_STATE_EXPANSION;
1894 if (!flag_toplevel_reorder)
1895 cgraph_output_in_order ();
1896 else
1898 cgraph_output_pending_asms ();
1900 cgraph_expand_all_functions ();
1901 varpool_remove_unreferenced_decls ();
1903 varpool_assemble_pending_decls ();
1905 cgraph_process_new_functions ();
1906 cgraph_state = CGRAPH_STATE_FINISHED;
1908 if (cgraph_dump_file)
1910 fprintf (cgraph_dump_file, "\nFinal ");
1911 dump_cgraph (cgraph_dump_file);
1912 dump_varpool (cgraph_dump_file);
1914 #ifdef ENABLE_CHECKING
1915 verify_cgraph ();
1916 /* Double check that all inline clones are gone and that all
1917 function bodies have been released from memory. */
1918 if (!seen_error ())
1920 struct cgraph_node *node;
1921 bool error_found = false;
1923 for (node = cgraph_nodes; node; node = node->next)
1924 if (node->analyzed
1925 && (node->global.inlined_to
1926 || gimple_has_body_p (node->decl)))
1928 error_found = true;
1929 dump_cgraph_node (stderr, node);
1931 if (error_found)
1932 internal_error ("nodes with unreleased memory found");
1934 #endif
1937 void
1938 init_cgraph (void)
1940 if (!cgraph_dump_file)
1941 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1944 /* The edges representing the callers of the NEW_VERSION node were
1945 fixed by cgraph_function_versioning (), now the call_expr in their
1946 respective tree code should be updated to call the NEW_VERSION. */
1948 static void
1949 update_call_expr (struct cgraph_node *new_version)
1951 struct cgraph_edge *e;
1953 gcc_assert (new_version);
1955 /* Update the call expr on the edges to call the new version. */
1956 for (e = new_version->callers; e; e = e->next_caller)
1958 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1959 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1960 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1965 /* Create a new cgraph node which is the new version of
1966 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1967 edges which should be redirected to point to
1968 NEW_VERSION. ALL the callees edges of OLD_VERSION
1969 are cloned to the new version node. Return the new
1970 version node.
1972 If non-NULL BLOCK_TO_COPY determine what basic blocks
1973 was copied to prevent duplications of calls that are dead
1974 in the clone. */
1976 static struct cgraph_node *
1977 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1978 tree new_decl,
1979 VEC(cgraph_edge_p,heap) *redirect_callers,
1980 bitmap bbs_to_copy)
1982 struct cgraph_node *new_version;
1983 struct cgraph_edge *e;
1984 unsigned i;
1986 gcc_assert (old_version);
1988 new_version = cgraph_create_node (new_decl);
1990 new_version->analyzed = true;
1991 new_version->local = old_version->local;
1992 new_version->local.externally_visible = false;
1993 new_version->local.local = true;
1994 new_version->global = old_version->global;
1995 new_version->rtl = old_version->rtl;
1996 new_version->reachable = true;
1997 new_version->count = old_version->count;
1999 for (e = old_version->callees; e; e=e->next_callee)
2000 if (!bbs_to_copy
2001 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2002 cgraph_clone_edge (e, new_version, e->call_stmt,
2003 e->lto_stmt_uid, REG_BR_PROB_BASE,
2004 CGRAPH_FREQ_BASE,
2005 true);
2006 for (e = old_version->indirect_calls; e; e=e->next_callee)
2007 if (!bbs_to_copy
2008 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2009 cgraph_clone_edge (e, new_version, e->call_stmt,
2010 e->lto_stmt_uid, REG_BR_PROB_BASE,
2011 CGRAPH_FREQ_BASE,
2012 true);
2013 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2015 /* Redirect calls to the old version node to point to its new
2016 version. */
2017 cgraph_redirect_edge_callee (e, new_version);
2020 return new_version;
2023 /* Perform function versioning.
2024 Function versioning includes copying of the tree and
2025 a callgraph update (creating a new cgraph node and updating
2026 its callees and callers).
2028 REDIRECT_CALLERS varray includes the edges to be redirected
2029 to the new version.
2031 TREE_MAP is a mapping of tree nodes we want to replace with
2032 new ones (according to results of prior analysis).
2033 OLD_VERSION_NODE is the node that is versioned.
2034 It returns the new version's cgraph node.
2035 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2036 from new version.
2037 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2038 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2040 struct cgraph_node *
2041 cgraph_function_versioning (struct cgraph_node *old_version_node,
2042 VEC(cgraph_edge_p,heap) *redirect_callers,
2043 VEC (ipa_replace_map_p,gc)* tree_map,
2044 bitmap args_to_skip,
2045 bitmap bbs_to_copy,
2046 basic_block new_entry_block,
2047 const char *clone_name)
2049 tree old_decl = old_version_node->decl;
2050 struct cgraph_node *new_version_node = NULL;
2051 tree new_decl;
2053 if (!tree_versionable_function_p (old_decl))
2054 return NULL;
2056 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2058 /* Make a new FUNCTION_DECL tree node for the
2059 new version. */
2060 if (!args_to_skip)
2061 new_decl = copy_node (old_decl);
2062 else
2063 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2065 /* Generate a new name for the new version. */
2066 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2067 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2068 SET_DECL_RTL (new_decl, NULL);
2070 /* Create the new version's call-graph node.
2071 and update the edges of the new node. */
2072 new_version_node =
2073 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2074 redirect_callers, bbs_to_copy);
2076 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2077 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2078 bbs_to_copy, new_entry_block);
2080 /* Update the new version's properties.
2081 Make The new version visible only within this translation unit. Make sure
2082 that is not weak also.
2083 ??? We cannot use COMDAT linkage because there is no
2084 ABI support for this. */
2085 cgraph_make_decl_local (new_version_node->decl);
2086 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2087 new_version_node->local.externally_visible = 0;
2088 new_version_node->local.local = 1;
2089 new_version_node->lowered = true;
2091 /* Update the call_expr on the edges to call the new version node. */
2092 update_call_expr (new_version_node);
2094 cgraph_call_function_insertion_hooks (new_version_node);
2095 return new_version_node;
2098 /* Given virtual clone, turn it into actual clone. */
2099 static void
2100 cgraph_materialize_clone (struct cgraph_node *node)
2102 bitmap_obstack_initialize (NULL);
2103 node->former_clone_of = node->clone_of->decl;
2104 if (node->clone_of->former_clone_of)
2105 node->former_clone_of = node->clone_of->former_clone_of;
2106 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2107 tree_function_versioning (node->clone_of->decl, node->decl,
2108 node->clone.tree_map, true,
2109 node->clone.args_to_skip, NULL, NULL);
2110 if (cgraph_dump_file)
2112 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2113 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2116 /* Function is no longer clone. */
2117 if (node->next_sibling_clone)
2118 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2119 if (node->prev_sibling_clone)
2120 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2121 else
2122 node->clone_of->clones = node->next_sibling_clone;
2123 node->next_sibling_clone = NULL;
2124 node->prev_sibling_clone = NULL;
2125 if (!node->clone_of->analyzed && !node->clone_of->clones)
2127 cgraph_release_function_body (node->clone_of);
2128 cgraph_node_remove_callees (node->clone_of);
2129 ipa_remove_all_references (&node->clone_of->ref_list);
2131 node->clone_of = NULL;
2132 bitmap_obstack_release (NULL);
2135 /* If necessary, change the function declaration in the call statement
2136 associated with E so that it corresponds to the edge callee. */
2138 gimple
2139 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2141 tree decl = gimple_call_fndecl (e->call_stmt);
2142 gimple new_stmt;
2143 gimple_stmt_iterator gsi;
2144 bool gsi_computed = false;
2145 #ifdef ENABLE_CHECKING
2146 struct cgraph_node *node;
2147 #endif
2149 if (e->indirect_unknown_callee
2150 || decl == e->callee->decl
2151 /* Don't update call from same body alias to the real function. */
2152 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2153 return e->call_stmt;
2155 #ifdef ENABLE_CHECKING
2156 if (decl)
2158 node = cgraph_get_node (decl);
2159 gcc_assert (!node || !node->clone.combined_args_to_skip);
2161 #endif
2163 if (cgraph_dump_file)
2165 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2166 cgraph_node_name (e->caller), e->caller->uid,
2167 cgraph_node_name (e->callee), e->callee->uid);
2168 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2169 if (e->callee->clone.combined_args_to_skip)
2171 fprintf (cgraph_dump_file, " combined args to skip: ");
2172 dump_bitmap (cgraph_dump_file,
2173 e->callee->clone.combined_args_to_skip);
2177 if (e->indirect_info &&
2178 e->indirect_info->thunk_delta != 0
2179 && (!e->callee->clone.combined_args_to_skip
2180 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2182 if (cgraph_dump_file)
2183 fprintf (cgraph_dump_file, " Thunk delta is "
2184 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
2185 gsi = gsi_for_stmt (e->call_stmt);
2186 gsi_computed = true;
2187 gimple_adjust_this_by_delta (&gsi,
2188 build_int_cst (sizetype,
2189 e->indirect_info->thunk_delta));
2190 e->indirect_info->thunk_delta = 0;
2193 if (e->callee->clone.combined_args_to_skip)
2195 int lp_nr;
2197 new_stmt
2198 = gimple_call_copy_skip_args (e->call_stmt,
2199 e->callee->clone.combined_args_to_skip);
2200 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2202 if (gimple_vdef (new_stmt)
2203 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2204 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2206 if (!gsi_computed)
2207 gsi = gsi_for_stmt (e->call_stmt);
2208 gsi_replace (&gsi, new_stmt, false);
2209 /* We need to defer cleaning EH info on the new statement to
2210 fixup-cfg. We may not have dominator information at this point
2211 and thus would end up with unreachable blocks and have no way
2212 to communicate that we need to run CFG cleanup then. */
2213 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2214 if (lp_nr != 0)
2216 remove_stmt_from_eh_lp (e->call_stmt);
2217 add_stmt_to_eh_lp (new_stmt, lp_nr);
2220 else
2222 new_stmt = e->call_stmt;
2223 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2224 update_stmt (new_stmt);
2227 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2229 if (cgraph_dump_file)
2231 fprintf (cgraph_dump_file, " updated to:");
2232 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2234 return new_stmt;
2237 /* Once all functions from compilation unit are in memory, produce all clones
2238 and update all calls. We might also do this on demand if we don't want to
2239 bring all functions to memory prior compilation, but current WHOPR
2240 implementation does that and it is is bit easier to keep everything right in
2241 this order. */
2242 void
2243 cgraph_materialize_all_clones (void)
2245 struct cgraph_node *node;
2246 bool stabilized = false;
2248 if (cgraph_dump_file)
2249 fprintf (cgraph_dump_file, "Materializing clones\n");
2250 #ifdef ENABLE_CHECKING
2251 verify_cgraph ();
2252 #endif
2254 /* We can also do topological order, but number of iterations should be
2255 bounded by number of IPA passes since single IPA pass is probably not
2256 going to create clones of clones it created itself. */
2257 while (!stabilized)
2259 stabilized = true;
2260 for (node = cgraph_nodes; node; node = node->next)
2262 if (node->clone_of && node->decl != node->clone_of->decl
2263 && !gimple_has_body_p (node->decl))
2265 if (gimple_has_body_p (node->clone_of->decl))
2267 if (cgraph_dump_file)
2269 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2270 cgraph_node_name (node->clone_of),
2271 cgraph_node_name (node));
2272 if (node->clone.tree_map)
2274 unsigned int i;
2275 fprintf (cgraph_dump_file, " replace map: ");
2276 for (i = 0; i < VEC_length (ipa_replace_map_p,
2277 node->clone.tree_map);
2278 i++)
2280 struct ipa_replace_map *replace_info;
2281 replace_info = VEC_index (ipa_replace_map_p,
2282 node->clone.tree_map,
2284 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2285 fprintf (cgraph_dump_file, " -> ");
2286 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2287 fprintf (cgraph_dump_file, "%s%s;",
2288 replace_info->replace_p ? "(replace)":"",
2289 replace_info->ref_p ? "(ref)":"");
2291 fprintf (cgraph_dump_file, "\n");
2293 if (node->clone.args_to_skip)
2295 fprintf (cgraph_dump_file, " args_to_skip: ");
2296 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2298 if (node->clone.args_to_skip)
2300 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2301 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2304 cgraph_materialize_clone (node);
2305 stabilized = false;
2310 for (node = cgraph_nodes; node; node = node->next)
2311 if (!node->analyzed && node->callees)
2312 cgraph_node_remove_callees (node);
2313 if (cgraph_dump_file)
2314 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2315 #ifdef ENABLE_CHECKING
2316 verify_cgraph ();
2317 #endif
2318 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2321 #include "gt-cgraphunit.h"