Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / cgraphunit.c
blob013cf638f717089fa42ec99b3d3db0a952bfc607
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
148 FILE *cgraph_dump_file;
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
155 configury. */
157 bool
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
162 return true;
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
177 return true;
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
194 || (!optimize
195 && !node->local.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
200 && !flag_lto)
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
202 return true;
204 return false;
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
211 bool
212 cgraph_process_new_functions (void)
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
238 case CGRAPH_STATE_IPA:
239 case CGRAPH_STATE_IPA_SSA:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
244 gimple_register_cfg_hooks ();
245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
249 compute_inline_parameters (node);
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
265 node->process = 0;
266 cgraph_expand_function (node);
267 break;
269 default:
270 gcc_unreachable ();
271 break;
273 cgraph_call_function_insertion_hooks (node);
274 varpool_analyze_pending_decls ();
276 return output;
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
289 static void
290 cgraph_reset_node (struct cgraph_node *node)
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node->process);
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
307 cgraph_node_remove_callees (node);
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
314 struct cgraph_node *n;
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
324 static void
325 cgraph_lower_function (struct cgraph_node *node)
327 if (node->lowered)
328 return;
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
334 tree_lowering_passes (node->decl);
335 node->lowered = true;
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
343 void
344 cgraph_finalize_function (tree decl, bool nested)
346 struct cgraph_node *node = cgraph_node (decl);
348 if (node->local.finalized)
349 cgraph_reset_node (node);
351 node->pid = cgraph_max_pid ++;
352 notice_global_symbol (decl);
353 node->local.finalized = true;
354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
355 node->finalized_by_frontend = true;
357 if (cgraph_decide_is_function_needed (node, decl))
358 cgraph_mark_needed_node (node);
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
362 there. */
363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
372 cgraph_mark_reachable_node (node);
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl))
376 (*debug_hooks->deferred_inline_function) (decl);
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
382 if (!nested)
383 ggc_collect ();
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
390 void
391 cgraph_mark_if_needed (tree decl)
393 struct cgraph_node *node = cgraph_node (decl);
394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
395 cgraph_mark_needed_node (node);
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399 static bool
400 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
407 /* Verify edge E count and frequency. */
409 static bool
410 verify_edge_count_and_frequency (struct cgraph_edge *e)
412 bool error_found = false;
413 if (e->count < 0)
415 error ("caller edge count is negative");
416 error_found = true;
418 if (e->frequency < 0)
420 error ("caller edge frequency is negative");
421 error_found = true;
423 if (e->frequency > CGRAPH_FREQ_MAX)
425 error ("caller edge frequency is too large");
426 error_found = true;
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
440 return error_found;
443 /* Verify cgraph nodes of given cgraph node. */
444 DEBUG_FUNCTION void
445 verify_cgraph_node (struct cgraph_node *node)
447 struct cgraph_edge *e;
448 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
449 struct function *saved_cfun = cfun;
450 basic_block this_block;
451 gimple_stmt_iterator gsi;
452 bool error_found = false;
454 if (seen_error ())
455 return;
457 timevar_push (TV_CGRAPH_VERIFY);
458 /* debug_generic_stmt needs correct cfun */
459 set_cfun (this_cfun);
460 for (e = node->callees; e; e = e->next_callee)
461 if (e->aux)
463 error ("aux field set for edge %s->%s",
464 identifier_to_locale (cgraph_node_name (e->caller)),
465 identifier_to_locale (cgraph_node_name (e->callee)));
466 error_found = true;
468 if (node->count < 0)
470 error ("execution count is negative");
471 error_found = true;
473 if (node->global.inlined_to && node->local.externally_visible)
475 error ("externally visible inline clone");
476 error_found = true;
478 if (node->global.inlined_to && node->address_taken)
480 error ("inline clone with address taken");
481 error_found = true;
483 if (node->global.inlined_to && node->needed)
485 error ("inline clone is needed");
486 error_found = true;
488 for (e = node->indirect_calls; e; e = e->next_callee)
490 if (e->aux)
492 error ("aux field set for indirect edge from %s",
493 identifier_to_locale (cgraph_node_name (e->caller)));
494 error_found = true;
496 if (!e->indirect_unknown_callee
497 || !e->indirect_info)
499 error ("An indirect edge from %s is not marked as indirect or has "
500 "associated indirect_info, the corresponding statement is: ",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 debug_gimple_stmt (e->call_stmt);
503 error_found = true;
506 for (e = node->callers; e; e = e->next_caller)
508 if (verify_edge_count_and_frequency (e))
509 error_found = true;
510 if (!e->inline_failed)
512 if (node->global.inlined_to
513 != (e->caller->global.inlined_to
514 ? e->caller->global.inlined_to : e->caller))
516 error ("inlined_to pointer is wrong");
517 error_found = true;
519 if (node->callers->next_caller)
521 error ("multiple inline callers");
522 error_found = true;
525 else
526 if (node->global.inlined_to)
528 error ("inlined_to pointer set for noninline callers");
529 error_found = true;
532 for (e = node->indirect_calls; e; e = e->next_callee)
533 if (verify_edge_count_and_frequency (e))
534 error_found = true;
535 if (!node->callers && node->global.inlined_to)
537 error ("inlined_to pointer is set but no predecessors found");
538 error_found = true;
540 if (node->global.inlined_to == node)
542 error ("inlined_to pointer refers to itself");
543 error_found = true;
546 if (!cgraph_node (node->decl))
548 error ("node not found in cgraph_hash");
549 error_found = true;
552 if (node->clone_of)
554 struct cgraph_node *n;
555 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
556 if (n == node)
557 break;
558 if (!n)
560 error ("node has wrong clone_of");
561 error_found = true;
564 if (node->clones)
566 struct cgraph_node *n;
567 for (n = node->clones; n; n = n->next_sibling_clone)
568 if (n->clone_of != node)
569 break;
570 if (n)
572 error ("node has wrong clone list");
573 error_found = true;
576 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
578 error ("node is in clone list but it is not clone");
579 error_found = true;
581 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
583 error ("node has wrong prev_clone pointer");
584 error_found = true;
586 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
588 error ("double linked list of clones corrupted");
589 error_found = true;
591 if (node->same_comdat_group)
593 struct cgraph_node *n = node->same_comdat_group;
595 if (!DECL_ONE_ONLY (node->decl))
597 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
598 error_found = true;
600 if (n == node)
602 error ("node is alone in a comdat group");
603 error_found = true;
607 if (!n->same_comdat_group)
609 error ("same_comdat_group is not a circular list");
610 error_found = true;
611 break;
613 n = n->same_comdat_group;
615 while (n != node);
618 if (node->analyzed && gimple_has_body_p (node->decl)
619 && !TREE_ASM_WRITTEN (node->decl)
620 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
621 && !flag_wpa)
623 if (this_cfun->cfg)
625 /* The nodes we're interested in are never shared, so walk
626 the tree ignoring duplicates. */
627 struct pointer_set_t *visited_nodes = pointer_set_create ();
628 /* Reach the trees by walking over the CFG, and note the
629 enclosing basic-blocks in the call edges. */
630 FOR_EACH_BB_FN (this_block, this_cfun)
631 for (gsi = gsi_start_bb (this_block);
632 !gsi_end_p (gsi);
633 gsi_next (&gsi))
635 gimple stmt = gsi_stmt (gsi);
636 if (is_gimple_call (stmt))
638 struct cgraph_edge *e = cgraph_edge (node, stmt);
639 tree decl = gimple_call_fndecl (stmt);
640 if (e)
642 if (e->aux)
644 error ("shared call_stmt:");
645 debug_gimple_stmt (stmt);
646 error_found = true;
648 if (!e->indirect_unknown_callee)
650 if (e->callee->same_body_alias)
652 error ("edge points to same body alias:");
653 debug_tree (e->callee->decl);
654 error_found = true;
656 else if (!e->callee->global.inlined_to
657 && decl
658 && cgraph_get_node (decl)
659 && (e->callee->former_clone_of
660 != cgraph_get_node (decl)->decl)
661 && !clone_of_p (cgraph_node (decl),
662 e->callee))
664 error ("edge points to wrong declaration:");
665 debug_tree (e->callee->decl);
666 fprintf (stderr," Instead of:");
667 debug_tree (decl);
668 error_found = true;
671 else if (decl)
673 error ("an indirect edge with unknown callee "
674 "corresponding to a call_stmt with "
675 "a known declaration:");
676 error_found = true;
677 debug_gimple_stmt (e->call_stmt);
679 e->aux = (void *)1;
681 else if (decl)
683 error ("missing callgraph edge for call stmt:");
684 debug_gimple_stmt (stmt);
685 error_found = true;
689 pointer_set_destroy (visited_nodes);
691 else
692 /* No CFG available?! */
693 gcc_unreachable ();
695 for (e = node->callees; e; e = e->next_callee)
697 if (!e->aux)
699 error ("edge %s->%s has no corresponding call_stmt",
700 identifier_to_locale (cgraph_node_name (e->caller)),
701 identifier_to_locale (cgraph_node_name (e->callee)));
702 debug_gimple_stmt (e->call_stmt);
703 error_found = true;
705 e->aux = 0;
707 for (e = node->indirect_calls; e; e = e->next_callee)
709 if (!e->aux)
711 error ("an indirect edge from %s has no corresponding call_stmt",
712 identifier_to_locale (cgraph_node_name (e->caller)));
713 debug_gimple_stmt (e->call_stmt);
714 error_found = true;
716 e->aux = 0;
719 if (error_found)
721 dump_cgraph_node (stderr, node);
722 internal_error ("verify_cgraph_node failed");
724 set_cfun (saved_cfun);
725 timevar_pop (TV_CGRAPH_VERIFY);
728 /* Verify whole cgraph structure. */
729 DEBUG_FUNCTION void
730 verify_cgraph (void)
732 struct cgraph_node *node;
734 if (seen_error ())
735 return;
737 for (node = cgraph_nodes; node; node = node->next)
738 verify_cgraph_node (node);
741 /* Output all asm statements we have stored up to be output. */
743 static void
744 cgraph_output_pending_asms (void)
746 struct cgraph_asm_node *can;
748 if (seen_error ())
749 return;
751 for (can = cgraph_asm_nodes; can; can = can->next)
752 assemble_asm (can->asm_str);
753 cgraph_asm_nodes = NULL;
756 /* Analyze the function scheduled to be output. */
757 static void
758 cgraph_analyze_function (struct cgraph_node *node)
760 tree save = current_function_decl;
761 tree decl = node->decl;
763 current_function_decl = decl;
764 push_cfun (DECL_STRUCT_FUNCTION (decl));
766 assign_assembler_name_if_neeeded (node->decl);
768 /* Make sure to gimplify bodies only once. During analyzing a
769 function we lower it, which will require gimplified nested
770 functions, so we can end up here with an already gimplified
771 body. */
772 if (!gimple_body (decl))
773 gimplify_function_tree (decl);
774 dump_function (TDI_generic, decl);
776 cgraph_lower_function (node);
777 node->analyzed = true;
779 pop_cfun ();
780 current_function_decl = save;
783 /* Look for externally_visible and used attributes and mark cgraph nodes
784 accordingly.
786 We cannot mark the nodes at the point the attributes are processed (in
787 handle_*_attribute) because the copy of the declarations available at that
788 point may not be canonical. For example, in:
790 void f();
791 void f() __attribute__((used));
793 the declaration we see in handle_used_attribute will be the second
794 declaration -- but the front end will subsequently merge that declaration
795 with the original declaration and discard the second declaration.
797 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
799 void f() {}
800 void f() __attribute__((externally_visible));
802 is valid.
804 So, we walk the nodes at the end of the translation unit, applying the
805 attributes at that point. */
807 static void
808 process_function_and_variable_attributes (struct cgraph_node *first,
809 struct varpool_node *first_var)
811 struct cgraph_node *node;
812 struct varpool_node *vnode;
814 for (node = cgraph_nodes; node != first; node = node->next)
816 tree decl = node->decl;
817 if (DECL_PRESERVE_P (decl))
818 cgraph_mark_needed_node (node);
819 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
820 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
821 && TREE_PUBLIC (node->decl))
823 if (node->local.finalized)
824 cgraph_mark_needed_node (node);
826 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
828 if (! TREE_PUBLIC (node->decl))
829 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
830 "%<externally_visible%>"
831 " attribute have effect only on public objects");
832 else if (node->local.finalized)
833 cgraph_mark_needed_node (node);
836 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
838 tree decl = vnode->decl;
839 if (DECL_PRESERVE_P (decl))
841 vnode->force_output = true;
842 if (vnode->finalized)
843 varpool_mark_needed_node (vnode);
845 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
846 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
847 && TREE_PUBLIC (vnode->decl))
849 if (vnode->finalized)
850 varpool_mark_needed_node (vnode);
852 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
854 if (! TREE_PUBLIC (vnode->decl))
855 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
856 "%<externally_visible%>"
857 " attribute have effect only on public objects");
858 else if (vnode->finalized)
859 varpool_mark_needed_node (vnode);
864 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
865 each reachable functions) and build cgraph.
866 The function can be called multiple times after inserting new nodes
867 into beginning of queue. Just the new part of queue is re-scanned then. */
869 static void
870 cgraph_analyze_functions (void)
872 /* Keep track of already processed nodes when called multiple times for
873 intermodule optimization. */
874 static struct cgraph_node *first_analyzed;
875 struct cgraph_node *first_processed = first_analyzed;
876 static struct varpool_node *first_analyzed_var;
877 struct cgraph_node *node, *next;
879 bitmap_obstack_initialize (NULL);
880 process_function_and_variable_attributes (first_processed,
881 first_analyzed_var);
882 first_processed = cgraph_nodes;
883 first_analyzed_var = varpool_nodes;
884 varpool_analyze_pending_decls ();
885 if (cgraph_dump_file)
887 fprintf (cgraph_dump_file, "Initial entry points:");
888 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
889 if (node->needed)
890 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
891 fprintf (cgraph_dump_file, "\n");
893 cgraph_process_new_functions ();
895 /* Propagate reachability flag and lower representation of all reachable
896 functions. In the future, lowering will introduce new functions and
897 new entry points on the way (by template instantiation and virtual
898 method table generation for instance). */
899 while (cgraph_nodes_queue)
901 struct cgraph_edge *edge;
902 tree decl = cgraph_nodes_queue->decl;
904 node = cgraph_nodes_queue;
905 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
906 node->next_needed = NULL;
908 /* ??? It is possible to create extern inline function and later using
909 weak alias attribute to kill its body. See
910 gcc.c-torture/compile/20011119-1.c */
911 if (!DECL_STRUCT_FUNCTION (decl))
913 cgraph_reset_node (node);
914 continue;
917 if (!node->analyzed)
918 cgraph_analyze_function (node);
920 for (edge = node->callees; edge; edge = edge->next_callee)
921 if (!edge->callee->reachable)
922 cgraph_mark_reachable_node (edge->callee);
924 if (node->same_comdat_group)
926 for (next = node->same_comdat_group;
927 next != node;
928 next = next->same_comdat_group)
929 cgraph_mark_reachable_node (next);
932 /* If decl is a clone of an abstract function, mark that abstract
933 function so that we don't release its body. The DECL_INITIAL() of that
934 abstract function declaration will be later needed to output debug info. */
935 if (DECL_ABSTRACT_ORIGIN (decl))
937 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
938 origin_node->abstract_and_needed = true;
941 /* We finalize local static variables during constructing callgraph
942 edges. Process their attributes too. */
943 process_function_and_variable_attributes (first_processed,
944 first_analyzed_var);
945 first_processed = cgraph_nodes;
946 first_analyzed_var = varpool_nodes;
947 varpool_analyze_pending_decls ();
948 cgraph_process_new_functions ();
951 /* Collect entry points to the unit. */
952 if (cgraph_dump_file)
954 fprintf (cgraph_dump_file, "Unit entry points:");
955 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
956 if (node->needed)
957 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
958 fprintf (cgraph_dump_file, "\n\nInitial ");
959 dump_cgraph (cgraph_dump_file);
960 dump_varpool (cgraph_dump_file);
963 if (cgraph_dump_file)
964 fprintf (cgraph_dump_file, "\nReclaiming functions:");
966 for (node = cgraph_nodes; node != first_analyzed; node = next)
968 tree decl = node->decl;
969 next = node->next;
971 if (node->local.finalized && !gimple_has_body_p (decl))
972 cgraph_reset_node (node);
974 if (!node->reachable && gimple_has_body_p (decl))
976 if (cgraph_dump_file)
977 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
978 cgraph_remove_node (node);
979 continue;
981 else
982 node->next_needed = NULL;
983 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
984 gcc_assert (node->analyzed == node->local.finalized);
986 if (cgraph_dump_file)
988 fprintf (cgraph_dump_file, "\n\nReclaimed ");
989 dump_cgraph (cgraph_dump_file);
990 dump_varpool (cgraph_dump_file);
992 bitmap_obstack_release (NULL);
993 first_analyzed = cgraph_nodes;
994 ggc_collect ();
998 /* Analyze the whole compilation unit once it is parsed completely. */
1000 void
1001 cgraph_finalize_compilation_unit (void)
1003 timevar_push (TV_CGRAPH);
1005 /* Do not skip analyzing the functions if there were errors, we
1006 miss diagnostics for following functions otherwise. */
1008 /* Emit size functions we didn't inline. */
1009 finalize_size_functions ();
1011 /* Mark alias targets necessary and emit diagnostics. */
1012 finish_aliases_1 ();
1014 if (!quiet_flag)
1016 fprintf (stderr, "\nAnalyzing compilation unit\n");
1017 fflush (stderr);
1020 /* Gimplify and lower all functions, compute reachability and
1021 remove unreachable nodes. */
1022 cgraph_analyze_functions ();
1024 /* Mark alias targets necessary and emit diagnostics. */
1025 finish_aliases_1 ();
1027 /* Gimplify and lower thunks. */
1028 cgraph_analyze_functions ();
1030 /* Finally drive the pass manager. */
1031 cgraph_optimize ();
1033 timevar_pop (TV_CGRAPH);
1037 /* Figure out what functions we want to assemble. */
1039 static void
1040 cgraph_mark_functions_to_output (void)
1042 struct cgraph_node *node;
1043 #ifdef ENABLE_CHECKING
1044 bool check_same_comdat_groups = false;
1046 for (node = cgraph_nodes; node; node = node->next)
1047 gcc_assert (!node->process);
1048 #endif
1050 for (node = cgraph_nodes; node; node = node->next)
1052 tree decl = node->decl;
1053 struct cgraph_edge *e;
1055 gcc_assert (!node->process || node->same_comdat_group);
1056 if (node->process)
1057 continue;
1059 for (e = node->callers; e; e = e->next_caller)
1060 if (e->inline_failed)
1061 break;
1063 /* We need to output all local functions that are used and not
1064 always inlined, as well as those that are reachable from
1065 outside the current compilation unit. */
1066 if (node->analyzed
1067 && !node->global.inlined_to
1068 && (!cgraph_only_called_directly_p (node)
1069 || (e && node->reachable))
1070 && !TREE_ASM_WRITTEN (decl)
1071 && !DECL_EXTERNAL (decl))
1073 node->process = 1;
1074 if (node->same_comdat_group)
1076 struct cgraph_node *next;
1077 for (next = node->same_comdat_group;
1078 next != node;
1079 next = next->same_comdat_group)
1080 next->process = 1;
1083 else if (node->same_comdat_group)
1085 #ifdef ENABLE_CHECKING
1086 check_same_comdat_groups = true;
1087 #endif
1089 else
1091 /* We should've reclaimed all functions that are not needed. */
1092 #ifdef ENABLE_CHECKING
1093 if (!node->global.inlined_to
1094 && gimple_has_body_p (decl)
1095 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1096 are inside partition, we can end up not removing the body since we no longer
1097 have analyzed node pointing to it. */
1098 && !node->in_other_partition
1099 && !DECL_EXTERNAL (decl))
1101 dump_cgraph_node (stderr, node);
1102 internal_error ("failed to reclaim unneeded function");
1104 #endif
1105 gcc_assert (node->global.inlined_to
1106 || !gimple_has_body_p (decl)
1107 || node->in_other_partition
1108 || DECL_EXTERNAL (decl));
1113 #ifdef ENABLE_CHECKING
1114 if (check_same_comdat_groups)
1115 for (node = cgraph_nodes; node; node = node->next)
1116 if (node->same_comdat_group && !node->process)
1118 tree decl = node->decl;
1119 if (!node->global.inlined_to
1120 && gimple_has_body_p (decl)
1121 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1122 are inside partition, we can end up not removing the body since we no longer
1123 have analyzed node pointing to it. */
1124 && !node->in_other_partition
1125 && !DECL_EXTERNAL (decl))
1127 dump_cgraph_node (stderr, node);
1128 internal_error ("failed to reclaim unneeded function");
1131 #endif
1134 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1135 in lowered gimple form.
1137 Set current_function_decl and cfun to newly constructed empty function body.
1138 return basic block in the function body. */
1140 static basic_block
1141 init_lowered_empty_function (tree decl)
1143 basic_block bb;
1145 current_function_decl = decl;
1146 allocate_struct_function (decl, false);
1147 gimple_register_cfg_hooks ();
1148 init_empty_tree_cfg ();
1149 init_tree_ssa (cfun);
1150 init_ssa_operands ();
1151 cfun->gimple_df->in_ssa_p = true;
1152 DECL_INITIAL (decl) = make_node (BLOCK);
1154 DECL_SAVED_TREE (decl) = error_mark_node;
1155 cfun->curr_properties |=
1156 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1157 PROP_ssa);
1159 /* Create BB for body of the function and connect it properly. */
1160 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1161 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1162 make_edge (bb, EXIT_BLOCK_PTR, 0);
1164 return bb;
1167 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1168 offset indicated by VIRTUAL_OFFSET, if that is
1169 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1170 zero for a result adjusting thunk. */
1172 static tree
1173 thunk_adjust (gimple_stmt_iterator * bsi,
1174 tree ptr, bool this_adjusting,
1175 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1177 gimple stmt;
1178 tree ret;
1180 if (this_adjusting
1181 && fixed_offset != 0)
1183 stmt = gimple_build_assign (ptr,
1184 fold_build2_loc (input_location,
1185 POINTER_PLUS_EXPR,
1186 TREE_TYPE (ptr), ptr,
1187 size_int (fixed_offset)));
1188 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1191 /* If there's a virtual offset, look up that value in the vtable and
1192 adjust the pointer again. */
1193 if (virtual_offset)
1195 tree vtabletmp;
1196 tree vtabletmp2;
1197 tree vtabletmp3;
1198 tree offsettmp;
1200 if (!vtable_entry_type)
1202 tree vfunc_type = make_node (FUNCTION_TYPE);
1203 TREE_TYPE (vfunc_type) = integer_type_node;
1204 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1205 layout_type (vfunc_type);
1207 vtable_entry_type = build_pointer_type (vfunc_type);
1210 vtabletmp =
1211 create_tmp_var (build_pointer_type
1212 (build_pointer_type (vtable_entry_type)), "vptr");
1214 /* The vptr is always at offset zero in the object. */
1215 stmt = gimple_build_assign (vtabletmp,
1216 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1217 ptr));
1218 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1219 mark_symbols_for_renaming (stmt);
1220 find_referenced_vars_in (stmt);
1222 /* Form the vtable address. */
1223 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1224 "vtableaddr");
1225 stmt = gimple_build_assign (vtabletmp2,
1226 build_simple_mem_ref (vtabletmp));
1227 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1228 mark_symbols_for_renaming (stmt);
1229 find_referenced_vars_in (stmt);
1231 /* Find the entry with the vcall offset. */
1232 stmt = gimple_build_assign (vtabletmp2,
1233 fold_build2_loc (input_location,
1234 POINTER_PLUS_EXPR,
1235 TREE_TYPE (vtabletmp2),
1236 vtabletmp2,
1237 fold_convert (sizetype,
1238 virtual_offset)));
1239 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1241 /* Get the offset itself. */
1242 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1243 "vcalloffset");
1244 stmt = gimple_build_assign (vtabletmp3,
1245 build_simple_mem_ref (vtabletmp2));
1246 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1247 mark_symbols_for_renaming (stmt);
1248 find_referenced_vars_in (stmt);
1250 /* Cast to sizetype. */
1251 offsettmp = create_tmp_var (sizetype, "offset");
1252 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1253 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1254 mark_symbols_for_renaming (stmt);
1255 find_referenced_vars_in (stmt);
1257 /* Adjust the `this' pointer. */
1258 ptr = fold_build2_loc (input_location,
1259 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1260 offsettmp);
1263 if (!this_adjusting
1264 && fixed_offset != 0)
1265 /* Adjust the pointer by the constant. */
1267 tree ptrtmp;
1269 if (TREE_CODE (ptr) == VAR_DECL)
1270 ptrtmp = ptr;
1271 else
1273 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1274 stmt = gimple_build_assign (ptrtmp, ptr);
1275 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1276 mark_symbols_for_renaming (stmt);
1277 find_referenced_vars_in (stmt);
1279 ptr = fold_build2_loc (input_location,
1280 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1281 size_int (fixed_offset));
1284 /* Emit the statement and gimplify the adjustment expression. */
1285 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1286 stmt = gimple_build_assign (ret, ptr);
1287 mark_symbols_for_renaming (stmt);
1288 find_referenced_vars_in (stmt);
1289 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1291 return ret;
1294 /* Produce assembler for thunk NODE. */
1296 static void
1297 assemble_thunk (struct cgraph_node *node)
1299 bool this_adjusting = node->thunk.this_adjusting;
1300 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1301 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1302 tree virtual_offset = NULL;
1303 tree alias = node->thunk.alias;
1304 tree thunk_fndecl = node->decl;
1305 tree a = DECL_ARGUMENTS (thunk_fndecl);
1307 current_function_decl = thunk_fndecl;
1309 if (this_adjusting
1310 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1311 virtual_value, alias))
1313 const char *fnname;
1314 tree fn_block;
1316 DECL_RESULT (thunk_fndecl)
1317 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1318 RESULT_DECL, 0, integer_type_node);
1319 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1321 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1322 create one. */
1323 fn_block = make_node (BLOCK);
1324 BLOCK_VARS (fn_block) = a;
1325 DECL_INITIAL (thunk_fndecl) = fn_block;
1326 init_function_start (thunk_fndecl);
1327 cfun->is_thunk = 1;
1328 assemble_start_function (thunk_fndecl, fnname);
1330 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1331 fixed_offset, virtual_value, alias);
1333 assemble_end_function (thunk_fndecl, fnname);
1334 init_insn_lengths ();
1335 free_after_compilation (cfun);
1336 set_cfun (NULL);
1337 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1339 else
1341 tree restype;
1342 basic_block bb, then_bb, else_bb, return_bb;
1343 gimple_stmt_iterator bsi;
1344 int nargs = 0;
1345 tree arg;
1346 int i;
1347 tree resdecl;
1348 tree restmp = NULL;
1349 VEC(tree, heap) *vargs;
1351 gimple call;
1352 gimple ret;
1354 DECL_IGNORED_P (thunk_fndecl) = 1;
1355 bitmap_obstack_initialize (NULL);
1357 if (node->thunk.virtual_offset_p)
1358 virtual_offset = size_int (virtual_value);
1360 /* Build the return declaration for the function. */
1361 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1362 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1364 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1365 DECL_ARTIFICIAL (resdecl) = 1;
1366 DECL_IGNORED_P (resdecl) = 1;
1367 DECL_RESULT (thunk_fndecl) = resdecl;
1369 else
1370 resdecl = DECL_RESULT (thunk_fndecl);
1372 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1374 bsi = gsi_start_bb (bb);
1376 /* Build call to the function being thunked. */
1377 if (!VOID_TYPE_P (restype))
1379 if (!is_gimple_reg_type (restype))
1381 restmp = resdecl;
1382 add_local_decl (cfun, restmp);
1383 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1385 else
1386 restmp = create_tmp_var_raw (restype, "retval");
1389 for (arg = a; arg; arg = DECL_CHAIN (arg))
1390 nargs++;
1391 vargs = VEC_alloc (tree, heap, nargs);
1392 if (this_adjusting)
1393 VEC_quick_push (tree, vargs,
1394 thunk_adjust (&bsi,
1395 a, 1, fixed_offset,
1396 virtual_offset));
1397 else
1398 VEC_quick_push (tree, vargs, a);
1399 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1400 VEC_quick_push (tree, vargs, arg);
1401 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1402 VEC_free (tree, heap, vargs);
1403 gimple_call_set_cannot_inline (call, true);
1404 gimple_call_set_from_thunk (call, true);
1405 if (restmp)
1406 gimple_call_set_lhs (call, restmp);
1407 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1408 mark_symbols_for_renaming (call);
1409 find_referenced_vars_in (call);
1410 update_stmt (call);
1412 if (restmp && !this_adjusting)
1414 tree true_label = NULL_TREE;
1416 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1418 gimple stmt;
1419 /* If the return type is a pointer, we need to
1420 protect against NULL. We know there will be an
1421 adjustment, because that's why we're emitting a
1422 thunk. */
1423 then_bb = create_basic_block (NULL, (void *) 0, bb);
1424 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1425 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1426 remove_edge (single_succ_edge (bb));
1427 true_label = gimple_block_label (then_bb);
1428 stmt = gimple_build_cond (NE_EXPR, restmp,
1429 build_zero_cst (TREE_TYPE (restmp)),
1430 NULL_TREE, NULL_TREE);
1431 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1432 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1433 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1434 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1435 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1436 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1437 bsi = gsi_last_bb (then_bb);
1440 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1441 fixed_offset, virtual_offset);
1442 if (true_label)
1444 gimple stmt;
1445 bsi = gsi_last_bb (else_bb);
1446 stmt = gimple_build_assign (restmp,
1447 build_zero_cst (TREE_TYPE (restmp)));
1448 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1449 bsi = gsi_last_bb (return_bb);
1452 else
1453 gimple_call_set_tail (call, true);
1455 /* Build return value. */
1456 ret = gimple_build_return (restmp);
1457 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1459 delete_unreachable_blocks ();
1460 update_ssa (TODO_update_ssa);
1462 cgraph_remove_same_body_alias (node);
1463 /* Since we want to emit the thunk, we explicitly mark its name as
1464 referenced. */
1465 cgraph_add_new_function (thunk_fndecl, true);
1466 bitmap_obstack_release (NULL);
1468 current_function_decl = NULL;
1471 /* Expand function specified by NODE. */
1473 static void
1474 cgraph_expand_function (struct cgraph_node *node)
1476 tree decl = node->decl;
1478 /* We ought to not compile any inline clones. */
1479 gcc_assert (!node->global.inlined_to);
1481 announce_function (decl);
1482 node->process = 0;
1483 if (node->same_body)
1485 struct cgraph_node *alias, *next;
1486 bool saved_alias = node->alias;
1487 for (alias = node->same_body;
1488 alias && alias->next; alias = alias->next)
1490 /* Walk aliases in the order they were created; it is possible that
1491 thunks reffers to the aliases made earlier. */
1492 for (; alias; alias = next)
1494 next = alias->previous;
1495 if (!alias->thunk.thunk_p)
1496 assemble_alias (alias->decl,
1497 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1498 else
1499 assemble_thunk (alias);
1501 node->alias = saved_alias;
1502 cgraph_process_new_functions ();
1505 gcc_assert (node->lowered);
1507 /* Generate RTL for the body of DECL. */
1508 tree_rest_of_compilation (decl);
1510 /* Make sure that BE didn't give up on compiling. */
1511 gcc_assert (TREE_ASM_WRITTEN (decl));
1512 current_function_decl = NULL;
1513 gcc_assert (!cgraph_preserve_function_body_p (decl));
1514 cgraph_release_function_body (node);
1515 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1516 points to the dead function body. */
1517 cgraph_node_remove_callees (node);
1519 cgraph_function_flags_ready = true;
1522 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1524 bool
1525 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1527 *reason = e->inline_failed;
1528 return !e->inline_failed;
1533 /* Expand all functions that must be output.
1535 Attempt to topologically sort the nodes so function is output when
1536 all called functions are already assembled to allow data to be
1537 propagated across the callgraph. Use a stack to get smaller distance
1538 between a function and its callees (later we may choose to use a more
1539 sophisticated algorithm for function reordering; we will likely want
1540 to use subsections to make the output functions appear in top-down
1541 order). */
1543 static void
1544 cgraph_expand_all_functions (void)
1546 struct cgraph_node *node;
1547 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1548 int order_pos, new_order_pos = 0;
1549 int i;
1551 order_pos = cgraph_postorder (order);
1552 gcc_assert (order_pos == cgraph_n_nodes);
1554 /* Garbage collector may remove inline clones we eliminate during
1555 optimization. So we must be sure to not reference them. */
1556 for (i = 0; i < order_pos; i++)
1557 if (order[i]->process)
1558 order[new_order_pos++] = order[i];
1560 for (i = new_order_pos - 1; i >= 0; i--)
1562 node = order[i];
1563 if (node->process)
1565 gcc_assert (node->reachable);
1566 node->process = 0;
1567 cgraph_expand_function (node);
1570 cgraph_process_new_functions ();
1572 free (order);
1576 /* This is used to sort the node types by the cgraph order number. */
1578 enum cgraph_order_sort_kind
1580 ORDER_UNDEFINED = 0,
1581 ORDER_FUNCTION,
1582 ORDER_VAR,
1583 ORDER_ASM
1586 struct cgraph_order_sort
1588 enum cgraph_order_sort_kind kind;
1589 union
1591 struct cgraph_node *f;
1592 struct varpool_node *v;
1593 struct cgraph_asm_node *a;
1594 } u;
1597 /* Output all functions, variables, and asm statements in the order
1598 according to their order fields, which is the order in which they
1599 appeared in the file. This implements -fno-toplevel-reorder. In
1600 this mode we may output functions and variables which don't really
1601 need to be output. */
1603 static void
1604 cgraph_output_in_order (void)
1606 int max;
1607 struct cgraph_order_sort *nodes;
1608 int i;
1609 struct cgraph_node *pf;
1610 struct varpool_node *pv;
1611 struct cgraph_asm_node *pa;
1613 max = cgraph_order;
1614 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1616 varpool_analyze_pending_decls ();
1618 for (pf = cgraph_nodes; pf; pf = pf->next)
1620 if (pf->process)
1622 i = pf->order;
1623 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1624 nodes[i].kind = ORDER_FUNCTION;
1625 nodes[i].u.f = pf;
1629 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1631 i = pv->order;
1632 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1633 nodes[i].kind = ORDER_VAR;
1634 nodes[i].u.v = pv;
1637 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1639 i = pa->order;
1640 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1641 nodes[i].kind = ORDER_ASM;
1642 nodes[i].u.a = pa;
1645 /* In toplevel reorder mode we output all statics; mark them as needed. */
1646 for (i = 0; i < max; ++i)
1648 if (nodes[i].kind == ORDER_VAR)
1650 varpool_mark_needed_node (nodes[i].u.v);
1653 varpool_empty_needed_queue ();
1655 for (i = 0; i < max; ++i)
1657 switch (nodes[i].kind)
1659 case ORDER_FUNCTION:
1660 nodes[i].u.f->process = 0;
1661 cgraph_expand_function (nodes[i].u.f);
1662 break;
1664 case ORDER_VAR:
1665 varpool_assemble_decl (nodes[i].u.v);
1666 break;
1668 case ORDER_ASM:
1669 assemble_asm (nodes[i].u.a->asm_str);
1670 break;
1672 case ORDER_UNDEFINED:
1673 break;
1675 default:
1676 gcc_unreachable ();
1680 cgraph_asm_nodes = NULL;
1681 free (nodes);
1684 /* Return true when function body of DECL still needs to be kept around
1685 for later re-use. */
1686 bool
1687 cgraph_preserve_function_body_p (tree decl)
1689 struct cgraph_node *node;
1691 gcc_assert (cgraph_global_info_ready);
1692 /* Look if there is any clone around. */
1693 node = cgraph_node (decl);
1694 if (node->clones)
1695 return true;
1696 return false;
1699 static void
1700 ipa_passes (void)
1702 set_cfun (NULL);
1703 current_function_decl = NULL;
1704 gimple_register_cfg_hooks ();
1705 bitmap_obstack_initialize (NULL);
1707 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1709 if (!in_lto_p)
1711 execute_ipa_pass_list (all_small_ipa_passes);
1712 if (seen_error ())
1713 return;
1716 /* If pass_all_early_optimizations was not scheduled, the state of
1717 the cgraph will not be properly updated. Update it now. */
1718 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1719 cgraph_state = CGRAPH_STATE_IPA_SSA;
1721 if (!in_lto_p)
1723 /* Generate coverage variables and constructors. */
1724 coverage_finish ();
1726 /* Process new functions added. */
1727 set_cfun (NULL);
1728 current_function_decl = NULL;
1729 cgraph_process_new_functions ();
1731 execute_ipa_summary_passes
1732 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1735 /* Some targets need to handle LTO assembler output specially. */
1736 if (flag_generate_lto)
1737 targetm.asm_out.lto_start ();
1739 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1741 if (!in_lto_p)
1742 ipa_write_summaries ();
1744 if (flag_generate_lto)
1745 targetm.asm_out.lto_end ();
1747 if (!flag_ltrans)
1748 execute_ipa_pass_list (all_regular_ipa_passes);
1749 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1751 bitmap_obstack_release (NULL);
1755 /* Perform simple optimizations based on callgraph. */
1757 void
1758 cgraph_optimize (void)
1760 if (seen_error ())
1761 return;
1763 #ifdef ENABLE_CHECKING
1764 verify_cgraph ();
1765 #endif
1767 /* Frontend may output common variables after the unit has been finalized.
1768 It is safe to deal with them here as they are always zero initialized. */
1769 varpool_analyze_pending_decls ();
1771 timevar_push (TV_CGRAPHOPT);
1772 if (pre_ipa_mem_report)
1774 fprintf (stderr, "Memory consumption before IPA\n");
1775 dump_memory_report (false);
1777 if (!quiet_flag)
1778 fprintf (stderr, "Performing interprocedural optimizations\n");
1779 cgraph_state = CGRAPH_STATE_IPA;
1781 /* Don't run the IPA passes if there was any error or sorry messages. */
1782 if (!seen_error ())
1783 ipa_passes ();
1785 /* Do nothing else if any IPA pass found errors. */
1786 if (seen_error ())
1788 timevar_pop (TV_CGRAPHOPT);
1789 return;
1792 /* This pass remove bodies of extern inline functions we never inlined.
1793 Do this later so other IPA passes see what is really going on. */
1794 cgraph_remove_unreachable_nodes (false, dump_file);
1795 cgraph_global_info_ready = true;
1796 if (cgraph_dump_file)
1798 fprintf (cgraph_dump_file, "Optimized ");
1799 dump_cgraph (cgraph_dump_file);
1800 dump_varpool (cgraph_dump_file);
1802 if (post_ipa_mem_report)
1804 fprintf (stderr, "Memory consumption after IPA\n");
1805 dump_memory_report (false);
1807 timevar_pop (TV_CGRAPHOPT);
1809 /* Output everything. */
1810 (*debug_hooks->assembly_start) ();
1811 if (!quiet_flag)
1812 fprintf (stderr, "Assembling functions:\n");
1813 #ifdef ENABLE_CHECKING
1814 verify_cgraph ();
1815 #endif
1817 cgraph_materialize_all_clones ();
1818 cgraph_mark_functions_to_output ();
1820 cgraph_state = CGRAPH_STATE_EXPANSION;
1821 if (!flag_toplevel_reorder)
1822 cgraph_output_in_order ();
1823 else
1825 cgraph_output_pending_asms ();
1827 cgraph_expand_all_functions ();
1828 varpool_remove_unreferenced_decls ();
1830 varpool_assemble_pending_decls ();
1832 cgraph_process_new_functions ();
1833 cgraph_state = CGRAPH_STATE_FINISHED;
1835 if (cgraph_dump_file)
1837 fprintf (cgraph_dump_file, "\nFinal ");
1838 dump_cgraph (cgraph_dump_file);
1839 dump_varpool (cgraph_dump_file);
1841 #ifdef ENABLE_CHECKING
1842 verify_cgraph ();
1843 /* Double check that all inline clones are gone and that all
1844 function bodies have been released from memory. */
1845 if (!seen_error ())
1847 struct cgraph_node *node;
1848 bool error_found = false;
1850 for (node = cgraph_nodes; node; node = node->next)
1851 if (node->analyzed
1852 && (node->global.inlined_to
1853 || gimple_has_body_p (node->decl)))
1855 error_found = true;
1856 dump_cgraph_node (stderr, node);
1858 if (error_found)
1859 internal_error ("nodes with unreleased memory found");
1861 #endif
1864 void
1865 init_cgraph (void)
1867 if (!cgraph_dump_file)
1868 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1871 /* The edges representing the callers of the NEW_VERSION node were
1872 fixed by cgraph_function_versioning (), now the call_expr in their
1873 respective tree code should be updated to call the NEW_VERSION. */
1875 static void
1876 update_call_expr (struct cgraph_node *new_version)
1878 struct cgraph_edge *e;
1880 gcc_assert (new_version);
1882 /* Update the call expr on the edges to call the new version. */
1883 for (e = new_version->callers; e; e = e->next_caller)
1885 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1886 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1887 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1892 /* Create a new cgraph node which is the new version of
1893 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1894 edges which should be redirected to point to
1895 NEW_VERSION. ALL the callees edges of OLD_VERSION
1896 are cloned to the new version node. Return the new
1897 version node.
1899 If non-NULL BLOCK_TO_COPY determine what basic blocks
1900 was copied to prevent duplications of calls that are dead
1901 in the clone. */
1903 static struct cgraph_node *
1904 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1905 tree new_decl,
1906 VEC(cgraph_edge_p,heap) *redirect_callers,
1907 bitmap bbs_to_copy)
1909 struct cgraph_node *new_version;
1910 struct cgraph_edge *e;
1911 unsigned i;
1913 gcc_assert (old_version);
1915 new_version = cgraph_node (new_decl);
1917 new_version->analyzed = true;
1918 new_version->local = old_version->local;
1919 new_version->local.externally_visible = false;
1920 new_version->local.local = true;
1921 new_version->local.vtable_method = false;
1922 new_version->global = old_version->global;
1923 new_version->rtl = old_version->rtl;
1924 new_version->reachable = true;
1925 new_version->count = old_version->count;
1927 for (e = old_version->callees; e; e=e->next_callee)
1928 if (!bbs_to_copy
1929 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1930 cgraph_clone_edge (e, new_version, e->call_stmt,
1931 e->lto_stmt_uid, REG_BR_PROB_BASE,
1932 CGRAPH_FREQ_BASE,
1933 e->loop_nest, true);
1934 for (e = old_version->indirect_calls; e; e=e->next_callee)
1935 if (!bbs_to_copy
1936 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1937 cgraph_clone_edge (e, new_version, e->call_stmt,
1938 e->lto_stmt_uid, REG_BR_PROB_BASE,
1939 CGRAPH_FREQ_BASE,
1940 e->loop_nest, true);
1941 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
1943 /* Redirect calls to the old version node to point to its new
1944 version. */
1945 cgraph_redirect_edge_callee (e, new_version);
1948 return new_version;
1951 /* Perform function versioning.
1952 Function versioning includes copying of the tree and
1953 a callgraph update (creating a new cgraph node and updating
1954 its callees and callers).
1956 REDIRECT_CALLERS varray includes the edges to be redirected
1957 to the new version.
1959 TREE_MAP is a mapping of tree nodes we want to replace with
1960 new ones (according to results of prior analysis).
1961 OLD_VERSION_NODE is the node that is versioned.
1962 It returns the new version's cgraph node.
1963 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1964 from new version.
1965 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1966 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
1968 struct cgraph_node *
1969 cgraph_function_versioning (struct cgraph_node *old_version_node,
1970 VEC(cgraph_edge_p,heap) *redirect_callers,
1971 VEC (ipa_replace_map_p,gc)* tree_map,
1972 bitmap args_to_skip,
1973 bitmap bbs_to_copy,
1974 basic_block new_entry_block,
1975 const char *clone_name)
1977 tree old_decl = old_version_node->decl;
1978 struct cgraph_node *new_version_node = NULL;
1979 tree new_decl;
1981 if (!tree_versionable_function_p (old_decl))
1982 return NULL;
1984 /* Make a new FUNCTION_DECL tree node for the
1985 new version. */
1986 if (!args_to_skip)
1987 new_decl = copy_node (old_decl);
1988 else
1989 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1991 /* Generate a new name for the new version. */
1992 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1993 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1994 SET_DECL_RTL (new_decl, NULL);
1996 /* Create the new version's call-graph node.
1997 and update the edges of the new node. */
1998 new_version_node =
1999 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2000 redirect_callers, bbs_to_copy);
2002 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2003 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2004 bbs_to_copy, new_entry_block);
2006 /* Update the new version's properties.
2007 Make The new version visible only within this translation unit. Make sure
2008 that is not weak also.
2009 ??? We cannot use COMDAT linkage because there is no
2010 ABI support for this. */
2011 cgraph_make_decl_local (new_version_node->decl);
2012 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2013 new_version_node->local.externally_visible = 0;
2014 new_version_node->local.local = 1;
2015 new_version_node->lowered = true;
2017 /* Update the call_expr on the edges to call the new version node. */
2018 update_call_expr (new_version_node);
2020 cgraph_call_function_insertion_hooks (new_version_node);
2021 return new_version_node;
2024 /* Produce separate function body for inline clones so the offline copy can be
2025 modified without affecting them. */
2026 struct cgraph_node *
2027 save_inline_function_body (struct cgraph_node *node)
2029 struct cgraph_node *first_clone, *n;
2031 gcc_assert (node == cgraph_node (node->decl));
2033 cgraph_lower_function (node);
2035 first_clone = node->clones;
2037 first_clone->decl = copy_node (node->decl);
2038 cgraph_insert_node_to_hashtable (first_clone);
2039 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2040 if (first_clone->next_sibling_clone)
2042 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2043 n->clone_of = first_clone;
2044 n->clone_of = first_clone;
2045 n->next_sibling_clone = first_clone->clones;
2046 if (first_clone->clones)
2047 first_clone->clones->prev_sibling_clone = n;
2048 first_clone->clones = first_clone->next_sibling_clone;
2049 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2050 first_clone->next_sibling_clone = NULL;
2051 gcc_assert (!first_clone->prev_sibling_clone);
2053 first_clone->clone_of = NULL;
2054 node->clones = NULL;
2056 if (first_clone->clones)
2057 for (n = first_clone->clones; n != first_clone;)
2059 gcc_assert (n->decl == node->decl);
2060 n->decl = first_clone->decl;
2061 if (n->clones)
2062 n = n->clones;
2063 else if (n->next_sibling_clone)
2064 n = n->next_sibling_clone;
2065 else
2067 while (n != first_clone && !n->next_sibling_clone)
2068 n = n->clone_of;
2069 if (n != first_clone)
2070 n = n->next_sibling_clone;
2074 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2075 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2076 NULL, NULL);
2078 DECL_EXTERNAL (first_clone->decl) = 0;
2079 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2080 TREE_PUBLIC (first_clone->decl) = 0;
2081 DECL_COMDAT (first_clone->decl) = 0;
2082 VEC_free (ipa_opt_pass, heap,
2083 first_clone->ipa_transforms_to_apply);
2084 first_clone->ipa_transforms_to_apply = NULL;
2086 #ifdef ENABLE_CHECKING
2087 verify_cgraph_node (first_clone);
2088 #endif
2089 return first_clone;
2092 /* Given virtual clone, turn it into actual clone. */
2093 static void
2094 cgraph_materialize_clone (struct cgraph_node *node)
2096 bitmap_obstack_initialize (NULL);
2097 node->former_clone_of = node->clone_of->decl;
2098 if (node->clone_of->former_clone_of)
2099 node->former_clone_of = node->clone_of->former_clone_of;
2100 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2101 tree_function_versioning (node->clone_of->decl, node->decl,
2102 node->clone.tree_map, true,
2103 node->clone.args_to_skip, NULL, NULL);
2104 if (cgraph_dump_file)
2106 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2107 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2110 /* Function is no longer clone. */
2111 if (node->next_sibling_clone)
2112 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2113 if (node->prev_sibling_clone)
2114 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2115 else
2116 node->clone_of->clones = node->next_sibling_clone;
2117 node->next_sibling_clone = NULL;
2118 node->prev_sibling_clone = NULL;
2119 if (!node->clone_of->analyzed && !node->clone_of->clones)
2121 cgraph_release_function_body (node->clone_of);
2122 cgraph_node_remove_callees (node->clone_of);
2123 ipa_remove_all_references (&node->clone_of->ref_list);
2125 node->clone_of = NULL;
2126 bitmap_obstack_release (NULL);
2129 /* If necessary, change the function declaration in the call statement
2130 associated with E so that it corresponds to the edge callee. */
2132 gimple
2133 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2135 tree decl = gimple_call_fndecl (e->call_stmt);
2136 gimple new_stmt;
2137 gimple_stmt_iterator gsi;
2138 bool gsi_computed = false;
2139 #ifdef ENABLE_CHECKING
2140 struct cgraph_node *node;
2141 #endif
2143 if (e->indirect_unknown_callee
2144 || decl == e->callee->decl
2145 /* Don't update call from same body alias to the real function. */
2146 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2147 return e->call_stmt;
2149 #ifdef ENABLE_CHECKING
2150 if (decl)
2152 node = cgraph_get_node (decl);
2153 gcc_assert (!node || !node->clone.combined_args_to_skip);
2155 #endif
2157 if (cgraph_dump_file)
2159 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2160 cgraph_node_name (e->caller), e->caller->uid,
2161 cgraph_node_name (e->callee), e->callee->uid);
2162 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2163 if (e->callee->clone.combined_args_to_skip)
2165 fprintf (cgraph_dump_file, " combined args to skip: ");
2166 dump_bitmap (cgraph_dump_file,
2167 e->callee->clone.combined_args_to_skip);
2171 if (e->indirect_info && e->indirect_info->thunk_delta
2172 && integer_nonzerop (e->indirect_info->thunk_delta)
2173 && (!e->callee->clone.combined_args_to_skip
2174 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2176 if (cgraph_dump_file)
2178 fprintf (cgraph_dump_file, " Thunk delta is ");
2179 print_generic_expr (cgraph_dump_file,
2180 e->indirect_info->thunk_delta, 0);
2181 fprintf (cgraph_dump_file, "\n");
2183 gsi = gsi_for_stmt (e->call_stmt);
2184 gsi_computed = true;
2185 gimple_adjust_this_by_delta (&gsi, e->indirect_info->thunk_delta);
2186 e->indirect_info->thunk_delta = NULL_TREE;
2189 if (e->callee->clone.combined_args_to_skip)
2191 int lp_nr;
2193 new_stmt
2194 = gimple_call_copy_skip_args (e->call_stmt,
2195 e->callee->clone.combined_args_to_skip);
2196 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2198 if (gimple_vdef (new_stmt)
2199 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2200 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2202 if (!gsi_computed)
2203 gsi = gsi_for_stmt (e->call_stmt);
2204 gsi_replace (&gsi, new_stmt, false);
2205 /* We need to defer cleaning EH info on the new statement to
2206 fixup-cfg. We may not have dominator information at this point
2207 and thus would end up with unreachable blocks and have no way
2208 to communicate that we need to run CFG cleanup then. */
2209 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2210 if (lp_nr != 0)
2212 remove_stmt_from_eh_lp (e->call_stmt);
2213 add_stmt_to_eh_lp (new_stmt, lp_nr);
2216 else
2218 new_stmt = e->call_stmt;
2219 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2220 update_stmt (new_stmt);
2223 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2225 if (cgraph_dump_file)
2227 fprintf (cgraph_dump_file, " updated to:");
2228 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2230 return new_stmt;
2233 /* Once all functions from compilation unit are in memory, produce all clones
2234 and update all calls. We might also do this on demand if we don't want to
2235 bring all functions to memory prior compilation, but current WHOPR
2236 implementation does that and it is is bit easier to keep everything right in
2237 this order. */
2238 void
2239 cgraph_materialize_all_clones (void)
2241 struct cgraph_node *node;
2242 bool stabilized = false;
2244 if (cgraph_dump_file)
2245 fprintf (cgraph_dump_file, "Materializing clones\n");
2246 #ifdef ENABLE_CHECKING
2247 verify_cgraph ();
2248 #endif
2250 /* We can also do topological order, but number of iterations should be
2251 bounded by number of IPA passes since single IPA pass is probably not
2252 going to create clones of clones it created itself. */
2253 while (!stabilized)
2255 stabilized = true;
2256 for (node = cgraph_nodes; node; node = node->next)
2258 if (node->clone_of && node->decl != node->clone_of->decl
2259 && !gimple_has_body_p (node->decl))
2261 if (gimple_has_body_p (node->clone_of->decl))
2263 if (cgraph_dump_file)
2265 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2266 cgraph_node_name (node->clone_of),
2267 cgraph_node_name (node));
2268 if (node->clone.tree_map)
2270 unsigned int i;
2271 fprintf (cgraph_dump_file, " replace map: ");
2272 for (i = 0; i < VEC_length (ipa_replace_map_p,
2273 node->clone.tree_map);
2274 i++)
2276 struct ipa_replace_map *replace_info;
2277 replace_info = VEC_index (ipa_replace_map_p,
2278 node->clone.tree_map,
2280 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2281 fprintf (cgraph_dump_file, " -> ");
2282 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2283 fprintf (cgraph_dump_file, "%s%s;",
2284 replace_info->replace_p ? "(replace)":"",
2285 replace_info->ref_p ? "(ref)":"");
2287 fprintf (cgraph_dump_file, "\n");
2289 if (node->clone.args_to_skip)
2291 fprintf (cgraph_dump_file, " args_to_skip: ");
2292 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2294 if (node->clone.args_to_skip)
2296 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2297 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2300 cgraph_materialize_clone (node);
2301 stabilized = false;
2306 for (node = cgraph_nodes; node; node = node->next)
2307 if (!node->analyzed && node->callees)
2308 cgraph_node_remove_callees (node);
2309 if (cgraph_dump_file)
2310 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2311 #ifdef ENABLE_CHECKING
2312 verify_cgraph ();
2313 #endif
2314 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2317 #include "gt-cgraphunit.h"