2010-11-11 Jakub Jelinek <jakub@redhat.com>
[official-gcc.git] / gcc / cgraphunit.c
blobf98243955b777a04a05450f72e4129e03594644c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
148 FILE *cgraph_dump_file;
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
155 configury. */
157 bool
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
162 return true;
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
177 return true;
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
194 || (!optimize
195 && !node->local.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
200 && !flag_lto)
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
202 return true;
204 return false;
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
211 bool
212 cgraph_process_new_functions (void)
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
238 case CGRAPH_STATE_IPA:
239 case CGRAPH_STATE_IPA_SSA:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
244 gimple_register_cfg_hooks ();
245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
249 compute_inline_parameters (node);
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
265 node->process = 0;
266 cgraph_expand_function (node);
267 break;
269 default:
270 gcc_unreachable ();
271 break;
273 cgraph_call_function_insertion_hooks (node);
274 varpool_analyze_pending_decls ();
276 return output;
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
289 static void
290 cgraph_reset_node (struct cgraph_node *node)
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node->process);
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
307 cgraph_node_remove_callees (node);
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
314 struct cgraph_node *n;
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
324 static void
325 cgraph_lower_function (struct cgraph_node *node)
327 if (node->lowered)
328 return;
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
334 tree_lowering_passes (node->decl);
335 node->lowered = true;
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
343 void
344 cgraph_finalize_function (tree decl, bool nested)
346 struct cgraph_node *node = cgraph_node (decl);
348 if (node->local.finalized)
349 cgraph_reset_node (node);
351 node->pid = cgraph_max_pid ++;
352 notice_global_symbol (decl);
353 node->local.finalized = true;
354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
355 node->finalized_by_frontend = true;
357 if (cgraph_decide_is_function_needed (node, decl))
358 cgraph_mark_needed_node (node);
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
362 there. */
363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
372 cgraph_mark_reachable_node (node);
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl))
376 (*debug_hooks->deferred_inline_function) (decl);
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
382 if (!nested)
383 ggc_collect ();
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
390 void
391 cgraph_mark_if_needed (tree decl)
393 struct cgraph_node *node = cgraph_node (decl);
394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
395 cgraph_mark_needed_node (node);
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399 static bool
400 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
407 /* Verify edge E count and frequency. */
409 static bool
410 verify_edge_count_and_frequency (struct cgraph_edge *e)
412 bool error_found = false;
413 if (e->count < 0)
415 error ("caller edge count is negative");
416 error_found = true;
418 if (e->frequency < 0)
420 error ("caller edge frequency is negative");
421 error_found = true;
423 if (e->frequency > CGRAPH_FREQ_MAX)
425 error ("caller edge frequency is too large");
426 error_found = true;
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
440 return error_found;
443 /* Verify cgraph nodes of given cgraph node. */
444 DEBUG_FUNCTION void
445 verify_cgraph_node (struct cgraph_node *node)
447 struct cgraph_edge *e;
448 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
449 struct function *saved_cfun = cfun;
450 basic_block this_block;
451 gimple_stmt_iterator gsi;
452 bool error_found = false;
454 if (seen_error ())
455 return;
457 timevar_push (TV_CGRAPH_VERIFY);
458 /* debug_generic_stmt needs correct cfun */
459 set_cfun (this_cfun);
460 for (e = node->callees; e; e = e->next_callee)
461 if (e->aux)
463 error ("aux field set for edge %s->%s",
464 identifier_to_locale (cgraph_node_name (e->caller)),
465 identifier_to_locale (cgraph_node_name (e->callee)));
466 error_found = true;
468 if (node->count < 0)
470 error ("execution count is negative");
471 error_found = true;
473 if (node->global.inlined_to && node->local.externally_visible)
475 error ("externally visible inline clone");
476 error_found = true;
478 if (node->global.inlined_to && node->address_taken)
480 error ("inline clone with address taken");
481 error_found = true;
483 if (node->global.inlined_to && node->needed)
485 error ("inline clone is needed");
486 error_found = true;
488 for (e = node->indirect_calls; e; e = e->next_callee)
490 if (e->aux)
492 error ("aux field set for indirect edge from %s",
493 identifier_to_locale (cgraph_node_name (e->caller)));
494 error_found = true;
496 if (!e->indirect_unknown_callee
497 || !e->indirect_info)
499 error ("An indirect edge from %s is not marked as indirect or has "
500 "associated indirect_info, the corresponding statement is: ",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 debug_gimple_stmt (e->call_stmt);
503 error_found = true;
506 for (e = node->callers; e; e = e->next_caller)
508 if (verify_edge_count_and_frequency (e))
509 error_found = true;
510 if (!e->inline_failed)
512 if (node->global.inlined_to
513 != (e->caller->global.inlined_to
514 ? e->caller->global.inlined_to : e->caller))
516 error ("inlined_to pointer is wrong");
517 error_found = true;
519 if (node->callers->next_caller)
521 error ("multiple inline callers");
522 error_found = true;
525 else
526 if (node->global.inlined_to)
528 error ("inlined_to pointer set for noninline callers");
529 error_found = true;
532 for (e = node->indirect_calls; e; e = e->next_callee)
533 if (verify_edge_count_and_frequency (e))
534 error_found = true;
535 if (!node->callers && node->global.inlined_to)
537 error ("inlined_to pointer is set but no predecessors found");
538 error_found = true;
540 if (node->global.inlined_to == node)
542 error ("inlined_to pointer refers to itself");
543 error_found = true;
546 if (!cgraph_node (node->decl))
548 error ("node not found in cgraph_hash");
549 error_found = true;
552 if (node->clone_of)
554 struct cgraph_node *n;
555 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
556 if (n == node)
557 break;
558 if (!n)
560 error ("node has wrong clone_of");
561 error_found = true;
564 if (node->clones)
566 struct cgraph_node *n;
567 for (n = node->clones; n; n = n->next_sibling_clone)
568 if (n->clone_of != node)
569 break;
570 if (n)
572 error ("node has wrong clone list");
573 error_found = true;
576 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
578 error ("node is in clone list but it is not clone");
579 error_found = true;
581 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
583 error ("node has wrong prev_clone pointer");
584 error_found = true;
586 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
588 error ("double linked list of clones corrupted");
589 error_found = true;
591 if (node->same_comdat_group)
593 struct cgraph_node *n = node->same_comdat_group;
595 if (!DECL_ONE_ONLY (node->decl))
597 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
598 error_found = true;
600 if (n == node)
602 error ("node is alone in a comdat group");
603 error_found = true;
607 if (!n->same_comdat_group)
609 error ("same_comdat_group is not a circular list");
610 error_found = true;
611 break;
613 n = n->same_comdat_group;
615 while (n != node);
618 if (node->analyzed && gimple_has_body_p (node->decl)
619 && !TREE_ASM_WRITTEN (node->decl)
620 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
621 && !flag_wpa)
623 if (this_cfun->cfg)
625 /* The nodes we're interested in are never shared, so walk
626 the tree ignoring duplicates. */
627 struct pointer_set_t *visited_nodes = pointer_set_create ();
628 /* Reach the trees by walking over the CFG, and note the
629 enclosing basic-blocks in the call edges. */
630 FOR_EACH_BB_FN (this_block, this_cfun)
631 for (gsi = gsi_start_bb (this_block);
632 !gsi_end_p (gsi);
633 gsi_next (&gsi))
635 gimple stmt = gsi_stmt (gsi);
636 if (is_gimple_call (stmt))
638 struct cgraph_edge *e = cgraph_edge (node, stmt);
639 tree decl = gimple_call_fndecl (stmt);
640 if (e)
642 if (e->aux)
644 error ("shared call_stmt:");
645 debug_gimple_stmt (stmt);
646 error_found = true;
648 if (!e->indirect_unknown_callee)
650 if (e->callee->same_body_alias)
652 error ("edge points to same body alias:");
653 debug_tree (e->callee->decl);
654 error_found = true;
656 else if (!e->callee->global.inlined_to
657 && decl
658 && cgraph_get_node (decl)
659 && (e->callee->former_clone_of
660 != cgraph_get_node (decl)->decl)
661 && !clone_of_p (cgraph_node (decl),
662 e->callee))
664 error ("edge points to wrong declaration:");
665 debug_tree (e->callee->decl);
666 fprintf (stderr," Instead of:");
667 debug_tree (decl);
668 error_found = true;
671 else if (decl)
673 error ("an indirect edge with unknown callee "
674 "corresponding to a call_stmt with "
675 "a known declaration:");
676 error_found = true;
677 debug_gimple_stmt (e->call_stmt);
679 e->aux = (void *)1;
681 else if (decl)
683 error ("missing callgraph edge for call stmt:");
684 debug_gimple_stmt (stmt);
685 error_found = true;
689 pointer_set_destroy (visited_nodes);
691 else
692 /* No CFG available?! */
693 gcc_unreachable ();
695 for (e = node->callees; e; e = e->next_callee)
697 if (!e->aux)
699 error ("edge %s->%s has no corresponding call_stmt",
700 identifier_to_locale (cgraph_node_name (e->caller)),
701 identifier_to_locale (cgraph_node_name (e->callee)));
702 debug_gimple_stmt (e->call_stmt);
703 error_found = true;
705 e->aux = 0;
707 for (e = node->indirect_calls; e; e = e->next_callee)
709 if (!e->aux)
711 error ("an indirect edge from %s has no corresponding call_stmt",
712 identifier_to_locale (cgraph_node_name (e->caller)));
713 debug_gimple_stmt (e->call_stmt);
714 error_found = true;
716 e->aux = 0;
719 if (error_found)
721 dump_cgraph_node (stderr, node);
722 internal_error ("verify_cgraph_node failed");
724 set_cfun (saved_cfun);
725 timevar_pop (TV_CGRAPH_VERIFY);
728 /* Verify whole cgraph structure. */
729 DEBUG_FUNCTION void
730 verify_cgraph (void)
732 struct cgraph_node *node;
734 if (seen_error ())
735 return;
737 for (node = cgraph_nodes; node; node = node->next)
738 verify_cgraph_node (node);
741 /* Output all asm statements we have stored up to be output. */
743 static void
744 cgraph_output_pending_asms (void)
746 struct cgraph_asm_node *can;
748 if (seen_error ())
749 return;
751 for (can = cgraph_asm_nodes; can; can = can->next)
752 assemble_asm (can->asm_str);
753 cgraph_asm_nodes = NULL;
756 /* Analyze the function scheduled to be output. */
757 static void
758 cgraph_analyze_function (struct cgraph_node *node)
760 tree save = current_function_decl;
761 tree decl = node->decl;
763 current_function_decl = decl;
764 push_cfun (DECL_STRUCT_FUNCTION (decl));
766 assign_assembler_name_if_neeeded (node->decl);
768 /* Make sure to gimplify bodies only once. During analyzing a
769 function we lower it, which will require gimplified nested
770 functions, so we can end up here with an already gimplified
771 body. */
772 if (!gimple_body (decl))
773 gimplify_function_tree (decl);
774 dump_function (TDI_generic, decl);
776 cgraph_lower_function (node);
777 node->analyzed = true;
779 pop_cfun ();
780 current_function_decl = save;
783 /* Look for externally_visible and used attributes and mark cgraph nodes
784 accordingly.
786 We cannot mark the nodes at the point the attributes are processed (in
787 handle_*_attribute) because the copy of the declarations available at that
788 point may not be canonical. For example, in:
790 void f();
791 void f() __attribute__((used));
793 the declaration we see in handle_used_attribute will be the second
794 declaration -- but the front end will subsequently merge that declaration
795 with the original declaration and discard the second declaration.
797 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
799 void f() {}
800 void f() __attribute__((externally_visible));
802 is valid.
804 So, we walk the nodes at the end of the translation unit, applying the
805 attributes at that point. */
807 static void
808 process_function_and_variable_attributes (struct cgraph_node *first,
809 struct varpool_node *first_var)
811 struct cgraph_node *node;
812 struct varpool_node *vnode;
814 for (node = cgraph_nodes; node != first; node = node->next)
816 tree decl = node->decl;
817 if (DECL_PRESERVE_P (decl))
818 cgraph_mark_needed_node (node);
819 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
821 if (! TREE_PUBLIC (node->decl))
822 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
823 "%<externally_visible%>"
824 " attribute have effect only on public objects");
825 else if (node->local.finalized)
826 cgraph_mark_needed_node (node);
829 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
831 tree decl = vnode->decl;
832 if (DECL_PRESERVE_P (decl))
834 vnode->force_output = true;
835 if (vnode->finalized)
836 varpool_mark_needed_node (vnode);
838 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
840 if (! TREE_PUBLIC (vnode->decl))
841 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
842 "%<externally_visible%>"
843 " attribute have effect only on public objects");
844 else if (vnode->finalized)
845 varpool_mark_needed_node (vnode);
850 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
851 each reachable functions) and build cgraph.
852 The function can be called multiple times after inserting new nodes
853 into beginning of queue. Just the new part of queue is re-scanned then. */
855 static void
856 cgraph_analyze_functions (void)
858 /* Keep track of already processed nodes when called multiple times for
859 intermodule optimization. */
860 static struct cgraph_node *first_analyzed;
861 struct cgraph_node *first_processed = first_analyzed;
862 static struct varpool_node *first_analyzed_var;
863 struct cgraph_node *node, *next;
865 bitmap_obstack_initialize (NULL);
866 process_function_and_variable_attributes (first_processed,
867 first_analyzed_var);
868 first_processed = cgraph_nodes;
869 first_analyzed_var = varpool_nodes;
870 varpool_analyze_pending_decls ();
871 if (cgraph_dump_file)
873 fprintf (cgraph_dump_file, "Initial entry points:");
874 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
875 if (node->needed)
876 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
877 fprintf (cgraph_dump_file, "\n");
879 cgraph_process_new_functions ();
881 /* Propagate reachability flag and lower representation of all reachable
882 functions. In the future, lowering will introduce new functions and
883 new entry points on the way (by template instantiation and virtual
884 method table generation for instance). */
885 while (cgraph_nodes_queue)
887 struct cgraph_edge *edge;
888 tree decl = cgraph_nodes_queue->decl;
890 node = cgraph_nodes_queue;
891 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
892 node->next_needed = NULL;
894 /* ??? It is possible to create extern inline function and later using
895 weak alias attribute to kill its body. See
896 gcc.c-torture/compile/20011119-1.c */
897 if (!DECL_STRUCT_FUNCTION (decl))
899 cgraph_reset_node (node);
900 continue;
903 if (!node->analyzed)
904 cgraph_analyze_function (node);
906 for (edge = node->callees; edge; edge = edge->next_callee)
907 if (!edge->callee->reachable)
908 cgraph_mark_reachable_node (edge->callee);
910 if (node->same_comdat_group)
912 for (next = node->same_comdat_group;
913 next != node;
914 next = next->same_comdat_group)
915 cgraph_mark_reachable_node (next);
918 /* If decl is a clone of an abstract function, mark that abstract
919 function so that we don't release its body. The DECL_INITIAL() of that
920 abstract function declaration will be later needed to output debug info. */
921 if (DECL_ABSTRACT_ORIGIN (decl))
923 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
924 origin_node->abstract_and_needed = true;
927 /* We finalize local static variables during constructing callgraph
928 edges. Process their attributes too. */
929 process_function_and_variable_attributes (first_processed,
930 first_analyzed_var);
931 first_processed = cgraph_nodes;
932 first_analyzed_var = varpool_nodes;
933 varpool_analyze_pending_decls ();
934 cgraph_process_new_functions ();
937 /* Collect entry points to the unit. */
938 if (cgraph_dump_file)
940 fprintf (cgraph_dump_file, "Unit entry points:");
941 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
942 if (node->needed)
943 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
944 fprintf (cgraph_dump_file, "\n\nInitial ");
945 dump_cgraph (cgraph_dump_file);
948 if (cgraph_dump_file)
949 fprintf (cgraph_dump_file, "\nReclaiming functions:");
951 for (node = cgraph_nodes; node != first_analyzed; node = next)
953 tree decl = node->decl;
954 next = node->next;
956 if (node->local.finalized && !gimple_has_body_p (decl))
957 cgraph_reset_node (node);
959 if (!node->reachable && gimple_has_body_p (decl))
961 if (cgraph_dump_file)
962 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
963 cgraph_remove_node (node);
964 continue;
966 else
967 node->next_needed = NULL;
968 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
969 gcc_assert (node->analyzed == node->local.finalized);
971 if (cgraph_dump_file)
973 fprintf (cgraph_dump_file, "\n\nReclaimed ");
974 dump_cgraph (cgraph_dump_file);
976 bitmap_obstack_release (NULL);
977 first_analyzed = cgraph_nodes;
978 ggc_collect ();
982 /* Analyze the whole compilation unit once it is parsed completely. */
984 void
985 cgraph_finalize_compilation_unit (void)
987 timevar_push (TV_CGRAPH);
989 /* Do not skip analyzing the functions if there were errors, we
990 miss diagnostics for following functions otherwise. */
992 /* Emit size functions we didn't inline. */
993 finalize_size_functions ();
995 /* Mark alias targets necessary and emit diagnostics. */
996 finish_aliases_1 ();
998 if (!quiet_flag)
1000 fprintf (stderr, "\nAnalyzing compilation unit\n");
1001 fflush (stderr);
1004 /* Gimplify and lower all functions, compute reachability and
1005 remove unreachable nodes. */
1006 cgraph_analyze_functions ();
1008 /* Mark alias targets necessary and emit diagnostics. */
1009 finish_aliases_1 ();
1011 /* Gimplify and lower thunks. */
1012 cgraph_analyze_functions ();
1014 /* Finally drive the pass manager. */
1015 cgraph_optimize ();
1017 timevar_pop (TV_CGRAPH);
1021 /* Figure out what functions we want to assemble. */
1023 static void
1024 cgraph_mark_functions_to_output (void)
1026 struct cgraph_node *node;
1027 #ifdef ENABLE_CHECKING
1028 bool check_same_comdat_groups = false;
1030 for (node = cgraph_nodes; node; node = node->next)
1031 gcc_assert (!node->process);
1032 #endif
1034 for (node = cgraph_nodes; node; node = node->next)
1036 tree decl = node->decl;
1037 struct cgraph_edge *e;
1039 gcc_assert (!node->process || node->same_comdat_group);
1040 if (node->process)
1041 continue;
1043 for (e = node->callers; e; e = e->next_caller)
1044 if (e->inline_failed)
1045 break;
1047 /* We need to output all local functions that are used and not
1048 always inlined, as well as those that are reachable from
1049 outside the current compilation unit. */
1050 if (node->analyzed
1051 && !node->global.inlined_to
1052 && (!cgraph_only_called_directly_p (node)
1053 || (e && node->reachable))
1054 && !TREE_ASM_WRITTEN (decl)
1055 && !DECL_EXTERNAL (decl))
1057 node->process = 1;
1058 if (node->same_comdat_group)
1060 struct cgraph_node *next;
1061 for (next = node->same_comdat_group;
1062 next != node;
1063 next = next->same_comdat_group)
1064 next->process = 1;
1067 else if (node->same_comdat_group)
1069 #ifdef ENABLE_CHECKING
1070 check_same_comdat_groups = true;
1071 #endif
1073 else
1075 /* We should've reclaimed all functions that are not needed. */
1076 #ifdef ENABLE_CHECKING
1077 if (!node->global.inlined_to
1078 && gimple_has_body_p (decl)
1079 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1080 are inside partition, we can end up not removing the body since we no longer
1081 have analyzed node pointing to it. */
1082 && !node->in_other_partition
1083 && !DECL_EXTERNAL (decl))
1085 dump_cgraph_node (stderr, node);
1086 internal_error ("failed to reclaim unneeded function");
1088 #endif
1089 gcc_assert (node->global.inlined_to
1090 || !gimple_has_body_p (decl)
1091 || node->in_other_partition
1092 || DECL_EXTERNAL (decl));
1097 #ifdef ENABLE_CHECKING
1098 if (check_same_comdat_groups)
1099 for (node = cgraph_nodes; node; node = node->next)
1100 if (node->same_comdat_group && !node->process)
1102 tree decl = node->decl;
1103 if (!node->global.inlined_to
1104 && gimple_has_body_p (decl)
1105 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1106 are inside partition, we can end up not removing the body since we no longer
1107 have analyzed node pointing to it. */
1108 && !node->in_other_partition
1109 && !DECL_EXTERNAL (decl))
1111 dump_cgraph_node (stderr, node);
1112 internal_error ("failed to reclaim unneeded function");
1115 #endif
1118 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1119 in lowered gimple form.
1121 Set current_function_decl and cfun to newly constructed empty function body.
1122 return basic block in the function body. */
1124 static basic_block
1125 init_lowered_empty_function (tree decl)
1127 basic_block bb;
1129 current_function_decl = decl;
1130 allocate_struct_function (decl, false);
1131 gimple_register_cfg_hooks ();
1132 init_empty_tree_cfg ();
1133 init_tree_ssa (cfun);
1134 init_ssa_operands ();
1135 cfun->gimple_df->in_ssa_p = true;
1136 DECL_INITIAL (decl) = make_node (BLOCK);
1138 DECL_SAVED_TREE (decl) = error_mark_node;
1139 cfun->curr_properties |=
1140 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1141 PROP_ssa);
1143 /* Create BB for body of the function and connect it properly. */
1144 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1145 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1146 make_edge (bb, EXIT_BLOCK_PTR, 0);
1148 return bb;
1151 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1152 offset indicated by VIRTUAL_OFFSET, if that is
1153 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1154 zero for a result adjusting thunk. */
1156 static tree
1157 thunk_adjust (gimple_stmt_iterator * bsi,
1158 tree ptr, bool this_adjusting,
1159 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1161 gimple stmt;
1162 tree ret;
1164 if (this_adjusting
1165 && fixed_offset != 0)
1167 stmt = gimple_build_assign (ptr,
1168 fold_build2_loc (input_location,
1169 POINTER_PLUS_EXPR,
1170 TREE_TYPE (ptr), ptr,
1171 size_int (fixed_offset)));
1172 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1175 /* If there's a virtual offset, look up that value in the vtable and
1176 adjust the pointer again. */
1177 if (virtual_offset)
1179 tree vtabletmp;
1180 tree vtabletmp2;
1181 tree vtabletmp3;
1182 tree offsettmp;
1184 if (!vtable_entry_type)
1186 tree vfunc_type = make_node (FUNCTION_TYPE);
1187 TREE_TYPE (vfunc_type) = integer_type_node;
1188 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1189 layout_type (vfunc_type);
1191 vtable_entry_type = build_pointer_type (vfunc_type);
1194 vtabletmp =
1195 create_tmp_var (build_pointer_type
1196 (build_pointer_type (vtable_entry_type)), "vptr");
1198 /* The vptr is always at offset zero in the object. */
1199 stmt = gimple_build_assign (vtabletmp,
1200 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1201 ptr));
1202 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1203 mark_symbols_for_renaming (stmt);
1204 find_referenced_vars_in (stmt);
1206 /* Form the vtable address. */
1207 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1208 "vtableaddr");
1209 stmt = gimple_build_assign (vtabletmp2,
1210 build_simple_mem_ref (vtabletmp));
1211 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1212 mark_symbols_for_renaming (stmt);
1213 find_referenced_vars_in (stmt);
1215 /* Find the entry with the vcall offset. */
1216 stmt = gimple_build_assign (vtabletmp2,
1217 fold_build2_loc (input_location,
1218 POINTER_PLUS_EXPR,
1219 TREE_TYPE (vtabletmp2),
1220 vtabletmp2,
1221 fold_convert (sizetype,
1222 virtual_offset)));
1223 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1225 /* Get the offset itself. */
1226 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1227 "vcalloffset");
1228 stmt = gimple_build_assign (vtabletmp3,
1229 build_simple_mem_ref (vtabletmp2));
1230 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1231 mark_symbols_for_renaming (stmt);
1232 find_referenced_vars_in (stmt);
1234 /* Cast to sizetype. */
1235 offsettmp = create_tmp_var (sizetype, "offset");
1236 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1237 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1238 mark_symbols_for_renaming (stmt);
1239 find_referenced_vars_in (stmt);
1241 /* Adjust the `this' pointer. */
1242 ptr = fold_build2_loc (input_location,
1243 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1244 offsettmp);
1247 if (!this_adjusting
1248 && fixed_offset != 0)
1249 /* Adjust the pointer by the constant. */
1251 tree ptrtmp;
1253 if (TREE_CODE (ptr) == VAR_DECL)
1254 ptrtmp = ptr;
1255 else
1257 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1258 stmt = gimple_build_assign (ptrtmp, ptr);
1259 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1260 mark_symbols_for_renaming (stmt);
1261 find_referenced_vars_in (stmt);
1263 ptr = fold_build2_loc (input_location,
1264 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1265 size_int (fixed_offset));
1268 /* Emit the statement and gimplify the adjustment expression. */
1269 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1270 stmt = gimple_build_assign (ret, ptr);
1271 mark_symbols_for_renaming (stmt);
1272 find_referenced_vars_in (stmt);
1273 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1275 return ret;
1278 /* Produce assembler for thunk NODE. */
1280 static void
1281 assemble_thunk (struct cgraph_node *node)
1283 bool this_adjusting = node->thunk.this_adjusting;
1284 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1285 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1286 tree virtual_offset = NULL;
1287 tree alias = node->thunk.alias;
1288 tree thunk_fndecl = node->decl;
1289 tree a = DECL_ARGUMENTS (thunk_fndecl);
1291 current_function_decl = thunk_fndecl;
1293 if (this_adjusting
1294 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1295 virtual_value, alias))
1297 const char *fnname;
1298 tree fn_block;
1300 DECL_RESULT (thunk_fndecl)
1301 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1302 RESULT_DECL, 0, integer_type_node);
1303 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1305 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1306 create one. */
1307 fn_block = make_node (BLOCK);
1308 BLOCK_VARS (fn_block) = a;
1309 DECL_INITIAL (thunk_fndecl) = fn_block;
1310 init_function_start (thunk_fndecl);
1311 cfun->is_thunk = 1;
1312 assemble_start_function (thunk_fndecl, fnname);
1314 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1315 fixed_offset, virtual_value, alias);
1317 assemble_end_function (thunk_fndecl, fnname);
1318 init_insn_lengths ();
1319 free_after_compilation (cfun);
1320 set_cfun (NULL);
1321 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1323 else
1325 tree restype;
1326 basic_block bb, then_bb, else_bb, return_bb;
1327 gimple_stmt_iterator bsi;
1328 int nargs = 0;
1329 tree arg;
1330 int i;
1331 tree resdecl;
1332 tree restmp = NULL;
1333 VEC(tree, heap) *vargs;
1335 gimple call;
1336 gimple ret;
1338 DECL_IGNORED_P (thunk_fndecl) = 1;
1339 bitmap_obstack_initialize (NULL);
1341 if (node->thunk.virtual_offset_p)
1342 virtual_offset = size_int (virtual_value);
1344 /* Build the return declaration for the function. */
1345 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1346 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1348 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1349 DECL_ARTIFICIAL (resdecl) = 1;
1350 DECL_IGNORED_P (resdecl) = 1;
1351 DECL_RESULT (thunk_fndecl) = resdecl;
1353 else
1354 resdecl = DECL_RESULT (thunk_fndecl);
1356 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1358 bsi = gsi_start_bb (bb);
1360 /* Build call to the function being thunked. */
1361 if (!VOID_TYPE_P (restype))
1363 if (!is_gimple_reg_type (restype))
1365 restmp = resdecl;
1366 add_local_decl (cfun, restmp);
1367 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1369 else
1370 restmp = create_tmp_var_raw (restype, "retval");
1373 for (arg = a; arg; arg = DECL_CHAIN (arg))
1374 nargs++;
1375 vargs = VEC_alloc (tree, heap, nargs);
1376 if (this_adjusting)
1377 VEC_quick_push (tree, vargs,
1378 thunk_adjust (&bsi,
1379 a, 1, fixed_offset,
1380 virtual_offset));
1381 else
1382 VEC_quick_push (tree, vargs, a);
1383 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1384 VEC_quick_push (tree, vargs, arg);
1385 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1386 VEC_free (tree, heap, vargs);
1387 gimple_call_set_cannot_inline (call, true);
1388 gimple_call_set_from_thunk (call, true);
1389 if (restmp)
1390 gimple_call_set_lhs (call, restmp);
1391 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1392 mark_symbols_for_renaming (call);
1393 find_referenced_vars_in (call);
1394 update_stmt (call);
1396 if (restmp && !this_adjusting)
1398 tree true_label = NULL_TREE;
1400 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1402 gimple stmt;
1403 /* If the return type is a pointer, we need to
1404 protect against NULL. We know there will be an
1405 adjustment, because that's why we're emitting a
1406 thunk. */
1407 then_bb = create_basic_block (NULL, (void *) 0, bb);
1408 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1409 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1410 remove_edge (single_succ_edge (bb));
1411 true_label = gimple_block_label (then_bb);
1412 stmt = gimple_build_cond (NE_EXPR, restmp,
1413 build_zero_cst (TREE_TYPE (restmp)),
1414 NULL_TREE, NULL_TREE);
1415 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1416 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1417 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1418 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1419 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1420 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1421 bsi = gsi_last_bb (then_bb);
1424 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1425 fixed_offset, virtual_offset);
1426 if (true_label)
1428 gimple stmt;
1429 bsi = gsi_last_bb (else_bb);
1430 stmt = gimple_build_assign (restmp,
1431 build_zero_cst (TREE_TYPE (restmp)));
1432 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1433 bsi = gsi_last_bb (return_bb);
1436 else
1437 gimple_call_set_tail (call, true);
1439 /* Build return value. */
1440 ret = gimple_build_return (restmp);
1441 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1443 delete_unreachable_blocks ();
1444 update_ssa (TODO_update_ssa);
1446 cgraph_remove_same_body_alias (node);
1447 /* Since we want to emit the thunk, we explicitly mark its name as
1448 referenced. */
1449 cgraph_add_new_function (thunk_fndecl, true);
1450 bitmap_obstack_release (NULL);
1452 current_function_decl = NULL;
1455 /* Expand function specified by NODE. */
1457 static void
1458 cgraph_expand_function (struct cgraph_node *node)
1460 tree decl = node->decl;
1462 /* We ought to not compile any inline clones. */
1463 gcc_assert (!node->global.inlined_to);
1465 announce_function (decl);
1466 node->process = 0;
1467 if (node->same_body)
1469 struct cgraph_node *alias, *next;
1470 bool saved_alias = node->alias;
1471 for (alias = node->same_body;
1472 alias && alias->next; alias = alias->next)
1474 /* Walk aliases in the order they were created; it is possible that
1475 thunks reffers to the aliases made earlier. */
1476 for (; alias; alias = next)
1478 next = alias->previous;
1479 if (!alias->thunk.thunk_p)
1480 assemble_alias (alias->decl,
1481 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1482 else
1483 assemble_thunk (alias);
1485 node->alias = saved_alias;
1486 cgraph_process_new_functions ();
1489 gcc_assert (node->lowered);
1491 /* Generate RTL for the body of DECL. */
1492 tree_rest_of_compilation (decl);
1494 /* Make sure that BE didn't give up on compiling. */
1495 gcc_assert (TREE_ASM_WRITTEN (decl));
1496 current_function_decl = NULL;
1497 gcc_assert (!cgraph_preserve_function_body_p (decl));
1498 cgraph_release_function_body (node);
1499 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1500 points to the dead function body. */
1501 cgraph_node_remove_callees (node);
1503 cgraph_function_flags_ready = true;
1506 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1508 bool
1509 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1511 *reason = e->inline_failed;
1512 return !e->inline_failed;
1517 /* Expand all functions that must be output.
1519 Attempt to topologically sort the nodes so function is output when
1520 all called functions are already assembled to allow data to be
1521 propagated across the callgraph. Use a stack to get smaller distance
1522 between a function and its callees (later we may choose to use a more
1523 sophisticated algorithm for function reordering; we will likely want
1524 to use subsections to make the output functions appear in top-down
1525 order). */
1527 static void
1528 cgraph_expand_all_functions (void)
1530 struct cgraph_node *node;
1531 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1532 int order_pos, new_order_pos = 0;
1533 int i;
1535 order_pos = cgraph_postorder (order);
1536 gcc_assert (order_pos == cgraph_n_nodes);
1538 /* Garbage collector may remove inline clones we eliminate during
1539 optimization. So we must be sure to not reference them. */
1540 for (i = 0; i < order_pos; i++)
1541 if (order[i]->process)
1542 order[new_order_pos++] = order[i];
1544 for (i = new_order_pos - 1; i >= 0; i--)
1546 node = order[i];
1547 if (node->process)
1549 gcc_assert (node->reachable);
1550 node->process = 0;
1551 cgraph_expand_function (node);
1554 cgraph_process_new_functions ();
1556 free (order);
1560 /* This is used to sort the node types by the cgraph order number. */
1562 enum cgraph_order_sort_kind
1564 ORDER_UNDEFINED = 0,
1565 ORDER_FUNCTION,
1566 ORDER_VAR,
1567 ORDER_ASM
1570 struct cgraph_order_sort
1572 enum cgraph_order_sort_kind kind;
1573 union
1575 struct cgraph_node *f;
1576 struct varpool_node *v;
1577 struct cgraph_asm_node *a;
1578 } u;
1581 /* Output all functions, variables, and asm statements in the order
1582 according to their order fields, which is the order in which they
1583 appeared in the file. This implements -fno-toplevel-reorder. In
1584 this mode we may output functions and variables which don't really
1585 need to be output. */
1587 static void
1588 cgraph_output_in_order (void)
1590 int max;
1591 struct cgraph_order_sort *nodes;
1592 int i;
1593 struct cgraph_node *pf;
1594 struct varpool_node *pv;
1595 struct cgraph_asm_node *pa;
1597 max = cgraph_order;
1598 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1600 varpool_analyze_pending_decls ();
1602 for (pf = cgraph_nodes; pf; pf = pf->next)
1604 if (pf->process)
1606 i = pf->order;
1607 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1608 nodes[i].kind = ORDER_FUNCTION;
1609 nodes[i].u.f = pf;
1613 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1615 i = pv->order;
1616 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1617 nodes[i].kind = ORDER_VAR;
1618 nodes[i].u.v = pv;
1621 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1623 i = pa->order;
1624 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1625 nodes[i].kind = ORDER_ASM;
1626 nodes[i].u.a = pa;
1629 /* In toplevel reorder mode we output all statics; mark them as needed. */
1630 for (i = 0; i < max; ++i)
1632 if (nodes[i].kind == ORDER_VAR)
1634 varpool_mark_needed_node (nodes[i].u.v);
1637 varpool_empty_needed_queue ();
1639 for (i = 0; i < max; ++i)
1641 switch (nodes[i].kind)
1643 case ORDER_FUNCTION:
1644 nodes[i].u.f->process = 0;
1645 cgraph_expand_function (nodes[i].u.f);
1646 break;
1648 case ORDER_VAR:
1649 varpool_assemble_decl (nodes[i].u.v);
1650 break;
1652 case ORDER_ASM:
1653 assemble_asm (nodes[i].u.a->asm_str);
1654 break;
1656 case ORDER_UNDEFINED:
1657 break;
1659 default:
1660 gcc_unreachable ();
1664 cgraph_asm_nodes = NULL;
1665 free (nodes);
1668 /* Return true when function body of DECL still needs to be kept around
1669 for later re-use. */
1670 bool
1671 cgraph_preserve_function_body_p (tree decl)
1673 struct cgraph_node *node;
1675 gcc_assert (cgraph_global_info_ready);
1676 /* Look if there is any clone around. */
1677 node = cgraph_node (decl);
1678 if (node->clones)
1679 return true;
1680 return false;
1683 static void
1684 ipa_passes (void)
1686 set_cfun (NULL);
1687 current_function_decl = NULL;
1688 gimple_register_cfg_hooks ();
1689 bitmap_obstack_initialize (NULL);
1691 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1693 if (!in_lto_p)
1694 execute_ipa_pass_list (all_small_ipa_passes);
1696 /* If pass_all_early_optimizations was not scheduled, the state of
1697 the cgraph will not be properly updated. Update it now. */
1698 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1699 cgraph_state = CGRAPH_STATE_IPA_SSA;
1701 if (!in_lto_p)
1703 /* Generate coverage variables and constructors. */
1704 coverage_finish ();
1706 /* Process new functions added. */
1707 set_cfun (NULL);
1708 current_function_decl = NULL;
1709 cgraph_process_new_functions ();
1711 execute_ipa_summary_passes
1712 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1715 /* Some targets need to handle LTO assembler output specially. */
1716 if (flag_generate_lto)
1717 targetm.asm_out.lto_start ();
1719 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1721 if (!in_lto_p)
1722 ipa_write_summaries ();
1724 if (flag_generate_lto)
1725 targetm.asm_out.lto_end ();
1727 if (!flag_ltrans)
1728 execute_ipa_pass_list (all_regular_ipa_passes);
1729 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1731 bitmap_obstack_release (NULL);
1735 /* Perform simple optimizations based on callgraph. */
1737 void
1738 cgraph_optimize (void)
1740 if (seen_error ())
1741 return;
1743 #ifdef ENABLE_CHECKING
1744 verify_cgraph ();
1745 #endif
1747 /* Frontend may output common variables after the unit has been finalized.
1748 It is safe to deal with them here as they are always zero initialized. */
1749 varpool_analyze_pending_decls ();
1751 timevar_push (TV_CGRAPHOPT);
1752 if (pre_ipa_mem_report)
1754 fprintf (stderr, "Memory consumption before IPA\n");
1755 dump_memory_report (false);
1757 if (!quiet_flag)
1758 fprintf (stderr, "Performing interprocedural optimizations\n");
1759 cgraph_state = CGRAPH_STATE_IPA;
1761 /* Don't run the IPA passes if there was any error or sorry messages. */
1762 if (!seen_error ())
1763 ipa_passes ();
1765 /* Do nothing else if any IPA pass found errors. */
1766 if (seen_error ())
1768 timevar_pop (TV_CGRAPHOPT);
1769 return;
1772 /* This pass remove bodies of extern inline functions we never inlined.
1773 Do this later so other IPA passes see what is really going on. */
1774 cgraph_remove_unreachable_nodes (false, dump_file);
1775 cgraph_global_info_ready = true;
1776 if (cgraph_dump_file)
1778 fprintf (cgraph_dump_file, "Optimized ");
1779 dump_cgraph (cgraph_dump_file);
1780 dump_varpool (cgraph_dump_file);
1782 if (post_ipa_mem_report)
1784 fprintf (stderr, "Memory consumption after IPA\n");
1785 dump_memory_report (false);
1787 timevar_pop (TV_CGRAPHOPT);
1789 /* Output everything. */
1790 (*debug_hooks->assembly_start) ();
1791 if (!quiet_flag)
1792 fprintf (stderr, "Assembling functions:\n");
1793 #ifdef ENABLE_CHECKING
1794 verify_cgraph ();
1795 #endif
1797 cgraph_materialize_all_clones ();
1798 cgraph_mark_functions_to_output ();
1800 cgraph_state = CGRAPH_STATE_EXPANSION;
1801 if (!flag_toplevel_reorder)
1802 cgraph_output_in_order ();
1803 else
1805 cgraph_output_pending_asms ();
1807 cgraph_expand_all_functions ();
1808 varpool_remove_unreferenced_decls ();
1810 varpool_assemble_pending_decls ();
1812 cgraph_process_new_functions ();
1813 cgraph_state = CGRAPH_STATE_FINISHED;
1815 if (cgraph_dump_file)
1817 fprintf (cgraph_dump_file, "\nFinal ");
1818 dump_cgraph (cgraph_dump_file);
1820 #ifdef ENABLE_CHECKING
1821 verify_cgraph ();
1822 /* Double check that all inline clones are gone and that all
1823 function bodies have been released from memory. */
1824 if (!seen_error ())
1826 struct cgraph_node *node;
1827 bool error_found = false;
1829 for (node = cgraph_nodes; node; node = node->next)
1830 if (node->analyzed
1831 && (node->global.inlined_to
1832 || gimple_has_body_p (node->decl)))
1834 error_found = true;
1835 dump_cgraph_node (stderr, node);
1837 if (error_found)
1838 internal_error ("nodes with unreleased memory found");
1840 #endif
1843 void
1844 init_cgraph (void)
1846 if (!cgraph_dump_file)
1847 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1850 /* The edges representing the callers of the NEW_VERSION node were
1851 fixed by cgraph_function_versioning (), now the call_expr in their
1852 respective tree code should be updated to call the NEW_VERSION. */
1854 static void
1855 update_call_expr (struct cgraph_node *new_version)
1857 struct cgraph_edge *e;
1859 gcc_assert (new_version);
1861 /* Update the call expr on the edges to call the new version. */
1862 for (e = new_version->callers; e; e = e->next_caller)
1864 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1865 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1866 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1871 /* Create a new cgraph node which is the new version of
1872 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1873 edges which should be redirected to point to
1874 NEW_VERSION. ALL the callees edges of OLD_VERSION
1875 are cloned to the new version node. Return the new
1876 version node.
1878 If non-NULL BLOCK_TO_COPY determine what basic blocks
1879 was copied to prevent duplications of calls that are dead
1880 in the clone. */
1882 static struct cgraph_node *
1883 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1884 tree new_decl,
1885 VEC(cgraph_edge_p,heap) *redirect_callers,
1886 bitmap bbs_to_copy)
1888 struct cgraph_node *new_version;
1889 struct cgraph_edge *e;
1890 unsigned i;
1892 gcc_assert (old_version);
1894 new_version = cgraph_node (new_decl);
1896 new_version->analyzed = true;
1897 new_version->local = old_version->local;
1898 new_version->local.externally_visible = false;
1899 new_version->local.local = true;
1900 new_version->local.vtable_method = false;
1901 new_version->global = old_version->global;
1902 new_version->rtl = old_version->rtl;
1903 new_version->reachable = true;
1904 new_version->count = old_version->count;
1906 for (e = old_version->callees; e; e=e->next_callee)
1907 if (!bbs_to_copy
1908 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1909 cgraph_clone_edge (e, new_version, e->call_stmt,
1910 e->lto_stmt_uid, REG_BR_PROB_BASE,
1911 CGRAPH_FREQ_BASE,
1912 e->loop_nest, true);
1913 for (e = old_version->indirect_calls; e; e=e->next_callee)
1914 if (!bbs_to_copy
1915 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1916 cgraph_clone_edge (e, new_version, e->call_stmt,
1917 e->lto_stmt_uid, REG_BR_PROB_BASE,
1918 CGRAPH_FREQ_BASE,
1919 e->loop_nest, true);
1920 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
1922 /* Redirect calls to the old version node to point to its new
1923 version. */
1924 cgraph_redirect_edge_callee (e, new_version);
1927 return new_version;
1930 /* Perform function versioning.
1931 Function versioning includes copying of the tree and
1932 a callgraph update (creating a new cgraph node and updating
1933 its callees and callers).
1935 REDIRECT_CALLERS varray includes the edges to be redirected
1936 to the new version.
1938 TREE_MAP is a mapping of tree nodes we want to replace with
1939 new ones (according to results of prior analysis).
1940 OLD_VERSION_NODE is the node that is versioned.
1941 It returns the new version's cgraph node.
1942 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1943 from new version.
1944 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1945 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
1947 struct cgraph_node *
1948 cgraph_function_versioning (struct cgraph_node *old_version_node,
1949 VEC(cgraph_edge_p,heap) *redirect_callers,
1950 VEC (ipa_replace_map_p,gc)* tree_map,
1951 bitmap args_to_skip,
1952 bitmap bbs_to_copy,
1953 basic_block new_entry_block,
1954 const char *clone_name)
1956 tree old_decl = old_version_node->decl;
1957 struct cgraph_node *new_version_node = NULL;
1958 tree new_decl;
1960 if (!tree_versionable_function_p (old_decl))
1961 return NULL;
1963 /* Make a new FUNCTION_DECL tree node for the
1964 new version. */
1965 if (!args_to_skip)
1966 new_decl = copy_node (old_decl);
1967 else
1968 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1970 /* Generate a new name for the new version. */
1971 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1972 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1973 SET_DECL_RTL (new_decl, NULL);
1975 /* Create the new version's call-graph node.
1976 and update the edges of the new node. */
1977 new_version_node =
1978 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1979 redirect_callers, bbs_to_copy);
1981 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1982 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1983 bbs_to_copy, new_entry_block);
1985 /* Update the new version's properties.
1986 Make The new version visible only within this translation unit. Make sure
1987 that is not weak also.
1988 ??? We cannot use COMDAT linkage because there is no
1989 ABI support for this. */
1990 cgraph_make_decl_local (new_version_node->decl);
1991 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1992 new_version_node->local.externally_visible = 0;
1993 new_version_node->local.local = 1;
1994 new_version_node->lowered = true;
1996 /* Update the call_expr on the edges to call the new version node. */
1997 update_call_expr (new_version_node);
1999 cgraph_call_function_insertion_hooks (new_version_node);
2000 return new_version_node;
2003 /* Produce separate function body for inline clones so the offline copy can be
2004 modified without affecting them. */
2005 struct cgraph_node *
2006 save_inline_function_body (struct cgraph_node *node)
2008 struct cgraph_node *first_clone, *n;
2010 gcc_assert (node == cgraph_node (node->decl));
2012 cgraph_lower_function (node);
2014 first_clone = node->clones;
2016 first_clone->decl = copy_node (node->decl);
2017 cgraph_insert_node_to_hashtable (first_clone);
2018 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2019 if (first_clone->next_sibling_clone)
2021 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2022 n->clone_of = first_clone;
2023 n->clone_of = first_clone;
2024 n->next_sibling_clone = first_clone->clones;
2025 if (first_clone->clones)
2026 first_clone->clones->prev_sibling_clone = n;
2027 first_clone->clones = first_clone->next_sibling_clone;
2028 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2029 first_clone->next_sibling_clone = NULL;
2030 gcc_assert (!first_clone->prev_sibling_clone);
2032 first_clone->clone_of = NULL;
2033 node->clones = NULL;
2035 if (first_clone->clones)
2036 for (n = first_clone->clones; n != first_clone;)
2038 gcc_assert (n->decl == node->decl);
2039 n->decl = first_clone->decl;
2040 if (n->clones)
2041 n = n->clones;
2042 else if (n->next_sibling_clone)
2043 n = n->next_sibling_clone;
2044 else
2046 while (n != first_clone && !n->next_sibling_clone)
2047 n = n->clone_of;
2048 if (n != first_clone)
2049 n = n->next_sibling_clone;
2053 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2054 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2055 NULL, NULL);
2057 DECL_EXTERNAL (first_clone->decl) = 0;
2058 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2059 TREE_PUBLIC (first_clone->decl) = 0;
2060 DECL_COMDAT (first_clone->decl) = 0;
2061 VEC_free (ipa_opt_pass, heap,
2062 first_clone->ipa_transforms_to_apply);
2063 first_clone->ipa_transforms_to_apply = NULL;
2065 #ifdef ENABLE_CHECKING
2066 verify_cgraph_node (first_clone);
2067 #endif
2068 return first_clone;
2071 /* Given virtual clone, turn it into actual clone. */
2072 static void
2073 cgraph_materialize_clone (struct cgraph_node *node)
2075 bitmap_obstack_initialize (NULL);
2076 node->former_clone_of = node->clone_of->decl;
2077 if (node->clone_of->former_clone_of)
2078 node->former_clone_of = node->clone_of->former_clone_of;
2079 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2080 tree_function_versioning (node->clone_of->decl, node->decl,
2081 node->clone.tree_map, true,
2082 node->clone.args_to_skip, NULL, NULL);
2083 if (cgraph_dump_file)
2085 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2086 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2089 /* Function is no longer clone. */
2090 if (node->next_sibling_clone)
2091 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2092 if (node->prev_sibling_clone)
2093 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2094 else
2095 node->clone_of->clones = node->next_sibling_clone;
2096 node->next_sibling_clone = NULL;
2097 node->prev_sibling_clone = NULL;
2098 if (!node->clone_of->analyzed && !node->clone_of->clones)
2100 cgraph_release_function_body (node->clone_of);
2101 cgraph_node_remove_callees (node->clone_of);
2102 ipa_remove_all_references (&node->clone_of->ref_list);
2104 node->clone_of = NULL;
2105 bitmap_obstack_release (NULL);
2108 /* If necessary, change the function declaration in the call statement
2109 associated with E so that it corresponds to the edge callee. */
2111 gimple
2112 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2114 tree decl = gimple_call_fndecl (e->call_stmt);
2115 gimple new_stmt;
2116 #ifdef ENABLE_CHECKING
2117 struct cgraph_node *node;
2118 #endif
2120 if (e->indirect_unknown_callee
2121 || decl == e->callee->decl
2122 /* Don't update call from same body alias to the real function. */
2123 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2124 return e->call_stmt;
2126 #ifdef ENABLE_CHECKING
2127 if (decl)
2129 node = cgraph_get_node (decl);
2130 gcc_assert (!node || !node->clone.combined_args_to_skip);
2132 #endif
2134 if (cgraph_dump_file)
2136 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2137 cgraph_node_name (e->caller), e->caller->uid,
2138 cgraph_node_name (e->callee), e->callee->uid);
2139 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2140 if (e->callee->clone.combined_args_to_skip)
2142 fprintf (cgraph_dump_file, " combined args to skip: ");
2143 dump_bitmap (cgraph_dump_file,
2144 e->callee->clone.combined_args_to_skip);
2148 if (e->callee->clone.combined_args_to_skip)
2150 gimple_stmt_iterator gsi;
2151 int lp_nr;
2153 new_stmt
2154 = gimple_call_copy_skip_args (e->call_stmt,
2155 e->callee->clone.combined_args_to_skip);
2156 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2158 if (gimple_vdef (new_stmt)
2159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2162 gsi = gsi_for_stmt (e->call_stmt);
2163 gsi_replace (&gsi, new_stmt, false);
2164 /* We need to defer cleaning EH info on the new statement to
2165 fixup-cfg. We may not have dominator information at this point
2166 and thus would end up with unreachable blocks and have no way
2167 to communicate that we need to run CFG cleanup then. */
2168 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2169 if (lp_nr != 0)
2171 remove_stmt_from_eh_lp (e->call_stmt);
2172 add_stmt_to_eh_lp (new_stmt, lp_nr);
2175 else
2177 new_stmt = e->call_stmt;
2178 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2179 update_stmt (new_stmt);
2182 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2184 if (cgraph_dump_file)
2186 fprintf (cgraph_dump_file, " updated to:");
2187 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2189 return new_stmt;
2192 /* Once all functions from compilation unit are in memory, produce all clones
2193 and update all calls. We might also do this on demand if we don't want to
2194 bring all functions to memory prior compilation, but current WHOPR
2195 implementation does that and it is is bit easier to keep everything right in
2196 this order. */
2197 void
2198 cgraph_materialize_all_clones (void)
2200 struct cgraph_node *node;
2201 bool stabilized = false;
2203 if (cgraph_dump_file)
2204 fprintf (cgraph_dump_file, "Materializing clones\n");
2205 #ifdef ENABLE_CHECKING
2206 verify_cgraph ();
2207 #endif
2209 /* We can also do topological order, but number of iterations should be
2210 bounded by number of IPA passes since single IPA pass is probably not
2211 going to create clones of clones it created itself. */
2212 while (!stabilized)
2214 stabilized = true;
2215 for (node = cgraph_nodes; node; node = node->next)
2217 if (node->clone_of && node->decl != node->clone_of->decl
2218 && !gimple_has_body_p (node->decl))
2220 if (gimple_has_body_p (node->clone_of->decl))
2222 if (cgraph_dump_file)
2224 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2225 cgraph_node_name (node->clone_of),
2226 cgraph_node_name (node));
2227 if (node->clone.tree_map)
2229 unsigned int i;
2230 fprintf (cgraph_dump_file, " replace map: ");
2231 for (i = 0; i < VEC_length (ipa_replace_map_p,
2232 node->clone.tree_map);
2233 i++)
2235 struct ipa_replace_map *replace_info;
2236 replace_info = VEC_index (ipa_replace_map_p,
2237 node->clone.tree_map,
2239 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2240 fprintf (cgraph_dump_file, " -> ");
2241 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2242 fprintf (cgraph_dump_file, "%s%s;",
2243 replace_info->replace_p ? "(replace)":"",
2244 replace_info->ref_p ? "(ref)":"");
2246 fprintf (cgraph_dump_file, "\n");
2248 if (node->clone.args_to_skip)
2250 fprintf (cgraph_dump_file, " args_to_skip: ");
2251 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2253 if (node->clone.args_to_skip)
2255 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2256 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2259 cgraph_materialize_clone (node);
2260 stabilized = false;
2265 for (node = cgraph_nodes; node; node = node->next)
2266 if (!node->analyzed && node->callees)
2267 cgraph_node_remove_callees (node);
2268 if (cgraph_dump_file)
2269 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2270 #ifdef ENABLE_CHECKING
2271 verify_cgraph ();
2272 #endif
2273 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2276 #include "gt-cgraphunit.h"