This patch suppresses the messages printed when the primary module is not found.
[official-gcc.git] / gcc-4_7 / gcc / cgraphunit.c
blob157369554dd30335ec4c969fcf7836546936834c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "gcov-io.h"
136 #include "tree-iterator.h"
137 #include "tree-pass.h"
138 #include "tree-dump.h"
139 #include "output.h"
140 #include "coverage.h"
141 #include "plugin.h"
142 #include "ipa-inline.h"
143 #include "ipa-utils.h"
144 #include "lto-streamer.h"
145 #include "l-ipo.h"
146 #include "auto-profile.h"
148 static void cgraph_expand_all_functions (void);
149 static void cgraph_mark_functions_to_output (void);
150 static void cgraph_expand_function (struct cgraph_node *);
151 static void cgraph_output_pending_asms (void);
153 FILE *cgraph_dump_file;
155 /* Used for vtable lookup in thunk adjusting. */
156 static GTY (()) tree vtable_entry_type;
158 /* Determine if function DECL is needed. That is, visible to something
159 either outside this translation unit, something magic in the system
160 configury. */
162 bool
163 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
165 /* If the user told us it is used, then it must be so. */
166 if (node->local.externally_visible)
167 return true;
169 /* ??? If the assembler name is set by hand, it is possible to assemble
170 the name later after finalizing the function and the fact is noticed
171 in assemble_name then. This is arguably a bug. */
172 if (DECL_ASSEMBLER_NAME_SET_P (decl)
173 && (!node->thunk.thunk_p && !node->same_body_alias)
174 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
175 return true;
177 /* With -fkeep-inline-functions we are keeping all inline functions except
178 for extern inline ones. */
179 if (flag_keep_inline_functions
180 && DECL_DECLARED_INLINE_P (decl)
181 && !DECL_EXTERNAL (decl)
182 && !DECL_DISREGARD_INLINE_LIMITS (decl))
183 return true;
185 /* If we decided it was needed before, but at the time we didn't have
186 the body of the function available, then it's still needed. We have
187 to go back and re-check its dependencies now. */
188 if (node->needed)
189 return true;
191 /* Externally visible functions must be output. The exception is
192 COMDAT functions that must be output only when they are needed.
194 When not optimizing, also output the static functions. (see
195 PR24561), but don't do so for always_inline functions, functions
196 declared inline and nested functions. These were optimized out
197 in the original implementation and it is unclear whether we want
198 to change the behavior here. */
199 if (((TREE_PUBLIC (decl)
200 || (!optimize
201 && !node->same_body_alias
202 && !DECL_DISREGARD_INLINE_LIMITS (decl)
203 && !DECL_DECLARED_INLINE_P (decl)
204 && !(DECL_CONTEXT (decl)
205 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
206 && !flag_whole_program
207 && !flag_lto)
208 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
209 return true;
211 return false;
214 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
215 functions into callgraph in a way so they look like ordinary reachable
216 functions inserted into callgraph already at construction time. */
218 bool
219 cgraph_process_new_functions (void)
221 bool output = false;
222 tree fndecl;
223 struct cgraph_node *node;
225 varpool_analyze_pending_decls ();
226 /* Note that this queue may grow as its being processed, as the new
227 functions may generate new ones. */
228 while (cgraph_new_nodes)
230 node = cgraph_new_nodes;
231 fndecl = node->decl;
232 cgraph_new_nodes = cgraph_new_nodes->next_needed;
233 switch (cgraph_state)
235 case CGRAPH_STATE_CONSTRUCTION:
236 /* At construction time we just need to finalize function and move
237 it into reachable functions list. */
239 node->next_needed = NULL;
240 cgraph_finalize_function (fndecl, false);
241 cgraph_mark_reachable_node (node);
242 output = true;
243 cgraph_call_function_insertion_hooks (node);
244 break;
246 case CGRAPH_STATE_IPA:
247 case CGRAPH_STATE_IPA_SSA:
248 /* When IPA optimization already started, do all essential
249 transformations that has been already performed on the whole
250 cgraph but not on this function. */
252 gimple_register_cfg_hooks ();
253 if (!node->analyzed)
254 cgraph_analyze_function (node);
255 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
256 current_function_decl = fndecl;
257 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
258 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
259 /* When not optimizing, be sure we run early local passes anyway
260 to expand OMP. */
261 || !optimize)
262 execute_pass_list (pass_early_local_passes.pass.sub);
263 else
264 compute_inline_parameters (node, true);
265 free_dominance_info (CDI_POST_DOMINATORS);
266 free_dominance_info (CDI_DOMINATORS);
267 pop_cfun ();
268 current_function_decl = NULL;
269 cgraph_call_function_insertion_hooks (node);
270 break;
272 case CGRAPH_STATE_EXPANSION:
273 /* Functions created during expansion shall be compiled
274 directly. */
275 node->process = 0;
276 cgraph_call_function_insertion_hooks (node);
277 cgraph_expand_function (node);
278 break;
280 default:
281 gcc_unreachable ();
282 break;
284 varpool_analyze_pending_decls ();
286 return output;
289 /* As an GCC extension we allow redefinition of the function. The
290 semantics when both copies of bodies differ is not well defined.
291 We replace the old body with new body so in unit at a time mode
292 we always use new body, while in normal mode we may end up with
293 old body inlined into some functions and new body expanded and
294 inlined in others.
296 ??? It may make more sense to use one body for inlining and other
297 body for expanding the function but this is difficult to do. */
299 static void
300 cgraph_reset_node (struct cgraph_node *node)
302 /* If node->process is set, then we have already begun whole-unit analysis.
303 This is *not* testing for whether we've already emitted the function.
304 That case can be sort-of legitimately seen with real function redefinition
305 errors. I would argue that the front end should never present us with
306 such a case, but don't enforce that for now. */
307 gcc_assert (!node->process);
309 /* Reset our data structures so we can analyze the function again. */
310 memset (&node->local, 0, sizeof (node->local));
311 memset (&node->global, 0, sizeof (node->global));
312 memset (&node->rtl, 0, sizeof (node->rtl));
313 node->analyzed = false;
314 node->local.finalized = false;
316 cgraph_node_remove_callees (node);
319 static void
320 cgraph_lower_function (struct cgraph_node *node)
322 if (node->lowered)
323 return;
325 if (node->nested)
326 lower_nested_functions (node->decl);
327 gcc_assert (!node->nested);
329 tree_lowering_passes (node->decl);
330 node->lowered = true;
333 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
334 logic in effect. If NESTED is true, then our caller cannot stand to have
335 the garbage collector run at the moment. We would need to either create
336 a new GC context, or just not compile right now. */
338 void
339 cgraph_finalize_function (tree decl, bool nested)
341 struct cgraph_node *node = cgraph_get_create_node (decl);
342 bool reset_needed = node->local.finalized;
344 if (node->local.finalized)
346 cgraph_reset_node (node);
347 node->local.redefined_extern_inline = true;
350 notice_global_symbol (decl);
351 node->local.finalized = true;
352 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
354 if (cgraph_decide_is_function_needed (node, decl))
355 cgraph_mark_needed_node (node);
357 /* Since we reclaim unreachable nodes at the end of every language
358 level unit, we need to be conservative about possible entry points
359 there. */
360 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
361 || DECL_STATIC_CONSTRUCTOR (decl)
362 || DECL_STATIC_DESTRUCTOR (decl)
363 /* COMDAT virtual functions may be referenced by vtable from
364 other compilation unit. Still we want to devirtualize calls
365 to those so we need to analyze them.
366 FIXME: We should introduce may edges for this purpose and update
367 their handling in unreachable function removal and inliner too. */
368 || (DECL_VIRTUAL_P (decl)
369 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
370 cgraph_mark_reachable_node (node);
372 /* For multi-module compilation, an inline function may be multiply
373 defined if it is a built-in. In one file, The decl may be marked
374 as needed (e.g., referenced), and analyzed (including inline parameter
375 computation) during function lowering invoked at the end of the file scope.
376 In the following scope, it may not be needed, thus won't be put into
377 the cgraph nodes queue for further analysis. Do it here. */
379 if (reset_needed
380 && L_IPO_IS_AUXILIARY_MODULE
381 && DECL_DECLARED_INLINE_P (node->decl))
382 cgraph_mark_reachable_node (node);
384 /* If we've not yet emitted decl, tell the debug info about it. */
385 if (!TREE_ASM_WRITTEN (decl))
386 (*debug_hooks->deferred_inline_function) (decl);
388 /* Possibly warn about unused parameters. */
389 if (warn_unused_parameter)
390 do_warn_unused_parameter (decl);
392 if (!nested)
393 ggc_collect ();
396 /* C99 extern inline keywords allow changing of declaration after function
397 has been finalized. We need to re-decide if we want to mark the function as
398 needed then. */
400 void
401 cgraph_mark_if_needed (tree decl)
403 struct cgraph_node *node = cgraph_get_node (decl);
404 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
405 cgraph_mark_needed_node (node);
408 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
409 static bool
410 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
412 node = cgraph_function_or_thunk_node (node, NULL);
413 node2 = cgraph_function_or_thunk_node (node2, NULL);
414 while (node != node2 && node2)
415 node2 = node2->clone_of;
416 return node2 != NULL;
419 /* Verify edge E count and frequency. */
421 static bool
422 verify_edge_count_and_frequency (struct cgraph_edge *e)
424 bool error_found = false;
425 if (e->count < 0)
427 error ("caller edge count is negative");
428 error_found = true;
430 if (e->frequency < 0)
432 error ("caller edge frequency is negative");
433 error_found = true;
435 if (e->frequency > CGRAPH_FREQ_MAX)
437 error ("caller edge frequency is too large");
438 error_found = true;
440 if (gimple_has_body_p (e->caller->decl)
441 && !e->caller->global.inlined_to
442 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
443 Remove this once edges are actualy removed from the function at that time. */
444 && e->call_stmt
445 && (e->frequency
446 || (inline_edge_summary_vec
447 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
448 <= (unsigned) e->uid)
449 || !inline_edge_summary (e)->predicate)))
450 && (e->frequency
451 != compute_call_stmt_bb_frequency (e->caller->decl,
452 gimple_bb (e->call_stmt)))
453 && !e->caller->clone_of)
455 error ("caller edge frequency %i does not match BB frequency %i",
456 e->frequency,
457 compute_call_stmt_bb_frequency (e->caller->decl,
458 gimple_bb (e->call_stmt)));
459 error_found = true;
461 return error_found;
464 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
465 static void
466 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
468 /* debug_gimple_stmt needs correct cfun */
469 if (cfun != this_cfun)
470 set_cfun (this_cfun);
471 debug_gimple_stmt (stmt);
474 /* Verify that call graph edge E corresponds to DECL from the associated
475 statement. Return true if the verification should fail. */
477 static bool
478 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
480 struct cgraph_node *node;
482 if (!decl || e->callee->global.inlined_to)
483 return false;
484 node = cgraph_get_node (decl);
486 /* We do not know if a node from a different partition is an alias or what it
487 aliases and therefore cannot do the former_clone_of check reliably. */
488 if (!node || node->in_other_partition)
489 return false;
490 node = cgraph_function_or_thunk_node (node, NULL);
492 if ((e->callee->former_clone_of != node->decl
493 && (!node->same_body_alias
494 || e->callee->former_clone_of != node->thunk.alias))
495 && (!L_IPO_COMP_MODE
496 || (e->callee->former_clone_of
497 && cgraph_lipo_get_resolved_node
498 (e->callee->former_clone_of)->decl
499 != cgraph_lipo_get_resolved_node (decl)->decl))
500 /* IPA-CP sometimes redirect edge to clone and then back to the former
501 function. This ping-pong has to go, eventually. */
502 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
503 && !clone_of_p (node, e->callee)
504 /* If decl is a same body alias of some other decl, allow e->callee to be
505 a clone of a clone of that other decl too. */
506 && (!node->same_body_alias
507 || !clone_of_p (cgraph_get_node (node->thunk.alias), e->callee))
508 && (!L_IPO_COMP_MODE
509 || !clone_of_p (cgraph_lipo_get_resolved_node (decl),
510 e->callee)))
511 return true;
512 else
513 return false;
516 /* Verify cgraph nodes of given cgraph node. */
517 DEBUG_FUNCTION void
518 verify_cgraph_node (struct cgraph_node *node)
520 struct cgraph_edge *e;
521 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
522 basic_block this_block;
523 gimple_stmt_iterator gsi;
524 bool error_found = false;
526 if (seen_error ())
527 return;
529 /* Disable checking for LIPO for now. */
530 if (L_IPO_COMP_MODE)
531 return;
533 timevar_push (TV_CGRAPH_VERIFY);
534 for (e = node->callees; e; e = e->next_callee)
535 if (e->aux)
537 error ("aux field set for edge %s->%s",
538 identifier_to_locale (cgraph_node_name (e->caller)),
539 identifier_to_locale (cgraph_node_name (e->callee)));
540 error_found = true;
542 if (node->count < 0)
544 error ("execution count is negative");
545 error_found = true;
547 if (node->global.inlined_to && node->local.externally_visible)
549 error ("externally visible inline clone");
550 error_found = true;
552 if (node->global.inlined_to && node->address_taken)
554 error ("inline clone with address taken");
555 error_found = true;
557 if (node->global.inlined_to && node->needed)
559 error ("inline clone is needed");
560 error_found = true;
562 for (e = node->indirect_calls; e; e = e->next_callee)
564 if (e->aux)
566 error ("aux field set for indirect edge from %s",
567 identifier_to_locale (cgraph_node_name (e->caller)));
568 error_found = true;
570 if (!e->indirect_unknown_callee
571 || !e->indirect_info)
573 error ("An indirect edge from %s is not marked as indirect or has "
574 "associated indirect_info, the corresponding statement is: ",
575 identifier_to_locale (cgraph_node_name (e->caller)));
576 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
577 error_found = true;
580 for (e = node->callers; e; e = e->next_caller)
582 if (verify_edge_count_and_frequency (e))
583 error_found = true;
584 if (!e->inline_failed)
586 if (node->global.inlined_to
587 != (e->caller->global.inlined_to
588 ? e->caller->global.inlined_to : e->caller))
590 error ("inlined_to pointer is wrong");
591 error_found = true;
593 if (node->callers->next_caller)
595 error ("multiple inline callers");
596 error_found = true;
599 else
600 if (node->global.inlined_to)
602 error ("inlined_to pointer set for noninline callers");
603 error_found = true;
606 for (e = node->indirect_calls; e; e = e->next_callee)
607 if (verify_edge_count_and_frequency (e))
608 error_found = true;
609 if (!node->callers && node->global.inlined_to)
611 error ("inlined_to pointer is set but no predecessors found");
612 error_found = true;
614 if (node->global.inlined_to == node)
616 error ("inlined_to pointer refers to itself");
617 error_found = true;
620 if (!cgraph_get_node (node->decl))
622 error ("node not found in cgraph_hash");
623 error_found = true;
626 if (node->clone_of)
628 struct cgraph_node *n;
629 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
630 if (n == node)
631 break;
632 if (!n)
634 error ("node has wrong clone_of");
635 error_found = true;
638 if (node->clones)
640 struct cgraph_node *n;
641 for (n = node->clones; n; n = n->next_sibling_clone)
642 if (n->clone_of != node)
643 break;
644 if (n)
646 error ("node has wrong clone list");
647 error_found = true;
650 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
652 error ("node is in clone list but it is not clone");
653 error_found = true;
655 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
657 error ("node has wrong prev_clone pointer");
658 error_found = true;
660 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
662 error ("double linked list of clones corrupted");
663 error_found = true;
665 if (node->same_comdat_group)
667 struct cgraph_node *n = node->same_comdat_group;
669 if (!DECL_ONE_ONLY (node->decl))
671 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
672 error_found = true;
674 if (n == node)
676 error ("node is alone in a comdat group");
677 error_found = true;
681 if (!n->same_comdat_group)
683 error ("same_comdat_group is not a circular list");
684 error_found = true;
685 break;
687 n = n->same_comdat_group;
689 while (n != node);
692 if (node->analyzed && node->alias)
694 bool ref_found = false;
695 int i;
696 struct ipa_ref *ref;
698 if (node->callees)
700 error ("Alias has call edges");
701 error_found = true;
703 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
704 if (ref->use != IPA_REF_ALIAS)
706 error ("Alias has non-alias reference");
707 error_found = true;
709 else if (ref_found)
711 error ("Alias has more than one alias reference");
712 error_found = true;
714 else
715 ref_found = true;
716 if (!ref_found)
718 error ("Analyzed alias has no reference");
719 error_found = true;
722 if (node->analyzed && node->thunk.thunk_p)
724 if (!node->callees)
726 error ("No edge out of thunk node");
727 error_found = true;
729 else if (node->callees->next_callee)
731 error ("More than one edge out of thunk node");
732 error_found = true;
734 if (gimple_has_body_p (node->decl))
736 error ("Thunk is not supposed to have body");
737 error_found = true;
740 else if (node->analyzed && gimple_has_body_p (node->decl)
741 && !cgraph_is_auxiliary (node->decl)
742 && !TREE_ASM_WRITTEN (node->decl)
743 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
744 && !flag_wpa)
746 if (this_cfun->cfg)
748 /* The nodes we're interested in are never shared, so walk
749 the tree ignoring duplicates. */
750 struct pointer_set_t *visited_nodes = pointer_set_create ();
751 /* Reach the trees by walking over the CFG, and note the
752 enclosing basic-blocks in the call edges. */
753 FOR_EACH_BB_FN (this_block, this_cfun)
754 for (gsi = gsi_start_bb (this_block);
755 !gsi_end_p (gsi);
756 gsi_next (&gsi))
758 gimple stmt = gsi_stmt (gsi);
759 if (is_gimple_call (stmt))
761 struct cgraph_edge *e = cgraph_edge (node, stmt);
762 tree decl = gimple_call_fndecl (stmt);
763 if (e)
765 if (e->aux)
767 error ("shared call_stmt:");
768 cgraph_debug_gimple_stmt (this_cfun, stmt);
769 error_found = true;
771 if (!e->indirect_unknown_callee)
773 if (verify_edge_corresponds_to_fndecl (e, decl))
775 error ("edge points to wrong declaration:");
776 debug_tree (e->callee->decl);
777 fprintf (stderr," Instead of:");
778 debug_tree (decl);
779 error_found = true;
782 else if (decl)
784 error ("an indirect edge with unknown callee "
785 "corresponding to a call_stmt with "
786 "a known declaration:");
787 error_found = true;
788 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
790 e->aux = (void *)1;
792 else if (decl)
794 error ("missing callgraph edge for call stmt:");
795 cgraph_debug_gimple_stmt (this_cfun, stmt);
796 error_found = true;
800 pointer_set_destroy (visited_nodes);
802 else
803 /* No CFG available?! */
804 gcc_unreachable ();
806 for (e = node->callees; e; e = e->next_callee)
808 if (!e->aux && !L_IPO_COMP_MODE)
810 error ("edge %s->%s has no corresponding call_stmt",
811 identifier_to_locale (cgraph_node_name (e->caller)),
812 identifier_to_locale (cgraph_node_name (e->callee)));
813 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
814 error_found = true;
816 e->aux = 0;
818 for (e = node->indirect_calls; e; e = e->next_callee)
820 if (!e->aux)
822 error ("an indirect edge from %s has no corresponding call_stmt",
823 identifier_to_locale (cgraph_node_name (e->caller)));
824 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
825 error_found = true;
827 e->aux = 0;
830 if (error_found)
832 dump_cgraph_node (stderr, node);
833 internal_error ("verify_cgraph_node failed");
835 timevar_pop (TV_CGRAPH_VERIFY);
838 /* Verify whole cgraph structure. */
839 DEBUG_FUNCTION void
840 verify_cgraph (void)
842 struct cgraph_node *node;
844 if (seen_error ())
845 return;
847 for (node = cgraph_nodes; node; node = node->next)
848 verify_cgraph_node (node);
851 /* Output all asm statements we have stored up to be output. */
853 static void
854 cgraph_output_pending_asms (void)
856 struct cgraph_asm_node *can;
858 if (seen_error ())
859 return;
861 for (can = cgraph_asm_nodes; can; can = can->next)
862 assemble_asm (can->asm_str);
863 cgraph_asm_nodes = NULL;
866 /* Analyze the function scheduled to be output. */
867 void
868 cgraph_analyze_function (struct cgraph_node *node)
870 tree save = current_function_decl;
871 tree decl = node->decl;
873 if (node->alias && node->thunk.alias)
875 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
876 struct cgraph_node *n;
878 for (n = tgt; n && n->alias;
879 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
880 if (n == node)
882 error ("function %q+D part of alias cycle", node->decl);
883 node->alias = false;
884 return;
886 if (!VEC_length (ipa_ref_t, node->ref_list.references))
887 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
888 if (node->same_body_alias)
890 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
891 DECL_DECLARED_INLINE_P (node->decl)
892 = DECL_DECLARED_INLINE_P (node->thunk.alias);
893 DECL_DISREGARD_INLINE_LIMITS (node->decl)
894 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
897 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
898 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
900 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
901 if (DECL_ONE_ONLY (node->thunk.alias))
903 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
904 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
905 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
907 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
908 node->same_comdat_group = tgt;
909 if (!tgt->same_comdat_group)
910 tgt->same_comdat_group = node;
911 else
913 struct cgraph_node *n;
914 for (n = tgt->same_comdat_group;
915 n->same_comdat_group != tgt;
916 n = n->same_comdat_group)
918 n->same_comdat_group = node;
923 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
924 if (node->address_taken)
925 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
926 if (cgraph_decide_is_function_needed (node, node->decl))
927 cgraph_mark_needed_node (node);
929 else if (node->thunk.thunk_p)
931 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
932 NULL, 0, CGRAPH_FREQ_BASE);
934 else if (node->dispatcher_function)
936 /* Generate the dispatcher body of multi-versioned functions. */
937 struct cgraph_function_version_info *dispatcher_version_info
938 = get_cgraph_node_version (node);
939 if (dispatcher_version_info != NULL
940 && (dispatcher_version_info->dispatcher_resolver
941 == NULL_TREE))
943 tree resolver = NULL_TREE;
944 gcc_assert (targetm.generate_version_dispatcher_body);
945 resolver = targetm.generate_version_dispatcher_body (node);
946 gcc_assert (resolver != NULL_TREE);
949 else
951 current_function_decl = decl;
952 push_cfun (DECL_STRUCT_FUNCTION (decl));
954 assign_assembler_name_if_neeeded (node->decl);
956 /* Make sure to gimplify bodies only once. During analyzing a
957 function we lower it, which will require gimplified nested
958 functions, so we can end up here with an already gimplified
959 body. */
960 if (!gimple_body (decl))
961 gimplify_function_tree (decl);
962 dump_function (TDI_generic, decl);
964 cgraph_lower_function (node);
965 pop_cfun ();
967 node->analyzed = true;
969 current_function_decl = save;
972 /* C++ frontend produce same body aliases all over the place, even before PCH
973 gets streamed out. It relies on us linking the aliases with their function
974 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
975 first produce aliases without links, but once C++ FE is sure he won't sream
976 PCH we build the links via this function. */
978 void
979 cgraph_process_same_body_aliases (void)
981 struct cgraph_node *node;
982 for (node = cgraph_nodes; node; node = node->next)
983 if (node->same_body_alias
984 && !VEC_length (ipa_ref_t, node->ref_list.references))
986 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
987 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
989 same_body_aliases_done = true;
992 /* Process attributes common for vars and functions. */
994 static void
995 process_common_attributes (tree decl)
997 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
999 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
1001 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1002 "%<weakref%> attribute should be accompanied with"
1003 " an %<alias%> attribute");
1004 DECL_WEAK (decl) = 0;
1005 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1006 DECL_ATTRIBUTES (decl));
1010 /* Look for externally_visible and used attributes and mark cgraph nodes
1011 accordingly.
1013 We cannot mark the nodes at the point the attributes are processed (in
1014 handle_*_attribute) because the copy of the declarations available at that
1015 point may not be canonical. For example, in:
1017 void f();
1018 void f() __attribute__((used));
1020 the declaration we see in handle_used_attribute will be the second
1021 declaration -- but the front end will subsequently merge that declaration
1022 with the original declaration and discard the second declaration.
1024 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
1026 void f() {}
1027 void f() __attribute__((externally_visible));
1029 is valid.
1031 So, we walk the nodes at the end of the translation unit, applying the
1032 attributes at that point. */
1034 static void
1035 process_function_and_variable_attributes (struct cgraph_node *first,
1036 struct varpool_node *first_var)
1038 struct cgraph_node *node;
1039 struct varpool_node *vnode;
1041 for (node = cgraph_nodes; node != first; node = node->next)
1043 tree decl = node->decl;
1044 if (DECL_PRESERVE_P (decl))
1045 cgraph_mark_needed_node (node);
1046 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1047 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1048 && TREE_PUBLIC (node->decl))
1050 if (node->local.finalized)
1051 cgraph_mark_needed_node (node);
1053 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1055 if (! TREE_PUBLIC (node->decl))
1056 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1057 "%<externally_visible%>"
1058 " attribute have effect only on public objects");
1059 else if (node->local.finalized)
1060 cgraph_mark_needed_node (node);
1062 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1063 && (node->local.finalized && !node->alias))
1065 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1066 "%<weakref%> attribute ignored"
1067 " because function is defined");
1068 DECL_WEAK (decl) = 0;
1069 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1070 DECL_ATTRIBUTES (decl));
1073 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1074 && !DECL_DECLARED_INLINE_P (decl)
1075 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1076 && !DECL_UNINLINABLE (decl))
1077 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1078 "always_inline function might not be inlinable");
1080 process_common_attributes (decl);
1082 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
1084 tree decl = vnode->decl;
1085 if (DECL_PRESERVE_P (decl))
1087 vnode->force_output = true;
1088 if (vnode->finalized)
1089 varpool_mark_needed_node (vnode);
1091 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1092 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1093 && TREE_PUBLIC (vnode->decl))
1095 if (vnode->finalized)
1096 varpool_mark_needed_node (vnode);
1098 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1100 if (! TREE_PUBLIC (vnode->decl))
1101 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1102 "%<externally_visible%>"
1103 " attribute have effect only on public objects");
1104 else if (vnode->finalized)
1105 varpool_mark_needed_node (vnode);
1107 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1108 && vnode->finalized
1109 && DECL_INITIAL (decl))
1111 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1112 "%<weakref%> attribute ignored"
1113 " because variable is initialized");
1114 DECL_WEAK (decl) = 0;
1115 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1116 DECL_ATTRIBUTES (decl));
1118 process_common_attributes (decl);
1122 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1123 each reachable functions) and build cgraph.
1124 The function can be called multiple times after inserting new nodes
1125 into beginning of queue. Just the new part of queue is re-scanned then. */
1127 static void
1128 cgraph_analyze_functions (void)
1130 /* Keep track of already processed nodes when called multiple times for
1131 intermodule optimization. */
1132 static struct cgraph_node *first_analyzed;
1133 struct cgraph_node *first_processed = first_analyzed;
1134 static struct varpool_node *first_analyzed_var;
1135 struct cgraph_node *node, *next;
1137 bitmap_obstack_initialize (NULL);
1138 process_function_and_variable_attributes (first_processed,
1139 first_analyzed_var);
1140 first_processed = cgraph_nodes;
1141 first_analyzed_var = varpool_nodes;
1142 varpool_analyze_pending_decls ();
1143 if (cgraph_dump_file)
1145 fprintf (cgraph_dump_file, "Initial entry points:");
1146 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1147 if (node->needed)
1148 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1149 fprintf (cgraph_dump_file, "\n");
1151 cgraph_process_new_functions ();
1153 /* Propagate reachability flag and lower representation of all reachable
1154 functions. In the future, lowering will introduce new functions and
1155 new entry points on the way (by template instantiation and virtual
1156 method table generation for instance). */
1157 while (cgraph_nodes_queue)
1159 struct cgraph_edge *edge;
1160 tree decl = cgraph_nodes_queue->decl;
1162 node = cgraph_nodes_queue;
1163 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1164 node->next_needed = NULL;
1166 /* ??? It is possible to create extern inline function and later using
1167 weak alias attribute to kill its body. See
1168 gcc.c-torture/compile/20011119-1.c */
1169 if (!DECL_STRUCT_FUNCTION (decl)
1170 && (!node->alias || !node->thunk.alias)
1171 && !node->thunk.thunk_p
1172 && !node->dispatcher_function)
1174 cgraph_reset_node (node);
1175 node->local.redefined_extern_inline = true;
1176 continue;
1179 if (!node->analyzed)
1180 cgraph_analyze_function (node);
1182 for (edge = node->callees; edge; edge = edge->next_callee)
1183 if (!edge->callee->reachable)
1184 cgraph_mark_reachable_node (edge->callee);
1185 for (edge = node->callers; edge; edge = edge->next_caller)
1186 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1187 cgraph_mark_reachable_node (edge->caller);
1189 if (node->same_comdat_group)
1191 for (next = node->same_comdat_group;
1192 next != node;
1193 next = next->same_comdat_group)
1194 cgraph_mark_reachable_node (next);
1197 /* If decl is a clone of an abstract function, mark that abstract
1198 function so that we don't release its body. The DECL_INITIAL() of that
1199 abstract function declaration will be later needed to output debug
1200 info. */
1201 if (DECL_ABSTRACT_ORIGIN (decl))
1203 struct cgraph_node *origin_node;
1204 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1205 origin_node->abstract_and_needed = true;
1208 /* We finalize local static variables during constructing callgraph
1209 edges. Process their attributes too. */
1210 process_function_and_variable_attributes (first_processed,
1211 first_analyzed_var);
1212 first_processed = cgraph_nodes;
1213 first_analyzed_var = varpool_nodes;
1214 varpool_analyze_pending_decls ();
1215 cgraph_process_new_functions ();
1218 /* Collect entry points to the unit. */
1219 if (cgraph_dump_file)
1221 fprintf (cgraph_dump_file, "Unit entry points:");
1222 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1223 if (node->needed)
1224 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1225 fprintf (cgraph_dump_file, "\n\nInitial ");
1226 dump_cgraph (cgraph_dump_file);
1227 dump_varpool (cgraph_dump_file);
1230 if (cgraph_dump_file)
1231 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1233 for (node = cgraph_nodes; node != first_analyzed; node = next)
1235 tree decl = node->decl;
1236 next = node->next;
1238 if (node->local.finalized && !gimple_has_body_p (decl)
1239 && (!node->alias || !node->thunk.alias)
1240 && !node->thunk.thunk_p)
1241 cgraph_reset_node (node);
1243 if (!node->reachable
1244 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1245 || (node->alias && node->thunk.alias)))
1247 if (cgraph_dump_file)
1248 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1249 cgraph_remove_node (node);
1250 continue;
1252 else
1253 node->next_needed = NULL;
1254 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1255 || node->alias
1256 || gimple_has_body_p (decl));
1257 gcc_assert (node->analyzed == node->local.finalized);
1259 if (cgraph_dump_file)
1261 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1262 dump_cgraph (cgraph_dump_file);
1263 dump_varpool (cgraph_dump_file);
1265 bitmap_obstack_release (NULL);
1266 first_analyzed = cgraph_nodes;
1267 ggc_collect ();
1270 /* Translate the ugly representation of aliases as alias pairs into nice
1271 representation in callgraph. We don't handle all cases yet,
1272 unforutnately. */
1274 static void
1275 handle_alias_pairs (void)
1277 alias_pair *p;
1278 unsigned i;
1279 struct cgraph_node *target_node;
1280 struct cgraph_node *src_node;
1281 struct varpool_node *target_vnode;
1283 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1285 if (TREE_CODE (p->decl) == FUNCTION_DECL
1286 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1288 src_node = cgraph_get_node (p->decl);
1289 if (src_node && src_node->local.finalized)
1290 cgraph_reset_node (src_node);
1291 /* Normally EXTERNAL flag is used to mark external inlines,
1292 however for aliases it seems to be allowed to use it w/o
1293 any meaning. See gcc.dg/attr-alias-3.c
1294 However for weakref we insist on EXTERNAL flag being set.
1295 See gcc.dg/attr-alias-5.c */
1296 if (DECL_EXTERNAL (p->decl))
1297 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1298 DECL_ATTRIBUTES (p->decl)) != NULL;
1299 cgraph_create_function_alias (p->decl, target_node->decl);
1300 VEC_unordered_remove (alias_pair, alias_pairs, i);
1302 else if (TREE_CODE (p->decl) == VAR_DECL
1303 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1305 /* Normally EXTERNAL flag is used to mark external inlines,
1306 however for aliases it seems to be allowed to use it w/o
1307 any meaning. See gcc.dg/attr-alias-3.c
1308 However for weakref we insist on EXTERNAL flag being set.
1309 See gcc.dg/attr-alias-5.c */
1310 if (DECL_EXTERNAL (p->decl))
1311 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1312 DECL_ATTRIBUTES (p->decl)) != NULL;
1313 varpool_create_variable_alias (p->decl, target_vnode->decl);
1314 VEC_unordered_remove (alias_pair, alias_pairs, i);
1316 /* Weakrefs with target not defined in current unit are easy to handle; they
1317 behave just as external variables except we need to note the alias flag
1318 to later output the weakref pseudo op into asm file. */
1319 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1320 && (TREE_CODE (p->decl) == FUNCTION_DECL
1321 ? (varpool_node_for_asm (p->target) == NULL)
1322 : (cgraph_node_for_asm (p->target) == NULL)))
1324 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1325 cgraph_get_create_node (p->decl)->alias = true;
1326 else
1327 varpool_get_node (p->decl)->alias = true;
1328 DECL_EXTERNAL (p->decl) = 1;
1329 VEC_unordered_remove (alias_pair, alias_pairs, i);
1331 else
1333 if (dump_file)
1334 fprintf (dump_file, "Unhandled alias %s->%s\n",
1335 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1336 IDENTIFIER_POINTER (p->target));
1338 i++;
1344 static bool backend_entered_p = false;
1345 extern bool is_backend_entered_p (void);
1347 /* Returns true if FE parsing is completely
1348 done (including pending decl processing) and backend
1349 takes over the control. */
1351 bool
1352 is_backend_entered_p (void)
1354 return backend_entered_p;
1357 /* Analyze the whole compilation unit once it is parsed completely. */
1359 void
1360 cgraph_finalize_compilation_unit (void)
1362 timevar_push (TV_CGRAPH);
1364 backend_entered_p = true;
1366 /* Before compilation, auto profile will process the profile to build the
1367 hash tables for later optimizations. We delay this function call here
1368 because all the parsing should be done so that we will have the bfd
1369 name mapping ready. */
1370 if (flag_auto_profile)
1371 process_auto_profile ();
1373 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1374 if (flag_lto)
1375 lto_streamer_hooks_init ();
1377 /* If we're here there's no current function anymore. Some frontends
1378 are lazy in clearing these. */
1379 current_function_decl = NULL;
1380 set_cfun (NULL);
1382 /* Do not skip analyzing the functions if there were errors, we
1383 miss diagnostics for following functions otherwise. */
1385 /* Emit size functions we didn't inline. */
1386 finalize_size_functions ();
1388 /* Mark alias targets necessary and emit diagnostics. */
1389 finish_aliases_1 ();
1390 handle_alias_pairs ();
1392 if (!quiet_flag)
1394 fprintf (stderr, "\nAnalyzing compilation unit\n");
1395 fflush (stderr);
1398 if (flag_dump_passes)
1399 dump_passes ();
1401 /* Gimplify and lower all functions, compute reachability and
1402 remove unreachable nodes. */
1403 cgraph_analyze_functions ();
1405 /* Mark alias targets necessary and emit diagnostics. */
1406 finish_aliases_1 ();
1407 handle_alias_pairs ();
1409 /* Gimplify and lower thunks. */
1410 cgraph_analyze_functions ();
1412 /* Finally drive the pass manager. */
1413 cgraph_optimize ();
1415 timevar_pop (TV_CGRAPH);
1418 /* Hash function for symbol (function) resolution. */
1420 static hashval_t
1421 hash_node_by_assembler_name (const void *p)
1423 const struct cgraph_node *n = (const struct cgraph_node *) p;
1424 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
1427 /* Equality function for cgraph_node table. */
1429 static int
1430 eq_node_assembler_name (const void *p1, const void *p2)
1432 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
1433 const_tree name = (const_tree)p2;
1434 return (decl_assembler_name_equal (n1->decl, name));
1437 /* In l-ipo mode compilation (light weight IPO), multiple bodies may
1438 be available for the same inline declared function. cgraph linking
1439 does not really merge them in order to keep the context (module info)
1440 of each body. After inlining, the linkage of the function may require
1441 them to be output (even if it is defined in an auxiliary module). This
1442 in term may result in duplicate emission. */
1444 static GTY((param_is (struct cgraph_node))) htab_t output_node_hash = NULL;
1446 /* Add NODE that is expanded into the hashtable. */
1448 static struct cgraph_node *
1449 cgraph_add_output_node (struct cgraph_node *node)
1451 void **aslot;
1452 tree name;
1454 if (!L_IPO_COMP_MODE)
1455 return node;
1457 /* Never common non public names except for compiler
1458 generated static functions. (they are not promoted
1459 to globals either. */
1460 if (!TREE_PUBLIC (node->decl)
1461 && !(DECL_ARTIFICIAL (node->decl)
1462 && DECL_ASSEMBLER_NAME_SET_P (node->decl)))
1463 return node;
1465 if (!output_node_hash)
1466 output_node_hash =
1467 htab_create_ggc (10, hash_node_by_assembler_name,
1468 eq_node_assembler_name, NULL);
1470 name = DECL_ASSEMBLER_NAME (node->decl);
1472 aslot = htab_find_slot_with_hash (output_node_hash, name,
1473 decl_assembler_name_hash (name),
1474 INSERT);
1475 if (*aslot == NULL)
1477 *aslot = node;
1478 return node;
1480 else
1481 return (struct cgraph_node *)(*aslot);
1484 #if ENABLE_CHECKING
1485 /* Return the cgraph_node if the function symbol for NODE is
1486 expanded in the output. Returns NULL otherwise. */
1488 static struct cgraph_node *
1489 cgraph_find_output_node (struct cgraph_node *node)
1491 void **aslot;
1492 tree name;
1494 if (!L_IPO_COMP_MODE)
1495 return node;
1497 /* We do not track non-public functions. */
1498 if (!TREE_PUBLIC (node->decl))
1499 return NULL;
1501 /* Never addedd. */
1502 if (!output_node_hash)
1503 return NULL;
1505 name = DECL_ASSEMBLER_NAME (node->decl);
1507 aslot = htab_find_slot_with_hash (output_node_hash, name,
1508 decl_assembler_name_hash (name),
1509 NO_INSERT);
1510 if (!aslot)
1511 return NULL;
1513 return (struct cgraph_node *)(*aslot);
1515 #endif
1518 #if ENABLE_CHECKING
1519 /* A function used in validation. Return true if NODE was
1520 not expanded and its body was not reclaimed. */
1522 static bool
1523 cgraph_node_expansion_skipped (struct cgraph_node *node)
1525 struct cgraph_node *output_node;
1527 if (!L_IPO_COMP_MODE)
1528 return false;
1530 output_node = cgraph_find_output_node (node);
1532 if (output_node == node)
1533 return false;
1535 if (output_node)
1536 return true;
1538 /* No output, no duplicate being output, and the node is not
1539 inlined (and reclaimed) either -- check if the caller node
1540 is output/expanded or not. */
1541 if (node->global.inlined_to)
1542 return cgraph_node_expansion_skipped (node->global.inlined_to);
1544 /* External functions not marked for output. */
1545 return true;
1547 #endif
1549 /* Figure out what functions we want to assemble. */
1551 static void
1552 cgraph_mark_functions_to_output (void)
1554 struct cgraph_node *node;
1555 #ifdef ENABLE_CHECKING
1556 bool check_same_comdat_groups = false;
1558 for (node = cgraph_nodes; node; node = node->next)
1559 gcc_assert (!node->process);
1560 #endif
1562 for (node = cgraph_nodes; node; node = node->next)
1564 tree decl = node->decl;
1565 struct cgraph_edge *e;
1567 gcc_assert (!node->process || node->same_comdat_group);
1568 if (node->process)
1569 continue;
1571 for (e = node->callers; e; e = e->next_caller)
1572 if (e->inline_failed)
1573 break;
1575 /* We need to output all local functions that are used and not
1576 always inlined, as well as those that are reachable from
1577 outside the current compilation unit. */
1578 if (node->analyzed
1579 && !node->thunk.thunk_p
1580 && !node->alias
1581 && !node->global.inlined_to
1582 && (!cgraph_only_called_directly_p (node)
1583 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1584 && node->reachable))
1585 && !TREE_ASM_WRITTEN (decl)
1586 && !(DECL_EXTERNAL (decl) || cgraph_is_aux_decl_external (node)))
1588 if (cgraph_add_output_node (node) == node)
1590 node->process = 1;
1591 if (node->same_comdat_group)
1593 struct cgraph_node *next;
1594 for (next = node->same_comdat_group;
1595 next != node;
1596 next = next->same_comdat_group)
1597 if (!next->thunk.thunk_p && !next->alias
1598 && cgraph_add_output_node (next) == next)
1599 next->process = 1;
1603 else if (node->same_comdat_group)
1605 #ifdef ENABLE_CHECKING
1606 check_same_comdat_groups = true;
1607 #endif
1609 else
1611 /* We should've reclaimed all functions that are not needed. */
1612 #ifdef ENABLE_CHECKING
1613 if (!node->global.inlined_to
1614 && gimple_has_body_p (decl)
1615 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1616 are inside partition, we can end up not removing the body since we no longer
1617 have analyzed node pointing to it. */
1618 && !node->in_other_partition
1619 && !node->alias
1620 && !cgraph_is_auxiliary (node->decl)
1621 && !DECL_EXTERNAL (decl))
1623 dump_cgraph_node (stderr, node);
1624 internal_error ("failed to reclaim unneeded function");
1626 #endif
1627 gcc_assert (node->global.inlined_to
1628 || !gimple_has_body_p (decl)
1629 || node->in_other_partition
1630 || DECL_EXTERNAL (decl)
1631 || cgraph_is_auxiliary (node->decl));
1636 #ifdef ENABLE_CHECKING
1637 if (check_same_comdat_groups && !L_IPO_COMP_MODE)
1638 for (node = cgraph_nodes; node; node = node->next)
1639 if (node->same_comdat_group && !node->process)
1641 tree decl = node->decl;
1642 if (!node->global.inlined_to
1643 && gimple_has_body_p (decl)
1644 /* FIXME: in an ltrans unit when the offline copy is outside a
1645 partition but inline copies are inside a partition, we can
1646 end up not removing the body since we no longer have an
1647 analyzed node pointing to it. */
1648 && !node->in_other_partition
1649 && !(DECL_EXTERNAL (decl) || cgraph_is_aux_decl_external (node))
1650 && !L_IPO_COMP_MODE)
1652 dump_cgraph_node (stderr, node);
1653 internal_error ("failed to reclaim unneeded function in same "
1654 "comdat group");
1657 #endif
1660 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1661 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1663 Set current_function_decl and cfun to newly constructed empty function body.
1664 return basic block in the function body. */
1666 basic_block
1667 init_lowered_empty_function (tree decl, bool in_ssa)
1669 basic_block bb;
1671 current_function_decl = decl;
1672 allocate_struct_function (decl, false);
1673 gimple_register_cfg_hooks ();
1674 init_empty_tree_cfg ();
1676 if (in_ssa)
1678 init_tree_ssa (cfun);
1679 init_ssa_operands ();
1680 cfun->gimple_df->in_ssa_p = true;
1683 DECL_INITIAL (decl) = make_node (BLOCK);
1685 DECL_SAVED_TREE (decl) = error_mark_node;
1686 cfun->curr_properties |=
1687 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1688 PROP_ssa | PROP_gimple_any);
1690 /* Create BB for body of the function and connect it properly. */
1691 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1692 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1693 make_edge (bb, EXIT_BLOCK_PTR, 0);
1695 return bb;
1698 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1699 offset indicated by VIRTUAL_OFFSET, if that is
1700 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1701 zero for a result adjusting thunk. */
1703 static tree
1704 thunk_adjust (gimple_stmt_iterator * bsi,
1705 tree ptr, bool this_adjusting,
1706 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1708 gimple stmt;
1709 tree ret;
1711 if (this_adjusting
1712 && fixed_offset != 0)
1714 stmt = gimple_build_assign
1715 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1716 ptr,
1717 fixed_offset));
1718 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1721 /* If there's a virtual offset, look up that value in the vtable and
1722 adjust the pointer again. */
1723 if (virtual_offset)
1725 tree vtabletmp;
1726 tree vtabletmp2;
1727 tree vtabletmp3;
1729 if (!vtable_entry_type)
1731 tree vfunc_type = make_node (FUNCTION_TYPE);
1732 TREE_TYPE (vfunc_type) = integer_type_node;
1733 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1734 layout_type (vfunc_type);
1736 vtable_entry_type = build_pointer_type (vfunc_type);
1739 vtabletmp =
1740 create_tmp_var (build_pointer_type
1741 (build_pointer_type (vtable_entry_type)), "vptr");
1743 /* The vptr is always at offset zero in the object. */
1744 stmt = gimple_build_assign (vtabletmp,
1745 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1746 ptr));
1747 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1748 mark_symbols_for_renaming (stmt);
1749 find_referenced_vars_in (stmt);
1751 /* Form the vtable address. */
1752 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1753 "vtableaddr");
1754 stmt = gimple_build_assign (vtabletmp2,
1755 build_simple_mem_ref (vtabletmp));
1756 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1757 mark_symbols_for_renaming (stmt);
1758 find_referenced_vars_in (stmt);
1760 /* Find the entry with the vcall offset. */
1761 stmt = gimple_build_assign (vtabletmp2,
1762 fold_build_pointer_plus_loc (input_location,
1763 vtabletmp2,
1764 virtual_offset));
1765 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1767 /* Get the offset itself. */
1768 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1769 "vcalloffset");
1770 stmt = gimple_build_assign (vtabletmp3,
1771 build_simple_mem_ref (vtabletmp2));
1772 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1773 mark_symbols_for_renaming (stmt);
1774 find_referenced_vars_in (stmt);
1776 /* Adjust the `this' pointer. */
1777 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1778 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1779 GSI_CONTINUE_LINKING);
1782 if (!this_adjusting
1783 && fixed_offset != 0)
1784 /* Adjust the pointer by the constant. */
1786 tree ptrtmp;
1788 if (TREE_CODE (ptr) == VAR_DECL)
1789 ptrtmp = ptr;
1790 else
1792 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1793 stmt = gimple_build_assign (ptrtmp, ptr);
1794 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1795 mark_symbols_for_renaming (stmt);
1796 find_referenced_vars_in (stmt);
1798 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1799 ptrtmp, fixed_offset);
1802 /* Emit the statement and gimplify the adjustment expression. */
1803 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1804 stmt = gimple_build_assign (ret, ptr);
1805 mark_symbols_for_renaming (stmt);
1806 find_referenced_vars_in (stmt);
1807 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1809 return ret;
1812 /* Produce assembler for thunk NODE. */
1814 static void
1815 assemble_thunk (struct cgraph_node *node)
1817 bool this_adjusting = node->thunk.this_adjusting;
1818 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1819 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1820 tree virtual_offset = NULL;
1821 tree alias = node->thunk.alias;
1822 tree thunk_fndecl = node->decl;
1823 tree a = DECL_ARGUMENTS (thunk_fndecl);
1825 current_function_decl = thunk_fndecl;
1827 /* Ensure thunks are emitted in their correct sections. */
1828 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1830 if (this_adjusting
1831 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1832 virtual_value, alias))
1834 const char *fnname;
1835 tree fn_block;
1836 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1838 DECL_RESULT (thunk_fndecl)
1839 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1840 RESULT_DECL, 0, restype);
1841 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1843 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1844 create one. */
1845 fn_block = make_node (BLOCK);
1846 BLOCK_VARS (fn_block) = a;
1847 DECL_INITIAL (thunk_fndecl) = fn_block;
1848 init_function_start (thunk_fndecl);
1849 cfun->is_thunk = 1;
1850 assemble_start_function (thunk_fndecl, fnname);
1851 (*debug_hooks->source_line) (DECL_SOURCE_LINE (thunk_fndecl),
1852 DECL_SOURCE_FILE (thunk_fndecl),
1853 /* discriminator */ 0,
1854 /* is_stmt */ 1);
1856 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1857 fixed_offset, virtual_value, alias);
1859 assemble_end_function (thunk_fndecl, fnname);
1860 init_insn_lengths ();
1861 free_after_compilation (cfun);
1862 set_cfun (NULL);
1863 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1864 node->thunk.thunk_p = false;
1865 node->analyzed = false;
1867 else
1869 tree restype;
1870 basic_block bb, then_bb, else_bb, return_bb;
1871 gimple_stmt_iterator bsi;
1872 int nargs = 0;
1873 tree arg;
1874 int i;
1875 tree resdecl;
1876 tree restmp = NULL;
1877 VEC(tree, heap) *vargs;
1879 gimple call;
1880 gimple ret;
1882 DECL_IGNORED_P (thunk_fndecl) = 1;
1883 bitmap_obstack_initialize (NULL);
1885 if (node->thunk.virtual_offset_p)
1886 virtual_offset = size_int (virtual_value);
1888 /* Build the return declaration for the function. */
1889 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1890 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1892 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1893 DECL_ARTIFICIAL (resdecl) = 1;
1894 DECL_IGNORED_P (resdecl) = 1;
1895 DECL_RESULT (thunk_fndecl) = resdecl;
1897 else
1898 resdecl = DECL_RESULT (thunk_fndecl);
1900 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1902 bsi = gsi_start_bb (bb);
1904 /* Build call to the function being thunked. */
1905 if (!VOID_TYPE_P (restype))
1907 if (!is_gimple_reg_type (restype))
1909 restmp = resdecl;
1910 add_local_decl (cfun, restmp);
1911 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1913 else
1914 restmp = create_tmp_var_raw (restype, "retval");
1917 for (arg = a; arg; arg = DECL_CHAIN (arg))
1918 nargs++;
1919 vargs = VEC_alloc (tree, heap, nargs);
1920 if (this_adjusting)
1921 VEC_quick_push (tree, vargs,
1922 thunk_adjust (&bsi,
1923 a, 1, fixed_offset,
1924 virtual_offset));
1925 else
1926 VEC_quick_push (tree, vargs, a);
1927 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1928 VEC_quick_push (tree, vargs, arg);
1929 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1930 VEC_free (tree, heap, vargs);
1931 gimple_call_set_from_thunk (call, true);
1932 if (restmp)
1933 gimple_call_set_lhs (call, restmp);
1934 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1935 mark_symbols_for_renaming (call);
1936 find_referenced_vars_in (call);
1937 update_stmt (call);
1939 if (restmp && !this_adjusting)
1941 tree true_label = NULL_TREE;
1943 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1945 gimple stmt;
1946 /* If the return type is a pointer, we need to
1947 protect against NULL. We know there will be an
1948 adjustment, because that's why we're emitting a
1949 thunk. */
1950 then_bb = create_basic_block (NULL, (void *) 0, bb);
1951 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1952 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1953 remove_edge (single_succ_edge (bb));
1954 true_label = gimple_block_label (then_bb);
1955 stmt = gimple_build_cond (NE_EXPR, restmp,
1956 build_zero_cst (TREE_TYPE (restmp)),
1957 NULL_TREE, NULL_TREE);
1958 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1959 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1960 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1961 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1962 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1963 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1964 bsi = gsi_last_bb (then_bb);
1967 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1968 fixed_offset, virtual_offset);
1969 if (true_label)
1971 gimple stmt;
1972 bsi = gsi_last_bb (else_bb);
1973 stmt = gimple_build_assign (restmp,
1974 build_zero_cst (TREE_TYPE (restmp)));
1975 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1976 bsi = gsi_last_bb (return_bb);
1979 else
1980 gimple_call_set_tail (call, true);
1982 /* Build return value. */
1983 ret = gimple_build_return (restmp);
1984 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1986 delete_unreachable_blocks ();
1987 update_ssa (TODO_update_ssa);
1989 /* Since we want to emit the thunk, we explicitly mark its name as
1990 referenced. */
1991 node->thunk.thunk_p = false;
1992 cgraph_node_remove_callees (node);
1993 cgraph_add_new_function (thunk_fndecl, true);
1994 bitmap_obstack_release (NULL);
1996 current_function_decl = NULL;
2001 /* Assemble thunks and aliases asociated to NODE. */
2003 static void
2004 assemble_thunks_and_aliases (struct cgraph_node *node)
2006 struct cgraph_edge *e;
2007 int i;
2008 struct ipa_ref *ref;
2010 for (e = node->callers; e;)
2011 if (e->caller->thunk.thunk_p)
2013 struct cgraph_node *thunk = e->caller;
2015 e = e->next_caller;
2016 assemble_thunks_and_aliases (thunk);
2017 assemble_thunk (thunk);
2019 else
2020 e = e->next_caller;
2021 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2022 if (ref->use == IPA_REF_ALIAS)
2024 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2025 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
2027 /* Force assemble_alias to really output the alias this time instead
2028 of buffering it in same alias pairs. */
2029 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
2030 assemble_alias (alias->decl,
2031 DECL_ASSEMBLER_NAME (alias->thunk.alias));
2032 assemble_thunks_and_aliases (alias);
2033 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
2037 /* Expand function specified by NODE. */
2039 static void
2040 cgraph_expand_function (struct cgraph_node *node)
2042 tree decl = node->decl;
2044 /* We ought to not compile any inline clones. */
2045 gcc_assert (!node->global.inlined_to);
2047 announce_function (decl);
2048 node->process = 0;
2049 gcc_assert (node->lowered);
2051 /* Generate RTL for the body of DECL. */
2052 tree_rest_of_compilation (decl);
2054 /* Make sure that BE didn't give up on compiling. */
2055 gcc_assert (TREE_ASM_WRITTEN (decl));
2056 current_function_decl = NULL;
2057 gcc_assert (!cgraph_preserve_function_body_p (node));
2059 /* It would make a lot more sense to output thunks before function body to get more
2060 forward and lest backwarding jumps. This is however would need solving problem
2061 with comdats. See PR48668. Also aliases must come after function itself to
2062 make one pass assemblers, like one on AIX happy. See PR 50689.
2063 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2064 groups. */
2065 assemble_thunks_and_aliases (node);
2066 cgraph_release_function_body (node);
2067 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2068 points to the dead function body. */
2069 cgraph_node_remove_callees (node);
2071 cgraph_function_flags_ready = true;
2074 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
2076 bool
2077 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
2079 *reason = e->inline_failed;
2080 return !e->inline_failed;
2085 /* Expand all functions that must be output.
2087 Attempt to topologically sort the nodes so function is output when
2088 all called functions are already assembled to allow data to be
2089 propagated across the callgraph. Use a stack to get smaller distance
2090 between a function and its callees (later we may choose to use a more
2091 sophisticated algorithm for function reordering; we will likely want
2092 to use subsections to make the output functions appear in top-down
2093 order). */
2095 static void
2096 cgraph_expand_all_functions (void)
2098 struct cgraph_node *node;
2099 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
2100 int order_pos, new_order_pos = 0;
2101 int i;
2103 order_pos = ipa_reverse_postorder (order);
2104 gcc_assert (order_pos == cgraph_n_nodes);
2106 /* Garbage collector may remove inline clones we eliminate during
2107 optimization. So we must be sure to not reference them. */
2108 for (i = 0; i < order_pos; i++)
2109 if (order[i]->process)
2110 order[new_order_pos++] = order[i];
2112 for (i = new_order_pos - 1; i >= 0; i--)
2114 node = order[i];
2115 if (node->process)
2117 gcc_assert (node->reachable);
2118 node->process = 0;
2119 cgraph_expand_function (node);
2122 cgraph_process_new_functions ();
2124 free (order);
2128 /* This is used to sort the node types by the cgraph order number. */
2130 enum cgraph_order_sort_kind
2132 ORDER_UNDEFINED = 0,
2133 ORDER_FUNCTION,
2134 ORDER_VAR,
2135 ORDER_ASM
2138 struct cgraph_order_sort
2140 enum cgraph_order_sort_kind kind;
2141 union
2143 struct cgraph_node *f;
2144 struct varpool_node *v;
2145 struct cgraph_asm_node *a;
2146 } u;
2149 /* Output all functions, variables, and asm statements in the order
2150 according to their order fields, which is the order in which they
2151 appeared in the file. This implements -fno-toplevel-reorder. In
2152 this mode we may output functions and variables which don't really
2153 need to be output. */
2155 static void
2156 cgraph_output_in_order (void)
2158 int max;
2159 struct cgraph_order_sort *nodes;
2160 int i;
2161 struct cgraph_node *pf;
2162 struct varpool_node *pv;
2163 struct cgraph_asm_node *pa;
2165 max = cgraph_order;
2166 nodes = XCNEWVEC (struct cgraph_order_sort, max);
2168 varpool_analyze_pending_decls ();
2169 varpool_remove_duplicate_weak_decls ();
2171 for (pf = cgraph_nodes; pf; pf = pf->next)
2173 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2175 i = pf->order;
2176 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2177 nodes[i].kind = ORDER_FUNCTION;
2178 nodes[i].u.f = pf;
2182 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
2184 i = pv->order;
2185 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2186 nodes[i].kind = ORDER_VAR;
2187 nodes[i].u.v = pv;
2190 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
2192 i = pa->order;
2193 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2194 nodes[i].kind = ORDER_ASM;
2195 nodes[i].u.a = pa;
2198 /* In toplevel reorder mode we output all statics; mark them as needed. */
2199 for (i = 0; i < max; ++i)
2201 if (nodes[i].kind == ORDER_VAR)
2203 varpool_mark_needed_node (nodes[i].u.v);
2206 varpool_empty_needed_queue ();
2208 for (i = 0; i < max; ++i)
2209 if (nodes[i].kind == ORDER_VAR)
2210 varpool_finalize_named_section_flags (nodes[i].u.v);
2212 for (i = 0; i < max; ++i)
2214 switch (nodes[i].kind)
2216 case ORDER_FUNCTION:
2217 nodes[i].u.f->process = 0;
2218 cgraph_expand_function (nodes[i].u.f);
2219 break;
2221 case ORDER_VAR:
2222 varpool_assemble_decl (nodes[i].u.v);
2223 break;
2225 case ORDER_ASM:
2226 assemble_asm (nodes[i].u.a->asm_str);
2227 break;
2229 case ORDER_UNDEFINED:
2230 break;
2232 default:
2233 gcc_unreachable ();
2237 cgraph_asm_nodes = NULL;
2238 free (nodes);
2241 /* Return true when function body of DECL still needs to be kept around
2242 for later re-use. */
2243 bool
2244 cgraph_preserve_function_body_p (struct cgraph_node *node)
2246 gcc_assert (cgraph_global_info_ready);
2247 gcc_assert (!node->alias && !node->thunk.thunk_p);
2249 /* Look if there is any clone around. */
2250 if (node->clones)
2251 return true;
2252 return false;
2255 static void
2256 ipa_passes (void)
2258 set_cfun (NULL);
2259 current_function_decl = NULL;
2260 gimple_register_cfg_hooks ();
2261 bitmap_obstack_initialize (NULL);
2263 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2265 if (!in_lto_p)
2267 execute_ipa_pass_list (all_small_ipa_passes);
2268 if (seen_error ())
2269 return;
2272 /* We never run removal of unreachable nodes after early passes. This is
2273 because TODO is run before the subpasses. It is important to remove
2274 the unreachable functions to save works at IPA level and to get LTO
2275 symbol tables right. */
2276 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2278 /* If pass_all_early_optimizations was not scheduled, the state of
2279 the cgraph will not be properly updated. Update it now. */
2280 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2281 cgraph_state = CGRAPH_STATE_IPA_SSA;
2283 if (!in_lto_p)
2285 /* Generate coverage variables and constructors.
2286 In LIPO mode, delay this until direct call profiling
2287 is done. */
2288 if (!flag_dyn_ipa)
2289 coverage_finish ();
2291 /* Process new functions added. */
2292 set_cfun (NULL);
2293 current_function_decl = NULL;
2294 cgraph_process_new_functions ();
2296 execute_ipa_summary_passes
2297 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
2300 /* Some targets need to handle LTO assembler output specially. */
2301 if (flag_generate_lto)
2302 targetm.asm_out.lto_start ();
2304 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2306 if (!in_lto_p)
2307 ipa_write_summaries ();
2309 if (flag_generate_lto)
2310 targetm.asm_out.lto_end ();
2312 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2313 execute_ipa_pass_list (all_regular_ipa_passes);
2314 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2316 bitmap_obstack_release (NULL);
2320 /* Return string alias is alias of. */
2322 static tree
2323 get_alias_symbol (tree decl)
2325 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2326 return get_identifier (TREE_STRING_POINTER
2327 (TREE_VALUE (TREE_VALUE (alias))));
2331 /* Weakrefs may be associated to external decls and thus not output
2332 at expansion time. Emit all neccesary aliases. */
2334 static void
2335 output_weakrefs (void)
2337 struct cgraph_node *node;
2338 struct varpool_node *vnode;
2339 for (node = cgraph_nodes; node; node = node->next)
2340 if (node->alias && DECL_EXTERNAL (node->decl)
2341 && !TREE_ASM_WRITTEN (node->decl)
2342 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2343 assemble_alias (node->decl,
2344 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2345 : get_alias_symbol (node->decl));
2346 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
2347 if (vnode->alias && DECL_EXTERNAL (vnode->decl)
2348 && !TREE_ASM_WRITTEN (vnode->decl)
2349 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl)))
2350 assemble_alias (vnode->decl,
2351 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2352 : get_alias_symbol (vnode->decl));
2356 /* Perform simple optimizations based on callgraph. */
2358 void
2359 cgraph_optimize (void)
2361 if (seen_error ())
2362 return;
2364 #ifdef ENABLE_CHECKING
2365 verify_cgraph ();
2366 #endif
2368 /* Frontend may output common variables after the unit has been finalized.
2369 It is safe to deal with them here as they are always zero initialized. */
2370 varpool_analyze_pending_decls ();
2372 timevar_push (TV_CGRAPHOPT);
2373 if (pre_ipa_mem_report)
2375 fprintf (stderr, "Memory consumption before IPA\n");
2376 dump_memory_report (false);
2378 if (!quiet_flag)
2379 fprintf (stderr, "Performing interprocedural optimizations\n");
2380 cgraph_state = CGRAPH_STATE_IPA;
2382 if (L_IPO_COMP_MODE)
2384 cgraph_init_gid_map ();
2385 cgraph_add_fake_indirect_call_edges ();
2388 /* Don't run the IPA passes if there was any error or sorry messages. */
2389 if (!seen_error ())
2390 ipa_passes ();
2392 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2393 if (seen_error ()
2394 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2396 timevar_pop (TV_CGRAPHOPT);
2397 return;
2400 /* This pass remove bodies of extern inline functions we never inlined.
2401 Do this later so other IPA passes see what is really going on. */
2402 cgraph_remove_unreachable_nodes (false, dump_file);
2403 cgraph_global_info_ready = true;
2404 if (cgraph_dump_file)
2406 fprintf (cgraph_dump_file, "Optimized ");
2407 dump_cgraph (cgraph_dump_file);
2408 dump_varpool (cgraph_dump_file);
2410 if (post_ipa_mem_report)
2412 fprintf (stderr, "Memory consumption after IPA\n");
2413 dump_memory_report (false);
2415 timevar_pop (TV_CGRAPHOPT);
2417 /* Output everything. */
2418 (*debug_hooks->assembly_start) ();
2419 if (!quiet_flag)
2420 fprintf (stderr, "Assembling functions:\n");
2421 #ifdef ENABLE_CHECKING
2422 verify_cgraph ();
2423 #endif
2425 cgraph_materialize_all_clones ();
2426 bitmap_obstack_initialize (NULL);
2427 execute_ipa_pass_list (all_late_ipa_passes);
2428 cgraph_remove_unreachable_nodes (true, dump_file);
2429 #ifdef ENABLE_CHECKING
2430 verify_cgraph ();
2431 #endif
2432 bitmap_obstack_release (NULL);
2433 cgraph_mark_functions_to_output ();
2434 output_weakrefs ();
2436 cgraph_state = CGRAPH_STATE_EXPANSION;
2437 if (!flag_toplevel_reorder)
2438 cgraph_output_in_order ();
2439 else
2441 cgraph_output_pending_asms ();
2443 cgraph_expand_all_functions ();
2444 varpool_remove_unreferenced_decls ();
2445 varpool_remove_duplicate_weak_decls ();
2447 varpool_assemble_pending_decls ();
2450 cgraph_process_new_functions ();
2451 cgraph_state = CGRAPH_STATE_FINISHED;
2453 if (cgraph_dump_file)
2455 fprintf (cgraph_dump_file, "\nFinal ");
2456 dump_cgraph (cgraph_dump_file);
2457 dump_varpool (cgraph_dump_file);
2459 #ifdef ENABLE_CHECKING
2460 verify_cgraph ();
2461 /* Double check that all inline clones are gone and that all
2462 function bodies have been released from memory.
2463 As an exception, allow inline clones in the callgraph if
2464 they are auxiliary functions. This is because we don't
2465 expand any of the auxiliary functions, which may result
2466 in inline clones of some auxiliary functions to be left
2467 in the callgraph. */
2468 if (!seen_error ())
2470 struct cgraph_node *node;
2471 bool error_found = false;
2473 for (node = cgraph_nodes; node; node = node->next)
2474 if (node->analyzed
2475 && ((node->global.inlined_to && !cgraph_is_auxiliary (node->decl))
2476 || gimple_has_body_p (node->decl))
2477 && !cgraph_node_expansion_skipped (node))
2479 error_found = true;
2480 dump_cgraph_node (stderr, node);
2482 if (error_found)
2483 internal_error ("nodes with unreleased memory found");
2485 #endif
2488 void
2489 init_cgraph (void)
2491 if (!cgraph_dump_file)
2492 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2495 /* The edges representing the callers of the NEW_VERSION node were
2496 fixed by cgraph_function_versioning (), now the call_expr in their
2497 respective tree code should be updated to call the NEW_VERSION. */
2499 static void
2500 update_call_expr (struct cgraph_node *new_version)
2502 struct cgraph_edge *e;
2504 gcc_assert (new_version);
2506 /* Update the call expr on the edges to call the new version. */
2507 for (e = new_version->callers; e; e = e->next_caller)
2509 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2510 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2511 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2516 /* Create a new cgraph node which is the new version of
2517 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2518 edges which should be redirected to point to
2519 NEW_VERSION. ALL the callees edges of OLD_VERSION
2520 are cloned to the new version node. Return the new
2521 version node.
2523 If non-NULL BLOCK_TO_COPY determine what basic blocks
2524 was copied to prevent duplications of calls that are dead
2525 in the clone. */
2527 struct cgraph_node *
2528 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2529 tree new_decl,
2530 VEC(cgraph_edge_p,heap) *redirect_callers,
2531 bitmap bbs_to_copy)
2533 struct cgraph_node *new_version;
2534 struct cgraph_edge *e;
2535 unsigned i;
2537 gcc_assert (old_version);
2539 new_version = cgraph_create_node (new_decl);
2541 new_version->analyzed = old_version->analyzed;
2542 new_version->local = old_version->local;
2543 new_version->local.externally_visible = false;
2544 new_version->local.local = true;
2545 new_version->global = old_version->global;
2546 new_version->rtl = old_version->rtl;
2547 new_version->reachable = true;
2548 new_version->count = old_version->count;
2549 new_version->max_bb_count = old_version->max_bb_count;
2550 new_version->is_versioned_clone = true;
2552 for (e = old_version->callees; e; e=e->next_callee)
2553 if (!bbs_to_copy
2554 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2555 cgraph_clone_edge (e, new_version, e->call_stmt,
2556 e->lto_stmt_uid, REG_BR_PROB_BASE,
2557 CGRAPH_FREQ_BASE,
2558 true);
2559 for (e = old_version->indirect_calls; e; e=e->next_callee)
2560 if (!bbs_to_copy
2561 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2562 cgraph_clone_edge (e, new_version, e->call_stmt,
2563 e->lto_stmt_uid, REG_BR_PROB_BASE,
2564 CGRAPH_FREQ_BASE,
2565 true);
2566 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2568 /* Redirect calls to the old version node to point to its new
2569 version. */
2570 cgraph_redirect_edge_callee (e, new_version);
2573 cgraph_call_node_duplication_hooks (old_version, new_version);
2575 return new_version;
2578 /* Perform function versioning.
2579 Function versioning includes copying of the tree and
2580 a callgraph update (creating a new cgraph node and updating
2581 its callees and callers).
2583 REDIRECT_CALLERS varray includes the edges to be redirected
2584 to the new version.
2586 TREE_MAP is a mapping of tree nodes we want to replace with
2587 new ones (according to results of prior analysis).
2588 OLD_VERSION_NODE is the node that is versioned.
2590 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2591 from new version.
2592 If SKIP_RETURN is true, the new version will return void.
2593 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2594 If non_NULL NEW_ENTRY determine new entry BB of the clone.
2596 Return the new version's cgraph node. */
2598 struct cgraph_node *
2599 cgraph_function_versioning (struct cgraph_node *old_version_node,
2600 VEC(cgraph_edge_p,heap) *redirect_callers,
2601 VEC (ipa_replace_map_p,gc)* tree_map,
2602 bitmap args_to_skip,
2603 bool skip_return,
2604 bitmap bbs_to_copy,
2605 basic_block new_entry_block,
2606 const char *clone_name)
2608 tree old_decl = old_version_node->decl;
2609 struct cgraph_node *new_version_node = NULL;
2610 tree new_decl;
2612 if (!tree_versionable_function_p (old_decl))
2613 return NULL;
2615 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2617 /* Make a new FUNCTION_DECL tree node for the new version. */
2618 if (!args_to_skip && !skip_return)
2619 new_decl = copy_node (old_decl);
2620 else
2621 new_decl
2622 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
2624 /* Generate a new name for the new version. */
2625 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2626 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2627 SET_DECL_RTL (new_decl, NULL);
2629 /* When the old decl was a con-/destructor make sure the clone isn't. */
2630 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2631 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2633 /* Create the new version's call-graph node.
2634 and update the edges of the new node. */
2635 new_version_node =
2636 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2637 redirect_callers, bbs_to_copy);
2639 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2640 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2641 skip_return, bbs_to_copy, new_entry_block);
2643 /* Update the new version's properties.
2644 Make The new version visible only within this translation unit. Make sure
2645 that is not weak also.
2646 ??? We cannot use COMDAT linkage because there is no
2647 ABI support for this. */
2648 cgraph_make_decl_local (new_version_node->decl);
2649 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2650 new_version_node->local.externally_visible = 0;
2651 new_version_node->local.local = 1;
2652 new_version_node->lowered = true;
2654 /* Update the call_expr on the edges to call the new version node. */
2655 update_call_expr (new_version_node);
2657 cgraph_call_function_insertion_hooks (new_version_node);
2658 return new_version_node;
2661 /* Given virtual clone, turn it into actual clone. */
2662 static void
2663 cgraph_materialize_clone (struct cgraph_node *node)
2665 bitmap_obstack_initialize (NULL);
2666 node->former_clone_of = node->clone_of->decl;
2667 if (node->clone_of->former_clone_of)
2668 node->former_clone_of = node->clone_of->former_clone_of;
2669 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2670 tree_function_versioning (node->clone_of->decl, node->decl,
2671 node->clone.tree_map, true,
2672 node->clone.args_to_skip, false,
2673 NULL, NULL);
2674 if (cgraph_dump_file)
2676 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2677 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2680 /* Function is no longer clone. */
2681 if (node->next_sibling_clone)
2682 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2683 if (node->prev_sibling_clone)
2684 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2685 else
2686 node->clone_of->clones = node->next_sibling_clone;
2687 node->next_sibling_clone = NULL;
2688 node->prev_sibling_clone = NULL;
2689 if (!node->clone_of->analyzed && !node->clone_of->clones)
2691 cgraph_release_function_body (node->clone_of);
2692 cgraph_node_remove_callees (node->clone_of);
2693 ipa_remove_all_references (&node->clone_of->ref_list);
2695 node->clone_of = NULL;
2696 bitmap_obstack_release (NULL);
2699 /* Return the root node of clone tree. */
2701 static inline struct cgraph_node *
2702 get_clone_orig_node (struct cgraph_node *node)
2704 while (node->clone_of
2705 && node->decl == node->clone_of->decl)
2706 node = node->clone_of;
2707 return node;
2710 /* If necessary, change the function declaration in the call statement
2711 associated with E so that it corresponds to the edge callee. */
2713 gimple
2714 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2716 tree decl = gimple_call_fndecl (e->call_stmt);
2717 gimple new_stmt;
2718 gimple_stmt_iterator gsi;
2719 #ifdef ENABLE_CHECKING
2720 struct cgraph_node *node;
2721 #endif
2723 if (e->indirect_unknown_callee
2724 || decl == e->callee->decl)
2725 return e->call_stmt;
2727 #ifdef ENABLE_CHECKING
2728 if (decl)
2730 node = cgraph_get_node (decl);
2731 gcc_assert (!node || !node->clone.combined_args_to_skip);
2733 #endif
2735 if (cgraph_dump_file)
2737 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2738 xstrdup (cgraph_node_name (e->caller)), e->caller->uid,
2739 xstrdup (cgraph_node_name (e->callee)), e->callee->uid);
2740 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2741 if (e->callee->clone.combined_args_to_skip)
2743 fprintf (cgraph_dump_file, " combined args to skip: ");
2744 dump_bitmap (cgraph_dump_file,
2745 e->callee->clone.combined_args_to_skip);
2749 if (e->callee->clone.combined_args_to_skip)
2751 int lp_nr;
2753 new_stmt
2754 = gimple_call_copy_skip_args (e->call_stmt,
2755 e->callee->clone.combined_args_to_skip);
2756 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2758 if (gimple_vdef (new_stmt)
2759 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2760 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2762 gsi = gsi_for_stmt (e->call_stmt);
2763 gsi_replace (&gsi, new_stmt, false);
2764 /* We need to defer cleaning EH info on the new statement to
2765 fixup-cfg. We may not have dominator information at this point
2766 and thus would end up with unreachable blocks and have no way
2767 to communicate that we need to run CFG cleanup then. */
2768 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2769 if (lp_nr != 0)
2771 remove_stmt_from_eh_lp (e->call_stmt);
2772 add_stmt_to_eh_lp (new_stmt, lp_nr);
2775 else
2777 new_stmt = e->call_stmt;
2778 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2779 update_stmt (new_stmt);
2782 cgraph_set_call_stmt_including_clones (get_clone_orig_node (e->caller),
2783 e->call_stmt, new_stmt);
2785 if (cgraph_dump_file)
2787 fprintf (cgraph_dump_file, " updated to:");
2788 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2790 return new_stmt;
2793 /* Once all functions from compilation unit are in memory, produce all clones
2794 and update all calls. We might also do this on demand if we don't want to
2795 bring all functions to memory prior compilation, but current WHOPR
2796 implementation does that and it is is bit easier to keep everything right in
2797 this order. */
2798 void
2799 cgraph_materialize_all_clones (void)
2801 struct cgraph_node *node;
2802 bool stabilized = false;
2804 if (cgraph_dump_file)
2805 fprintf (cgraph_dump_file, "Materializing clones\n");
2806 #ifdef ENABLE_CHECKING
2807 verify_cgraph ();
2808 #endif
2810 /* We can also do topological order, but number of iterations should be
2811 bounded by number of IPA passes since single IPA pass is probably not
2812 going to create clones of clones it created itself. */
2813 while (!stabilized)
2815 stabilized = true;
2816 for (node = cgraph_nodes; node; node = node->next)
2818 if (node->clone_of && node->decl != node->clone_of->decl
2819 && !gimple_has_body_p (node->decl))
2821 if (gimple_has_body_p (node->clone_of->decl))
2823 if (cgraph_dump_file)
2825 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2826 xstrdup (cgraph_node_name (node->clone_of)),
2827 xstrdup (cgraph_node_name (node)));
2828 if (node->clone.tree_map)
2830 unsigned int i;
2831 fprintf (cgraph_dump_file, " replace map: ");
2832 for (i = 0; i < VEC_length (ipa_replace_map_p,
2833 node->clone.tree_map);
2834 i++)
2836 struct ipa_replace_map *replace_info;
2837 replace_info = VEC_index (ipa_replace_map_p,
2838 node->clone.tree_map,
2840 print_generic_expr (cgraph_dump_file,
2841 replace_info->old_tree, 0);
2842 fprintf (cgraph_dump_file, " -> ");
2843 print_generic_expr (cgraph_dump_file,
2844 replace_info->new_tree, 0);
2845 fprintf (cgraph_dump_file, "%s%s;",
2846 replace_info->replace_p ? "(replace)":"",
2847 replace_info->ref_p ? "(ref)":"");
2849 fprintf (cgraph_dump_file, "\n");
2851 if (node->clone.args_to_skip)
2853 fprintf (cgraph_dump_file, " args_to_skip: ");
2854 dump_bitmap (cgraph_dump_file,
2855 node->clone.args_to_skip);
2857 if (node->clone.args_to_skip)
2859 fprintf (cgraph_dump_file,
2860 " combined_args_to_skip:");
2861 dump_bitmap (cgraph_dump_file,
2862 node->clone.combined_args_to_skip);
2865 cgraph_materialize_clone (node);
2866 stabilized = false;
2871 for (node = cgraph_nodes; node; node = node->next)
2872 if (!node->analyzed && node->callees)
2873 cgraph_node_remove_callees (node);
2874 if (cgraph_dump_file)
2875 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2876 #ifdef ENABLE_CHECKING
2877 verify_cgraph ();
2878 #endif
2879 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2882 #include "gt-cgraphunit.h"