Print SCoPs under CLooG format.
[official-gcc/graphite-test-results.git] / gcc / cgraphunit.c
blobf4580adbd750bd561864c95de446d1b394764f16
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138 #include "plugin.h"
140 static void cgraph_expand_all_functions (void);
141 static void cgraph_mark_functions_to_output (void);
142 static void cgraph_expand_function (struct cgraph_node *);
143 static void cgraph_output_pending_asms (void);
144 static void cgraph_analyze_function (struct cgraph_node *);
146 static FILE *cgraph_dump_file;
148 /* A vector of FUNCTION_DECLs declared as static constructors. */
149 static GTY (()) VEC(tree, gc) *static_ctors;
150 /* A vector of FUNCTION_DECLs declared as static destructors. */
151 static GTY (()) VEC(tree, gc) *static_dtors;
153 /* Used for vtable lookup in thunk adjusting. */
154 static GTY (()) tree vtable_entry_type;
156 /* When target does not have ctors and dtors, we call all constructor
157 and destructor by special initialization/destruction function
158 recognized by collect2.
160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
163 static void
164 record_cdtor_fn (tree fndecl)
166 struct cgraph_node *node;
167 if (targetm.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 && !DECL_STATIC_DESTRUCTOR (fndecl)))
170 return;
172 if (DECL_STATIC_CONSTRUCTOR (fndecl))
174 VEC_safe_push (tree, gc, static_ctors, fndecl);
175 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
177 if (DECL_STATIC_DESTRUCTOR (fndecl))
179 VEC_safe_push (tree, gc, static_dtors, fndecl);
180 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
182 node = cgraph_node (fndecl);
183 node->local.disregard_inline_limits = 1;
184 cgraph_mark_reachable_node (node);
187 /* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
192 static void
193 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
195 size_t i;
197 i = 0;
198 while (i < len)
200 tree body;
201 tree fn;
202 priority_type priority;
204 priority = 0;
205 body = NULL_TREE;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
210 priority_type p;
211 fn = cdtors[i];
212 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 if (!body)
214 priority = p;
215 else if (p != priority)
216 break;
217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 fn, 0),
219 &body);
220 ++i;
222 while (i < len);
223 gcc_assert (body != NULL_TREE);
224 /* Generate a function to call all the function of like
225 priority. */
226 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
230 /* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
234 static int
235 compare_ctor (const void *p1, const void *p2)
237 tree f1;
238 tree f2;
239 int priority1;
240 int priority2;
242 f1 = *(const tree *)p1;
243 f2 = *(const tree *)p2;
244 priority1 = DECL_INIT_PRIORITY (f1);
245 priority2 = DECL_INIT_PRIORITY (f2);
247 if (priority1 < priority2)
248 return -1;
249 else if (priority1 > priority2)
250 return 1;
251 else
252 /* Ensure a stable sort. */
253 return (const tree *)p1 - (const tree *)p2;
256 /* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
260 static int
261 compare_dtor (const void *p1, const void *p2)
263 tree f1;
264 tree f2;
265 int priority1;
266 int priority2;
268 f1 = *(const tree *)p1;
269 f2 = *(const tree *)p2;
270 priority1 = DECL_FINI_PRIORITY (f1);
271 priority2 = DECL_FINI_PRIORITY (f2);
273 if (priority1 < priority2)
274 return -1;
275 else if (priority1 > priority2)
276 return 1;
277 else
278 /* Ensure a stable sort. */
279 return (const tree *)p1 - (const tree *)p2;
282 /* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
286 static void
287 cgraph_build_cdtor_fns (void)
289 if (!VEC_empty (tree, static_ctors))
291 gcc_assert (!targetm.have_ctors_dtors);
292 qsort (VEC_address (tree, static_ctors),
293 VEC_length (tree, static_ctors),
294 sizeof (tree),
295 compare_ctor);
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree, static_ctors),
298 VEC_length (tree, static_ctors));
299 VEC_truncate (tree, static_ctors, 0);
302 if (!VEC_empty (tree, static_dtors))
304 gcc_assert (!targetm.have_ctors_dtors);
305 qsort (VEC_address (tree, static_dtors),
306 VEC_length (tree, static_dtors),
307 sizeof (tree),
308 compare_dtor);
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree, static_dtors),
311 VEC_length (tree, static_dtors));
312 VEC_truncate (tree, static_dtors, 0);
316 /* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
318 configury. */
320 bool
321 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program
361 && !flag_lto
362 && !flag_whopr)
363 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 return true;
366 /* Constructors and destructors are reachable from the runtime by
367 some mechanism. */
368 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369 return true;
371 return false;
374 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
378 bool
379 cgraph_process_new_functions (void)
381 bool output = false;
382 tree fndecl;
383 struct cgraph_node *node;
385 /* Note that this queue may grow as its being processed, as the new
386 functions may generate new ones. */
387 while (cgraph_new_nodes)
389 node = cgraph_new_nodes;
390 fndecl = node->decl;
391 cgraph_new_nodes = cgraph_new_nodes->next_needed;
392 switch (cgraph_state)
394 case CGRAPH_STATE_CONSTRUCTION:
395 /* At construction time we just need to finalize function and move
396 it into reachable functions list. */
398 node->next_needed = NULL;
399 cgraph_finalize_function (fndecl, false);
400 cgraph_mark_reachable_node (node);
401 output = true;
402 break;
404 case CGRAPH_STATE_IPA:
405 case CGRAPH_STATE_IPA_SSA:
406 /* When IPA optimization already started, do all essential
407 transformations that has been already performed on the whole
408 cgraph but not on this function. */
410 gimple_register_cfg_hooks ();
411 if (!node->analyzed)
412 cgraph_analyze_function (node);
413 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
414 current_function_decl = fndecl;
415 compute_inline_parameters (node);
416 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
417 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
418 /* When not optimizing, be sure we run early local passes anyway
419 to expand OMP. */
420 || !optimize)
421 execute_pass_list (pass_early_local_passes.pass.sub);
422 free_dominance_info (CDI_POST_DOMINATORS);
423 free_dominance_info (CDI_DOMINATORS);
424 pop_cfun ();
425 current_function_decl = NULL;
426 break;
428 case CGRAPH_STATE_EXPANSION:
429 /* Functions created during expansion shall be compiled
430 directly. */
431 node->process = 0;
432 cgraph_expand_function (node);
433 break;
435 default:
436 gcc_unreachable ();
437 break;
439 cgraph_call_function_insertion_hooks (node);
441 return output;
444 /* As an GCC extension we allow redefinition of the function. The
445 semantics when both copies of bodies differ is not well defined.
446 We replace the old body with new body so in unit at a time mode
447 we always use new body, while in normal mode we may end up with
448 old body inlined into some functions and new body expanded and
449 inlined in others.
451 ??? It may make more sense to use one body for inlining and other
452 body for expanding the function but this is difficult to do. */
454 static void
455 cgraph_reset_node (struct cgraph_node *node)
457 /* If node->process is set, then we have already begun whole-unit analysis.
458 This is *not* testing for whether we've already emitted the function.
459 That case can be sort-of legitimately seen with real function redefinition
460 errors. I would argue that the front end should never present us with
461 such a case, but don't enforce that for now. */
462 gcc_assert (!node->process);
464 /* Reset our data structures so we can analyze the function again. */
465 memset (&node->local, 0, sizeof (node->local));
466 memset (&node->global, 0, sizeof (node->global));
467 memset (&node->rtl, 0, sizeof (node->rtl));
468 node->analyzed = false;
469 node->local.redefined_extern_inline = true;
470 node->local.finalized = false;
472 cgraph_node_remove_callees (node);
474 /* We may need to re-queue the node for assembling in case
475 we already proceeded it and ignored as not needed or got
476 a re-declaration in IMA mode. */
477 if (node->reachable)
479 struct cgraph_node *n;
481 for (n = cgraph_nodes_queue; n; n = n->next_needed)
482 if (n == node)
483 break;
484 if (!n)
485 node->reachable = 0;
489 static void
490 cgraph_lower_function (struct cgraph_node *node)
492 if (node->lowered)
493 return;
495 if (node->nested)
496 lower_nested_functions (node->decl);
497 gcc_assert (!node->nested);
499 tree_lowering_passes (node->decl);
500 node->lowered = true;
503 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
504 logic in effect. If NESTED is true, then our caller cannot stand to have
505 the garbage collector run at the moment. We would need to either create
506 a new GC context, or just not compile right now. */
508 void
509 cgraph_finalize_function (tree decl, bool nested)
511 struct cgraph_node *node = cgraph_node (decl);
513 if (node->local.finalized)
514 cgraph_reset_node (node);
516 node->pid = cgraph_max_pid ++;
517 notice_global_symbol (decl);
518 node->local.finalized = true;
519 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
520 node->finalized_by_frontend = true;
521 record_cdtor_fn (node->decl);
523 if (cgraph_decide_is_function_needed (node, decl))
524 cgraph_mark_needed_node (node);
526 /* Since we reclaim unreachable nodes at the end of every language
527 level unit, we need to be conservative about possible entry points
528 there. */
529 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
530 cgraph_mark_reachable_node (node);
532 /* If we've not yet emitted decl, tell the debug info about it. */
533 if (!TREE_ASM_WRITTEN (decl))
534 (*debug_hooks->deferred_inline_function) (decl);
536 /* Possibly warn about unused parameters. */
537 if (warn_unused_parameter)
538 do_warn_unused_parameter (decl);
540 if (!nested)
541 ggc_collect ();
544 /* C99 extern inline keywords allow changing of declaration after function
545 has been finalized. We need to re-decide if we want to mark the function as
546 needed then. */
548 void
549 cgraph_mark_if_needed (tree decl)
551 struct cgraph_node *node = cgraph_node (decl);
552 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
553 cgraph_mark_needed_node (node);
556 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
557 static bool
558 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
560 while (node != node2 && node2)
561 node2 = node2->clone_of;
562 return node2 != NULL;
565 /* Verify cgraph nodes of given cgraph node. */
566 void
567 verify_cgraph_node (struct cgraph_node *node)
569 struct cgraph_edge *e;
570 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
571 struct function *saved_cfun = cfun;
572 basic_block this_block;
573 gimple_stmt_iterator gsi;
574 bool error_found = false;
576 if (errorcount || sorrycount)
577 return;
579 timevar_push (TV_CGRAPH_VERIFY);
580 /* debug_generic_stmt needs correct cfun */
581 set_cfun (this_cfun);
582 for (e = node->callees; e; e = e->next_callee)
583 if (e->aux)
585 error ("aux field set for edge %s->%s",
586 identifier_to_locale (cgraph_node_name (e->caller)),
587 identifier_to_locale (cgraph_node_name (e->callee)));
588 error_found = true;
590 if (node->count < 0)
592 error ("Execution count is negative");
593 error_found = true;
595 if (node->global.inlined_to && node->local.externally_visible)
597 error ("Externally visible inline clone");
598 error_found = true;
600 if (node->global.inlined_to && node->address_taken)
602 error ("Inline clone with address taken");
603 error_found = true;
605 if (node->global.inlined_to && node->needed)
607 error ("Inline clone is needed");
608 error_found = true;
610 for (e = node->callers; e; e = e->next_caller)
612 if (e->count < 0)
614 error ("caller edge count is negative");
615 error_found = true;
617 if (e->frequency < 0)
619 error ("caller edge frequency is negative");
620 error_found = true;
622 if (e->frequency > CGRAPH_FREQ_MAX)
624 error ("caller edge frequency is too large");
625 error_found = true;
627 if (gimple_has_body_p (e->caller->decl)
628 && !e->caller->global.inlined_to
629 && (e->frequency
630 != compute_call_stmt_bb_frequency (e->caller->decl,
631 gimple_bb (e->call_stmt))))
633 error ("caller edge frequency %i does not match BB freqency %i",
634 e->frequency,
635 compute_call_stmt_bb_frequency (e->caller->decl,
636 gimple_bb (e->call_stmt)));
637 error_found = true;
639 if (!e->inline_failed)
641 if (node->global.inlined_to
642 != (e->caller->global.inlined_to
643 ? e->caller->global.inlined_to : e->caller))
645 error ("inlined_to pointer is wrong");
646 error_found = true;
648 if (node->callers->next_caller)
650 error ("multiple inline callers");
651 error_found = true;
654 else
655 if (node->global.inlined_to)
657 error ("inlined_to pointer set for noninline callers");
658 error_found = true;
661 if (!node->callers && node->global.inlined_to)
663 error ("inlined_to pointer is set but no predecessors found");
664 error_found = true;
666 if (node->global.inlined_to == node)
668 error ("inlined_to pointer refers to itself");
669 error_found = true;
672 if (!cgraph_node (node->decl))
674 error ("node not found in cgraph_hash");
675 error_found = true;
678 if (node->clone_of)
680 struct cgraph_node *n;
681 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
682 if (n == node)
683 break;
684 if (!n)
686 error ("node has wrong clone_of");
687 error_found = true;
690 if (node->clones)
692 struct cgraph_node *n;
693 for (n = node->clones; n; n = n->next_sibling_clone)
694 if (n->clone_of != node)
695 break;
696 if (n)
698 error ("node has wrong clone list");
699 error_found = true;
702 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
704 error ("node is in clone list but it is not clone");
705 error_found = true;
707 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
709 error ("node has wrong prev_clone pointer");
710 error_found = true;
712 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
714 error ("double linked list of clones corrupted");
715 error_found = true;
718 if (node->analyzed && gimple_has_body_p (node->decl)
719 && !TREE_ASM_WRITTEN (node->decl)
720 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
721 && !flag_wpa)
723 if (this_cfun->cfg)
725 /* The nodes we're interested in are never shared, so walk
726 the tree ignoring duplicates. */
727 struct pointer_set_t *visited_nodes = pointer_set_create ();
728 /* Reach the trees by walking over the CFG, and note the
729 enclosing basic-blocks in the call edges. */
730 FOR_EACH_BB_FN (this_block, this_cfun)
731 for (gsi = gsi_start_bb (this_block);
732 !gsi_end_p (gsi);
733 gsi_next (&gsi))
735 gimple stmt = gsi_stmt (gsi);
736 tree decl;
737 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
739 struct cgraph_edge *e = cgraph_edge (node, stmt);
740 if (e)
742 if (e->aux)
744 error ("shared call_stmt:");
745 debug_gimple_stmt (stmt);
746 error_found = true;
748 if (e->callee->same_body_alias)
750 error ("edge points to same body alias:");
751 debug_tree (e->callee->decl);
752 error_found = true;
754 else if (!clone_of_p (cgraph_node (decl), e->callee)
755 && !e->callee->global.inlined_to)
757 error ("edge points to wrong declaration:");
758 debug_tree (e->callee->decl);
759 fprintf (stderr," Instead of:");
760 debug_tree (decl);
761 error_found = true;
763 e->aux = (void *)1;
765 else
767 error ("missing callgraph edge for call stmt:");
768 debug_gimple_stmt (stmt);
769 error_found = true;
773 pointer_set_destroy (visited_nodes);
775 else
776 /* No CFG available?! */
777 gcc_unreachable ();
779 for (e = node->callees; e; e = e->next_callee)
781 if (!e->aux && !e->indirect_call)
783 error ("edge %s->%s has no corresponding call_stmt",
784 identifier_to_locale (cgraph_node_name (e->caller)),
785 identifier_to_locale (cgraph_node_name (e->callee)));
786 debug_gimple_stmt (e->call_stmt);
787 error_found = true;
789 e->aux = 0;
792 if (error_found)
794 dump_cgraph_node (stderr, node);
795 internal_error ("verify_cgraph_node failed");
797 set_cfun (saved_cfun);
798 timevar_pop (TV_CGRAPH_VERIFY);
801 /* Verify whole cgraph structure. */
802 void
803 verify_cgraph (void)
805 struct cgraph_node *node;
807 if (sorrycount || errorcount)
808 return;
810 for (node = cgraph_nodes; node; node = node->next)
811 verify_cgraph_node (node);
814 /* Output all asm statements we have stored up to be output. */
816 static void
817 cgraph_output_pending_asms (void)
819 struct cgraph_asm_node *can;
821 if (errorcount || sorrycount)
822 return;
824 for (can = cgraph_asm_nodes; can; can = can->next)
825 assemble_asm (can->asm_str);
826 cgraph_asm_nodes = NULL;
829 /* Analyze the function scheduled to be output. */
830 static void
831 cgraph_analyze_function (struct cgraph_node *node)
833 tree save = current_function_decl;
834 tree decl = node->decl;
836 current_function_decl = decl;
837 push_cfun (DECL_STRUCT_FUNCTION (decl));
839 assign_assembler_name_if_neeeded (node->decl);
841 /* Make sure to gimplify bodies only once. During analyzing a
842 function we lower it, which will require gimplified nested
843 functions, so we can end up here with an already gimplified
844 body. */
845 if (!gimple_body (decl))
846 gimplify_function_tree (decl);
847 dump_function (TDI_generic, decl);
849 cgraph_lower_function (node);
850 node->analyzed = true;
852 pop_cfun ();
853 current_function_decl = save;
856 /* Look for externally_visible and used attributes and mark cgraph nodes
857 accordingly.
859 We cannot mark the nodes at the point the attributes are processed (in
860 handle_*_attribute) because the copy of the declarations available at that
861 point may not be canonical. For example, in:
863 void f();
864 void f() __attribute__((used));
866 the declaration we see in handle_used_attribute will be the second
867 declaration -- but the front end will subsequently merge that declaration
868 with the original declaration and discard the second declaration.
870 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
872 void f() {}
873 void f() __attribute__((externally_visible));
875 is valid.
877 So, we walk the nodes at the end of the translation unit, applying the
878 attributes at that point. */
880 static void
881 process_function_and_variable_attributes (struct cgraph_node *first,
882 struct varpool_node *first_var)
884 struct cgraph_node *node;
885 struct varpool_node *vnode;
887 for (node = cgraph_nodes; node != first; node = node->next)
889 tree decl = node->decl;
890 if (DECL_PRESERVE_P (decl))
892 mark_decl_referenced (decl);
893 if (node->local.finalized)
894 cgraph_mark_needed_node (node);
896 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
898 if (! TREE_PUBLIC (node->decl))
899 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
900 "%<externally_visible%>"
901 " attribute have effect only on public objects");
902 else if (node->local.finalized)
903 cgraph_mark_needed_node (node);
906 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
908 tree decl = vnode->decl;
909 if (DECL_PRESERVE_P (decl))
911 mark_decl_referenced (decl);
912 vnode->force_output = true;
913 if (vnode->finalized)
914 varpool_mark_needed_node (vnode);
916 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
918 if (! TREE_PUBLIC (vnode->decl))
919 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
920 "%<externally_visible%>"
921 " attribute have effect only on public objects");
922 else if (vnode->finalized)
923 varpool_mark_needed_node (vnode);
928 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
929 each reachable functions) and build cgraph.
930 The function can be called multiple times after inserting new nodes
931 into beginning of queue. Just the new part of queue is re-scanned then. */
933 static void
934 cgraph_analyze_functions (void)
936 /* Keep track of already processed nodes when called multiple times for
937 intermodule optimization. */
938 static struct cgraph_node *first_analyzed;
939 struct cgraph_node *first_processed = first_analyzed;
940 static struct varpool_node *first_analyzed_var;
941 struct cgraph_node *node, *next;
943 process_function_and_variable_attributes (first_processed,
944 first_analyzed_var);
945 first_processed = cgraph_nodes;
946 first_analyzed_var = varpool_nodes;
947 varpool_analyze_pending_decls ();
948 if (cgraph_dump_file)
950 fprintf (cgraph_dump_file, "Initial entry points:");
951 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
952 if (node->needed)
953 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
954 fprintf (cgraph_dump_file, "\n");
956 cgraph_process_new_functions ();
958 /* Propagate reachability flag and lower representation of all reachable
959 functions. In the future, lowering will introduce new functions and
960 new entry points on the way (by template instantiation and virtual
961 method table generation for instance). */
962 while (cgraph_nodes_queue)
964 struct cgraph_edge *edge;
965 tree decl = cgraph_nodes_queue->decl;
967 node = cgraph_nodes_queue;
968 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
969 node->next_needed = NULL;
971 /* ??? It is possible to create extern inline function and later using
972 weak alias attribute to kill its body. See
973 gcc.c-torture/compile/20011119-1.c */
974 if (!DECL_STRUCT_FUNCTION (decl))
976 cgraph_reset_node (node);
977 continue;
980 if (!node->analyzed)
981 cgraph_analyze_function (node);
983 for (edge = node->callees; edge; edge = edge->next_callee)
984 if (!edge->callee->reachable)
985 cgraph_mark_reachable_node (edge->callee);
987 if (node->same_comdat_group)
989 for (next = node->same_comdat_group;
990 next != node;
991 next = next->same_comdat_group)
992 cgraph_mark_reachable_node (next);
995 /* If decl is a clone of an abstract function, mark that abstract
996 function so that we don't release its body. The DECL_INITIAL() of that
997 abstract function declaration will be later needed to output debug info. */
998 if (DECL_ABSTRACT_ORIGIN (decl))
1000 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1001 origin_node->abstract_and_needed = true;
1004 /* We finalize local static variables during constructing callgraph
1005 edges. Process their attributes too. */
1006 process_function_and_variable_attributes (first_processed,
1007 first_analyzed_var);
1008 first_processed = cgraph_nodes;
1009 first_analyzed_var = varpool_nodes;
1010 varpool_analyze_pending_decls ();
1011 cgraph_process_new_functions ();
1014 /* Collect entry points to the unit. */
1015 if (cgraph_dump_file)
1017 fprintf (cgraph_dump_file, "Unit entry points:");
1018 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1019 if (node->needed)
1020 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1021 fprintf (cgraph_dump_file, "\n\nInitial ");
1022 dump_cgraph (cgraph_dump_file);
1025 if (cgraph_dump_file)
1026 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1028 for (node = cgraph_nodes; node != first_analyzed; node = next)
1030 tree decl = node->decl;
1031 next = node->next;
1033 if (node->local.finalized && !gimple_has_body_p (decl))
1034 cgraph_reset_node (node);
1036 if (!node->reachable && gimple_has_body_p (decl))
1038 if (cgraph_dump_file)
1039 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1040 cgraph_remove_node (node);
1041 continue;
1043 else
1044 node->next_needed = NULL;
1045 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1046 gcc_assert (node->analyzed == node->local.finalized);
1048 if (cgraph_dump_file)
1050 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1051 dump_cgraph (cgraph_dump_file);
1053 first_analyzed = cgraph_nodes;
1054 ggc_collect ();
1058 /* Analyze the whole compilation unit once it is parsed completely. */
1060 void
1061 cgraph_finalize_compilation_unit (void)
1063 timevar_push (TV_CGRAPH);
1065 /* Do not skip analyzing the functions if there were errors, we
1066 miss diagnostics for following functions otherwise. */
1068 /* Emit size functions we didn't inline. */
1069 finalize_size_functions ();
1071 /* Call functions declared with the "constructor" or "destructor"
1072 attribute. */
1073 cgraph_build_cdtor_fns ();
1075 /* Mark alias targets necessary and emit diagnostics. */
1076 finish_aliases_1 ();
1078 if (!quiet_flag)
1080 fprintf (stderr, "\nAnalyzing compilation unit\n");
1081 fflush (stderr);
1084 /* Gimplify and lower all functions, compute reachability and
1085 remove unreachable nodes. */
1086 cgraph_analyze_functions ();
1088 /* Mark alias targets necessary and emit diagnostics. */
1089 finish_aliases_1 ();
1091 /* Gimplify and lower thunks. */
1092 cgraph_analyze_functions ();
1094 /* Finally drive the pass manager. */
1095 cgraph_optimize ();
1097 timevar_pop (TV_CGRAPH);
1101 /* Figure out what functions we want to assemble. */
1103 static void
1104 cgraph_mark_functions_to_output (void)
1106 struct cgraph_node *node;
1107 #ifdef ENABLE_CHECKING
1108 bool check_same_comdat_groups = false;
1110 for (node = cgraph_nodes; node; node = node->next)
1111 gcc_assert (!node->process);
1112 #endif
1114 for (node = cgraph_nodes; node; node = node->next)
1116 tree decl = node->decl;
1117 struct cgraph_edge *e;
1119 gcc_assert (!node->process || node->same_comdat_group);
1120 if (node->process)
1121 continue;
1123 for (e = node->callers; e; e = e->next_caller)
1124 if (e->inline_failed)
1125 break;
1127 /* We need to output all local functions that are used and not
1128 always inlined, as well as those that are reachable from
1129 outside the current compilation unit. */
1130 if (node->analyzed
1131 && !node->global.inlined_to
1132 && (node->needed
1133 || (e && node->reachable))
1134 && !TREE_ASM_WRITTEN (decl)
1135 && !DECL_EXTERNAL (decl))
1137 node->process = 1;
1138 if (node->same_comdat_group)
1140 struct cgraph_node *next;
1141 for (next = node->same_comdat_group;
1142 next != node;
1143 next = next->same_comdat_group)
1144 next->process = 1;
1147 else if (node->same_comdat_group)
1149 #ifdef ENABLE_CHECKING
1150 check_same_comdat_groups = true;
1151 #endif
1153 else
1155 /* We should've reclaimed all functions that are not needed. */
1156 #ifdef ENABLE_CHECKING
1157 if (!node->global.inlined_to
1158 && gimple_has_body_p (decl)
1159 && !DECL_EXTERNAL (decl))
1161 dump_cgraph_node (stderr, node);
1162 internal_error ("failed to reclaim unneeded function");
1164 #endif
1165 gcc_assert (node->global.inlined_to
1166 || !gimple_has_body_p (decl)
1167 || DECL_EXTERNAL (decl));
1172 #ifdef ENABLE_CHECKING
1173 if (check_same_comdat_groups)
1174 for (node = cgraph_nodes; node; node = node->next)
1175 if (node->same_comdat_group && !node->process)
1177 tree decl = node->decl;
1178 if (!node->global.inlined_to
1179 && gimple_has_body_p (decl)
1180 && !DECL_EXTERNAL (decl))
1182 dump_cgraph_node (stderr, node);
1183 internal_error ("failed to reclaim unneeded function");
1186 #endif
1189 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1190 in lowered gimple form.
1192 Set current_function_decl and cfun to newly constructed empty function body.
1193 return basic block in the function body. */
1195 static basic_block
1196 init_lowered_empty_function (tree decl)
1198 basic_block bb;
1200 current_function_decl = decl;
1201 allocate_struct_function (decl, false);
1202 gimple_register_cfg_hooks ();
1203 init_empty_tree_cfg ();
1204 init_tree_ssa (cfun);
1205 init_ssa_operands ();
1206 cfun->gimple_df->in_ssa_p = true;
1207 DECL_INITIAL (decl) = make_node (BLOCK);
1209 DECL_SAVED_TREE (decl) = error_mark_node;
1210 cfun->curr_properties |=
1211 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1212 PROP_ssa);
1214 /* Create BB for body of the function and connect it properly. */
1215 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1216 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1217 make_edge (bb, EXIT_BLOCK_PTR, 0);
1219 return bb;
1222 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1223 offset indicated by VIRTUAL_OFFSET, if that is
1224 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1225 zero for a result adjusting thunk. */
1227 static tree
1228 thunk_adjust (gimple_stmt_iterator * bsi,
1229 tree ptr, bool this_adjusting,
1230 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1232 gimple stmt;
1233 tree ret;
1235 if (this_adjusting
1236 && fixed_offset != 0)
1238 stmt = gimple_build_assign (ptr,
1239 fold_build2_loc (input_location,
1240 POINTER_PLUS_EXPR,
1241 TREE_TYPE (ptr), ptr,
1242 size_int (fixed_offset)));
1243 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1246 /* If there's a virtual offset, look up that value in the vtable and
1247 adjust the pointer again. */
1248 if (virtual_offset)
1250 tree vtabletmp;
1251 tree vtabletmp2;
1252 tree vtabletmp3;
1253 tree offsettmp;
1255 if (!vtable_entry_type)
1257 tree vfunc_type = make_node (FUNCTION_TYPE);
1258 TREE_TYPE (vfunc_type) = integer_type_node;
1259 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1260 layout_type (vfunc_type);
1262 vtable_entry_type = build_pointer_type (vfunc_type);
1265 vtabletmp =
1266 create_tmp_var (build_pointer_type
1267 (build_pointer_type (vtable_entry_type)), "vptr");
1269 /* The vptr is always at offset zero in the object. */
1270 stmt = gimple_build_assign (vtabletmp,
1271 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1272 ptr));
1273 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1274 mark_symbols_for_renaming (stmt);
1275 find_referenced_vars_in (stmt);
1277 /* Form the vtable address. */
1278 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1279 "vtableaddr");
1280 stmt = gimple_build_assign (vtabletmp2,
1281 build1 (INDIRECT_REF,
1282 TREE_TYPE (vtabletmp2), vtabletmp));
1283 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1284 mark_symbols_for_renaming (stmt);
1285 find_referenced_vars_in (stmt);
1287 /* Find the entry with the vcall offset. */
1288 stmt = gimple_build_assign (vtabletmp2,
1289 fold_build2_loc (input_location,
1290 POINTER_PLUS_EXPR,
1291 TREE_TYPE (vtabletmp2),
1292 vtabletmp2,
1293 fold_convert (sizetype,
1294 virtual_offset)));
1295 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1297 /* Get the offset itself. */
1298 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1299 "vcalloffset");
1300 stmt = gimple_build_assign (vtabletmp3,
1301 build1 (INDIRECT_REF,
1302 TREE_TYPE (vtabletmp3),
1303 vtabletmp2));
1304 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1305 mark_symbols_for_renaming (stmt);
1306 find_referenced_vars_in (stmt);
1308 /* Cast to sizetype. */
1309 offsettmp = create_tmp_var (sizetype, "offset");
1310 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1311 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1312 mark_symbols_for_renaming (stmt);
1313 find_referenced_vars_in (stmt);
1315 /* Adjust the `this' pointer. */
1316 ptr = fold_build2_loc (input_location,
1317 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1318 offsettmp);
1321 if (!this_adjusting
1322 && fixed_offset != 0)
1323 /* Adjust the pointer by the constant. */
1325 tree ptrtmp;
1327 if (TREE_CODE (ptr) == VAR_DECL)
1328 ptrtmp = ptr;
1329 else
1331 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1332 stmt = gimple_build_assign (ptrtmp, ptr);
1333 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1334 mark_symbols_for_renaming (stmt);
1335 find_referenced_vars_in (stmt);
1337 ptr = fold_build2_loc (input_location,
1338 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1339 size_int (fixed_offset));
1342 /* Emit the statement and gimplify the adjustment expression. */
1343 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1344 stmt = gimple_build_assign (ret, ptr);
1345 mark_symbols_for_renaming (stmt);
1346 find_referenced_vars_in (stmt);
1347 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1349 return ret;
1352 /* Produce assembler for thunk NODE. */
1354 static void
1355 assemble_thunk (struct cgraph_node *node)
1357 bool this_adjusting = node->thunk.this_adjusting;
1358 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1359 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1360 tree virtual_offset = NULL;
1361 tree alias = node->thunk.alias;
1362 tree thunk_fndecl = node->decl;
1363 tree a = DECL_ARGUMENTS (thunk_fndecl);
1365 current_function_decl = thunk_fndecl;
1367 if (this_adjusting
1368 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1369 virtual_value, alias))
1371 const char *fnname;
1372 tree fn_block;
1374 DECL_RESULT (thunk_fndecl)
1375 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1376 RESULT_DECL, 0, integer_type_node);
1377 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1379 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1380 create one. */
1381 fn_block = make_node (BLOCK);
1382 BLOCK_VARS (fn_block) = a;
1383 DECL_INITIAL (thunk_fndecl) = fn_block;
1384 init_function_start (thunk_fndecl);
1385 cfun->is_thunk = 1;
1386 assemble_start_function (thunk_fndecl, fnname);
1388 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1389 fixed_offset, virtual_value, alias);
1391 assemble_end_function (thunk_fndecl, fnname);
1392 init_insn_lengths ();
1393 free_after_compilation (cfun);
1394 set_cfun (NULL);
1395 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1397 else
1399 tree restype;
1400 basic_block bb, then_bb, else_bb, return_bb;
1401 gimple_stmt_iterator bsi;
1402 int nargs = 0;
1403 tree arg;
1404 int i;
1405 tree resdecl;
1406 tree restmp = NULL;
1407 VEC(tree, heap) *vargs;
1409 gimple call;
1410 gimple ret;
1412 DECL_IGNORED_P (thunk_fndecl) = 1;
1413 bitmap_obstack_initialize (NULL);
1415 if (node->thunk.virtual_offset_p)
1416 virtual_offset = size_int (virtual_value);
1418 /* Build the return declaration for the function. */
1419 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1420 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1422 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1423 DECL_ARTIFICIAL (resdecl) = 1;
1424 DECL_IGNORED_P (resdecl) = 1;
1425 DECL_RESULT (thunk_fndecl) = resdecl;
1427 else
1428 resdecl = DECL_RESULT (thunk_fndecl);
1430 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1432 bsi = gsi_start_bb (bb);
1434 /* Build call to the function being thunked. */
1435 if (!VOID_TYPE_P (restype))
1437 if (!is_gimple_reg_type (restype))
1439 restmp = resdecl;
1440 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1441 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1443 else
1444 restmp = create_tmp_var_raw (restype, "retval");
1447 for (arg = a; arg; arg = TREE_CHAIN (arg))
1448 nargs++;
1449 vargs = VEC_alloc (tree, heap, nargs);
1450 if (this_adjusting)
1451 VEC_quick_push (tree, vargs,
1452 thunk_adjust (&bsi,
1453 a, 1, fixed_offset,
1454 virtual_offset));
1455 else
1456 VEC_quick_push (tree, vargs, a);
1457 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1458 VEC_quick_push (tree, vargs, arg);
1459 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1460 VEC_free (tree, heap, vargs);
1461 gimple_call_set_cannot_inline (call, true);
1462 gimple_call_set_from_thunk (call, true);
1463 if (restmp)
1464 gimple_call_set_lhs (call, restmp);
1465 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1466 mark_symbols_for_renaming (call);
1467 find_referenced_vars_in (call);
1468 update_stmt (call);
1470 if (restmp && !this_adjusting)
1472 tree true_label = NULL_TREE;
1474 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1476 gimple stmt;
1477 /* If the return type is a pointer, we need to
1478 protect against NULL. We know there will be an
1479 adjustment, because that's why we're emitting a
1480 thunk. */
1481 then_bb = create_basic_block (NULL, (void *) 0, bb);
1482 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1483 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1484 remove_edge (single_succ_edge (bb));
1485 true_label = gimple_block_label (then_bb);
1486 stmt = gimple_build_cond (NE_EXPR, restmp,
1487 fold_convert (TREE_TYPE (restmp),
1488 integer_zero_node),
1489 NULL_TREE, NULL_TREE);
1490 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1491 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1492 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1493 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1494 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1495 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1496 bsi = gsi_last_bb (then_bb);
1499 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1500 fixed_offset, virtual_offset);
1501 if (true_label)
1503 gimple stmt;
1504 bsi = gsi_last_bb (else_bb);
1505 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1506 integer_zero_node));
1507 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1508 bsi = gsi_last_bb (return_bb);
1511 else
1512 gimple_call_set_tail (call, true);
1514 /* Build return value. */
1515 ret = gimple_build_return (restmp);
1516 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1518 delete_unreachable_blocks ();
1519 update_ssa (TODO_update_ssa);
1521 cgraph_remove_same_body_alias (node);
1522 /* Since we want to emit the thunk, we explicitly mark its name as
1523 referenced. */
1524 mark_decl_referenced (thunk_fndecl);
1525 cgraph_add_new_function (thunk_fndecl, true);
1526 bitmap_obstack_release (NULL);
1528 current_function_decl = NULL;
1531 /* Expand function specified by NODE. */
1533 static void
1534 cgraph_expand_function (struct cgraph_node *node)
1536 tree decl = node->decl;
1538 /* We ought to not compile any inline clones. */
1539 gcc_assert (!node->global.inlined_to);
1541 announce_function (decl);
1542 node->process = 0;
1544 gcc_assert (node->lowered);
1546 /* Generate RTL for the body of DECL. */
1547 tree_rest_of_compilation (decl);
1549 /* Make sure that BE didn't give up on compiling. */
1550 gcc_assert (TREE_ASM_WRITTEN (decl));
1551 current_function_decl = NULL;
1552 if (node->same_body)
1554 struct cgraph_node *alias, *next;
1555 bool saved_alias = node->alias;
1556 for (alias = node->same_body;
1557 alias && alias->next; alias = alias->next)
1559 /* Walk aliases in the order they were created; it is possible that
1560 thunks reffers to the aliases made earlier. */
1561 for (; alias; alias = next)
1563 next = alias->previous;
1564 if (!alias->thunk.thunk_p)
1565 assemble_alias (alias->decl,
1566 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1567 else
1568 assemble_thunk (alias);
1570 node->alias = saved_alias;
1572 gcc_assert (!cgraph_preserve_function_body_p (decl));
1573 cgraph_release_function_body (node);
1574 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1575 points to the dead function body. */
1576 cgraph_node_remove_callees (node);
1578 cgraph_function_flags_ready = true;
1581 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1583 bool
1584 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1586 *reason = e->inline_failed;
1587 return !e->inline_failed;
1592 /* Expand all functions that must be output.
1594 Attempt to topologically sort the nodes so function is output when
1595 all called functions are already assembled to allow data to be
1596 propagated across the callgraph. Use a stack to get smaller distance
1597 between a function and its callees (later we may choose to use a more
1598 sophisticated algorithm for function reordering; we will likely want
1599 to use subsections to make the output functions appear in top-down
1600 order). */
1602 static void
1603 cgraph_expand_all_functions (void)
1605 struct cgraph_node *node;
1606 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1607 int order_pos, new_order_pos = 0;
1608 int i;
1610 order_pos = cgraph_postorder (order);
1611 gcc_assert (order_pos == cgraph_n_nodes);
1613 /* Garbage collector may remove inline clones we eliminate during
1614 optimization. So we must be sure to not reference them. */
1615 for (i = 0; i < order_pos; i++)
1616 if (order[i]->process)
1617 order[new_order_pos++] = order[i];
1619 for (i = new_order_pos - 1; i >= 0; i--)
1621 node = order[i];
1622 if (node->process)
1624 gcc_assert (node->reachable);
1625 node->process = 0;
1626 cgraph_expand_function (node);
1629 cgraph_process_new_functions ();
1631 free (order);
1635 /* This is used to sort the node types by the cgraph order number. */
1637 enum cgraph_order_sort_kind
1639 ORDER_UNDEFINED = 0,
1640 ORDER_FUNCTION,
1641 ORDER_VAR,
1642 ORDER_ASM
1645 struct cgraph_order_sort
1647 enum cgraph_order_sort_kind kind;
1648 union
1650 struct cgraph_node *f;
1651 struct varpool_node *v;
1652 struct cgraph_asm_node *a;
1653 } u;
1656 /* Output all functions, variables, and asm statements in the order
1657 according to their order fields, which is the order in which they
1658 appeared in the file. This implements -fno-toplevel-reorder. In
1659 this mode we may output functions and variables which don't really
1660 need to be output. */
1662 static void
1663 cgraph_output_in_order (void)
1665 int max;
1666 size_t size;
1667 struct cgraph_order_sort *nodes;
1668 int i;
1669 struct cgraph_node *pf;
1670 struct varpool_node *pv;
1671 struct cgraph_asm_node *pa;
1673 max = cgraph_order;
1674 size = max * sizeof (struct cgraph_order_sort);
1675 nodes = (struct cgraph_order_sort *) alloca (size);
1676 memset (nodes, 0, size);
1678 varpool_analyze_pending_decls ();
1680 for (pf = cgraph_nodes; pf; pf = pf->next)
1682 if (pf->process)
1684 i = pf->order;
1685 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1686 nodes[i].kind = ORDER_FUNCTION;
1687 nodes[i].u.f = pf;
1691 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1693 i = pv->order;
1694 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1695 nodes[i].kind = ORDER_VAR;
1696 nodes[i].u.v = pv;
1699 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1701 i = pa->order;
1702 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1703 nodes[i].kind = ORDER_ASM;
1704 nodes[i].u.a = pa;
1707 /* In toplevel reorder mode we output all statics; mark them as needed. */
1708 for (i = 0; i < max; ++i)
1710 if (nodes[i].kind == ORDER_VAR)
1712 varpool_mark_needed_node (nodes[i].u.v);
1715 varpool_empty_needed_queue ();
1717 for (i = 0; i < max; ++i)
1719 switch (nodes[i].kind)
1721 case ORDER_FUNCTION:
1722 nodes[i].u.f->process = 0;
1723 cgraph_expand_function (nodes[i].u.f);
1724 break;
1726 case ORDER_VAR:
1727 varpool_assemble_decl (nodes[i].u.v);
1728 break;
1730 case ORDER_ASM:
1731 assemble_asm (nodes[i].u.a->asm_str);
1732 break;
1734 case ORDER_UNDEFINED:
1735 break;
1737 default:
1738 gcc_unreachable ();
1742 cgraph_asm_nodes = NULL;
1745 /* Return true when function body of DECL still needs to be kept around
1746 for later re-use. */
1747 bool
1748 cgraph_preserve_function_body_p (tree decl)
1750 struct cgraph_node *node;
1752 gcc_assert (cgraph_global_info_ready);
1753 /* Look if there is any clone around. */
1754 node = cgraph_node (decl);
1755 if (node->clones)
1756 return true;
1757 return false;
1760 static void
1761 ipa_passes (void)
1763 set_cfun (NULL);
1764 current_function_decl = NULL;
1765 gimple_register_cfg_hooks ();
1766 bitmap_obstack_initialize (NULL);
1768 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1770 if (!in_lto_p)
1771 execute_ipa_pass_list (all_small_ipa_passes);
1773 /* If pass_all_early_optimizations was not scheduled, the state of
1774 the cgraph will not be properly updated. Update it now. */
1775 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1776 cgraph_state = CGRAPH_STATE_IPA_SSA;
1778 if (!in_lto_p)
1780 /* Generate coverage variables and constructors. */
1781 coverage_finish ();
1783 /* Process new functions added. */
1784 set_cfun (NULL);
1785 current_function_decl = NULL;
1786 cgraph_process_new_functions ();
1788 execute_ipa_summary_passes
1789 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1791 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1793 if (!in_lto_p)
1794 ipa_write_summaries ();
1796 if (!flag_ltrans)
1797 execute_ipa_pass_list (all_regular_ipa_passes);
1798 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1800 bitmap_obstack_release (NULL);
1804 /* Perform simple optimizations based on callgraph. */
1806 void
1807 cgraph_optimize (void)
1809 if (errorcount || sorrycount)
1810 return;
1812 #ifdef ENABLE_CHECKING
1813 verify_cgraph ();
1814 #endif
1816 /* Frontend may output common variables after the unit has been finalized.
1817 It is safe to deal with them here as they are always zero initialized. */
1818 varpool_analyze_pending_decls ();
1820 timevar_push (TV_CGRAPHOPT);
1821 if (pre_ipa_mem_report)
1823 fprintf (stderr, "Memory consumption before IPA\n");
1824 dump_memory_report (false);
1826 if (!quiet_flag)
1827 fprintf (stderr, "Performing interprocedural optimizations\n");
1828 cgraph_state = CGRAPH_STATE_IPA;
1830 /* Don't run the IPA passes if there was any error or sorry messages. */
1831 if (errorcount == 0 && sorrycount == 0)
1832 ipa_passes ();
1834 /* Do nothing else if any IPA pass found errors. */
1835 if (errorcount || sorrycount)
1837 timevar_pop (TV_CGRAPHOPT);
1838 return;
1841 /* This pass remove bodies of extern inline functions we never inlined.
1842 Do this later so other IPA passes see what is really going on. */
1843 cgraph_remove_unreachable_nodes (false, dump_file);
1844 cgraph_global_info_ready = true;
1845 if (cgraph_dump_file)
1847 fprintf (cgraph_dump_file, "Optimized ");
1848 dump_cgraph (cgraph_dump_file);
1849 dump_varpool (cgraph_dump_file);
1851 if (post_ipa_mem_report)
1853 fprintf (stderr, "Memory consumption after IPA\n");
1854 dump_memory_report (false);
1856 timevar_pop (TV_CGRAPHOPT);
1858 /* Output everything. */
1859 (*debug_hooks->assembly_start) ();
1860 if (!quiet_flag)
1861 fprintf (stderr, "Assembling functions:\n");
1862 #ifdef ENABLE_CHECKING
1863 verify_cgraph ();
1864 #endif
1866 cgraph_materialize_all_clones ();
1867 cgraph_mark_functions_to_output ();
1869 cgraph_state = CGRAPH_STATE_EXPANSION;
1870 if (!flag_toplevel_reorder)
1871 cgraph_output_in_order ();
1872 else
1874 cgraph_output_pending_asms ();
1876 cgraph_expand_all_functions ();
1877 varpool_remove_unreferenced_decls ();
1879 varpool_assemble_pending_decls ();
1881 cgraph_process_new_functions ();
1882 cgraph_state = CGRAPH_STATE_FINISHED;
1884 if (cgraph_dump_file)
1886 fprintf (cgraph_dump_file, "\nFinal ");
1887 dump_cgraph (cgraph_dump_file);
1889 #ifdef ENABLE_CHECKING
1890 verify_cgraph ();
1891 /* Double check that all inline clones are gone and that all
1892 function bodies have been released from memory. */
1893 if (!(sorrycount || errorcount))
1895 struct cgraph_node *node;
1896 bool error_found = false;
1898 for (node = cgraph_nodes; node; node = node->next)
1899 if (node->analyzed
1900 && (node->global.inlined_to
1901 || gimple_has_body_p (node->decl)))
1903 error_found = true;
1904 dump_cgraph_node (stderr, node);
1906 if (error_found)
1907 internal_error ("nodes with unreleased memory found");
1909 #endif
1913 /* Generate and emit a static constructor or destructor. WHICH must
1914 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1915 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1916 initialization priority for this constructor or destructor. */
1918 void
1919 cgraph_build_static_cdtor (char which, tree body, int priority)
1921 static int counter = 0;
1922 char which_buf[16];
1923 tree decl, name, resdecl;
1925 /* The priority is encoded in the constructor or destructor name.
1926 collect2 will sort the names and arrange that they are called at
1927 program startup. */
1928 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1929 name = get_file_function_name (which_buf);
1931 decl = build_decl (input_location, FUNCTION_DECL, name,
1932 build_function_type (void_type_node, void_list_node));
1933 current_function_decl = decl;
1935 resdecl = build_decl (input_location,
1936 RESULT_DECL, NULL_TREE, void_type_node);
1937 DECL_ARTIFICIAL (resdecl) = 1;
1938 DECL_RESULT (decl) = resdecl;
1939 DECL_CONTEXT (resdecl) = decl;
1941 allocate_struct_function (decl, false);
1943 TREE_STATIC (decl) = 1;
1944 TREE_USED (decl) = 1;
1945 DECL_ARTIFICIAL (decl) = 1;
1946 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1947 DECL_SAVED_TREE (decl) = body;
1948 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1949 DECL_UNINLINABLE (decl) = 1;
1951 DECL_INITIAL (decl) = make_node (BLOCK);
1952 TREE_USED (DECL_INITIAL (decl)) = 1;
1954 DECL_SOURCE_LOCATION (decl) = input_location;
1955 cfun->function_end_locus = input_location;
1957 switch (which)
1959 case 'I':
1960 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1961 decl_init_priority_insert (decl, priority);
1962 break;
1963 case 'D':
1964 DECL_STATIC_DESTRUCTOR (decl) = 1;
1965 decl_fini_priority_insert (decl, priority);
1966 break;
1967 default:
1968 gcc_unreachable ();
1971 gimplify_function_tree (decl);
1973 cgraph_add_new_function (decl, false);
1974 cgraph_mark_needed_node (cgraph_node (decl));
1975 set_cfun (NULL);
1978 void
1979 init_cgraph (void)
1981 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1984 /* The edges representing the callers of the NEW_VERSION node were
1985 fixed by cgraph_function_versioning (), now the call_expr in their
1986 respective tree code should be updated to call the NEW_VERSION. */
1988 static void
1989 update_call_expr (struct cgraph_node *new_version)
1991 struct cgraph_edge *e;
1993 gcc_assert (new_version);
1995 /* Update the call expr on the edges to call the new version. */
1996 for (e = new_version->callers; e; e = e->next_caller)
1998 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1999 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2000 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2005 /* Create a new cgraph node which is the new version of
2006 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2007 edges which should be redirected to point to
2008 NEW_VERSION. ALL the callees edges of OLD_VERSION
2009 are cloned to the new version node. Return the new
2010 version node. */
2012 static struct cgraph_node *
2013 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2014 tree new_decl,
2015 VEC(cgraph_edge_p,heap) *redirect_callers)
2017 struct cgraph_node *new_version;
2018 struct cgraph_edge *e, *new_e;
2019 struct cgraph_edge *next_callee;
2020 unsigned i;
2022 gcc_assert (old_version);
2024 new_version = cgraph_node (new_decl);
2026 new_version->analyzed = true;
2027 new_version->local = old_version->local;
2028 new_version->global = old_version->global;
2029 new_version->rtl = new_version->rtl;
2030 new_version->reachable = true;
2031 new_version->count = old_version->count;
2033 /* Clone the old node callees. Recursive calls are
2034 also cloned. */
2035 for (e = old_version->callees;e; e=e->next_callee)
2037 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
2038 e->lto_stmt_uid, 0, e->frequency,
2039 e->loop_nest, true);
2040 new_e->count = e->count;
2042 /* Fix recursive calls.
2043 If OLD_VERSION has a recursive call after the
2044 previous edge cloning, the new version will have an edge
2045 pointing to the old version, which is wrong;
2046 Redirect it to point to the new version. */
2047 for (e = new_version->callees ; e; e = next_callee)
2049 next_callee = e->next_callee;
2050 if (e->callee == old_version)
2051 cgraph_redirect_edge_callee (e, new_version);
2053 if (!next_callee)
2054 break;
2056 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2058 /* Redirect calls to the old version node to point to its new
2059 version. */
2060 cgraph_redirect_edge_callee (e, new_version);
2063 return new_version;
2066 /* Perform function versioning.
2067 Function versioning includes copying of the tree and
2068 a callgraph update (creating a new cgraph node and updating
2069 its callees and callers).
2071 REDIRECT_CALLERS varray includes the edges to be redirected
2072 to the new version.
2074 TREE_MAP is a mapping of tree nodes we want to replace with
2075 new ones (according to results of prior analysis).
2076 OLD_VERSION_NODE is the node that is versioned.
2077 It returns the new version's cgraph node.
2078 ARGS_TO_SKIP lists arguments to be omitted from functions
2081 struct cgraph_node *
2082 cgraph_function_versioning (struct cgraph_node *old_version_node,
2083 VEC(cgraph_edge_p,heap) *redirect_callers,
2084 VEC (ipa_replace_map_p,gc)* tree_map,
2085 bitmap args_to_skip)
2087 tree old_decl = old_version_node->decl;
2088 struct cgraph_node *new_version_node = NULL;
2089 tree new_decl;
2091 if (!tree_versionable_function_p (old_decl))
2092 return NULL;
2094 /* Make a new FUNCTION_DECL tree node for the
2095 new version. */
2096 if (!args_to_skip)
2097 new_decl = copy_node (old_decl);
2098 else
2099 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2101 /* Create the new version's call-graph node.
2102 and update the edges of the new node. */
2103 new_version_node =
2104 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2105 redirect_callers);
2107 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2108 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
2110 /* Update the new version's properties.
2111 Make The new version visible only within this translation unit. Make sure
2112 that is not weak also.
2113 ??? We cannot use COMDAT linkage because there is no
2114 ABI support for this. */
2115 cgraph_make_decl_local (new_version_node->decl);
2116 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2117 new_version_node->local.externally_visible = 0;
2118 new_version_node->local.local = 1;
2119 new_version_node->lowered = true;
2121 /* Update the call_expr on the edges to call the new version node. */
2122 update_call_expr (new_version_node);
2124 cgraph_call_function_insertion_hooks (new_version_node);
2125 return new_version_node;
2128 /* Produce separate function body for inline clones so the offline copy can be
2129 modified without affecting them. */
2130 struct cgraph_node *
2131 save_inline_function_body (struct cgraph_node *node)
2133 struct cgraph_node *first_clone, *n;
2135 gcc_assert (node == cgraph_node (node->decl));
2137 cgraph_lower_function (node);
2139 first_clone = node->clones;
2141 first_clone->decl = copy_node (node->decl);
2142 cgraph_insert_node_to_hashtable (first_clone);
2143 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2144 if (first_clone->next_sibling_clone)
2146 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2147 n->clone_of = first_clone;
2148 n->clone_of = first_clone;
2149 n->next_sibling_clone = first_clone->clones;
2150 if (first_clone->clones)
2151 first_clone->clones->prev_sibling_clone = n;
2152 first_clone->clones = first_clone->next_sibling_clone;
2153 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2154 first_clone->next_sibling_clone = NULL;
2155 gcc_assert (!first_clone->prev_sibling_clone);
2157 first_clone->clone_of = NULL;
2158 node->clones = NULL;
2160 if (first_clone->clones)
2161 for (n = first_clone->clones; n != first_clone;)
2163 gcc_assert (n->decl == node->decl);
2164 n->decl = first_clone->decl;
2165 if (n->clones)
2166 n = n->clones;
2167 else if (n->next_sibling_clone)
2168 n = n->next_sibling_clone;
2169 else
2171 while (n != first_clone && !n->next_sibling_clone)
2172 n = n->clone_of;
2173 if (n != first_clone)
2174 n = n->next_sibling_clone;
2178 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2179 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
2181 DECL_EXTERNAL (first_clone->decl) = 0;
2182 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2183 TREE_PUBLIC (first_clone->decl) = 0;
2184 DECL_COMDAT (first_clone->decl) = 0;
2185 VEC_free (ipa_opt_pass, heap,
2186 first_clone->ipa_transforms_to_apply);
2187 first_clone->ipa_transforms_to_apply = NULL;
2189 #ifdef ENABLE_CHECKING
2190 verify_cgraph_node (first_clone);
2191 #endif
2192 return first_clone;
2195 /* Given virtual clone, turn it into actual clone. */
2196 static void
2197 cgraph_materialize_clone (struct cgraph_node *node)
2199 bitmap_obstack_initialize (NULL);
2200 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2201 tree_function_versioning (node->clone_of->decl, node->decl,
2202 node->clone.tree_map, true,
2203 node->clone.args_to_skip);
2204 if (cgraph_dump_file)
2206 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2207 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2210 /* Function is no longer clone. */
2211 if (node->next_sibling_clone)
2212 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2213 if (node->prev_sibling_clone)
2214 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2215 else
2216 node->clone_of->clones = node->next_sibling_clone;
2217 node->next_sibling_clone = NULL;
2218 node->prev_sibling_clone = NULL;
2219 if (!node->clone_of->analyzed && !node->clone_of->clones)
2220 cgraph_remove_node (node->clone_of);
2221 node->clone_of = NULL;
2222 bitmap_obstack_release (NULL);
2225 /* Once all functions from compilation unit are in memory, produce all clones
2226 and update all calls.
2227 We might also do this on demand if we don't want to bring all functions to
2228 memory prior compilation, but current WHOPR implementation does that and it is
2229 is bit easier to keep everything right in this order. */
2230 void
2231 cgraph_materialize_all_clones (void)
2233 struct cgraph_node *node;
2234 bool stabilized = false;
2236 if (cgraph_dump_file)
2237 fprintf (cgraph_dump_file, "Materializing clones\n");
2238 #ifdef ENABLE_CHECKING
2239 verify_cgraph ();
2240 #endif
2242 /* We can also do topological order, but number of iterations should be
2243 bounded by number of IPA passes since single IPA pass is probably not
2244 going to create clones of clones it created itself. */
2245 while (!stabilized)
2247 stabilized = true;
2248 for (node = cgraph_nodes; node; node = node->next)
2250 if (node->clone_of && node->decl != node->clone_of->decl
2251 && !gimple_has_body_p (node->decl))
2253 if (gimple_has_body_p (node->clone_of->decl))
2255 if (cgraph_dump_file)
2257 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2258 cgraph_node_name (node->clone_of),
2259 cgraph_node_name (node));
2260 if (node->clone.tree_map)
2262 unsigned int i;
2263 fprintf (cgraph_dump_file, " replace map: ");
2264 for (i = 0; i < VEC_length (ipa_replace_map_p,
2265 node->clone.tree_map);
2266 i++)
2268 struct ipa_replace_map *replace_info;
2269 replace_info = VEC_index (ipa_replace_map_p,
2270 node->clone.tree_map,
2272 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2273 fprintf (cgraph_dump_file, " -> ");
2274 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2275 fprintf (cgraph_dump_file, "%s%s;",
2276 replace_info->replace_p ? "(replace)":"",
2277 replace_info->ref_p ? "(ref)":"");
2279 fprintf (cgraph_dump_file, "\n");
2281 if (node->clone.args_to_skip)
2283 fprintf (cgraph_dump_file, " args_to_skip: ");
2284 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2286 if (node->clone.args_to_skip)
2288 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2289 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2292 cgraph_materialize_clone (node);
2294 else
2295 stabilized = false;
2299 for (node = cgraph_nodes; node; node = node->next)
2300 if (!node->analyzed && node->callees)
2301 cgraph_node_remove_callees (node);
2302 if (cgraph_dump_file)
2303 fprintf (cgraph_dump_file, "Updating call sites\n");
2304 for (node = cgraph_nodes; node; node = node->next)
2305 if (node->analyzed && gimple_has_body_p (node->decl)
2306 && (!node->clone_of || node->clone_of->decl != node->decl))
2308 struct cgraph_edge *e;
2310 current_function_decl = node->decl;
2311 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2312 for (e = node->callees; e; e = e->next_callee)
2314 tree decl = gimple_call_fndecl (e->call_stmt);
2315 /* When function gets inlined, indirect inlining might've invented
2316 new edge for orginally indirect stmt. Since we are not
2317 preserving clones in the original form, we must not update here
2318 since other inline clones don't need to contain call to the same
2319 call. Inliner will do the substitution for us later. */
2320 if (decl && decl != e->callee->decl)
2322 gimple new_stmt;
2323 gimple_stmt_iterator gsi;
2325 if (cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2326 /* Don't update call from same body alias to the real function. */
2327 continue;
2329 if (cgraph_dump_file)
2331 fprintf (cgraph_dump_file, "updating call of %s in %s:",
2332 cgraph_node_name (node),
2333 cgraph_node_name (e->callee));
2334 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2337 if (e->callee->clone.combined_args_to_skip)
2338 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2339 e->callee->clone.combined_args_to_skip);
2340 else
2341 new_stmt = e->call_stmt;
2342 if (gimple_vdef (new_stmt)
2343 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2344 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2345 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2347 gsi = gsi_for_stmt (e->call_stmt);
2348 gsi_replace (&gsi, new_stmt, true);
2350 /* Update EH information too, just in case. */
2351 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2353 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
2355 if (cgraph_dump_file)
2357 fprintf (cgraph_dump_file, " updated to:");
2358 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2362 pop_cfun ();
2363 current_function_decl = NULL;
2364 #ifdef ENABLE_CHECKING
2365 verify_cgraph_node (node);
2366 #endif
2368 #ifdef ENABLE_CHECKING
2369 verify_cgraph ();
2370 #endif
2371 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2374 #include "gt-cgraphunit.h"