Merged r158704 through r158906 into branch.
[official-gcc.git] / gcc / cgraphunit.c
blobc1729bccf206625b6801e489e992806466ea6016
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138 #include "plugin.h"
140 static void cgraph_expand_all_functions (void);
141 static void cgraph_mark_functions_to_output (void);
142 static void cgraph_expand_function (struct cgraph_node *);
143 static void cgraph_output_pending_asms (void);
144 static void cgraph_analyze_function (struct cgraph_node *);
146 static FILE *cgraph_dump_file;
148 /* A vector of FUNCTION_DECLs declared as static constructors. */
149 static GTY (()) VEC(tree, gc) *static_ctors;
150 /* A vector of FUNCTION_DECLs declared as static destructors. */
151 static GTY (()) VEC(tree, gc) *static_dtors;
153 /* Used for vtable lookup in thunk adjusting. */
154 static GTY (()) tree vtable_entry_type;
156 /* When target does not have ctors and dtors, we call all constructor
157 and destructor by special initialization/destruction function
158 recognized by collect2.
160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
163 static void
164 record_cdtor_fn (tree fndecl)
166 struct cgraph_node *node;
167 if (targetm.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 && !DECL_STATIC_DESTRUCTOR (fndecl)))
170 return;
172 if (DECL_STATIC_CONSTRUCTOR (fndecl))
174 VEC_safe_push (tree, gc, static_ctors, fndecl);
175 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
177 if (DECL_STATIC_DESTRUCTOR (fndecl))
179 VEC_safe_push (tree, gc, static_dtors, fndecl);
180 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
182 node = cgraph_node (fndecl);
183 node->local.disregard_inline_limits = 1;
184 cgraph_mark_reachable_node (node);
187 /* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
192 static void
193 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
195 size_t i;
197 i = 0;
198 while (i < len)
200 tree body;
201 tree fn;
202 priority_type priority;
204 priority = 0;
205 body = NULL_TREE;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
210 priority_type p;
211 fn = cdtors[i];
212 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 if (!body)
214 priority = p;
215 else if (p != priority)
216 break;
217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 fn, 0),
219 &body);
220 ++i;
222 while (i < len);
223 gcc_assert (body != NULL_TREE);
224 /* Generate a function to call all the function of like
225 priority. */
226 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
230 /* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
234 static int
235 compare_ctor (const void *p1, const void *p2)
237 tree f1;
238 tree f2;
239 int priority1;
240 int priority2;
242 f1 = *(const tree *)p1;
243 f2 = *(const tree *)p2;
244 priority1 = DECL_INIT_PRIORITY (f1);
245 priority2 = DECL_INIT_PRIORITY (f2);
247 if (priority1 < priority2)
248 return -1;
249 else if (priority1 > priority2)
250 return 1;
251 else
252 /* Ensure a stable sort. */
253 return (const tree *)p1 - (const tree *)p2;
256 /* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
260 static int
261 compare_dtor (const void *p1, const void *p2)
263 tree f1;
264 tree f2;
265 int priority1;
266 int priority2;
268 f1 = *(const tree *)p1;
269 f2 = *(const tree *)p2;
270 priority1 = DECL_FINI_PRIORITY (f1);
271 priority2 = DECL_FINI_PRIORITY (f2);
273 if (priority1 < priority2)
274 return -1;
275 else if (priority1 > priority2)
276 return 1;
277 else
278 /* Ensure a stable sort. */
279 return (const tree *)p1 - (const tree *)p2;
282 /* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
286 static void
287 cgraph_build_cdtor_fns (void)
289 if (!VEC_empty (tree, static_ctors))
291 gcc_assert (!targetm.have_ctors_dtors);
292 qsort (VEC_address (tree, static_ctors),
293 VEC_length (tree, static_ctors),
294 sizeof (tree),
295 compare_ctor);
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree, static_ctors),
298 VEC_length (tree, static_ctors));
299 VEC_truncate (tree, static_ctors, 0);
302 if (!VEC_empty (tree, static_dtors))
304 gcc_assert (!targetm.have_ctors_dtors);
305 qsort (VEC_address (tree, static_dtors),
306 VEC_length (tree, static_dtors),
307 sizeof (tree),
308 compare_dtor);
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree, static_dtors),
311 VEC_length (tree, static_dtors));
312 VEC_truncate (tree, static_dtors, 0);
316 /* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
318 configury. */
320 bool
321 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program
361 && !flag_lto
362 && !flag_whopr)
363 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 return true;
366 /* Constructors and destructors are reachable from the runtime by
367 some mechanism. */
368 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369 return true;
371 return false;
374 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
378 bool
379 cgraph_process_new_functions (void)
381 bool output = false;
382 tree fndecl;
383 struct cgraph_node *node;
385 varpool_analyze_pending_decls ();
386 /* Note that this queue may grow as its being processed, as the new
387 functions may generate new ones. */
388 while (cgraph_new_nodes)
390 node = cgraph_new_nodes;
391 fndecl = node->decl;
392 cgraph_new_nodes = cgraph_new_nodes->next_needed;
393 switch (cgraph_state)
395 case CGRAPH_STATE_CONSTRUCTION:
396 /* At construction time we just need to finalize function and move
397 it into reachable functions list. */
399 node->next_needed = NULL;
400 cgraph_finalize_function (fndecl, false);
401 cgraph_mark_reachable_node (node);
402 output = true;
403 break;
405 case CGRAPH_STATE_IPA:
406 case CGRAPH_STATE_IPA_SSA:
407 /* When IPA optimization already started, do all essential
408 transformations that has been already performed on the whole
409 cgraph but not on this function. */
411 gimple_register_cfg_hooks ();
412 if (!node->analyzed)
413 cgraph_analyze_function (node);
414 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
415 current_function_decl = fndecl;
416 compute_inline_parameters (node);
417 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
418 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
419 /* When not optimizing, be sure we run early local passes anyway
420 to expand OMP. */
421 || !optimize)
422 execute_pass_list (pass_early_local_passes.pass.sub);
423 free_dominance_info (CDI_POST_DOMINATORS);
424 free_dominance_info (CDI_DOMINATORS);
425 pop_cfun ();
426 current_function_decl = NULL;
427 break;
429 case CGRAPH_STATE_EXPANSION:
430 /* Functions created during expansion shall be compiled
431 directly. */
432 node->process = 0;
433 cgraph_expand_function (node);
434 break;
436 default:
437 gcc_unreachable ();
438 break;
440 cgraph_call_function_insertion_hooks (node);
441 varpool_analyze_pending_decls ();
443 return output;
446 /* As an GCC extension we allow redefinition of the function. The
447 semantics when both copies of bodies differ is not well defined.
448 We replace the old body with new body so in unit at a time mode
449 we always use new body, while in normal mode we may end up with
450 old body inlined into some functions and new body expanded and
451 inlined in others.
453 ??? It may make more sense to use one body for inlining and other
454 body for expanding the function but this is difficult to do. */
456 static void
457 cgraph_reset_node (struct cgraph_node *node)
459 /* If node->process is set, then we have already begun whole-unit analysis.
460 This is *not* testing for whether we've already emitted the function.
461 That case can be sort-of legitimately seen with real function redefinition
462 errors. I would argue that the front end should never present us with
463 such a case, but don't enforce that for now. */
464 gcc_assert (!node->process);
466 /* Reset our data structures so we can analyze the function again. */
467 memset (&node->local, 0, sizeof (node->local));
468 memset (&node->global, 0, sizeof (node->global));
469 memset (&node->rtl, 0, sizeof (node->rtl));
470 node->analyzed = false;
471 node->local.redefined_extern_inline = true;
472 node->local.finalized = false;
474 cgraph_node_remove_callees (node);
476 /* We may need to re-queue the node for assembling in case
477 we already proceeded it and ignored as not needed or got
478 a re-declaration in IMA mode. */
479 if (node->reachable)
481 struct cgraph_node *n;
483 for (n = cgraph_nodes_queue; n; n = n->next_needed)
484 if (n == node)
485 break;
486 if (!n)
487 node->reachable = 0;
491 static void
492 cgraph_lower_function (struct cgraph_node *node)
494 if (node->lowered)
495 return;
497 if (node->nested)
498 lower_nested_functions (node->decl);
499 gcc_assert (!node->nested);
501 tree_lowering_passes (node->decl);
502 node->lowered = true;
505 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
506 logic in effect. If NESTED is true, then our caller cannot stand to have
507 the garbage collector run at the moment. We would need to either create
508 a new GC context, or just not compile right now. */
510 void
511 cgraph_finalize_function (tree decl, bool nested)
513 struct cgraph_node *node = cgraph_node (decl);
515 if (node->local.finalized)
516 cgraph_reset_node (node);
518 node->pid = cgraph_max_pid ++;
519 notice_global_symbol (decl);
520 node->local.finalized = true;
521 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
522 node->finalized_by_frontend = true;
523 record_cdtor_fn (node->decl);
525 if (cgraph_decide_is_function_needed (node, decl))
526 cgraph_mark_needed_node (node);
528 /* Since we reclaim unreachable nodes at the end of every language
529 level unit, we need to be conservative about possible entry points
530 there. */
531 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
532 cgraph_mark_reachable_node (node);
534 /* If we've not yet emitted decl, tell the debug info about it. */
535 if (!TREE_ASM_WRITTEN (decl))
536 (*debug_hooks->deferred_inline_function) (decl);
538 /* Parameters in IFUNC function should never be used. */
539 if (DECL_IS_IFUNC (decl))
541 tree parm;
543 for (parm = DECL_ARGUMENTS (decl);
544 parm; parm = TREE_CHAIN (parm))
546 if (TREE_USED (parm)
547 && TREE_CODE (parm) == PARM_DECL
548 && DECL_NAME (parm))
549 error ("parameter %q+D used in indirect function %q+F",
550 parm, decl);
554 /* Possibly warn about unused parameters. */
555 else if (warn_unused_parameter)
556 do_warn_unused_parameter (decl);
558 if (!nested)
559 ggc_collect ();
562 /* C99 extern inline keywords allow changing of declaration after function
563 has been finalized. We need to re-decide if we want to mark the function as
564 needed then. */
566 void
567 cgraph_mark_if_needed (tree decl)
569 struct cgraph_node *node = cgraph_node (decl);
570 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
571 cgraph_mark_needed_node (node);
574 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
575 static bool
576 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
578 while (node != node2 && node2)
579 node2 = node2->clone_of;
580 return node2 != NULL;
583 /* Verify cgraph nodes of given cgraph node. */
584 void
585 verify_cgraph_node (struct cgraph_node *node)
587 struct cgraph_edge *e;
588 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
589 struct function *saved_cfun = cfun;
590 basic_block this_block;
591 gimple_stmt_iterator gsi;
592 bool error_found = false;
594 if (errorcount || sorrycount)
595 return;
597 timevar_push (TV_CGRAPH_VERIFY);
598 /* debug_generic_stmt needs correct cfun */
599 set_cfun (this_cfun);
600 for (e = node->callees; e; e = e->next_callee)
601 if (e->aux)
603 error ("aux field set for edge %s->%s",
604 identifier_to_locale (cgraph_node_name (e->caller)),
605 identifier_to_locale (cgraph_node_name (e->callee)));
606 error_found = true;
608 if (node->count < 0)
610 error ("Execution count is negative");
611 error_found = true;
613 if (node->global.inlined_to && node->local.externally_visible)
615 error ("Externally visible inline clone");
616 error_found = true;
618 if (node->global.inlined_to && node->address_taken)
620 error ("Inline clone with address taken");
621 error_found = true;
623 if (node->global.inlined_to && node->needed)
625 error ("Inline clone is needed");
626 error_found = true;
628 for (e = node->indirect_calls; e; e = e->next_callee)
630 if (e->aux)
632 error ("aux field set for indirect edge from %s",
633 identifier_to_locale (cgraph_node_name (e->caller)));
634 error_found = true;
636 if (!e->indirect_unknown_callee
637 || !e->indirect_info)
639 error ("An indirect edge from %s is not marked as indirect or has "
640 "associated indirect_info, the corresponding statement is: ",
641 identifier_to_locale (cgraph_node_name (e->caller)));
642 debug_gimple_stmt (e->call_stmt);
643 error_found = true;
646 for (e = node->callers; e; e = e->next_caller)
648 if (e->count < 0)
650 error ("caller edge count is negative");
651 error_found = true;
653 if (e->frequency < 0)
655 error ("caller edge frequency is negative");
656 error_found = true;
658 if (e->frequency > CGRAPH_FREQ_MAX)
660 error ("caller edge frequency is too large");
661 error_found = true;
663 if (gimple_has_body_p (e->caller->decl)
664 && !e->caller->global.inlined_to
665 && (e->frequency
666 != compute_call_stmt_bb_frequency (e->caller->decl,
667 gimple_bb (e->call_stmt))))
669 error ("caller edge frequency %i does not match BB freqency %i",
670 e->frequency,
671 compute_call_stmt_bb_frequency (e->caller->decl,
672 gimple_bb (e->call_stmt)));
673 error_found = true;
675 if (!e->inline_failed)
677 if (node->global.inlined_to
678 != (e->caller->global.inlined_to
679 ? e->caller->global.inlined_to : e->caller))
681 error ("inlined_to pointer is wrong");
682 error_found = true;
684 if (node->callers->next_caller)
686 error ("multiple inline callers");
687 error_found = true;
690 else
691 if (node->global.inlined_to)
693 error ("inlined_to pointer set for noninline callers");
694 error_found = true;
697 if (!node->callers && node->global.inlined_to)
699 error ("inlined_to pointer is set but no predecessors found");
700 error_found = true;
702 if (node->global.inlined_to == node)
704 error ("inlined_to pointer refers to itself");
705 error_found = true;
708 if (!cgraph_node (node->decl))
710 error ("node not found in cgraph_hash");
711 error_found = true;
714 if (node->clone_of)
716 struct cgraph_node *n;
717 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
718 if (n == node)
719 break;
720 if (!n)
722 error ("node has wrong clone_of");
723 error_found = true;
726 if (node->clones)
728 struct cgraph_node *n;
729 for (n = node->clones; n; n = n->next_sibling_clone)
730 if (n->clone_of != node)
731 break;
732 if (n)
734 error ("node has wrong clone list");
735 error_found = true;
738 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
740 error ("node is in clone list but it is not clone");
741 error_found = true;
743 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
745 error ("node has wrong prev_clone pointer");
746 error_found = true;
748 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
750 error ("double linked list of clones corrupted");
751 error_found = true;
753 if (node->same_comdat_group)
755 struct cgraph_node *n = node->same_comdat_group;
757 if (!DECL_ONE_ONLY (node->decl))
759 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
760 error_found = true;
762 if (n == node)
764 error ("node is alone in a comdat group");
765 error_found = true;
769 if (!n->same_comdat_group)
771 error ("same_comdat_group is not a circular list");
772 error_found = true;
773 break;
775 n = n->same_comdat_group;
777 while (n != node);
780 if (node->analyzed && gimple_has_body_p (node->decl)
781 && !TREE_ASM_WRITTEN (node->decl)
782 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
783 && !flag_wpa)
785 if (this_cfun->cfg)
787 /* The nodes we're interested in are never shared, so walk
788 the tree ignoring duplicates. */
789 struct pointer_set_t *visited_nodes = pointer_set_create ();
790 /* Reach the trees by walking over the CFG, and note the
791 enclosing basic-blocks in the call edges. */
792 FOR_EACH_BB_FN (this_block, this_cfun)
793 for (gsi = gsi_start_bb (this_block);
794 !gsi_end_p (gsi);
795 gsi_next (&gsi))
797 gimple stmt = gsi_stmt (gsi);
798 if (is_gimple_call (stmt))
800 struct cgraph_edge *e = cgraph_edge (node, stmt);
801 tree decl = gimple_call_fndecl (stmt);
802 if (e)
804 if (e->aux)
806 error ("shared call_stmt:");
807 debug_gimple_stmt (stmt);
808 error_found = true;
810 if (!e->indirect_unknown_callee)
812 if (e->callee->same_body_alias)
814 error ("edge points to same body alias:");
815 debug_tree (e->callee->decl);
816 error_found = true;
818 else if (!node->global.inlined_to
819 && !e->callee->global.inlined_to
820 && decl
821 && !clone_of_p (cgraph_node (decl),
822 e->callee))
824 error ("edge points to wrong declaration:");
825 debug_tree (e->callee->decl);
826 fprintf (stderr," Instead of:");
827 debug_tree (decl);
828 error_found = true;
831 else if (decl)
833 error ("an indirect edge with unknown callee "
834 "corresponding to a call_stmt with "
835 "a known declaration:");
836 error_found = true;
837 debug_gimple_stmt (e->call_stmt);
839 e->aux = (void *)1;
841 else if (decl)
843 error ("missing callgraph edge for call stmt:");
844 debug_gimple_stmt (stmt);
845 error_found = true;
849 pointer_set_destroy (visited_nodes);
851 else
852 /* No CFG available?! */
853 gcc_unreachable ();
855 for (e = node->callees; e; e = e->next_callee)
857 if (!e->aux)
859 error ("edge %s->%s has no corresponding call_stmt",
860 identifier_to_locale (cgraph_node_name (e->caller)),
861 identifier_to_locale (cgraph_node_name (e->callee)));
862 debug_gimple_stmt (e->call_stmt);
863 error_found = true;
865 e->aux = 0;
867 for (e = node->indirect_calls; e; e = e->next_callee)
869 if (!e->aux)
871 error ("an indirect edge from %s has no corresponding call_stmt",
872 identifier_to_locale (cgraph_node_name (e->caller)));
873 debug_gimple_stmt (e->call_stmt);
874 error_found = true;
876 e->aux = 0;
879 if (error_found)
881 dump_cgraph_node (stderr, node);
882 internal_error ("verify_cgraph_node failed");
884 set_cfun (saved_cfun);
885 timevar_pop (TV_CGRAPH_VERIFY);
888 /* Verify whole cgraph structure. */
889 void
890 verify_cgraph (void)
892 struct cgraph_node *node;
894 if (sorrycount || errorcount)
895 return;
897 for (node = cgraph_nodes; node; node = node->next)
898 verify_cgraph_node (node);
901 /* Output all asm statements we have stored up to be output. */
903 static void
904 cgraph_output_pending_asms (void)
906 struct cgraph_asm_node *can;
908 if (errorcount || sorrycount)
909 return;
911 for (can = cgraph_asm_nodes; can; can = can->next)
912 assemble_asm (can->asm_str);
913 cgraph_asm_nodes = NULL;
916 /* Analyze the function scheduled to be output. */
917 static void
918 cgraph_analyze_function (struct cgraph_node *node)
920 tree save = current_function_decl;
921 tree decl = node->decl;
923 current_function_decl = decl;
924 push_cfun (DECL_STRUCT_FUNCTION (decl));
926 assign_assembler_name_if_neeeded (node->decl);
928 /* Make sure to gimplify bodies only once. During analyzing a
929 function we lower it, which will require gimplified nested
930 functions, so we can end up here with an already gimplified
931 body. */
932 if (!gimple_body (decl))
933 gimplify_function_tree (decl);
934 dump_function (TDI_generic, decl);
936 cgraph_lower_function (node);
937 node->analyzed = true;
939 pop_cfun ();
940 current_function_decl = save;
943 /* Look for externally_visible and used attributes and mark cgraph nodes
944 accordingly.
946 We cannot mark the nodes at the point the attributes are processed (in
947 handle_*_attribute) because the copy of the declarations available at that
948 point may not be canonical. For example, in:
950 void f();
951 void f() __attribute__((used));
953 the declaration we see in handle_used_attribute will be the second
954 declaration -- but the front end will subsequently merge that declaration
955 with the original declaration and discard the second declaration.
957 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
959 void f() {}
960 void f() __attribute__((externally_visible));
962 is valid.
964 So, we walk the nodes at the end of the translation unit, applying the
965 attributes at that point. */
967 static void
968 process_function_and_variable_attributes (struct cgraph_node *first,
969 struct varpool_node *first_var)
971 struct cgraph_node *node;
972 struct varpool_node *vnode;
974 for (node = cgraph_nodes; node != first; node = node->next)
976 tree decl = node->decl;
977 if (DECL_PRESERVE_P (decl))
979 mark_decl_referenced (decl);
980 if (node->local.finalized)
981 cgraph_mark_needed_node (node);
983 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
985 if (! TREE_PUBLIC (node->decl))
986 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
987 "%<externally_visible%>"
988 " attribute have effect only on public objects");
989 else if (node->local.finalized)
990 cgraph_mark_needed_node (node);
993 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
995 tree decl = vnode->decl;
996 if (DECL_PRESERVE_P (decl))
998 mark_decl_referenced (decl);
999 vnode->force_output = true;
1000 if (vnode->finalized)
1001 varpool_mark_needed_node (vnode);
1003 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1005 if (! TREE_PUBLIC (vnode->decl))
1006 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1007 "%<externally_visible%>"
1008 " attribute have effect only on public objects");
1009 else if (vnode->finalized)
1010 varpool_mark_needed_node (vnode);
1015 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1016 each reachable functions) and build cgraph.
1017 The function can be called multiple times after inserting new nodes
1018 into beginning of queue. Just the new part of queue is re-scanned then. */
1020 static void
1021 cgraph_analyze_functions (void)
1023 /* Keep track of already processed nodes when called multiple times for
1024 intermodule optimization. */
1025 static struct cgraph_node *first_analyzed;
1026 struct cgraph_node *first_processed = first_analyzed;
1027 static struct varpool_node *first_analyzed_var;
1028 struct cgraph_node *node, *next;
1030 process_function_and_variable_attributes (first_processed,
1031 first_analyzed_var);
1032 first_processed = cgraph_nodes;
1033 first_analyzed_var = varpool_nodes;
1034 varpool_analyze_pending_decls ();
1035 if (cgraph_dump_file)
1037 fprintf (cgraph_dump_file, "Initial entry points:");
1038 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1039 if (node->needed)
1040 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1041 fprintf (cgraph_dump_file, "\n");
1043 cgraph_process_new_functions ();
1045 /* Propagate reachability flag and lower representation of all reachable
1046 functions. In the future, lowering will introduce new functions and
1047 new entry points on the way (by template instantiation and virtual
1048 method table generation for instance). */
1049 while (cgraph_nodes_queue)
1051 struct cgraph_edge *edge;
1052 tree decl = cgraph_nodes_queue->decl;
1054 node = cgraph_nodes_queue;
1055 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1056 node->next_needed = NULL;
1058 /* ??? It is possible to create extern inline function and later using
1059 weak alias attribute to kill its body. See
1060 gcc.c-torture/compile/20011119-1.c */
1061 if (!DECL_STRUCT_FUNCTION (decl))
1063 cgraph_reset_node (node);
1064 continue;
1067 if (!node->analyzed)
1068 cgraph_analyze_function (node);
1070 for (edge = node->callees; edge; edge = edge->next_callee)
1071 if (!edge->callee->reachable)
1072 cgraph_mark_reachable_node (edge->callee);
1074 if (node->same_comdat_group)
1076 for (next = node->same_comdat_group;
1077 next != node;
1078 next = next->same_comdat_group)
1079 cgraph_mark_reachable_node (next);
1082 /* If decl is a clone of an abstract function, mark that abstract
1083 function so that we don't release its body. The DECL_INITIAL() of that
1084 abstract function declaration will be later needed to output debug info. */
1085 if (DECL_ABSTRACT_ORIGIN (decl))
1087 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1088 origin_node->abstract_and_needed = true;
1091 /* We finalize local static variables during constructing callgraph
1092 edges. Process their attributes too. */
1093 process_function_and_variable_attributes (first_processed,
1094 first_analyzed_var);
1095 first_processed = cgraph_nodes;
1096 first_analyzed_var = varpool_nodes;
1097 varpool_analyze_pending_decls ();
1098 cgraph_process_new_functions ();
1101 /* Collect entry points to the unit. */
1102 if (cgraph_dump_file)
1104 fprintf (cgraph_dump_file, "Unit entry points:");
1105 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1106 if (node->needed)
1107 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1108 fprintf (cgraph_dump_file, "\n\nInitial ");
1109 dump_cgraph (cgraph_dump_file);
1112 if (cgraph_dump_file)
1113 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1115 for (node = cgraph_nodes; node != first_analyzed; node = next)
1117 tree decl = node->decl;
1118 next = node->next;
1120 if (node->local.finalized && !gimple_has_body_p (decl))
1121 cgraph_reset_node (node);
1123 if (!node->reachable && gimple_has_body_p (decl))
1125 if (cgraph_dump_file)
1126 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1127 cgraph_remove_node (node);
1128 continue;
1130 else
1131 node->next_needed = NULL;
1132 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1133 gcc_assert (node->analyzed == node->local.finalized);
1135 if (cgraph_dump_file)
1137 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1138 dump_cgraph (cgraph_dump_file);
1140 first_analyzed = cgraph_nodes;
1141 ggc_collect ();
1145 /* Analyze the whole compilation unit once it is parsed completely. */
1147 void
1148 cgraph_finalize_compilation_unit (void)
1150 timevar_push (TV_CGRAPH);
1152 /* Do not skip analyzing the functions if there were errors, we
1153 miss diagnostics for following functions otherwise. */
1155 /* Emit size functions we didn't inline. */
1156 finalize_size_functions ();
1158 /* Call functions declared with the "constructor" or "destructor"
1159 attribute. */
1160 cgraph_build_cdtor_fns ();
1162 /* Mark alias targets necessary and emit diagnostics. */
1163 finish_aliases_1 ();
1165 if (!quiet_flag)
1167 fprintf (stderr, "\nAnalyzing compilation unit\n");
1168 fflush (stderr);
1171 /* Gimplify and lower all functions, compute reachability and
1172 remove unreachable nodes. */
1173 cgraph_analyze_functions ();
1175 /* Mark alias targets necessary and emit diagnostics. */
1176 finish_aliases_1 ();
1178 /* Gimplify and lower thunks. */
1179 cgraph_analyze_functions ();
1181 /* Finally drive the pass manager. */
1182 cgraph_optimize ();
1184 timevar_pop (TV_CGRAPH);
1188 /* Figure out what functions we want to assemble. */
1190 static void
1191 cgraph_mark_functions_to_output (void)
1193 struct cgraph_node *node;
1194 #ifdef ENABLE_CHECKING
1195 bool check_same_comdat_groups = false;
1197 for (node = cgraph_nodes; node; node = node->next)
1198 gcc_assert (!node->process);
1199 #endif
1201 for (node = cgraph_nodes; node; node = node->next)
1203 tree decl = node->decl;
1204 struct cgraph_edge *e;
1206 gcc_assert (!node->process || node->same_comdat_group);
1207 if (node->process)
1208 continue;
1210 for (e = node->callers; e; e = e->next_caller)
1211 if (e->inline_failed)
1212 break;
1214 /* We need to output all local functions that are used and not
1215 always inlined, as well as those that are reachable from
1216 outside the current compilation unit. */
1217 if (node->analyzed
1218 && !node->global.inlined_to
1219 && (node->needed || node->reachable_from_other_partition
1220 || (e && node->reachable))
1221 && !TREE_ASM_WRITTEN (decl)
1222 && !DECL_EXTERNAL (decl))
1224 node->process = 1;
1225 if (node->same_comdat_group)
1227 struct cgraph_node *next;
1228 for (next = node->same_comdat_group;
1229 next != node;
1230 next = next->same_comdat_group)
1231 next->process = 1;
1234 else if (node->same_comdat_group)
1236 #ifdef ENABLE_CHECKING
1237 check_same_comdat_groups = true;
1238 #endif
1240 else
1242 /* We should've reclaimed all functions that are not needed. */
1243 #ifdef ENABLE_CHECKING
1244 if (!node->global.inlined_to
1245 && gimple_has_body_p (decl)
1246 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1247 are inside partition, we can end up not removing the body since we no longer
1248 have analyzed node pointing to it. */
1249 && !node->in_other_partition
1250 && !DECL_EXTERNAL (decl))
1252 dump_cgraph_node (stderr, node);
1253 internal_error ("failed to reclaim unneeded function");
1255 #endif
1256 gcc_assert (node->global.inlined_to
1257 || !gimple_has_body_p (decl)
1258 || node->in_other_partition
1259 || DECL_EXTERNAL (decl));
1264 #ifdef ENABLE_CHECKING
1265 if (check_same_comdat_groups)
1266 for (node = cgraph_nodes; node; node = node->next)
1267 if (node->same_comdat_group && !node->process)
1269 tree decl = node->decl;
1270 if (!node->global.inlined_to
1271 && gimple_has_body_p (decl)
1272 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1273 are inside partition, we can end up not removing the body since we no longer
1274 have analyzed node pointing to it. */
1275 && !node->in_other_partition
1276 && !DECL_EXTERNAL (decl))
1278 dump_cgraph_node (stderr, node);
1279 internal_error ("failed to reclaim unneeded function");
1282 #endif
1285 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1286 in lowered gimple form.
1288 Set current_function_decl and cfun to newly constructed empty function body.
1289 return basic block in the function body. */
1291 static basic_block
1292 init_lowered_empty_function (tree decl)
1294 basic_block bb;
1296 current_function_decl = decl;
1297 allocate_struct_function (decl, false);
1298 gimple_register_cfg_hooks ();
1299 init_empty_tree_cfg ();
1300 init_tree_ssa (cfun);
1301 init_ssa_operands ();
1302 cfun->gimple_df->in_ssa_p = true;
1303 DECL_INITIAL (decl) = make_node (BLOCK);
1305 DECL_SAVED_TREE (decl) = error_mark_node;
1306 cfun->curr_properties |=
1307 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1308 PROP_ssa);
1310 /* Create BB for body of the function and connect it properly. */
1311 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1312 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1313 make_edge (bb, EXIT_BLOCK_PTR, 0);
1315 return bb;
1318 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1319 offset indicated by VIRTUAL_OFFSET, if that is
1320 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1321 zero for a result adjusting thunk. */
1323 static tree
1324 thunk_adjust (gimple_stmt_iterator * bsi,
1325 tree ptr, bool this_adjusting,
1326 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1328 gimple stmt;
1329 tree ret;
1331 if (this_adjusting
1332 && fixed_offset != 0)
1334 stmt = gimple_build_assign (ptr,
1335 fold_build2_loc (input_location,
1336 POINTER_PLUS_EXPR,
1337 TREE_TYPE (ptr), ptr,
1338 size_int (fixed_offset)));
1339 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1342 /* If there's a virtual offset, look up that value in the vtable and
1343 adjust the pointer again. */
1344 if (virtual_offset)
1346 tree vtabletmp;
1347 tree vtabletmp2;
1348 tree vtabletmp3;
1349 tree offsettmp;
1351 if (!vtable_entry_type)
1353 tree vfunc_type = make_node (FUNCTION_TYPE);
1354 TREE_TYPE (vfunc_type) = integer_type_node;
1355 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1356 layout_type (vfunc_type);
1358 vtable_entry_type = build_pointer_type (vfunc_type);
1361 vtabletmp =
1362 create_tmp_var (build_pointer_type
1363 (build_pointer_type (vtable_entry_type)), "vptr");
1365 /* The vptr is always at offset zero in the object. */
1366 stmt = gimple_build_assign (vtabletmp,
1367 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1368 ptr));
1369 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1370 mark_symbols_for_renaming (stmt);
1371 find_referenced_vars_in (stmt);
1373 /* Form the vtable address. */
1374 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1375 "vtableaddr");
1376 stmt = gimple_build_assign (vtabletmp2,
1377 build1 (INDIRECT_REF,
1378 TREE_TYPE (vtabletmp2), vtabletmp));
1379 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1380 mark_symbols_for_renaming (stmt);
1381 find_referenced_vars_in (stmt);
1383 /* Find the entry with the vcall offset. */
1384 stmt = gimple_build_assign (vtabletmp2,
1385 fold_build2_loc (input_location,
1386 POINTER_PLUS_EXPR,
1387 TREE_TYPE (vtabletmp2),
1388 vtabletmp2,
1389 fold_convert (sizetype,
1390 virtual_offset)));
1391 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1393 /* Get the offset itself. */
1394 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1395 "vcalloffset");
1396 stmt = gimple_build_assign (vtabletmp3,
1397 build1 (INDIRECT_REF,
1398 TREE_TYPE (vtabletmp3),
1399 vtabletmp2));
1400 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1401 mark_symbols_for_renaming (stmt);
1402 find_referenced_vars_in (stmt);
1404 /* Cast to sizetype. */
1405 offsettmp = create_tmp_var (sizetype, "offset");
1406 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1407 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1408 mark_symbols_for_renaming (stmt);
1409 find_referenced_vars_in (stmt);
1411 /* Adjust the `this' pointer. */
1412 ptr = fold_build2_loc (input_location,
1413 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1414 offsettmp);
1417 if (!this_adjusting
1418 && fixed_offset != 0)
1419 /* Adjust the pointer by the constant. */
1421 tree ptrtmp;
1423 if (TREE_CODE (ptr) == VAR_DECL)
1424 ptrtmp = ptr;
1425 else
1427 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1428 stmt = gimple_build_assign (ptrtmp, ptr);
1429 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1430 mark_symbols_for_renaming (stmt);
1431 find_referenced_vars_in (stmt);
1433 ptr = fold_build2_loc (input_location,
1434 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1435 size_int (fixed_offset));
1438 /* Emit the statement and gimplify the adjustment expression. */
1439 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1440 stmt = gimple_build_assign (ret, ptr);
1441 mark_symbols_for_renaming (stmt);
1442 find_referenced_vars_in (stmt);
1443 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1445 return ret;
1448 /* Produce assembler for thunk NODE. */
1450 static void
1451 assemble_thunk (struct cgraph_node *node)
1453 bool this_adjusting = node->thunk.this_adjusting;
1454 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1455 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1456 tree virtual_offset = NULL;
1457 tree alias = node->thunk.alias;
1458 tree thunk_fndecl = node->decl;
1459 tree a = DECL_ARGUMENTS (thunk_fndecl);
1461 current_function_decl = thunk_fndecl;
1463 if (this_adjusting
1464 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1465 virtual_value, alias))
1467 const char *fnname;
1468 tree fn_block;
1470 DECL_RESULT (thunk_fndecl)
1471 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1472 RESULT_DECL, 0, integer_type_node);
1473 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1475 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1476 create one. */
1477 fn_block = make_node (BLOCK);
1478 BLOCK_VARS (fn_block) = a;
1479 DECL_INITIAL (thunk_fndecl) = fn_block;
1480 init_function_start (thunk_fndecl);
1481 cfun->is_thunk = 1;
1482 assemble_start_function (thunk_fndecl, fnname);
1484 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1485 fixed_offset, virtual_value, alias);
1487 assemble_end_function (thunk_fndecl, fnname);
1488 init_insn_lengths ();
1489 free_after_compilation (cfun);
1490 set_cfun (NULL);
1491 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1493 else
1495 tree restype;
1496 basic_block bb, then_bb, else_bb, return_bb;
1497 gimple_stmt_iterator bsi;
1498 int nargs = 0;
1499 tree arg;
1500 int i;
1501 tree resdecl;
1502 tree restmp = NULL;
1503 VEC(tree, heap) *vargs;
1505 gimple call;
1506 gimple ret;
1508 DECL_IGNORED_P (thunk_fndecl) = 1;
1509 bitmap_obstack_initialize (NULL);
1511 if (node->thunk.virtual_offset_p)
1512 virtual_offset = size_int (virtual_value);
1514 /* Build the return declaration for the function. */
1515 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1516 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1518 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1519 DECL_ARTIFICIAL (resdecl) = 1;
1520 DECL_IGNORED_P (resdecl) = 1;
1521 DECL_RESULT (thunk_fndecl) = resdecl;
1523 else
1524 resdecl = DECL_RESULT (thunk_fndecl);
1526 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1528 bsi = gsi_start_bb (bb);
1530 /* Build call to the function being thunked. */
1531 if (!VOID_TYPE_P (restype))
1533 if (!is_gimple_reg_type (restype))
1535 restmp = resdecl;
1536 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1537 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1539 else
1540 restmp = create_tmp_var_raw (restype, "retval");
1543 for (arg = a; arg; arg = TREE_CHAIN (arg))
1544 nargs++;
1545 vargs = VEC_alloc (tree, heap, nargs);
1546 if (this_adjusting)
1547 VEC_quick_push (tree, vargs,
1548 thunk_adjust (&bsi,
1549 a, 1, fixed_offset,
1550 virtual_offset));
1551 else
1552 VEC_quick_push (tree, vargs, a);
1553 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1554 VEC_quick_push (tree, vargs, arg);
1555 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1556 VEC_free (tree, heap, vargs);
1557 gimple_call_set_cannot_inline (call, true);
1558 gimple_call_set_from_thunk (call, true);
1559 if (restmp)
1560 gimple_call_set_lhs (call, restmp);
1561 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1562 mark_symbols_for_renaming (call);
1563 find_referenced_vars_in (call);
1564 update_stmt (call);
1566 if (restmp && !this_adjusting)
1568 tree true_label = NULL_TREE;
1570 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1572 gimple stmt;
1573 /* If the return type is a pointer, we need to
1574 protect against NULL. We know there will be an
1575 adjustment, because that's why we're emitting a
1576 thunk. */
1577 then_bb = create_basic_block (NULL, (void *) 0, bb);
1578 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1579 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1580 remove_edge (single_succ_edge (bb));
1581 true_label = gimple_block_label (then_bb);
1582 stmt = gimple_build_cond (NE_EXPR, restmp,
1583 fold_convert (TREE_TYPE (restmp),
1584 integer_zero_node),
1585 NULL_TREE, NULL_TREE);
1586 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1587 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1588 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1589 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1590 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1591 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1592 bsi = gsi_last_bb (then_bb);
1595 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1596 fixed_offset, virtual_offset);
1597 if (true_label)
1599 gimple stmt;
1600 bsi = gsi_last_bb (else_bb);
1601 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1602 integer_zero_node));
1603 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1604 bsi = gsi_last_bb (return_bb);
1607 else
1608 gimple_call_set_tail (call, true);
1610 /* Build return value. */
1611 ret = gimple_build_return (restmp);
1612 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1614 delete_unreachable_blocks ();
1615 update_ssa (TODO_update_ssa);
1617 cgraph_remove_same_body_alias (node);
1618 /* Since we want to emit the thunk, we explicitly mark its name as
1619 referenced. */
1620 mark_decl_referenced (thunk_fndecl);
1621 cgraph_add_new_function (thunk_fndecl, true);
1622 bitmap_obstack_release (NULL);
1624 current_function_decl = NULL;
1627 /* Expand function specified by NODE. */
1629 static void
1630 cgraph_expand_function (struct cgraph_node *node)
1632 tree decl = node->decl;
1634 /* We ought to not compile any inline clones. */
1635 gcc_assert (!node->global.inlined_to);
1637 announce_function (decl);
1638 node->process = 0;
1640 gcc_assert (node->lowered);
1642 /* Generate RTL for the body of DECL. */
1643 tree_rest_of_compilation (decl);
1645 /* Make sure that BE didn't give up on compiling. */
1646 gcc_assert (TREE_ASM_WRITTEN (decl));
1647 current_function_decl = NULL;
1648 if (node->same_body)
1650 struct cgraph_node *alias, *next;
1651 bool saved_alias = node->alias;
1652 for (alias = node->same_body;
1653 alias && alias->next; alias = alias->next)
1655 /* Walk aliases in the order they were created; it is possible that
1656 thunks reffers to the aliases made earlier. */
1657 for (; alias; alias = next)
1659 next = alias->previous;
1660 if (!alias->thunk.thunk_p)
1661 assemble_alias (alias->decl,
1662 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1663 else
1664 assemble_thunk (alias);
1666 node->alias = saved_alias;
1668 gcc_assert (!cgraph_preserve_function_body_p (decl));
1669 cgraph_release_function_body (node);
1670 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1671 points to the dead function body. */
1672 cgraph_node_remove_callees (node);
1674 cgraph_function_flags_ready = true;
1677 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1679 bool
1680 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1682 *reason = e->inline_failed;
1683 return !e->inline_failed;
1688 /* Expand all functions that must be output.
1690 Attempt to topologically sort the nodes so function is output when
1691 all called functions are already assembled to allow data to be
1692 propagated across the callgraph. Use a stack to get smaller distance
1693 between a function and its callees (later we may choose to use a more
1694 sophisticated algorithm for function reordering; we will likely want
1695 to use subsections to make the output functions appear in top-down
1696 order). */
1698 static void
1699 cgraph_expand_all_functions (void)
1701 struct cgraph_node *node;
1702 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1703 int order_pos, new_order_pos = 0;
1704 int i;
1706 order_pos = cgraph_postorder (order);
1707 gcc_assert (order_pos == cgraph_n_nodes);
1709 /* Garbage collector may remove inline clones we eliminate during
1710 optimization. So we must be sure to not reference them. */
1711 for (i = 0; i < order_pos; i++)
1712 if (order[i]->process)
1713 order[new_order_pos++] = order[i];
1715 for (i = new_order_pos - 1; i >= 0; i--)
1717 node = order[i];
1718 if (node->process)
1720 gcc_assert (node->reachable);
1721 node->process = 0;
1722 cgraph_expand_function (node);
1725 cgraph_process_new_functions ();
1727 free (order);
1731 /* This is used to sort the node types by the cgraph order number. */
1733 enum cgraph_order_sort_kind
1735 ORDER_UNDEFINED = 0,
1736 ORDER_FUNCTION,
1737 ORDER_VAR,
1738 ORDER_ASM
1741 struct cgraph_order_sort
1743 enum cgraph_order_sort_kind kind;
1744 union
1746 struct cgraph_node *f;
1747 struct varpool_node *v;
1748 struct cgraph_asm_node *a;
1749 } u;
1752 /* Output all functions, variables, and asm statements in the order
1753 according to their order fields, which is the order in which they
1754 appeared in the file. This implements -fno-toplevel-reorder. In
1755 this mode we may output functions and variables which don't really
1756 need to be output. */
1758 static void
1759 cgraph_output_in_order (void)
1761 int max;
1762 struct cgraph_order_sort *nodes;
1763 int i;
1764 struct cgraph_node *pf;
1765 struct varpool_node *pv;
1766 struct cgraph_asm_node *pa;
1768 max = cgraph_order;
1769 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1771 varpool_analyze_pending_decls ();
1773 for (pf = cgraph_nodes; pf; pf = pf->next)
1775 if (pf->process)
1777 i = pf->order;
1778 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1779 nodes[i].kind = ORDER_FUNCTION;
1780 nodes[i].u.f = pf;
1784 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1786 i = pv->order;
1787 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1788 nodes[i].kind = ORDER_VAR;
1789 nodes[i].u.v = pv;
1792 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1794 i = pa->order;
1795 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1796 nodes[i].kind = ORDER_ASM;
1797 nodes[i].u.a = pa;
1800 /* In toplevel reorder mode we output all statics; mark them as needed. */
1801 for (i = 0; i < max; ++i)
1803 if (nodes[i].kind == ORDER_VAR)
1805 varpool_mark_needed_node (nodes[i].u.v);
1808 varpool_empty_needed_queue ();
1810 for (i = 0; i < max; ++i)
1812 switch (nodes[i].kind)
1814 case ORDER_FUNCTION:
1815 nodes[i].u.f->process = 0;
1816 cgraph_expand_function (nodes[i].u.f);
1817 break;
1819 case ORDER_VAR:
1820 varpool_assemble_decl (nodes[i].u.v);
1821 break;
1823 case ORDER_ASM:
1824 assemble_asm (nodes[i].u.a->asm_str);
1825 break;
1827 case ORDER_UNDEFINED:
1828 break;
1830 default:
1831 gcc_unreachable ();
1835 cgraph_asm_nodes = NULL;
1836 free (nodes);
1839 /* Return true when function body of DECL still needs to be kept around
1840 for later re-use. */
1841 bool
1842 cgraph_preserve_function_body_p (tree decl)
1844 struct cgraph_node *node;
1846 gcc_assert (cgraph_global_info_ready);
1847 /* Look if there is any clone around. */
1848 node = cgraph_node (decl);
1849 if (node->clones)
1850 return true;
1851 return false;
1854 static void
1855 ipa_passes (void)
1857 set_cfun (NULL);
1858 current_function_decl = NULL;
1859 gimple_register_cfg_hooks ();
1860 bitmap_obstack_initialize (NULL);
1862 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1864 if (!in_lto_p)
1865 execute_ipa_pass_list (all_small_ipa_passes);
1867 /* If pass_all_early_optimizations was not scheduled, the state of
1868 the cgraph will not be properly updated. Update it now. */
1869 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1870 cgraph_state = CGRAPH_STATE_IPA_SSA;
1872 if (!in_lto_p)
1874 /* Generate coverage variables and constructors. */
1875 coverage_finish ();
1877 /* Process new functions added. */
1878 set_cfun (NULL);
1879 current_function_decl = NULL;
1880 cgraph_process_new_functions ();
1882 execute_ipa_summary_passes
1883 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1885 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1887 if (!in_lto_p)
1888 ipa_write_summaries ();
1890 if (!flag_ltrans)
1891 execute_ipa_pass_list (all_regular_ipa_passes);
1892 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1894 bitmap_obstack_release (NULL);
1898 /* Perform simple optimizations based on callgraph. */
1900 void
1901 cgraph_optimize (void)
1903 if (errorcount || sorrycount)
1904 return;
1906 #ifdef ENABLE_CHECKING
1907 verify_cgraph ();
1908 #endif
1910 /* Frontend may output common variables after the unit has been finalized.
1911 It is safe to deal with them here as they are always zero initialized. */
1912 varpool_analyze_pending_decls ();
1914 timevar_push (TV_CGRAPHOPT);
1915 if (pre_ipa_mem_report)
1917 fprintf (stderr, "Memory consumption before IPA\n");
1918 dump_memory_report (false);
1920 if (!quiet_flag)
1921 fprintf (stderr, "Performing interprocedural optimizations\n");
1922 cgraph_state = CGRAPH_STATE_IPA;
1924 /* Don't run the IPA passes if there was any error or sorry messages. */
1925 if (errorcount == 0 && sorrycount == 0)
1926 ipa_passes ();
1928 /* Do nothing else if any IPA pass found errors. */
1929 if (errorcount || sorrycount)
1931 timevar_pop (TV_CGRAPHOPT);
1932 return;
1935 /* This pass remove bodies of extern inline functions we never inlined.
1936 Do this later so other IPA passes see what is really going on. */
1937 cgraph_remove_unreachable_nodes (false, dump_file);
1938 cgraph_global_info_ready = true;
1939 if (cgraph_dump_file)
1941 fprintf (cgraph_dump_file, "Optimized ");
1942 dump_cgraph (cgraph_dump_file);
1943 dump_varpool (cgraph_dump_file);
1945 if (post_ipa_mem_report)
1947 fprintf (stderr, "Memory consumption after IPA\n");
1948 dump_memory_report (false);
1950 timevar_pop (TV_CGRAPHOPT);
1952 /* Output everything. */
1953 (*debug_hooks->assembly_start) ();
1954 if (!quiet_flag)
1955 fprintf (stderr, "Assembling functions:\n");
1956 #ifdef ENABLE_CHECKING
1957 verify_cgraph ();
1958 #endif
1960 cgraph_materialize_all_clones ();
1961 cgraph_mark_functions_to_output ();
1963 cgraph_state = CGRAPH_STATE_EXPANSION;
1964 if (!flag_toplevel_reorder)
1965 cgraph_output_in_order ();
1966 else
1968 cgraph_output_pending_asms ();
1970 cgraph_expand_all_functions ();
1971 varpool_remove_unreferenced_decls ();
1973 varpool_assemble_pending_decls ();
1975 cgraph_process_new_functions ();
1976 cgraph_state = CGRAPH_STATE_FINISHED;
1978 if (cgraph_dump_file)
1980 fprintf (cgraph_dump_file, "\nFinal ");
1981 dump_cgraph (cgraph_dump_file);
1983 #ifdef ENABLE_CHECKING
1984 verify_cgraph ();
1985 /* Double check that all inline clones are gone and that all
1986 function bodies have been released from memory. */
1987 if (!(sorrycount || errorcount))
1989 struct cgraph_node *node;
1990 bool error_found = false;
1992 for (node = cgraph_nodes; node; node = node->next)
1993 if (node->analyzed
1994 && (node->global.inlined_to
1995 || gimple_has_body_p (node->decl)))
1997 error_found = true;
1998 dump_cgraph_node (stderr, node);
2000 if (error_found)
2001 internal_error ("nodes with unreleased memory found");
2003 #endif
2007 /* Generate and emit a static constructor or destructor. WHICH must
2008 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2009 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
2010 initialization priority for this constructor or destructor. */
2012 void
2013 cgraph_build_static_cdtor (char which, tree body, int priority)
2015 static int counter = 0;
2016 char which_buf[16];
2017 tree decl, name, resdecl;
2019 /* The priority is encoded in the constructor or destructor name.
2020 collect2 will sort the names and arrange that they are called at
2021 program startup. */
2022 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
2023 name = get_file_function_name (which_buf);
2025 decl = build_decl (input_location, FUNCTION_DECL, name,
2026 build_function_type (void_type_node, void_list_node));
2027 current_function_decl = decl;
2029 resdecl = build_decl (input_location,
2030 RESULT_DECL, NULL_TREE, void_type_node);
2031 DECL_ARTIFICIAL (resdecl) = 1;
2032 DECL_RESULT (decl) = resdecl;
2033 DECL_CONTEXT (resdecl) = decl;
2035 allocate_struct_function (decl, false);
2037 TREE_STATIC (decl) = 1;
2038 TREE_USED (decl) = 1;
2039 DECL_ARTIFICIAL (decl) = 1;
2040 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2041 DECL_SAVED_TREE (decl) = body;
2042 if (!targetm.have_ctors_dtors)
2044 TREE_PUBLIC (decl) = 1;
2045 DECL_PRESERVE_P (decl) = 1;
2047 DECL_UNINLINABLE (decl) = 1;
2049 DECL_INITIAL (decl) = make_node (BLOCK);
2050 TREE_USED (DECL_INITIAL (decl)) = 1;
2052 DECL_SOURCE_LOCATION (decl) = input_location;
2053 cfun->function_end_locus = input_location;
2055 switch (which)
2057 case 'I':
2058 DECL_STATIC_CONSTRUCTOR (decl) = 1;
2059 decl_init_priority_insert (decl, priority);
2060 break;
2061 case 'D':
2062 DECL_STATIC_DESTRUCTOR (decl) = 1;
2063 decl_fini_priority_insert (decl, priority);
2064 break;
2065 default:
2066 gcc_unreachable ();
2069 gimplify_function_tree (decl);
2071 cgraph_add_new_function (decl, false);
2072 cgraph_mark_needed_node (cgraph_node (decl));
2073 set_cfun (NULL);
2076 void
2077 init_cgraph (void)
2079 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2082 /* The edges representing the callers of the NEW_VERSION node were
2083 fixed by cgraph_function_versioning (), now the call_expr in their
2084 respective tree code should be updated to call the NEW_VERSION. */
2086 static void
2087 update_call_expr (struct cgraph_node *new_version)
2089 struct cgraph_edge *e;
2091 gcc_assert (new_version);
2093 /* Update the call expr on the edges to call the new version. */
2094 for (e = new_version->callers; e; e = e->next_caller)
2096 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2097 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2098 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2103 /* Create a new cgraph node which is the new version of
2104 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2105 edges which should be redirected to point to
2106 NEW_VERSION. ALL the callees edges of OLD_VERSION
2107 are cloned to the new version node. Return the new
2108 version node. */
2110 static struct cgraph_node *
2111 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2112 tree new_decl,
2113 VEC(cgraph_edge_p,heap) *redirect_callers)
2115 struct cgraph_node *new_version;
2116 struct cgraph_edge *e;
2117 struct cgraph_edge *next_callee;
2118 unsigned i;
2120 gcc_assert (old_version);
2122 new_version = cgraph_node (new_decl);
2124 new_version->analyzed = true;
2125 new_version->local = old_version->local;
2126 new_version->global = old_version->global;
2127 new_version->rtl = new_version->rtl;
2128 new_version->reachable = true;
2129 new_version->count = old_version->count;
2131 /* Clone the old node callees. Recursive calls are
2132 also cloned. */
2133 for (e = old_version->callees;e; e=e->next_callee)
2135 cgraph_clone_edge (e, new_version, e->call_stmt,
2136 e->lto_stmt_uid, REG_BR_PROB_BASE,
2137 CGRAPH_FREQ_BASE,
2138 e->loop_nest, true);
2140 /* Fix recursive calls.
2141 If OLD_VERSION has a recursive call after the
2142 previous edge cloning, the new version will have an edge
2143 pointing to the old version, which is wrong;
2144 Redirect it to point to the new version. */
2145 for (e = new_version->callees ; e; e = next_callee)
2147 next_callee = e->next_callee;
2148 if (e->callee == old_version)
2149 cgraph_redirect_edge_callee (e, new_version);
2151 if (!next_callee)
2152 break;
2154 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2156 /* Redirect calls to the old version node to point to its new
2157 version. */
2158 cgraph_redirect_edge_callee (e, new_version);
2161 return new_version;
2164 /* Perform function versioning.
2165 Function versioning includes copying of the tree and
2166 a callgraph update (creating a new cgraph node and updating
2167 its callees and callers).
2169 REDIRECT_CALLERS varray includes the edges to be redirected
2170 to the new version.
2172 TREE_MAP is a mapping of tree nodes we want to replace with
2173 new ones (according to results of prior analysis).
2174 OLD_VERSION_NODE is the node that is versioned.
2175 It returns the new version's cgraph node.
2176 ARGS_TO_SKIP lists arguments to be omitted from functions
2179 struct cgraph_node *
2180 cgraph_function_versioning (struct cgraph_node *old_version_node,
2181 VEC(cgraph_edge_p,heap) *redirect_callers,
2182 VEC (ipa_replace_map_p,gc)* tree_map,
2183 bitmap args_to_skip)
2185 tree old_decl = old_version_node->decl;
2186 struct cgraph_node *new_version_node = NULL;
2187 tree new_decl;
2189 if (!tree_versionable_function_p (old_decl))
2190 return NULL;
2192 /* Make a new FUNCTION_DECL tree node for the
2193 new version. */
2194 if (!args_to_skip)
2195 new_decl = copy_node (old_decl);
2196 else
2197 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2199 /* Create the new version's call-graph node.
2200 and update the edges of the new node. */
2201 new_version_node =
2202 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2203 redirect_callers);
2205 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2206 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
2208 /* Update the new version's properties.
2209 Make The new version visible only within this translation unit. Make sure
2210 that is not weak also.
2211 ??? We cannot use COMDAT linkage because there is no
2212 ABI support for this. */
2213 cgraph_make_decl_local (new_version_node->decl);
2214 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2215 new_version_node->local.externally_visible = 0;
2216 new_version_node->local.local = 1;
2217 new_version_node->lowered = true;
2219 /* Update the call_expr on the edges to call the new version node. */
2220 update_call_expr (new_version_node);
2222 cgraph_call_function_insertion_hooks (new_version_node);
2223 return new_version_node;
2226 /* Produce separate function body for inline clones so the offline copy can be
2227 modified without affecting them. */
2228 struct cgraph_node *
2229 save_inline_function_body (struct cgraph_node *node)
2231 struct cgraph_node *first_clone, *n;
2233 gcc_assert (node == cgraph_node (node->decl));
2235 cgraph_lower_function (node);
2237 first_clone = node->clones;
2239 first_clone->decl = copy_node (node->decl);
2240 cgraph_insert_node_to_hashtable (first_clone);
2241 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2242 if (first_clone->next_sibling_clone)
2244 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2245 n->clone_of = first_clone;
2246 n->clone_of = first_clone;
2247 n->next_sibling_clone = first_clone->clones;
2248 if (first_clone->clones)
2249 first_clone->clones->prev_sibling_clone = n;
2250 first_clone->clones = first_clone->next_sibling_clone;
2251 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2252 first_clone->next_sibling_clone = NULL;
2253 gcc_assert (!first_clone->prev_sibling_clone);
2255 first_clone->clone_of = NULL;
2256 node->clones = NULL;
2258 if (first_clone->clones)
2259 for (n = first_clone->clones; n != first_clone;)
2261 gcc_assert (n->decl == node->decl);
2262 n->decl = first_clone->decl;
2263 if (n->clones)
2264 n = n->clones;
2265 else if (n->next_sibling_clone)
2266 n = n->next_sibling_clone;
2267 else
2269 while (n != first_clone && !n->next_sibling_clone)
2270 n = n->clone_of;
2271 if (n != first_clone)
2272 n = n->next_sibling_clone;
2276 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2277 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
2279 DECL_EXTERNAL (first_clone->decl) = 0;
2280 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2281 TREE_PUBLIC (first_clone->decl) = 0;
2282 DECL_COMDAT (first_clone->decl) = 0;
2283 VEC_free (ipa_opt_pass, heap,
2284 first_clone->ipa_transforms_to_apply);
2285 first_clone->ipa_transforms_to_apply = NULL;
2287 #ifdef ENABLE_CHECKING
2288 verify_cgraph_node (first_clone);
2289 #endif
2290 return first_clone;
2293 /* Given virtual clone, turn it into actual clone. */
2294 static void
2295 cgraph_materialize_clone (struct cgraph_node *node)
2297 bitmap_obstack_initialize (NULL);
2298 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2299 tree_function_versioning (node->clone_of->decl, node->decl,
2300 node->clone.tree_map, true,
2301 node->clone.args_to_skip);
2302 if (cgraph_dump_file)
2304 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2305 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2308 /* Function is no longer clone. */
2309 if (node->next_sibling_clone)
2310 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2311 if (node->prev_sibling_clone)
2312 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2313 else
2314 node->clone_of->clones = node->next_sibling_clone;
2315 node->next_sibling_clone = NULL;
2316 node->prev_sibling_clone = NULL;
2317 if (!node->clone_of->analyzed && !node->clone_of->clones)
2318 cgraph_remove_node (node->clone_of);
2319 node->clone_of = NULL;
2320 bitmap_obstack_release (NULL);
2323 /* If necessary, change the function declaration in the call statement
2324 associated with E so that it corresponds to the edge callee. */
2326 gimple
2327 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2329 tree decl = gimple_call_fndecl (e->call_stmt);
2330 gimple new_stmt;
2331 gimple_stmt_iterator gsi;
2333 if (!decl || decl == e->callee->decl
2334 /* Don't update call from same body alias to the real function. */
2335 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2336 return e->call_stmt;
2338 if (cgraph_dump_file)
2340 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2341 cgraph_node_name (e->caller), e->caller->uid,
2342 cgraph_node_name (e->callee), e->callee->uid);
2343 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2346 if (e->callee->clone.combined_args_to_skip)
2347 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2348 e->callee->clone.combined_args_to_skip);
2349 else
2350 new_stmt = e->call_stmt;
2351 if (gimple_vdef (new_stmt)
2352 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2353 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2354 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2356 gsi = gsi_for_stmt (e->call_stmt);
2357 gsi_replace (&gsi, new_stmt, true);
2358 update_stmt (new_stmt);
2360 /* Update EH information too, just in case. */
2361 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2363 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2365 if (cgraph_dump_file)
2367 fprintf (cgraph_dump_file, " updated to:");
2368 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2370 return new_stmt;
2373 /* Once all functions from compilation unit are in memory, produce all clones
2374 and update all calls. We might also do this on demand if we don't want to
2375 bring all functions to memory prior compilation, but current WHOPR
2376 implementation does that and it is is bit easier to keep everything right in
2377 this order. */
2378 void
2379 cgraph_materialize_all_clones (void)
2381 struct cgraph_node *node;
2382 bool stabilized = false;
2384 if (cgraph_dump_file)
2385 fprintf (cgraph_dump_file, "Materializing clones\n");
2386 #ifdef ENABLE_CHECKING
2387 verify_cgraph ();
2388 #endif
2390 /* We can also do topological order, but number of iterations should be
2391 bounded by number of IPA passes since single IPA pass is probably not
2392 going to create clones of clones it created itself. */
2393 while (!stabilized)
2395 stabilized = true;
2396 for (node = cgraph_nodes; node; node = node->next)
2398 if (node->clone_of && node->decl != node->clone_of->decl
2399 && !gimple_has_body_p (node->decl))
2401 if (gimple_has_body_p (node->clone_of->decl))
2403 if (cgraph_dump_file)
2405 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2406 cgraph_node_name (node->clone_of),
2407 cgraph_node_name (node));
2408 if (node->clone.tree_map)
2410 unsigned int i;
2411 fprintf (cgraph_dump_file, " replace map: ");
2412 for (i = 0; i < VEC_length (ipa_replace_map_p,
2413 node->clone.tree_map);
2414 i++)
2416 struct ipa_replace_map *replace_info;
2417 replace_info = VEC_index (ipa_replace_map_p,
2418 node->clone.tree_map,
2420 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2421 fprintf (cgraph_dump_file, " -> ");
2422 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2423 fprintf (cgraph_dump_file, "%s%s;",
2424 replace_info->replace_p ? "(replace)":"",
2425 replace_info->ref_p ? "(ref)":"");
2427 fprintf (cgraph_dump_file, "\n");
2429 if (node->clone.args_to_skip)
2431 fprintf (cgraph_dump_file, " args_to_skip: ");
2432 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2434 if (node->clone.args_to_skip)
2436 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2437 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2440 cgraph_materialize_clone (node);
2442 else
2443 stabilized = false;
2447 for (node = cgraph_nodes; node; node = node->next)
2448 if (!node->analyzed && node->callees)
2449 cgraph_node_remove_callees (node);
2450 if (cgraph_dump_file)
2451 fprintf (cgraph_dump_file, "Updating call sites\n");
2452 for (node = cgraph_nodes; node; node = node->next)
2453 if (node->analyzed && !node->clone_of
2454 && gimple_has_body_p (node->decl))
2456 struct cgraph_edge *e;
2458 current_function_decl = node->decl;
2459 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2460 for (e = node->callees; e; e = e->next_callee)
2461 cgraph_redirect_edge_call_stmt_to_callee (e);
2462 gcc_assert (!need_ssa_update_p (cfun));
2463 pop_cfun ();
2464 current_function_decl = NULL;
2465 #ifdef ENABLE_CHECKING
2466 verify_cgraph_node (node);
2467 #endif
2469 if (cgraph_dump_file)
2470 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2471 /* All changes to parameters have been performed. In order not to
2472 incorrectly repeat them, we simply dispose of the bitmaps that drive the
2473 changes. */
2474 for (node = cgraph_nodes; node; node = node->next)
2475 node->clone.combined_args_to_skip = NULL;
2476 #ifdef ENABLE_CHECKING
2477 verify_cgraph ();
2478 #endif
2479 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2482 #include "gt-cgraphunit.h"