* gcc.dg/vect/slp-perm-1.c (main): Make sure loops aren't vectorized.
[official-gcc.git] / gcc / cgraphunit.c
blob7dce8e96799a547947e3330d51bb94fc1896e47f
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
148 FILE *cgraph_dump_file;
150 /* A vector of FUNCTION_DECLs declared as static constructors. */
151 static GTY (()) VEC(tree, gc) *static_ctors;
152 /* A vector of FUNCTION_DECLs declared as static destructors. */
153 static GTY (()) VEC(tree, gc) *static_dtors;
155 /* Used for vtable lookup in thunk adjusting. */
156 static GTY (()) tree vtable_entry_type;
158 /* When target does not have ctors and dtors, we call all constructor
159 and destructor by special initialization/destruction function
160 recognized by collect2.
162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
165 static void
166 record_cdtor_fn (tree fndecl)
168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
172 return;
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
176 VEC_safe_push (tree, gc, static_ctors, fndecl);
177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
181 VEC_safe_push (tree, gc, static_dtors, fndecl);
182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
189 /* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
194 static void
195 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
197 size_t i;
199 i = 0;
200 while (i < len)
202 tree body;
203 tree fn;
204 priority_type priority;
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
212 priority_type p;
213 tree call;
214 fn = cdtors[i];
215 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
216 if (!body)
217 priority = p;
218 else if (p != priority)
219 break;
220 call = build_call_expr (fn, 0);
221 append_to_statement_list (call, &body);
222 ++i;
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
232 /* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
236 static int
237 compare_ctor (const void *p1, const void *p2)
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
258 /* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
262 static int
263 compare_dtor (const void *p1, const void *p2)
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
284 /* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
288 static void
289 cgraph_build_cdtor_fns (void)
291 if (!VEC_empty (tree, static_ctors))
293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
295 VEC_length (tree, static_ctors),
296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
300 VEC_length (tree, static_ctors));
301 VEC_truncate (tree, static_ctors, 0);
304 if (!VEC_empty (tree, static_dtors))
306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
308 VEC_length (tree, static_dtors),
309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
313 VEC_length (tree, static_dtors));
314 VEC_truncate (tree, static_dtors, 0);
318 /* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
320 configury. */
322 bool
323 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
325 /* If the user told us it is used, then it must be so. */
326 if (node->local.externally_visible)
327 return true;
329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
342 return true;
344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
350 /* Externally visible functions must be output. The exception is
351 COMDAT functions that must be output only when they are needed.
353 When not optimizing, also output the static functions. (see
354 PR24561), but don't do so for always_inline functions, functions
355 declared inline and nested functions. These were optimized out
356 in the original implementation and it is unclear whether we want
357 to change the behavior here. */
358 if (((TREE_PUBLIC (decl)
359 || (!optimize
360 && !node->local.disregard_inline_limits
361 && !DECL_DECLARED_INLINE_P (decl)
362 && !(DECL_CONTEXT (decl)
363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
364 && !flag_whole_program
365 && !flag_lto
366 && !flag_whopr)
367 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
368 return true;
370 /* Constructors and destructors are reachable from the runtime by
371 some mechanism. */
372 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
373 return true;
375 return false;
378 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
379 functions into callgraph in a way so they look like ordinary reachable
380 functions inserted into callgraph already at construction time. */
382 bool
383 cgraph_process_new_functions (void)
385 bool output = false;
386 tree fndecl;
387 struct cgraph_node *node;
389 varpool_analyze_pending_decls ();
390 /* Note that this queue may grow as its being processed, as the new
391 functions may generate new ones. */
392 while (cgraph_new_nodes)
394 node = cgraph_new_nodes;
395 fndecl = node->decl;
396 cgraph_new_nodes = cgraph_new_nodes->next_needed;
397 switch (cgraph_state)
399 case CGRAPH_STATE_CONSTRUCTION:
400 /* At construction time we just need to finalize function and move
401 it into reachable functions list. */
403 node->next_needed = NULL;
404 cgraph_finalize_function (fndecl, false);
405 cgraph_mark_reachable_node (node);
406 output = true;
407 break;
409 case CGRAPH_STATE_IPA:
410 case CGRAPH_STATE_IPA_SSA:
411 /* When IPA optimization already started, do all essential
412 transformations that has been already performed on the whole
413 cgraph but not on this function. */
415 gimple_register_cfg_hooks ();
416 if (!node->analyzed)
417 cgraph_analyze_function (node);
418 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
419 current_function_decl = fndecl;
420 compute_inline_parameters (node);
421 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
422 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
423 /* When not optimizing, be sure we run early local passes anyway
424 to expand OMP. */
425 || !optimize)
426 execute_pass_list (pass_early_local_passes.pass.sub);
427 free_dominance_info (CDI_POST_DOMINATORS);
428 free_dominance_info (CDI_DOMINATORS);
429 pop_cfun ();
430 current_function_decl = NULL;
431 break;
433 case CGRAPH_STATE_EXPANSION:
434 /* Functions created during expansion shall be compiled
435 directly. */
436 node->process = 0;
437 cgraph_expand_function (node);
438 break;
440 default:
441 gcc_unreachable ();
442 break;
444 cgraph_call_function_insertion_hooks (node);
445 varpool_analyze_pending_decls ();
447 return output;
450 /* As an GCC extension we allow redefinition of the function. The
451 semantics when both copies of bodies differ is not well defined.
452 We replace the old body with new body so in unit at a time mode
453 we always use new body, while in normal mode we may end up with
454 old body inlined into some functions and new body expanded and
455 inlined in others.
457 ??? It may make more sense to use one body for inlining and other
458 body for expanding the function but this is difficult to do. */
460 static void
461 cgraph_reset_node (struct cgraph_node *node)
463 /* If node->process is set, then we have already begun whole-unit analysis.
464 This is *not* testing for whether we've already emitted the function.
465 That case can be sort-of legitimately seen with real function redefinition
466 errors. I would argue that the front end should never present us with
467 such a case, but don't enforce that for now. */
468 gcc_assert (!node->process);
470 /* Reset our data structures so we can analyze the function again. */
471 memset (&node->local, 0, sizeof (node->local));
472 memset (&node->global, 0, sizeof (node->global));
473 memset (&node->rtl, 0, sizeof (node->rtl));
474 node->analyzed = false;
475 node->local.redefined_extern_inline = true;
476 node->local.finalized = false;
478 cgraph_node_remove_callees (node);
480 /* We may need to re-queue the node for assembling in case
481 we already proceeded it and ignored as not needed or got
482 a re-declaration in IMA mode. */
483 if (node->reachable)
485 struct cgraph_node *n;
487 for (n = cgraph_nodes_queue; n; n = n->next_needed)
488 if (n == node)
489 break;
490 if (!n)
491 node->reachable = 0;
495 static void
496 cgraph_lower_function (struct cgraph_node *node)
498 if (node->lowered)
499 return;
501 if (node->nested)
502 lower_nested_functions (node->decl);
503 gcc_assert (!node->nested);
505 tree_lowering_passes (node->decl);
506 node->lowered = true;
509 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
510 logic in effect. If NESTED is true, then our caller cannot stand to have
511 the garbage collector run at the moment. We would need to either create
512 a new GC context, or just not compile right now. */
514 void
515 cgraph_finalize_function (tree decl, bool nested)
517 struct cgraph_node *node = cgraph_node (decl);
519 if (node->local.finalized)
520 cgraph_reset_node (node);
522 node->pid = cgraph_max_pid ++;
523 notice_global_symbol (decl);
524 node->local.finalized = true;
525 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
526 node->finalized_by_frontend = true;
527 record_cdtor_fn (node->decl);
529 if (cgraph_decide_is_function_needed (node, decl))
530 cgraph_mark_needed_node (node);
532 /* Since we reclaim unreachable nodes at the end of every language
533 level unit, we need to be conservative about possible entry points
534 there. */
535 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
536 cgraph_mark_reachable_node (node);
538 /* If we've not yet emitted decl, tell the debug info about it. */
539 if (!TREE_ASM_WRITTEN (decl))
540 (*debug_hooks->deferred_inline_function) (decl);
542 /* Possibly warn about unused parameters. */
543 if (warn_unused_parameter)
544 do_warn_unused_parameter (decl);
546 if (!nested)
547 ggc_collect ();
550 /* C99 extern inline keywords allow changing of declaration after function
551 has been finalized. We need to re-decide if we want to mark the function as
552 needed then. */
554 void
555 cgraph_mark_if_needed (tree decl)
557 struct cgraph_node *node = cgraph_node (decl);
558 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
559 cgraph_mark_needed_node (node);
562 #ifdef ENABLE_CHECKING
563 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
564 static bool
565 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
567 while (node != node2 && node2)
568 node2 = node2->clone_of;
569 return node2 != NULL;
571 #endif
573 /* Verify cgraph nodes of given cgraph node. */
574 DEBUG_FUNCTION void
575 verify_cgraph_node (struct cgraph_node *node)
577 struct cgraph_edge *e;
578 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
579 struct function *saved_cfun = cfun;
580 basic_block this_block;
581 gimple_stmt_iterator gsi;
582 bool error_found = false;
584 if (seen_error ())
585 return;
587 timevar_push (TV_CGRAPH_VERIFY);
588 /* debug_generic_stmt needs correct cfun */
589 set_cfun (this_cfun);
590 for (e = node->callees; e; e = e->next_callee)
591 if (e->aux)
593 error ("aux field set for edge %s->%s",
594 identifier_to_locale (cgraph_node_name (e->caller)),
595 identifier_to_locale (cgraph_node_name (e->callee)));
596 error_found = true;
598 if (node->count < 0)
600 error ("Execution count is negative");
601 error_found = true;
603 if (node->global.inlined_to && node->local.externally_visible)
605 error ("Externally visible inline clone");
606 error_found = true;
608 if (node->global.inlined_to && node->address_taken)
610 error ("Inline clone with address taken");
611 error_found = true;
613 if (node->global.inlined_to && node->needed)
615 error ("Inline clone is needed");
616 error_found = true;
618 for (e = node->indirect_calls; e; e = e->next_callee)
620 if (e->aux)
622 error ("aux field set for indirect edge from %s",
623 identifier_to_locale (cgraph_node_name (e->caller)));
624 error_found = true;
626 if (!e->indirect_unknown_callee
627 || !e->indirect_info)
629 error ("An indirect edge from %s is not marked as indirect or has "
630 "associated indirect_info, the corresponding statement is: ",
631 identifier_to_locale (cgraph_node_name (e->caller)));
632 debug_gimple_stmt (e->call_stmt);
633 error_found = true;
636 for (e = node->callers; e; e = e->next_caller)
638 if (e->count < 0)
640 error ("caller edge count is negative");
641 error_found = true;
643 if (e->frequency < 0)
645 error ("caller edge frequency is negative");
646 error_found = true;
648 if (e->frequency > CGRAPH_FREQ_MAX)
650 error ("caller edge frequency is too large");
651 error_found = true;
653 if (gimple_has_body_p (e->caller->decl)
654 && !e->caller->global.inlined_to
655 && (e->frequency
656 != compute_call_stmt_bb_frequency (e->caller->decl,
657 gimple_bb (e->call_stmt))))
659 error ("caller edge frequency %i does not match BB freqency %i",
660 e->frequency,
661 compute_call_stmt_bb_frequency (e->caller->decl,
662 gimple_bb (e->call_stmt)));
663 error_found = true;
665 if (!e->inline_failed)
667 if (node->global.inlined_to
668 != (e->caller->global.inlined_to
669 ? e->caller->global.inlined_to : e->caller))
671 error ("inlined_to pointer is wrong");
672 error_found = true;
674 if (node->callers->next_caller)
676 error ("multiple inline callers");
677 error_found = true;
680 else
681 if (node->global.inlined_to)
683 error ("inlined_to pointer set for noninline callers");
684 error_found = true;
687 if (!node->callers && node->global.inlined_to)
689 error ("inlined_to pointer is set but no predecessors found");
690 error_found = true;
692 if (node->global.inlined_to == node)
694 error ("inlined_to pointer refers to itself");
695 error_found = true;
698 if (!cgraph_node (node->decl))
700 error ("node not found in cgraph_hash");
701 error_found = true;
704 if (node->clone_of)
706 struct cgraph_node *n;
707 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
708 if (n == node)
709 break;
710 if (!n)
712 error ("node has wrong clone_of");
713 error_found = true;
716 if (node->clones)
718 struct cgraph_node *n;
719 for (n = node->clones; n; n = n->next_sibling_clone)
720 if (n->clone_of != node)
721 break;
722 if (n)
724 error ("node has wrong clone list");
725 error_found = true;
728 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
730 error ("node is in clone list but it is not clone");
731 error_found = true;
733 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
735 error ("node has wrong prev_clone pointer");
736 error_found = true;
738 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
740 error ("double linked list of clones corrupted");
741 error_found = true;
743 if (node->same_comdat_group)
745 struct cgraph_node *n = node->same_comdat_group;
747 if (!DECL_ONE_ONLY (node->decl))
749 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
750 error_found = true;
752 if (n == node)
754 error ("node is alone in a comdat group");
755 error_found = true;
759 if (!n->same_comdat_group)
761 error ("same_comdat_group is not a circular list");
762 error_found = true;
763 break;
765 n = n->same_comdat_group;
767 while (n != node);
770 if (node->analyzed && gimple_has_body_p (node->decl)
771 && !TREE_ASM_WRITTEN (node->decl)
772 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
773 && !flag_wpa)
775 if (this_cfun->cfg)
777 /* The nodes we're interested in are never shared, so walk
778 the tree ignoring duplicates. */
779 struct pointer_set_t *visited_nodes = pointer_set_create ();
780 /* Reach the trees by walking over the CFG, and note the
781 enclosing basic-blocks in the call edges. */
782 FOR_EACH_BB_FN (this_block, this_cfun)
783 for (gsi = gsi_start_bb (this_block);
784 !gsi_end_p (gsi);
785 gsi_next (&gsi))
787 gimple stmt = gsi_stmt (gsi);
788 if (is_gimple_call (stmt))
790 struct cgraph_edge *e = cgraph_edge (node, stmt);
791 tree decl = gimple_call_fndecl (stmt);
792 if (e)
794 if (e->aux)
796 error ("shared call_stmt:");
797 debug_gimple_stmt (stmt);
798 error_found = true;
800 if (!e->indirect_unknown_callee)
802 if (e->callee->same_body_alias)
804 error ("edge points to same body alias:");
805 debug_tree (e->callee->decl);
806 error_found = true;
808 #ifdef ENABLE_CHECKING
809 else if (!e->callee->global.inlined_to
810 && decl
811 && cgraph_get_node (decl)
812 && (e->callee->former_clone_of
813 != cgraph_get_node (decl)->decl)
814 && !clone_of_p (cgraph_node (decl),
815 e->callee))
817 error ("edge points to wrong declaration:");
818 debug_tree (e->callee->decl);
819 fprintf (stderr," Instead of:");
820 debug_tree (decl);
821 error_found = true;
823 #endif
825 else if (decl)
827 error ("an indirect edge with unknown callee "
828 "corresponding to a call_stmt with "
829 "a known declaration:");
830 error_found = true;
831 debug_gimple_stmt (e->call_stmt);
833 e->aux = (void *)1;
835 else if (decl)
837 error ("missing callgraph edge for call stmt:");
838 debug_gimple_stmt (stmt);
839 error_found = true;
843 pointer_set_destroy (visited_nodes);
845 else
846 /* No CFG available?! */
847 gcc_unreachable ();
849 for (e = node->callees; e; e = e->next_callee)
851 if (!e->aux)
853 error ("edge %s->%s has no corresponding call_stmt",
854 identifier_to_locale (cgraph_node_name (e->caller)),
855 identifier_to_locale (cgraph_node_name (e->callee)));
856 debug_gimple_stmt (e->call_stmt);
857 error_found = true;
859 e->aux = 0;
861 for (e = node->indirect_calls; e; e = e->next_callee)
863 if (!e->aux)
865 error ("an indirect edge from %s has no corresponding call_stmt",
866 identifier_to_locale (cgraph_node_name (e->caller)));
867 debug_gimple_stmt (e->call_stmt);
868 error_found = true;
870 e->aux = 0;
873 if (error_found)
875 dump_cgraph_node (stderr, node);
876 internal_error ("verify_cgraph_node failed");
878 set_cfun (saved_cfun);
879 timevar_pop (TV_CGRAPH_VERIFY);
882 /* Verify whole cgraph structure. */
883 DEBUG_FUNCTION void
884 verify_cgraph (void)
886 struct cgraph_node *node;
888 if (seen_error ())
889 return;
891 for (node = cgraph_nodes; node; node = node->next)
892 verify_cgraph_node (node);
895 /* Output all asm statements we have stored up to be output. */
897 static void
898 cgraph_output_pending_asms (void)
900 struct cgraph_asm_node *can;
902 if (seen_error ())
903 return;
905 for (can = cgraph_asm_nodes; can; can = can->next)
906 assemble_asm (can->asm_str);
907 cgraph_asm_nodes = NULL;
910 /* Analyze the function scheduled to be output. */
911 static void
912 cgraph_analyze_function (struct cgraph_node *node)
914 tree save = current_function_decl;
915 tree decl = node->decl;
917 current_function_decl = decl;
918 push_cfun (DECL_STRUCT_FUNCTION (decl));
920 assign_assembler_name_if_neeeded (node->decl);
922 /* Make sure to gimplify bodies only once. During analyzing a
923 function we lower it, which will require gimplified nested
924 functions, so we can end up here with an already gimplified
925 body. */
926 if (!gimple_body (decl))
927 gimplify_function_tree (decl);
928 dump_function (TDI_generic, decl);
930 cgraph_lower_function (node);
931 node->analyzed = true;
933 pop_cfun ();
934 current_function_decl = save;
937 /* Look for externally_visible and used attributes and mark cgraph nodes
938 accordingly.
940 We cannot mark the nodes at the point the attributes are processed (in
941 handle_*_attribute) because the copy of the declarations available at that
942 point may not be canonical. For example, in:
944 void f();
945 void f() __attribute__((used));
947 the declaration we see in handle_used_attribute will be the second
948 declaration -- but the front end will subsequently merge that declaration
949 with the original declaration and discard the second declaration.
951 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
953 void f() {}
954 void f() __attribute__((externally_visible));
956 is valid.
958 So, we walk the nodes at the end of the translation unit, applying the
959 attributes at that point. */
961 static void
962 process_function_and_variable_attributes (struct cgraph_node *first,
963 struct varpool_node *first_var)
965 struct cgraph_node *node;
966 struct varpool_node *vnode;
968 for (node = cgraph_nodes; node != first; node = node->next)
970 tree decl = node->decl;
971 if (DECL_PRESERVE_P (decl))
972 cgraph_mark_needed_node (node);
973 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
975 if (! TREE_PUBLIC (node->decl))
976 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
977 "%<externally_visible%>"
978 " attribute have effect only on public objects");
979 else if (node->local.finalized)
980 cgraph_mark_needed_node (node);
983 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
985 tree decl = vnode->decl;
986 if (DECL_PRESERVE_P (decl))
988 vnode->force_output = true;
989 if (vnode->finalized)
990 varpool_mark_needed_node (vnode);
992 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
994 if (! TREE_PUBLIC (vnode->decl))
995 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
996 "%<externally_visible%>"
997 " attribute have effect only on public objects");
998 else if (vnode->finalized)
999 varpool_mark_needed_node (vnode);
1004 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1005 each reachable functions) and build cgraph.
1006 The function can be called multiple times after inserting new nodes
1007 into beginning of queue. Just the new part of queue is re-scanned then. */
1009 static void
1010 cgraph_analyze_functions (void)
1012 /* Keep track of already processed nodes when called multiple times for
1013 intermodule optimization. */
1014 static struct cgraph_node *first_analyzed;
1015 struct cgraph_node *first_processed = first_analyzed;
1016 static struct varpool_node *first_analyzed_var;
1017 struct cgraph_node *node, *next;
1019 process_function_and_variable_attributes (first_processed,
1020 first_analyzed_var);
1021 first_processed = cgraph_nodes;
1022 first_analyzed_var = varpool_nodes;
1023 varpool_analyze_pending_decls ();
1024 if (cgraph_dump_file)
1026 fprintf (cgraph_dump_file, "Initial entry points:");
1027 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1028 if (node->needed)
1029 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1030 fprintf (cgraph_dump_file, "\n");
1032 cgraph_process_new_functions ();
1034 /* Propagate reachability flag and lower representation of all reachable
1035 functions. In the future, lowering will introduce new functions and
1036 new entry points on the way (by template instantiation and virtual
1037 method table generation for instance). */
1038 while (cgraph_nodes_queue)
1040 struct cgraph_edge *edge;
1041 tree decl = cgraph_nodes_queue->decl;
1043 node = cgraph_nodes_queue;
1044 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1045 node->next_needed = NULL;
1047 /* ??? It is possible to create extern inline function and later using
1048 weak alias attribute to kill its body. See
1049 gcc.c-torture/compile/20011119-1.c */
1050 if (!DECL_STRUCT_FUNCTION (decl))
1052 cgraph_reset_node (node);
1053 continue;
1056 if (!node->analyzed)
1057 cgraph_analyze_function (node);
1059 for (edge = node->callees; edge; edge = edge->next_callee)
1060 if (!edge->callee->reachable)
1061 cgraph_mark_reachable_node (edge->callee);
1063 if (node->same_comdat_group)
1065 for (next = node->same_comdat_group;
1066 next != node;
1067 next = next->same_comdat_group)
1068 cgraph_mark_reachable_node (next);
1071 /* If decl is a clone of an abstract function, mark that abstract
1072 function so that we don't release its body. The DECL_INITIAL() of that
1073 abstract function declaration will be later needed to output debug info. */
1074 if (DECL_ABSTRACT_ORIGIN (decl))
1076 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1077 origin_node->abstract_and_needed = true;
1080 /* We finalize local static variables during constructing callgraph
1081 edges. Process their attributes too. */
1082 process_function_and_variable_attributes (first_processed,
1083 first_analyzed_var);
1084 first_processed = cgraph_nodes;
1085 first_analyzed_var = varpool_nodes;
1086 varpool_analyze_pending_decls ();
1087 cgraph_process_new_functions ();
1090 /* Collect entry points to the unit. */
1091 if (cgraph_dump_file)
1093 fprintf (cgraph_dump_file, "Unit entry points:");
1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1095 if (node->needed)
1096 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1097 fprintf (cgraph_dump_file, "\n\nInitial ");
1098 dump_cgraph (cgraph_dump_file);
1101 if (cgraph_dump_file)
1102 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1104 for (node = cgraph_nodes; node != first_analyzed; node = next)
1106 tree decl = node->decl;
1107 next = node->next;
1109 if (node->local.finalized && !gimple_has_body_p (decl))
1110 cgraph_reset_node (node);
1112 if (!node->reachable && gimple_has_body_p (decl))
1114 if (cgraph_dump_file)
1115 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1116 cgraph_remove_node (node);
1117 continue;
1119 else
1120 node->next_needed = NULL;
1121 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1122 gcc_assert (node->analyzed == node->local.finalized);
1124 if (cgraph_dump_file)
1126 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1127 dump_cgraph (cgraph_dump_file);
1129 first_analyzed = cgraph_nodes;
1130 ggc_collect ();
1134 /* Analyze the whole compilation unit once it is parsed completely. */
1136 void
1137 cgraph_finalize_compilation_unit (void)
1139 timevar_push (TV_CGRAPH);
1141 /* Do not skip analyzing the functions if there were errors, we
1142 miss diagnostics for following functions otherwise. */
1144 /* Emit size functions we didn't inline. */
1145 finalize_size_functions ();
1147 /* Call functions declared with the "constructor" or "destructor"
1148 attribute. */
1149 cgraph_build_cdtor_fns ();
1151 /* Mark alias targets necessary and emit diagnostics. */
1152 finish_aliases_1 ();
1154 if (!quiet_flag)
1156 fprintf (stderr, "\nAnalyzing compilation unit\n");
1157 fflush (stderr);
1160 /* Gimplify and lower all functions, compute reachability and
1161 remove unreachable nodes. */
1162 cgraph_analyze_functions ();
1164 /* Mark alias targets necessary and emit diagnostics. */
1165 finish_aliases_1 ();
1167 /* Gimplify and lower thunks. */
1168 cgraph_analyze_functions ();
1170 /* Finally drive the pass manager. */
1171 cgraph_optimize ();
1173 timevar_pop (TV_CGRAPH);
1177 /* Figure out what functions we want to assemble. */
1179 static void
1180 cgraph_mark_functions_to_output (void)
1182 struct cgraph_node *node;
1183 #ifdef ENABLE_CHECKING
1184 bool check_same_comdat_groups = false;
1186 for (node = cgraph_nodes; node; node = node->next)
1187 gcc_assert (!node->process);
1188 #endif
1190 for (node = cgraph_nodes; node; node = node->next)
1192 tree decl = node->decl;
1193 struct cgraph_edge *e;
1195 gcc_assert (!node->process || node->same_comdat_group);
1196 if (node->process)
1197 continue;
1199 for (e = node->callers; e; e = e->next_caller)
1200 if (e->inline_failed)
1201 break;
1203 /* We need to output all local functions that are used and not
1204 always inlined, as well as those that are reachable from
1205 outside the current compilation unit. */
1206 if (node->analyzed
1207 && !node->global.inlined_to
1208 && (node->needed || node->reachable_from_other_partition
1209 || node->address_taken
1210 || (e && node->reachable))
1211 && !TREE_ASM_WRITTEN (decl)
1212 && !DECL_EXTERNAL (decl))
1214 node->process = 1;
1215 if (node->same_comdat_group)
1217 struct cgraph_node *next;
1218 for (next = node->same_comdat_group;
1219 next != node;
1220 next = next->same_comdat_group)
1221 next->process = 1;
1224 else if (node->same_comdat_group)
1226 #ifdef ENABLE_CHECKING
1227 check_same_comdat_groups = true;
1228 #endif
1230 else
1232 /* We should've reclaimed all functions that are not needed. */
1233 #ifdef ENABLE_CHECKING
1234 if (!node->global.inlined_to
1235 && gimple_has_body_p (decl)
1236 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1237 are inside partition, we can end up not removing the body since we no longer
1238 have analyzed node pointing to it. */
1239 && !node->in_other_partition
1240 && !DECL_EXTERNAL (decl))
1242 dump_cgraph_node (stderr, node);
1243 internal_error ("failed to reclaim unneeded function");
1245 #endif
1246 gcc_assert (node->global.inlined_to
1247 || !gimple_has_body_p (decl)
1248 || node->in_other_partition
1249 || DECL_EXTERNAL (decl));
1254 #ifdef ENABLE_CHECKING
1255 if (check_same_comdat_groups)
1256 for (node = cgraph_nodes; node; node = node->next)
1257 if (node->same_comdat_group && !node->process)
1259 tree decl = node->decl;
1260 if (!node->global.inlined_to
1261 && gimple_has_body_p (decl)
1262 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1263 are inside partition, we can end up not removing the body since we no longer
1264 have analyzed node pointing to it. */
1265 && !node->in_other_partition
1266 && !DECL_EXTERNAL (decl))
1268 dump_cgraph_node (stderr, node);
1269 internal_error ("failed to reclaim unneeded function");
1272 #endif
1275 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1276 in lowered gimple form.
1278 Set current_function_decl and cfun to newly constructed empty function body.
1279 return basic block in the function body. */
1281 static basic_block
1282 init_lowered_empty_function (tree decl)
1284 basic_block bb;
1286 current_function_decl = decl;
1287 allocate_struct_function (decl, false);
1288 gimple_register_cfg_hooks ();
1289 init_empty_tree_cfg ();
1290 init_tree_ssa (cfun);
1291 init_ssa_operands ();
1292 cfun->gimple_df->in_ssa_p = true;
1293 DECL_INITIAL (decl) = make_node (BLOCK);
1295 DECL_SAVED_TREE (decl) = error_mark_node;
1296 cfun->curr_properties |=
1297 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1298 PROP_ssa);
1300 /* Create BB for body of the function and connect it properly. */
1301 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1302 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1303 make_edge (bb, EXIT_BLOCK_PTR, 0);
1305 return bb;
1308 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1309 offset indicated by VIRTUAL_OFFSET, if that is
1310 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1311 zero for a result adjusting thunk. */
1313 static tree
1314 thunk_adjust (gimple_stmt_iterator * bsi,
1315 tree ptr, bool this_adjusting,
1316 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1318 gimple stmt;
1319 tree ret;
1321 if (this_adjusting
1322 && fixed_offset != 0)
1324 stmt = gimple_build_assign (ptr,
1325 fold_build2_loc (input_location,
1326 POINTER_PLUS_EXPR,
1327 TREE_TYPE (ptr), ptr,
1328 size_int (fixed_offset)));
1329 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1332 /* If there's a virtual offset, look up that value in the vtable and
1333 adjust the pointer again. */
1334 if (virtual_offset)
1336 tree vtabletmp;
1337 tree vtabletmp2;
1338 tree vtabletmp3;
1339 tree offsettmp;
1341 if (!vtable_entry_type)
1343 tree vfunc_type = make_node (FUNCTION_TYPE);
1344 TREE_TYPE (vfunc_type) = integer_type_node;
1345 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1346 layout_type (vfunc_type);
1348 vtable_entry_type = build_pointer_type (vfunc_type);
1351 vtabletmp =
1352 create_tmp_var (build_pointer_type
1353 (build_pointer_type (vtable_entry_type)), "vptr");
1355 /* The vptr is always at offset zero in the object. */
1356 stmt = gimple_build_assign (vtabletmp,
1357 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1358 ptr));
1359 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1360 mark_symbols_for_renaming (stmt);
1361 find_referenced_vars_in (stmt);
1363 /* Form the vtable address. */
1364 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1365 "vtableaddr");
1366 stmt = gimple_build_assign (vtabletmp2,
1367 build_simple_mem_ref (vtabletmp));
1368 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1369 mark_symbols_for_renaming (stmt);
1370 find_referenced_vars_in (stmt);
1372 /* Find the entry with the vcall offset. */
1373 stmt = gimple_build_assign (vtabletmp2,
1374 fold_build2_loc (input_location,
1375 POINTER_PLUS_EXPR,
1376 TREE_TYPE (vtabletmp2),
1377 vtabletmp2,
1378 fold_convert (sizetype,
1379 virtual_offset)));
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1382 /* Get the offset itself. */
1383 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1384 "vcalloffset");
1385 stmt = gimple_build_assign (vtabletmp3,
1386 build_simple_mem_ref (vtabletmp2));
1387 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1388 mark_symbols_for_renaming (stmt);
1389 find_referenced_vars_in (stmt);
1391 /* Cast to sizetype. */
1392 offsettmp = create_tmp_var (sizetype, "offset");
1393 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1394 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395 mark_symbols_for_renaming (stmt);
1396 find_referenced_vars_in (stmt);
1398 /* Adjust the `this' pointer. */
1399 ptr = fold_build2_loc (input_location,
1400 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1401 offsettmp);
1404 if (!this_adjusting
1405 && fixed_offset != 0)
1406 /* Adjust the pointer by the constant. */
1408 tree ptrtmp;
1410 if (TREE_CODE (ptr) == VAR_DECL)
1411 ptrtmp = ptr;
1412 else
1414 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1415 stmt = gimple_build_assign (ptrtmp, ptr);
1416 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1417 mark_symbols_for_renaming (stmt);
1418 find_referenced_vars_in (stmt);
1420 ptr = fold_build2_loc (input_location,
1421 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1422 size_int (fixed_offset));
1425 /* Emit the statement and gimplify the adjustment expression. */
1426 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1427 stmt = gimple_build_assign (ret, ptr);
1428 mark_symbols_for_renaming (stmt);
1429 find_referenced_vars_in (stmt);
1430 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1432 return ret;
1435 /* Produce assembler for thunk NODE. */
1437 static void
1438 assemble_thunk (struct cgraph_node *node)
1440 bool this_adjusting = node->thunk.this_adjusting;
1441 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1442 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1443 tree virtual_offset = NULL;
1444 tree alias = node->thunk.alias;
1445 tree thunk_fndecl = node->decl;
1446 tree a = DECL_ARGUMENTS (thunk_fndecl);
1448 current_function_decl = thunk_fndecl;
1450 if (this_adjusting
1451 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1452 virtual_value, alias))
1454 const char *fnname;
1455 tree fn_block;
1457 DECL_RESULT (thunk_fndecl)
1458 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1459 RESULT_DECL, 0, integer_type_node);
1460 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1462 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1463 create one. */
1464 fn_block = make_node (BLOCK);
1465 BLOCK_VARS (fn_block) = a;
1466 DECL_INITIAL (thunk_fndecl) = fn_block;
1467 init_function_start (thunk_fndecl);
1468 cfun->is_thunk = 1;
1469 assemble_start_function (thunk_fndecl, fnname);
1471 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1472 fixed_offset, virtual_value, alias);
1474 assemble_end_function (thunk_fndecl, fnname);
1475 init_insn_lengths ();
1476 free_after_compilation (cfun);
1477 set_cfun (NULL);
1478 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1480 else
1482 tree restype;
1483 basic_block bb, then_bb, else_bb, return_bb;
1484 gimple_stmt_iterator bsi;
1485 int nargs = 0;
1486 tree arg;
1487 int i;
1488 tree resdecl;
1489 tree restmp = NULL;
1490 VEC(tree, heap) *vargs;
1492 gimple call;
1493 gimple ret;
1495 DECL_IGNORED_P (thunk_fndecl) = 1;
1496 bitmap_obstack_initialize (NULL);
1498 if (node->thunk.virtual_offset_p)
1499 virtual_offset = size_int (virtual_value);
1501 /* Build the return declaration for the function. */
1502 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1503 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1505 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1506 DECL_ARTIFICIAL (resdecl) = 1;
1507 DECL_IGNORED_P (resdecl) = 1;
1508 DECL_RESULT (thunk_fndecl) = resdecl;
1510 else
1511 resdecl = DECL_RESULT (thunk_fndecl);
1513 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1515 bsi = gsi_start_bb (bb);
1517 /* Build call to the function being thunked. */
1518 if (!VOID_TYPE_P (restype))
1520 if (!is_gimple_reg_type (restype))
1522 restmp = resdecl;
1523 add_local_decl (cfun, restmp);
1524 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1526 else
1527 restmp = create_tmp_var_raw (restype, "retval");
1530 for (arg = a; arg; arg = DECL_CHAIN (arg))
1531 nargs++;
1532 vargs = VEC_alloc (tree, heap, nargs);
1533 if (this_adjusting)
1534 VEC_quick_push (tree, vargs,
1535 thunk_adjust (&bsi,
1536 a, 1, fixed_offset,
1537 virtual_offset));
1538 else
1539 VEC_quick_push (tree, vargs, a);
1540 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1541 VEC_quick_push (tree, vargs, arg);
1542 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1543 VEC_free (tree, heap, vargs);
1544 gimple_call_set_cannot_inline (call, true);
1545 gimple_call_set_from_thunk (call, true);
1546 if (restmp)
1547 gimple_call_set_lhs (call, restmp);
1548 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1549 mark_symbols_for_renaming (call);
1550 find_referenced_vars_in (call);
1551 update_stmt (call);
1553 if (restmp && !this_adjusting)
1555 tree true_label = NULL_TREE;
1557 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1559 gimple stmt;
1560 /* If the return type is a pointer, we need to
1561 protect against NULL. We know there will be an
1562 adjustment, because that's why we're emitting a
1563 thunk. */
1564 then_bb = create_basic_block (NULL, (void *) 0, bb);
1565 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1566 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1567 remove_edge (single_succ_edge (bb));
1568 true_label = gimple_block_label (then_bb);
1569 stmt = gimple_build_cond (NE_EXPR, restmp,
1570 fold_convert (TREE_TYPE (restmp),
1571 integer_zero_node),
1572 NULL_TREE, NULL_TREE);
1573 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1574 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1575 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1576 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1577 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1578 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1579 bsi = gsi_last_bb (then_bb);
1582 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1583 fixed_offset, virtual_offset);
1584 if (true_label)
1586 gimple stmt;
1587 bsi = gsi_last_bb (else_bb);
1588 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1589 integer_zero_node));
1590 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1591 bsi = gsi_last_bb (return_bb);
1594 else
1595 gimple_call_set_tail (call, true);
1597 /* Build return value. */
1598 ret = gimple_build_return (restmp);
1599 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1601 delete_unreachable_blocks ();
1602 update_ssa (TODO_update_ssa);
1604 cgraph_remove_same_body_alias (node);
1605 /* Since we want to emit the thunk, we explicitly mark its name as
1606 referenced. */
1607 cgraph_add_new_function (thunk_fndecl, true);
1608 bitmap_obstack_release (NULL);
1610 current_function_decl = NULL;
1613 /* Expand function specified by NODE. */
1615 static void
1616 cgraph_expand_function (struct cgraph_node *node)
1618 tree decl = node->decl;
1620 /* We ought to not compile any inline clones. */
1621 gcc_assert (!node->global.inlined_to);
1623 announce_function (decl);
1624 node->process = 0;
1626 gcc_assert (node->lowered);
1628 /* Generate RTL for the body of DECL. */
1629 tree_rest_of_compilation (decl);
1631 /* Make sure that BE didn't give up on compiling. */
1632 gcc_assert (TREE_ASM_WRITTEN (decl));
1633 current_function_decl = NULL;
1634 if (node->same_body)
1636 struct cgraph_node *alias, *next;
1637 bool saved_alias = node->alias;
1638 for (alias = node->same_body;
1639 alias && alias->next; alias = alias->next)
1641 /* Walk aliases in the order they were created; it is possible that
1642 thunks reffers to the aliases made earlier. */
1643 for (; alias; alias = next)
1645 next = alias->previous;
1646 if (!alias->thunk.thunk_p)
1647 assemble_alias (alias->decl,
1648 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1649 else
1650 assemble_thunk (alias);
1652 node->alias = saved_alias;
1654 gcc_assert (!cgraph_preserve_function_body_p (decl));
1655 cgraph_release_function_body (node);
1656 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1657 points to the dead function body. */
1658 cgraph_node_remove_callees (node);
1660 cgraph_function_flags_ready = true;
1663 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1665 bool
1666 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1668 *reason = e->inline_failed;
1669 return !e->inline_failed;
1674 /* Expand all functions that must be output.
1676 Attempt to topologically sort the nodes so function is output when
1677 all called functions are already assembled to allow data to be
1678 propagated across the callgraph. Use a stack to get smaller distance
1679 between a function and its callees (later we may choose to use a more
1680 sophisticated algorithm for function reordering; we will likely want
1681 to use subsections to make the output functions appear in top-down
1682 order). */
1684 static void
1685 cgraph_expand_all_functions (void)
1687 struct cgraph_node *node;
1688 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1689 int order_pos, new_order_pos = 0;
1690 int i;
1692 order_pos = cgraph_postorder (order);
1693 gcc_assert (order_pos == cgraph_n_nodes);
1695 /* Garbage collector may remove inline clones we eliminate during
1696 optimization. So we must be sure to not reference them. */
1697 for (i = 0; i < order_pos; i++)
1698 if (order[i]->process)
1699 order[new_order_pos++] = order[i];
1701 for (i = new_order_pos - 1; i >= 0; i--)
1703 node = order[i];
1704 if (node->process)
1706 gcc_assert (node->reachable);
1707 node->process = 0;
1708 cgraph_expand_function (node);
1711 cgraph_process_new_functions ();
1713 free (order);
1717 /* This is used to sort the node types by the cgraph order number. */
1719 enum cgraph_order_sort_kind
1721 ORDER_UNDEFINED = 0,
1722 ORDER_FUNCTION,
1723 ORDER_VAR,
1724 ORDER_ASM
1727 struct cgraph_order_sort
1729 enum cgraph_order_sort_kind kind;
1730 union
1732 struct cgraph_node *f;
1733 struct varpool_node *v;
1734 struct cgraph_asm_node *a;
1735 } u;
1738 /* Output all functions, variables, and asm statements in the order
1739 according to their order fields, which is the order in which they
1740 appeared in the file. This implements -fno-toplevel-reorder. In
1741 this mode we may output functions and variables which don't really
1742 need to be output. */
1744 static void
1745 cgraph_output_in_order (void)
1747 int max;
1748 struct cgraph_order_sort *nodes;
1749 int i;
1750 struct cgraph_node *pf;
1751 struct varpool_node *pv;
1752 struct cgraph_asm_node *pa;
1754 max = cgraph_order;
1755 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1757 varpool_analyze_pending_decls ();
1759 for (pf = cgraph_nodes; pf; pf = pf->next)
1761 if (pf->process)
1763 i = pf->order;
1764 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1765 nodes[i].kind = ORDER_FUNCTION;
1766 nodes[i].u.f = pf;
1770 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1772 i = pv->order;
1773 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1774 nodes[i].kind = ORDER_VAR;
1775 nodes[i].u.v = pv;
1778 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1780 i = pa->order;
1781 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1782 nodes[i].kind = ORDER_ASM;
1783 nodes[i].u.a = pa;
1786 /* In toplevel reorder mode we output all statics; mark them as needed. */
1787 for (i = 0; i < max; ++i)
1789 if (nodes[i].kind == ORDER_VAR)
1791 varpool_mark_needed_node (nodes[i].u.v);
1794 varpool_empty_needed_queue ();
1796 for (i = 0; i < max; ++i)
1798 switch (nodes[i].kind)
1800 case ORDER_FUNCTION:
1801 nodes[i].u.f->process = 0;
1802 cgraph_expand_function (nodes[i].u.f);
1803 break;
1805 case ORDER_VAR:
1806 varpool_assemble_decl (nodes[i].u.v);
1807 break;
1809 case ORDER_ASM:
1810 assemble_asm (nodes[i].u.a->asm_str);
1811 break;
1813 case ORDER_UNDEFINED:
1814 break;
1816 default:
1817 gcc_unreachable ();
1821 cgraph_asm_nodes = NULL;
1822 free (nodes);
1825 /* Return true when function body of DECL still needs to be kept around
1826 for later re-use. */
1827 bool
1828 cgraph_preserve_function_body_p (tree decl)
1830 struct cgraph_node *node;
1832 gcc_assert (cgraph_global_info_ready);
1833 /* Look if there is any clone around. */
1834 node = cgraph_node (decl);
1835 if (node->clones)
1836 return true;
1837 return false;
1840 static void
1841 ipa_passes (void)
1843 set_cfun (NULL);
1844 current_function_decl = NULL;
1845 gimple_register_cfg_hooks ();
1846 bitmap_obstack_initialize (NULL);
1848 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1850 if (!in_lto_p)
1851 execute_ipa_pass_list (all_small_ipa_passes);
1853 /* If pass_all_early_optimizations was not scheduled, the state of
1854 the cgraph will not be properly updated. Update it now. */
1855 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1856 cgraph_state = CGRAPH_STATE_IPA_SSA;
1858 if (!in_lto_p)
1860 /* Generate coverage variables and constructors. */
1861 coverage_finish ();
1863 /* Process new functions added. */
1864 set_cfun (NULL);
1865 current_function_decl = NULL;
1866 cgraph_process_new_functions ();
1868 execute_ipa_summary_passes
1869 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1872 /* Some targets need to handle LTO assembler output specially. */
1873 if (flag_generate_lto)
1874 targetm.asm_out.lto_start ();
1876 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1878 if (!in_lto_p)
1879 ipa_write_summaries ();
1881 if (flag_generate_lto)
1882 targetm.asm_out.lto_end ();
1884 if (!flag_ltrans)
1885 execute_ipa_pass_list (all_regular_ipa_passes);
1886 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1888 bitmap_obstack_release (NULL);
1892 /* Perform simple optimizations based on callgraph. */
1894 void
1895 cgraph_optimize (void)
1897 if (seen_error ())
1898 return;
1900 #ifdef ENABLE_CHECKING
1901 verify_cgraph ();
1902 #endif
1904 /* Frontend may output common variables after the unit has been finalized.
1905 It is safe to deal with them here as they are always zero initialized. */
1906 varpool_analyze_pending_decls ();
1908 timevar_push (TV_CGRAPHOPT);
1909 if (pre_ipa_mem_report)
1911 fprintf (stderr, "Memory consumption before IPA\n");
1912 dump_memory_report (false);
1914 if (!quiet_flag)
1915 fprintf (stderr, "Performing interprocedural optimizations\n");
1916 cgraph_state = CGRAPH_STATE_IPA;
1918 /* Don't run the IPA passes if there was any error or sorry messages. */
1919 if (!seen_error ())
1920 ipa_passes ();
1922 /* Do nothing else if any IPA pass found errors. */
1923 if (seen_error ())
1925 timevar_pop (TV_CGRAPHOPT);
1926 return;
1929 /* This pass remove bodies of extern inline functions we never inlined.
1930 Do this later so other IPA passes see what is really going on. */
1931 cgraph_remove_unreachable_nodes (false, dump_file);
1932 cgraph_global_info_ready = true;
1933 if (cgraph_dump_file)
1935 fprintf (cgraph_dump_file, "Optimized ");
1936 dump_cgraph (cgraph_dump_file);
1937 dump_varpool (cgraph_dump_file);
1939 if (post_ipa_mem_report)
1941 fprintf (stderr, "Memory consumption after IPA\n");
1942 dump_memory_report (false);
1944 timevar_pop (TV_CGRAPHOPT);
1946 /* Output everything. */
1947 (*debug_hooks->assembly_start) ();
1948 if (!quiet_flag)
1949 fprintf (stderr, "Assembling functions:\n");
1950 #ifdef ENABLE_CHECKING
1951 verify_cgraph ();
1952 #endif
1954 cgraph_materialize_all_clones ();
1955 cgraph_mark_functions_to_output ();
1957 cgraph_state = CGRAPH_STATE_EXPANSION;
1958 if (!flag_toplevel_reorder)
1959 cgraph_output_in_order ();
1960 else
1962 cgraph_output_pending_asms ();
1964 cgraph_expand_all_functions ();
1965 varpool_remove_unreferenced_decls ();
1967 varpool_assemble_pending_decls ();
1969 cgraph_process_new_functions ();
1970 cgraph_state = CGRAPH_STATE_FINISHED;
1972 if (cgraph_dump_file)
1974 fprintf (cgraph_dump_file, "\nFinal ");
1975 dump_cgraph (cgraph_dump_file);
1977 #ifdef ENABLE_CHECKING
1978 verify_cgraph ();
1979 /* Double check that all inline clones are gone and that all
1980 function bodies have been released from memory. */
1981 if (!seen_error ())
1983 struct cgraph_node *node;
1984 bool error_found = false;
1986 for (node = cgraph_nodes; node; node = node->next)
1987 if (node->analyzed
1988 && (node->global.inlined_to
1989 || gimple_has_body_p (node->decl)))
1991 error_found = true;
1992 dump_cgraph_node (stderr, node);
1994 if (error_found)
1995 internal_error ("nodes with unreleased memory found");
1997 #endif
2001 /* Generate and emit a static constructor or destructor. WHICH must
2002 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2003 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
2004 initialization priority for this constructor or destructor. */
2006 void
2007 cgraph_build_static_cdtor (char which, tree body, int priority)
2009 static int counter = 0;
2010 char which_buf[16];
2011 tree decl, name, resdecl;
2013 /* The priority is encoded in the constructor or destructor name.
2014 collect2 will sort the names and arrange that they are called at
2015 program startup. */
2016 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
2017 name = get_file_function_name (which_buf);
2019 decl = build_decl (input_location, FUNCTION_DECL, name,
2020 build_function_type_list (void_type_node, NULL_TREE));
2021 current_function_decl = decl;
2023 resdecl = build_decl (input_location,
2024 RESULT_DECL, NULL_TREE, void_type_node);
2025 DECL_ARTIFICIAL (resdecl) = 1;
2026 DECL_RESULT (decl) = resdecl;
2027 DECL_CONTEXT (resdecl) = decl;
2029 allocate_struct_function (decl, false);
2031 TREE_STATIC (decl) = 1;
2032 TREE_USED (decl) = 1;
2033 DECL_ARTIFICIAL (decl) = 1;
2034 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2035 DECL_SAVED_TREE (decl) = body;
2036 if (!targetm.have_ctors_dtors)
2038 TREE_PUBLIC (decl) = 1;
2039 DECL_PRESERVE_P (decl) = 1;
2041 DECL_UNINLINABLE (decl) = 1;
2043 DECL_INITIAL (decl) = make_node (BLOCK);
2044 TREE_USED (DECL_INITIAL (decl)) = 1;
2046 DECL_SOURCE_LOCATION (decl) = input_location;
2047 cfun->function_end_locus = input_location;
2049 switch (which)
2051 case 'I':
2052 DECL_STATIC_CONSTRUCTOR (decl) = 1;
2053 decl_init_priority_insert (decl, priority);
2054 break;
2055 case 'D':
2056 DECL_STATIC_DESTRUCTOR (decl) = 1;
2057 decl_fini_priority_insert (decl, priority);
2058 break;
2059 default:
2060 gcc_unreachable ();
2063 gimplify_function_tree (decl);
2065 cgraph_add_new_function (decl, false);
2066 cgraph_mark_needed_node (cgraph_node (decl));
2068 set_cfun (NULL);
2069 current_function_decl = NULL;
2072 void
2073 init_cgraph (void)
2075 if (!cgraph_dump_file)
2076 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2079 /* The edges representing the callers of the NEW_VERSION node were
2080 fixed by cgraph_function_versioning (), now the call_expr in their
2081 respective tree code should be updated to call the NEW_VERSION. */
2083 static void
2084 update_call_expr (struct cgraph_node *new_version)
2086 struct cgraph_edge *e;
2088 gcc_assert (new_version);
2090 /* Update the call expr on the edges to call the new version. */
2091 for (e = new_version->callers; e; e = e->next_caller)
2093 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2094 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2095 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2100 /* Create a new cgraph node which is the new version of
2101 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2102 edges which should be redirected to point to
2103 NEW_VERSION. ALL the callees edges of OLD_VERSION
2104 are cloned to the new version node. Return the new
2105 version node.
2107 If non-NULL BLOCK_TO_COPY determine what basic blocks
2108 was copied to prevent duplications of calls that are dead
2109 in the clone. */
2111 static struct cgraph_node *
2112 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2113 tree new_decl,
2114 VEC(cgraph_edge_p,heap) *redirect_callers,
2115 bitmap bbs_to_copy)
2117 struct cgraph_node *new_version;
2118 struct cgraph_edge *e;
2119 unsigned i;
2121 gcc_assert (old_version);
2123 new_version = cgraph_node (new_decl);
2125 new_version->analyzed = true;
2126 new_version->local = old_version->local;
2127 new_version->local.externally_visible = false;
2128 new_version->local.local = true;
2129 new_version->local.vtable_method = false;
2130 new_version->global = old_version->global;
2131 new_version->rtl = old_version->rtl;
2132 new_version->reachable = true;
2133 new_version->count = old_version->count;
2135 for (e = old_version->callees; e; e=e->next_callee)
2136 if (!bbs_to_copy
2137 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2138 cgraph_clone_edge (e, new_version, e->call_stmt,
2139 e->lto_stmt_uid, REG_BR_PROB_BASE,
2140 CGRAPH_FREQ_BASE,
2141 e->loop_nest, true);
2142 for (e = old_version->indirect_calls; e; e=e->next_callee)
2143 if (!bbs_to_copy
2144 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2145 cgraph_clone_edge (e, new_version, e->call_stmt,
2146 e->lto_stmt_uid, REG_BR_PROB_BASE,
2147 CGRAPH_FREQ_BASE,
2148 e->loop_nest, true);
2149 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2151 /* Redirect calls to the old version node to point to its new
2152 version. */
2153 cgraph_redirect_edge_callee (e, new_version);
2156 return new_version;
2159 /* Perform function versioning.
2160 Function versioning includes copying of the tree and
2161 a callgraph update (creating a new cgraph node and updating
2162 its callees and callers).
2164 REDIRECT_CALLERS varray includes the edges to be redirected
2165 to the new version.
2167 TREE_MAP is a mapping of tree nodes we want to replace with
2168 new ones (according to results of prior analysis).
2169 OLD_VERSION_NODE is the node that is versioned.
2170 It returns the new version's cgraph node.
2171 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2172 from new version.
2173 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2174 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2176 struct cgraph_node *
2177 cgraph_function_versioning (struct cgraph_node *old_version_node,
2178 VEC(cgraph_edge_p,heap) *redirect_callers,
2179 VEC (ipa_replace_map_p,gc)* tree_map,
2180 bitmap args_to_skip,
2181 bitmap bbs_to_copy,
2182 basic_block new_entry_block,
2183 const char *clone_name)
2185 tree old_decl = old_version_node->decl;
2186 struct cgraph_node *new_version_node = NULL;
2187 tree new_decl;
2189 if (!tree_versionable_function_p (old_decl))
2190 return NULL;
2192 /* Make a new FUNCTION_DECL tree node for the
2193 new version. */
2194 if (!args_to_skip)
2195 new_decl = copy_node (old_decl);
2196 else
2197 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2199 /* Generate a new name for the new version. */
2200 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2201 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2202 SET_DECL_RTL (new_decl, NULL);
2204 /* Create the new version's call-graph node.
2205 and update the edges of the new node. */
2206 new_version_node =
2207 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2208 redirect_callers, bbs_to_copy);
2210 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2211 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2212 bbs_to_copy, new_entry_block);
2214 /* Update the new version's properties.
2215 Make The new version visible only within this translation unit. Make sure
2216 that is not weak also.
2217 ??? We cannot use COMDAT linkage because there is no
2218 ABI support for this. */
2219 cgraph_make_decl_local (new_version_node->decl);
2220 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2221 new_version_node->local.externally_visible = 0;
2222 new_version_node->local.local = 1;
2223 new_version_node->lowered = true;
2225 /* Update the call_expr on the edges to call the new version node. */
2226 update_call_expr (new_version_node);
2228 cgraph_call_function_insertion_hooks (new_version_node);
2229 return new_version_node;
2232 /* Produce separate function body for inline clones so the offline copy can be
2233 modified without affecting them. */
2234 struct cgraph_node *
2235 save_inline_function_body (struct cgraph_node *node)
2237 struct cgraph_node *first_clone, *n;
2239 gcc_assert (node == cgraph_node (node->decl));
2241 cgraph_lower_function (node);
2243 first_clone = node->clones;
2245 first_clone->decl = copy_node (node->decl);
2246 cgraph_insert_node_to_hashtable (first_clone);
2247 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2248 if (first_clone->next_sibling_clone)
2250 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2251 n->clone_of = first_clone;
2252 n->clone_of = first_clone;
2253 n->next_sibling_clone = first_clone->clones;
2254 if (first_clone->clones)
2255 first_clone->clones->prev_sibling_clone = n;
2256 first_clone->clones = first_clone->next_sibling_clone;
2257 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2258 first_clone->next_sibling_clone = NULL;
2259 gcc_assert (!first_clone->prev_sibling_clone);
2261 first_clone->clone_of = NULL;
2262 node->clones = NULL;
2264 if (first_clone->clones)
2265 for (n = first_clone->clones; n != first_clone;)
2267 gcc_assert (n->decl == node->decl);
2268 n->decl = first_clone->decl;
2269 if (n->clones)
2270 n = n->clones;
2271 else if (n->next_sibling_clone)
2272 n = n->next_sibling_clone;
2273 else
2275 while (n != first_clone && !n->next_sibling_clone)
2276 n = n->clone_of;
2277 if (n != first_clone)
2278 n = n->next_sibling_clone;
2282 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2283 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2284 NULL, NULL);
2286 DECL_EXTERNAL (first_clone->decl) = 0;
2287 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2288 TREE_PUBLIC (first_clone->decl) = 0;
2289 DECL_COMDAT (first_clone->decl) = 0;
2290 VEC_free (ipa_opt_pass, heap,
2291 first_clone->ipa_transforms_to_apply);
2292 first_clone->ipa_transforms_to_apply = NULL;
2294 #ifdef ENABLE_CHECKING
2295 verify_cgraph_node (first_clone);
2296 #endif
2297 return first_clone;
2300 /* Given virtual clone, turn it into actual clone. */
2301 static void
2302 cgraph_materialize_clone (struct cgraph_node *node)
2304 bitmap_obstack_initialize (NULL);
2305 #ifdef ENABLE_CHECKING
2306 node->former_clone_of = node->clone_of->decl;
2307 if (node->clone_of->former_clone_of)
2308 node->former_clone_of = node->clone_of->former_clone_of;
2309 #endif
2310 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2311 tree_function_versioning (node->clone_of->decl, node->decl,
2312 node->clone.tree_map, true,
2313 node->clone.args_to_skip, NULL, NULL);
2314 if (cgraph_dump_file)
2316 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2317 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2320 /* Function is no longer clone. */
2321 if (node->next_sibling_clone)
2322 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2323 if (node->prev_sibling_clone)
2324 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2325 else
2326 node->clone_of->clones = node->next_sibling_clone;
2327 node->next_sibling_clone = NULL;
2328 node->prev_sibling_clone = NULL;
2329 if (!node->clone_of->analyzed && !node->clone_of->clones)
2331 cgraph_release_function_body (node->clone_of);
2332 cgraph_node_remove_callees (node->clone_of);
2333 ipa_remove_all_references (&node->clone_of->ref_list);
2335 node->clone_of = NULL;
2336 bitmap_obstack_release (NULL);
2339 /* If necessary, change the function declaration in the call statement
2340 associated with E so that it corresponds to the edge callee. */
2342 gimple
2343 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2345 tree decl = gimple_call_fndecl (e->call_stmt);
2346 gimple new_stmt;
2347 #ifdef ENABLE_CHECKING
2348 struct cgraph_node *node;
2349 #endif
2351 if (!decl || decl == e->callee->decl
2352 /* Don't update call from same body alias to the real function. */
2353 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2354 return e->call_stmt;
2356 #ifdef ENABLE_CHECKING
2357 node = cgraph_get_node (decl);
2358 gcc_assert (!node || !node->clone.combined_args_to_skip);
2359 #endif
2361 if (cgraph_dump_file)
2363 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2364 cgraph_node_name (e->caller), e->caller->uid,
2365 cgraph_node_name (e->callee), e->callee->uid);
2366 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2367 if (e->callee->clone.combined_args_to_skip)
2369 fprintf (cgraph_dump_file, " combined args to skip: ");
2370 dump_bitmap (cgraph_dump_file,
2371 e->callee->clone.combined_args_to_skip);
2375 if (e->callee->clone.combined_args_to_skip)
2377 gimple_stmt_iterator gsi;
2379 new_stmt
2380 = gimple_call_copy_skip_args (e->call_stmt,
2381 e->callee->clone.combined_args_to_skip);
2383 if (gimple_vdef (new_stmt)
2384 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2385 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2387 gsi = gsi_for_stmt (e->call_stmt);
2388 gsi_replace (&gsi, new_stmt, true);
2390 else
2391 new_stmt = e->call_stmt;
2393 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2394 update_stmt (new_stmt);
2396 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2398 if (cgraph_dump_file)
2400 fprintf (cgraph_dump_file, " updated to:");
2401 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2403 return new_stmt;
2406 /* Once all functions from compilation unit are in memory, produce all clones
2407 and update all calls. We might also do this on demand if we don't want to
2408 bring all functions to memory prior compilation, but current WHOPR
2409 implementation does that and it is is bit easier to keep everything right in
2410 this order. */
2411 void
2412 cgraph_materialize_all_clones (void)
2414 struct cgraph_node *node;
2415 bool stabilized = false;
2417 if (cgraph_dump_file)
2418 fprintf (cgraph_dump_file, "Materializing clones\n");
2419 #ifdef ENABLE_CHECKING
2420 verify_cgraph ();
2421 #endif
2423 /* We can also do topological order, but number of iterations should be
2424 bounded by number of IPA passes since single IPA pass is probably not
2425 going to create clones of clones it created itself. */
2426 while (!stabilized)
2428 stabilized = true;
2429 for (node = cgraph_nodes; node; node = node->next)
2431 if (node->clone_of && node->decl != node->clone_of->decl
2432 && !gimple_has_body_p (node->decl))
2434 if (gimple_has_body_p (node->clone_of->decl))
2436 if (cgraph_dump_file)
2438 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2439 cgraph_node_name (node->clone_of),
2440 cgraph_node_name (node));
2441 if (node->clone.tree_map)
2443 unsigned int i;
2444 fprintf (cgraph_dump_file, " replace map: ");
2445 for (i = 0; i < VEC_length (ipa_replace_map_p,
2446 node->clone.tree_map);
2447 i++)
2449 struct ipa_replace_map *replace_info;
2450 replace_info = VEC_index (ipa_replace_map_p,
2451 node->clone.tree_map,
2453 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2454 fprintf (cgraph_dump_file, " -> ");
2455 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2456 fprintf (cgraph_dump_file, "%s%s;",
2457 replace_info->replace_p ? "(replace)":"",
2458 replace_info->ref_p ? "(ref)":"");
2460 fprintf (cgraph_dump_file, "\n");
2462 if (node->clone.args_to_skip)
2464 fprintf (cgraph_dump_file, " args_to_skip: ");
2465 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2467 if (node->clone.args_to_skip)
2469 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2470 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2473 cgraph_materialize_clone (node);
2474 stabilized = false;
2479 for (node = cgraph_nodes; node; node = node->next)
2480 if (!node->analyzed && node->callees)
2481 cgraph_node_remove_callees (node);
2482 if (cgraph_dump_file)
2483 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2484 #ifdef ENABLE_CHECKING
2485 verify_cgraph ();
2486 #endif
2487 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2490 #include "gt-cgraphunit.h"