Pass name cleanups
[official-gcc.git] / gcc / cgraphunit.c
blob834acb101e55e773c3facb6e5b1cb5171e90a75a
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141 #include "ipa-inline.h"
142 #include "ipa-utils.h"
144 static void cgraph_expand_all_functions (void);
145 static void cgraph_mark_functions_to_output (void);
146 static void cgraph_expand_function (struct cgraph_node *);
147 static void cgraph_output_pending_asms (void);
149 FILE *cgraph_dump_file;
151 /* Used for vtable lookup in thunk adjusting. */
152 static GTY (()) tree vtable_entry_type;
154 /* Determine if function DECL is needed. That is, visible to something
155 either outside this translation unit, something magic in the system
156 configury. */
158 bool
159 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
161 /* If the user told us it is used, then it must be so. */
162 if (node->local.externally_visible)
163 return true;
165 /* ??? If the assembler name is set by hand, it is possible to assemble
166 the name later after finalizing the function and the fact is noticed
167 in assemble_name then. This is arguably a bug. */
168 if (DECL_ASSEMBLER_NAME_SET_P (decl)
169 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
170 return true;
172 /* With -fkeep-inline-functions we are keeping all inline functions except
173 for extern inline ones. */
174 if (flag_keep_inline_functions
175 && DECL_DECLARED_INLINE_P (decl)
176 && !DECL_EXTERNAL (decl)
177 && !DECL_DISREGARD_INLINE_LIMITS (decl))
178 return true;
180 /* If we decided it was needed before, but at the time we didn't have
181 the body of the function available, then it's still needed. We have
182 to go back and re-check its dependencies now. */
183 if (node->needed)
184 return true;
186 /* Externally visible functions must be output. The exception is
187 COMDAT functions that must be output only when they are needed.
189 When not optimizing, also output the static functions. (see
190 PR24561), but don't do so for always_inline functions, functions
191 declared inline and nested functions. These were optimized out
192 in the original implementation and it is unclear whether we want
193 to change the behavior here. */
194 if (((TREE_PUBLIC (decl)
195 || (!optimize
196 && !DECL_DISREGARD_INLINE_LIMITS (decl)
197 && !DECL_DECLARED_INLINE_P (decl)
198 && !(DECL_CONTEXT (decl)
199 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
200 && !flag_whole_program
201 && !flag_lto)
202 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
203 return true;
205 return false;
208 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
209 functions into callgraph in a way so they look like ordinary reachable
210 functions inserted into callgraph already at construction time. */
212 bool
213 cgraph_process_new_functions (void)
215 bool output = false;
216 tree fndecl;
217 struct cgraph_node *node;
219 varpool_analyze_pending_decls ();
220 /* Note that this queue may grow as its being processed, as the new
221 functions may generate new ones. */
222 while (cgraph_new_nodes)
224 node = cgraph_new_nodes;
225 fndecl = node->decl;
226 cgraph_new_nodes = cgraph_new_nodes->next_needed;
227 switch (cgraph_state)
229 case CGRAPH_STATE_CONSTRUCTION:
230 /* At construction time we just need to finalize function and move
231 it into reachable functions list. */
233 node->next_needed = NULL;
234 cgraph_finalize_function (fndecl, false);
235 cgraph_mark_reachable_node (node);
236 output = true;
237 cgraph_call_function_insertion_hooks (node);
238 break;
240 case CGRAPH_STATE_IPA:
241 case CGRAPH_STATE_IPA_SSA:
242 /* When IPA optimization already started, do all essential
243 transformations that has been already performed on the whole
244 cgraph but not on this function. */
246 gimple_register_cfg_hooks ();
247 if (!node->analyzed)
248 cgraph_analyze_function (node);
249 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
250 current_function_decl = fndecl;
251 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
252 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
253 /* When not optimizing, be sure we run early local passes anyway
254 to expand OMP. */
255 || !optimize)
256 execute_pass_list (pass_early_local_passes.pass.sub);
257 else
258 compute_inline_parameters (node, true);
259 free_dominance_info (CDI_POST_DOMINATORS);
260 free_dominance_info (CDI_DOMINATORS);
261 pop_cfun ();
262 current_function_decl = NULL;
263 cgraph_call_function_insertion_hooks (node);
264 break;
266 case CGRAPH_STATE_EXPANSION:
267 /* Functions created during expansion shall be compiled
268 directly. */
269 node->process = 0;
270 cgraph_call_function_insertion_hooks (node);
271 cgraph_expand_function (node);
272 break;
274 default:
275 gcc_unreachable ();
276 break;
278 varpool_analyze_pending_decls ();
280 return output;
283 /* As an GCC extension we allow redefinition of the function. The
284 semantics when both copies of bodies differ is not well defined.
285 We replace the old body with new body so in unit at a time mode
286 we always use new body, while in normal mode we may end up with
287 old body inlined into some functions and new body expanded and
288 inlined in others.
290 ??? It may make more sense to use one body for inlining and other
291 body for expanding the function but this is difficult to do. */
293 static void
294 cgraph_reset_node (struct cgraph_node *node)
296 /* If node->process is set, then we have already begun whole-unit analysis.
297 This is *not* testing for whether we've already emitted the function.
298 That case can be sort-of legitimately seen with real function redefinition
299 errors. I would argue that the front end should never present us with
300 such a case, but don't enforce that for now. */
301 gcc_assert (!node->process);
303 /* Reset our data structures so we can analyze the function again. */
304 memset (&node->local, 0, sizeof (node->local));
305 memset (&node->global, 0, sizeof (node->global));
306 memset (&node->rtl, 0, sizeof (node->rtl));
307 node->analyzed = false;
308 node->local.redefined_extern_inline = true;
309 node->local.finalized = false;
311 cgraph_node_remove_callees (node);
313 /* We may need to re-queue the node for assembling in case
314 we already proceeded it and ignored as not needed or got
315 a re-declaration in IMA mode. */
316 if (node->reachable)
318 struct cgraph_node *n;
320 for (n = cgraph_nodes_queue; n; n = n->next_needed)
321 if (n == node)
322 break;
323 if (!n)
324 node->reachable = 0;
328 static void
329 cgraph_lower_function (struct cgraph_node *node)
331 if (node->lowered)
332 return;
334 if (node->nested)
335 lower_nested_functions (node->decl);
336 gcc_assert (!node->nested);
338 tree_lowering_passes (node->decl);
339 node->lowered = true;
342 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
343 logic in effect. If NESTED is true, then our caller cannot stand to have
344 the garbage collector run at the moment. We would need to either create
345 a new GC context, or just not compile right now. */
347 void
348 cgraph_finalize_function (tree decl, bool nested)
350 struct cgraph_node *node = cgraph_get_create_node (decl);
352 if (node->local.finalized)
353 cgraph_reset_node (node);
355 notice_global_symbol (decl);
356 node->local.finalized = true;
357 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
359 if (cgraph_decide_is_function_needed (node, decl))
360 cgraph_mark_needed_node (node);
362 /* Since we reclaim unreachable nodes at the end of every language
363 level unit, we need to be conservative about possible entry points
364 there. */
365 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
366 || DECL_STATIC_CONSTRUCTOR (decl)
367 || DECL_STATIC_DESTRUCTOR (decl)
368 /* COMDAT virtual functions may be referenced by vtable from
369 other compilation unit. Still we want to devirtualize calls
370 to those so we need to analyze them.
371 FIXME: We should introduce may edges for this purpose and update
372 their handling in unreachable function removal and inliner too. */
373 || (DECL_VIRTUAL_P (decl)
374 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
375 cgraph_mark_reachable_node (node);
377 /* If we've not yet emitted decl, tell the debug info about it. */
378 if (!TREE_ASM_WRITTEN (decl))
379 (*debug_hooks->deferred_inline_function) (decl);
381 /* Possibly warn about unused parameters. */
382 if (warn_unused_parameter)
383 do_warn_unused_parameter (decl);
385 if (!nested)
386 ggc_collect ();
389 /* C99 extern inline keywords allow changing of declaration after function
390 has been finalized. We need to re-decide if we want to mark the function as
391 needed then. */
393 void
394 cgraph_mark_if_needed (tree decl)
396 struct cgraph_node *node = cgraph_get_node (decl);
397 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
398 cgraph_mark_needed_node (node);
401 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
402 static bool
403 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
405 while (node != node2 && node2)
406 node2 = node2->clone_of;
407 return node2 != NULL;
410 /* Verify edge E count and frequency. */
412 static bool
413 verify_edge_count_and_frequency (struct cgraph_edge *e)
415 bool error_found = false;
416 if (e->count < 0)
418 error ("caller edge count is negative");
419 error_found = true;
421 if (e->frequency < 0)
423 error ("caller edge frequency is negative");
424 error_found = true;
426 if (e->frequency > CGRAPH_FREQ_MAX)
428 error ("caller edge frequency is too large");
429 error_found = true;
431 if (gimple_has_body_p (e->caller->decl)
432 && !e->caller->global.inlined_to
433 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
434 Remove this once edges are actualy removed from the function at that time. */
435 && (e->frequency
436 || (inline_edge_summary_vec
437 && !inline_edge_summary (e)->predicate))
438 && (e->frequency
439 != compute_call_stmt_bb_frequency (e->caller->decl,
440 gimple_bb (e->call_stmt))))
442 error ("caller edge frequency %i does not match BB frequency %i",
443 e->frequency,
444 compute_call_stmt_bb_frequency (e->caller->decl,
445 gimple_bb (e->call_stmt)));
446 error_found = true;
448 return error_found;
451 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
452 static void
453 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
455 /* debug_gimple_stmt needs correct cfun */
456 if (cfun != this_cfun)
457 set_cfun (this_cfun);
458 debug_gimple_stmt (stmt);
461 /* Verify cgraph nodes of given cgraph node. */
462 DEBUG_FUNCTION void
463 verify_cgraph_node (struct cgraph_node *node)
465 struct cgraph_edge *e;
466 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
467 basic_block this_block;
468 gimple_stmt_iterator gsi;
469 bool error_found = false;
471 if (seen_error ())
472 return;
474 timevar_push (TV_CGRAPH_VERIFY);
475 for (e = node->callees; e; e = e->next_callee)
476 if (e->aux)
478 error ("aux field set for edge %s->%s",
479 identifier_to_locale (cgraph_node_name (e->caller)),
480 identifier_to_locale (cgraph_node_name (e->callee)));
481 error_found = true;
483 if (node->count < 0)
485 error ("execution count is negative");
486 error_found = true;
488 if (node->global.inlined_to && node->local.externally_visible)
490 error ("externally visible inline clone");
491 error_found = true;
493 if (node->global.inlined_to && node->address_taken)
495 error ("inline clone with address taken");
496 error_found = true;
498 if (node->global.inlined_to && node->needed)
500 error ("inline clone is needed");
501 error_found = true;
503 for (e = node->indirect_calls; e; e = e->next_callee)
505 if (e->aux)
507 error ("aux field set for indirect edge from %s",
508 identifier_to_locale (cgraph_node_name (e->caller)));
509 error_found = true;
511 if (!e->indirect_unknown_callee
512 || !e->indirect_info)
514 error ("An indirect edge from %s is not marked as indirect or has "
515 "associated indirect_info, the corresponding statement is: ",
516 identifier_to_locale (cgraph_node_name (e->caller)));
517 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
518 error_found = true;
521 for (e = node->callers; e; e = e->next_caller)
523 if (verify_edge_count_and_frequency (e))
524 error_found = true;
525 if (!e->inline_failed)
527 if (node->global.inlined_to
528 != (e->caller->global.inlined_to
529 ? e->caller->global.inlined_to : e->caller))
531 error ("inlined_to pointer is wrong");
532 error_found = true;
534 if (node->callers->next_caller)
536 error ("multiple inline callers");
537 error_found = true;
540 else
541 if (node->global.inlined_to)
543 error ("inlined_to pointer set for noninline callers");
544 error_found = true;
547 for (e = node->indirect_calls; e; e = e->next_callee)
548 if (verify_edge_count_and_frequency (e))
549 error_found = true;
550 if (!node->callers && node->global.inlined_to)
552 error ("inlined_to pointer is set but no predecessors found");
553 error_found = true;
555 if (node->global.inlined_to == node)
557 error ("inlined_to pointer refers to itself");
558 error_found = true;
561 if (!cgraph_get_node (node->decl))
563 error ("node not found in cgraph_hash");
564 error_found = true;
567 if (node->clone_of)
569 struct cgraph_node *n;
570 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
571 if (n == node)
572 break;
573 if (!n)
575 error ("node has wrong clone_of");
576 error_found = true;
579 if (node->clones)
581 struct cgraph_node *n;
582 for (n = node->clones; n; n = n->next_sibling_clone)
583 if (n->clone_of != node)
584 break;
585 if (n)
587 error ("node has wrong clone list");
588 error_found = true;
591 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
593 error ("node is in clone list but it is not clone");
594 error_found = true;
596 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
598 error ("node has wrong prev_clone pointer");
599 error_found = true;
601 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
603 error ("double linked list of clones corrupted");
604 error_found = true;
606 if (node->same_comdat_group)
608 struct cgraph_node *n = node->same_comdat_group;
610 if (!DECL_ONE_ONLY (node->decl))
612 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
613 error_found = true;
615 if (n == node)
617 error ("node is alone in a comdat group");
618 error_found = true;
622 if (!n->same_comdat_group)
624 error ("same_comdat_group is not a circular list");
625 error_found = true;
626 break;
628 n = n->same_comdat_group;
630 while (n != node);
633 if (node->analyzed && node->thunk.thunk_p)
635 if (!node->callees)
637 error ("No edge out of thunk node");
638 error_found = true;
640 else if (node->callees->next_callee)
642 error ("More than one edge out of thunk node");
643 error_found = true;
645 if (gimple_has_body_p (node->decl))
647 error ("Thunk is not supposed to have body");
648 error_found = true;
651 else if (node->analyzed && gimple_has_body_p (node->decl)
652 && !TREE_ASM_WRITTEN (node->decl)
653 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
654 && !flag_wpa)
656 if (this_cfun->cfg)
658 /* The nodes we're interested in are never shared, so walk
659 the tree ignoring duplicates. */
660 struct pointer_set_t *visited_nodes = pointer_set_create ();
661 /* Reach the trees by walking over the CFG, and note the
662 enclosing basic-blocks in the call edges. */
663 FOR_EACH_BB_FN (this_block, this_cfun)
664 for (gsi = gsi_start_bb (this_block);
665 !gsi_end_p (gsi);
666 gsi_next (&gsi))
668 gimple stmt = gsi_stmt (gsi);
669 if (is_gimple_call (stmt))
671 struct cgraph_edge *e = cgraph_edge (node, stmt);
672 tree decl = gimple_call_fndecl (stmt);
673 if (e)
675 if (e->aux)
677 error ("shared call_stmt:");
678 cgraph_debug_gimple_stmt (this_cfun, stmt);
679 error_found = true;
681 if (!e->indirect_unknown_callee)
683 if (e->callee->same_body_alias)
685 error ("edge points to same body alias:");
686 debug_tree (e->callee->decl);
687 error_found = true;
689 else if (!e->callee->global.inlined_to
690 && decl
691 && cgraph_get_node (decl)
692 && (e->callee->former_clone_of
693 != cgraph_get_node (decl)->decl)
694 && !clone_of_p (cgraph_get_node (decl),
695 e->callee))
697 error ("edge points to wrong declaration:");
698 debug_tree (e->callee->decl);
699 fprintf (stderr," Instead of:");
700 debug_tree (decl);
701 error_found = true;
704 else if (decl)
706 error ("an indirect edge with unknown callee "
707 "corresponding to a call_stmt with "
708 "a known declaration:");
709 error_found = true;
710 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
712 e->aux = (void *)1;
714 else if (decl)
716 error ("missing callgraph edge for call stmt:");
717 cgraph_debug_gimple_stmt (this_cfun, stmt);
718 error_found = true;
722 pointer_set_destroy (visited_nodes);
724 else
725 /* No CFG available?! */
726 gcc_unreachable ();
728 for (e = node->callees; e; e = e->next_callee)
730 if (!e->aux)
732 error ("edge %s->%s has no corresponding call_stmt",
733 identifier_to_locale (cgraph_node_name (e->caller)),
734 identifier_to_locale (cgraph_node_name (e->callee)));
735 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
736 error_found = true;
738 e->aux = 0;
740 for (e = node->indirect_calls; e; e = e->next_callee)
742 if (!e->aux)
744 error ("an indirect edge from %s has no corresponding call_stmt",
745 identifier_to_locale (cgraph_node_name (e->caller)));
746 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
747 error_found = true;
749 e->aux = 0;
752 if (error_found)
754 dump_cgraph_node (stderr, node);
755 internal_error ("verify_cgraph_node failed");
757 timevar_pop (TV_CGRAPH_VERIFY);
760 /* Verify whole cgraph structure. */
761 DEBUG_FUNCTION void
762 verify_cgraph (void)
764 struct cgraph_node *node;
766 if (seen_error ())
767 return;
769 for (node = cgraph_nodes; node; node = node->next)
770 verify_cgraph_node (node);
773 /* Output all asm statements we have stored up to be output. */
775 static void
776 cgraph_output_pending_asms (void)
778 struct cgraph_asm_node *can;
780 if (seen_error ())
781 return;
783 for (can = cgraph_asm_nodes; can; can = can->next)
784 assemble_asm (can->asm_str);
785 cgraph_asm_nodes = NULL;
788 /* Analyze the function scheduled to be output. */
789 void
790 cgraph_analyze_function (struct cgraph_node *node)
792 tree save = current_function_decl;
793 tree decl = node->decl;
795 if (node->thunk.thunk_p)
797 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
798 NULL, 0, CGRAPH_FREQ_BASE);
800 else
802 current_function_decl = decl;
803 push_cfun (DECL_STRUCT_FUNCTION (decl));
805 assign_assembler_name_if_neeeded (node->decl);
807 /* Make sure to gimplify bodies only once. During analyzing a
808 function we lower it, which will require gimplified nested
809 functions, so we can end up here with an already gimplified
810 body. */
811 if (!gimple_body (decl))
812 gimplify_function_tree (decl);
813 dump_function (TDI_generic, decl);
815 cgraph_lower_function (node);
816 pop_cfun ();
818 node->analyzed = true;
820 current_function_decl = save;
823 /* Process attributes common for vars and functions. */
825 static void
826 process_common_attributes (tree decl)
828 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
830 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
832 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
833 "%<weakref%> attribute should be accompanied with"
834 " an %<alias%> attribute");
835 DECL_WEAK (decl) = 0;
836 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
837 DECL_ATTRIBUTES (decl));
841 /* Look for externally_visible and used attributes and mark cgraph nodes
842 accordingly.
844 We cannot mark the nodes at the point the attributes are processed (in
845 handle_*_attribute) because the copy of the declarations available at that
846 point may not be canonical. For example, in:
848 void f();
849 void f() __attribute__((used));
851 the declaration we see in handle_used_attribute will be the second
852 declaration -- but the front end will subsequently merge that declaration
853 with the original declaration and discard the second declaration.
855 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
857 void f() {}
858 void f() __attribute__((externally_visible));
860 is valid.
862 So, we walk the nodes at the end of the translation unit, applying the
863 attributes at that point. */
865 static void
866 process_function_and_variable_attributes (struct cgraph_node *first,
867 struct varpool_node *first_var)
869 struct cgraph_node *node;
870 struct varpool_node *vnode;
872 for (node = cgraph_nodes; node != first; node = node->next)
874 tree decl = node->decl;
875 if (DECL_PRESERVE_P (decl))
876 cgraph_mark_needed_node (node);
877 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
878 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
879 && TREE_PUBLIC (node->decl))
881 if (node->local.finalized)
882 cgraph_mark_needed_node (node);
884 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
886 if (! TREE_PUBLIC (node->decl))
887 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
888 "%<externally_visible%>"
889 " attribute have effect only on public objects");
890 else if (node->local.finalized)
891 cgraph_mark_needed_node (node);
893 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
894 && node->local.finalized)
896 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
897 "%<weakref%> attribute ignored"
898 " because function is defined");
899 DECL_WEAK (decl) = 0;
900 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
901 DECL_ATTRIBUTES (decl));
903 process_common_attributes (decl);
905 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
907 tree decl = vnode->decl;
908 if (DECL_PRESERVE_P (decl))
910 vnode->force_output = true;
911 if (vnode->finalized)
912 varpool_mark_needed_node (vnode);
914 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
915 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
916 && TREE_PUBLIC (vnode->decl))
918 if (vnode->finalized)
919 varpool_mark_needed_node (vnode);
921 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
923 if (! TREE_PUBLIC (vnode->decl))
924 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
925 "%<externally_visible%>"
926 " attribute have effect only on public objects");
927 else if (vnode->finalized)
928 varpool_mark_needed_node (vnode);
930 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
931 && vnode->finalized
932 && DECL_INITIAL (decl))
934 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
935 "%<weakref%> attribute ignored"
936 " because variable is initialized");
937 DECL_WEAK (decl) = 0;
938 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
939 DECL_ATTRIBUTES (decl));
941 process_common_attributes (decl);
945 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
946 each reachable functions) and build cgraph.
947 The function can be called multiple times after inserting new nodes
948 into beginning of queue. Just the new part of queue is re-scanned then. */
950 static void
951 cgraph_analyze_functions (void)
953 /* Keep track of already processed nodes when called multiple times for
954 intermodule optimization. */
955 static struct cgraph_node *first_analyzed;
956 struct cgraph_node *first_processed = first_analyzed;
957 static struct varpool_node *first_analyzed_var;
958 struct cgraph_node *node, *next;
960 bitmap_obstack_initialize (NULL);
961 process_function_and_variable_attributes (first_processed,
962 first_analyzed_var);
963 first_processed = cgraph_nodes;
964 first_analyzed_var = varpool_nodes;
965 varpool_analyze_pending_decls ();
966 if (cgraph_dump_file)
968 fprintf (cgraph_dump_file, "Initial entry points:");
969 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
970 if (node->needed)
971 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
972 fprintf (cgraph_dump_file, "\n");
974 cgraph_process_new_functions ();
976 /* Propagate reachability flag and lower representation of all reachable
977 functions. In the future, lowering will introduce new functions and
978 new entry points on the way (by template instantiation and virtual
979 method table generation for instance). */
980 while (cgraph_nodes_queue)
982 struct cgraph_edge *edge;
983 tree decl = cgraph_nodes_queue->decl;
985 node = cgraph_nodes_queue;
986 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
987 node->next_needed = NULL;
989 /* ??? It is possible to create extern inline function and later using
990 weak alias attribute to kill its body. See
991 gcc.c-torture/compile/20011119-1.c */
992 if (!DECL_STRUCT_FUNCTION (decl)
993 && !node->thunk.thunk_p)
995 cgraph_reset_node (node);
996 continue;
999 if (!node->analyzed)
1000 cgraph_analyze_function (node);
1002 for (edge = node->callees; edge; edge = edge->next_callee)
1003 if (!edge->callee->reachable)
1004 cgraph_mark_reachable_node (edge->callee);
1005 for (edge = node->callers; edge; edge = edge->next_caller)
1006 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1007 cgraph_mark_reachable_node (edge->caller);
1009 if (node->same_comdat_group)
1011 for (next = node->same_comdat_group;
1012 next != node;
1013 next = next->same_comdat_group)
1014 cgraph_mark_reachable_node (next);
1017 /* If decl is a clone of an abstract function, mark that abstract
1018 function so that we don't release its body. The DECL_INITIAL() of that
1019 abstract function declaration will be later needed to output debug
1020 info. */
1021 if (DECL_ABSTRACT_ORIGIN (decl))
1023 struct cgraph_node *origin_node;
1024 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1025 origin_node->abstract_and_needed = true;
1028 /* We finalize local static variables during constructing callgraph
1029 edges. Process their attributes too. */
1030 process_function_and_variable_attributes (first_processed,
1031 first_analyzed_var);
1032 first_processed = cgraph_nodes;
1033 first_analyzed_var = varpool_nodes;
1034 varpool_analyze_pending_decls ();
1035 cgraph_process_new_functions ();
1038 /* Collect entry points to the unit. */
1039 if (cgraph_dump_file)
1041 fprintf (cgraph_dump_file, "Unit entry points:");
1042 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1043 if (node->needed)
1044 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1045 fprintf (cgraph_dump_file, "\n\nInitial ");
1046 dump_cgraph (cgraph_dump_file);
1047 dump_varpool (cgraph_dump_file);
1050 if (cgraph_dump_file)
1051 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1053 for (node = cgraph_nodes; node != first_analyzed; node = next)
1055 tree decl = node->decl;
1056 next = node->next;
1058 if (node->local.finalized && !gimple_has_body_p (decl)
1059 && !node->thunk.thunk_p)
1060 cgraph_reset_node (node);
1062 if (!node->reachable
1063 && (gimple_has_body_p (decl) || node->thunk.thunk_p))
1065 if (cgraph_dump_file)
1066 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1067 cgraph_remove_node (node);
1068 continue;
1070 else
1071 node->next_needed = NULL;
1072 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1073 || gimple_has_body_p (decl));
1074 gcc_assert (node->analyzed == node->local.finalized);
1076 if (cgraph_dump_file)
1078 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1079 dump_cgraph (cgraph_dump_file);
1080 dump_varpool (cgraph_dump_file);
1082 bitmap_obstack_release (NULL);
1083 first_analyzed = cgraph_nodes;
1084 ggc_collect ();
1088 /* Analyze the whole compilation unit once it is parsed completely. */
1090 void
1091 cgraph_finalize_compilation_unit (void)
1093 timevar_push (TV_CGRAPH);
1095 /* If we're here there's no current function anymore. Some frontends
1096 are lazy in clearing these. */
1097 current_function_decl = NULL;
1098 set_cfun (NULL);
1100 /* Do not skip analyzing the functions if there were errors, we
1101 miss diagnostics for following functions otherwise. */
1103 /* Emit size functions we didn't inline. */
1104 finalize_size_functions ();
1106 /* Mark alias targets necessary and emit diagnostics. */
1107 finish_aliases_1 ();
1109 if (!quiet_flag)
1111 fprintf (stderr, "\nAnalyzing compilation unit\n");
1112 fflush (stderr);
1115 /* Gimplify and lower all functions, compute reachability and
1116 remove unreachable nodes. */
1117 cgraph_analyze_functions ();
1119 /* Mark alias targets necessary and emit diagnostics. */
1120 finish_aliases_1 ();
1122 /* Gimplify and lower thunks. */
1123 cgraph_analyze_functions ();
1125 /* Finally drive the pass manager. */
1126 cgraph_optimize ();
1128 timevar_pop (TV_CGRAPH);
1132 /* Figure out what functions we want to assemble. */
1134 static void
1135 cgraph_mark_functions_to_output (void)
1137 struct cgraph_node *node;
1138 #ifdef ENABLE_CHECKING
1139 bool check_same_comdat_groups = false;
1141 for (node = cgraph_nodes; node; node = node->next)
1142 gcc_assert (!node->process);
1143 #endif
1145 for (node = cgraph_nodes; node; node = node->next)
1147 tree decl = node->decl;
1148 struct cgraph_edge *e;
1150 gcc_assert (!node->process || node->same_comdat_group);
1151 if (node->process)
1152 continue;
1154 for (e = node->callers; e; e = e->next_caller)
1155 if (e->inline_failed)
1156 break;
1158 /* We need to output all local functions that are used and not
1159 always inlined, as well as those that are reachable from
1160 outside the current compilation unit. */
1161 if (node->analyzed
1162 && !node->thunk.thunk_p
1163 && !node->global.inlined_to
1164 && (!cgraph_only_called_directly_p (node)
1165 || (e && node->reachable))
1166 && !TREE_ASM_WRITTEN (decl)
1167 && !DECL_EXTERNAL (decl))
1169 node->process = 1;
1170 if (node->same_comdat_group)
1172 struct cgraph_node *next;
1173 for (next = node->same_comdat_group;
1174 next != node;
1175 next = next->same_comdat_group)
1176 if (!next->thunk.thunk_p)
1177 next->process = 1;
1180 else if (node->same_comdat_group)
1182 #ifdef ENABLE_CHECKING
1183 check_same_comdat_groups = true;
1184 #endif
1186 else
1188 /* We should've reclaimed all functions that are not needed. */
1189 #ifdef ENABLE_CHECKING
1190 if (!node->global.inlined_to
1191 && gimple_has_body_p (decl)
1192 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1193 are inside partition, we can end up not removing the body since we no longer
1194 have analyzed node pointing to it. */
1195 && !node->in_other_partition
1196 && !DECL_EXTERNAL (decl))
1198 dump_cgraph_node (stderr, node);
1199 internal_error ("failed to reclaim unneeded function");
1201 #endif
1202 gcc_assert (node->global.inlined_to
1203 || !gimple_has_body_p (decl)
1204 || node->in_other_partition
1205 || DECL_EXTERNAL (decl));
1210 #ifdef ENABLE_CHECKING
1211 if (check_same_comdat_groups)
1212 for (node = cgraph_nodes; node; node = node->next)
1213 if (node->same_comdat_group && !node->process)
1215 tree decl = node->decl;
1216 if (!node->global.inlined_to
1217 && gimple_has_body_p (decl)
1218 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1219 are inside partition, we can end up not removing the body since we no longer
1220 have analyzed node pointing to it. */
1221 && !node->in_other_partition
1222 && !DECL_EXTERNAL (decl))
1224 dump_cgraph_node (stderr, node);
1225 internal_error ("failed to reclaim unneeded function");
1228 #endif
1231 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1232 in lowered gimple form.
1234 Set current_function_decl and cfun to newly constructed empty function body.
1235 return basic block in the function body. */
1237 static basic_block
1238 init_lowered_empty_function (tree decl)
1240 basic_block bb;
1242 current_function_decl = decl;
1243 allocate_struct_function (decl, false);
1244 gimple_register_cfg_hooks ();
1245 init_empty_tree_cfg ();
1246 init_tree_ssa (cfun);
1247 init_ssa_operands ();
1248 cfun->gimple_df->in_ssa_p = true;
1249 DECL_INITIAL (decl) = make_node (BLOCK);
1251 DECL_SAVED_TREE (decl) = error_mark_node;
1252 cfun->curr_properties |=
1253 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1254 PROP_ssa);
1256 /* Create BB for body of the function and connect it properly. */
1257 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1258 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1259 make_edge (bb, EXIT_BLOCK_PTR, 0);
1261 return bb;
1264 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1265 offset indicated by VIRTUAL_OFFSET, if that is
1266 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1267 zero for a result adjusting thunk. */
1269 static tree
1270 thunk_adjust (gimple_stmt_iterator * bsi,
1271 tree ptr, bool this_adjusting,
1272 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1274 gimple stmt;
1275 tree ret;
1277 if (this_adjusting
1278 && fixed_offset != 0)
1280 stmt = gimple_build_assign (ptr,
1281 fold_build2_loc (input_location,
1282 POINTER_PLUS_EXPR,
1283 TREE_TYPE (ptr), ptr,
1284 size_int (fixed_offset)));
1285 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1288 /* If there's a virtual offset, look up that value in the vtable and
1289 adjust the pointer again. */
1290 if (virtual_offset)
1292 tree vtabletmp;
1293 tree vtabletmp2;
1294 tree vtabletmp3;
1295 tree offsettmp;
1297 if (!vtable_entry_type)
1299 tree vfunc_type = make_node (FUNCTION_TYPE);
1300 TREE_TYPE (vfunc_type) = integer_type_node;
1301 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1302 layout_type (vfunc_type);
1304 vtable_entry_type = build_pointer_type (vfunc_type);
1307 vtabletmp =
1308 create_tmp_var (build_pointer_type
1309 (build_pointer_type (vtable_entry_type)), "vptr");
1311 /* The vptr is always at offset zero in the object. */
1312 stmt = gimple_build_assign (vtabletmp,
1313 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1314 ptr));
1315 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1316 mark_symbols_for_renaming (stmt);
1317 find_referenced_vars_in (stmt);
1319 /* Form the vtable address. */
1320 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1321 "vtableaddr");
1322 stmt = gimple_build_assign (vtabletmp2,
1323 build_simple_mem_ref (vtabletmp));
1324 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1325 mark_symbols_for_renaming (stmt);
1326 find_referenced_vars_in (stmt);
1328 /* Find the entry with the vcall offset. */
1329 stmt = gimple_build_assign (vtabletmp2,
1330 fold_build2_loc (input_location,
1331 POINTER_PLUS_EXPR,
1332 TREE_TYPE (vtabletmp2),
1333 vtabletmp2,
1334 fold_convert (sizetype,
1335 virtual_offset)));
1336 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1338 /* Get the offset itself. */
1339 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1340 "vcalloffset");
1341 stmt = gimple_build_assign (vtabletmp3,
1342 build_simple_mem_ref (vtabletmp2));
1343 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1344 mark_symbols_for_renaming (stmt);
1345 find_referenced_vars_in (stmt);
1347 /* Cast to sizetype. */
1348 offsettmp = create_tmp_var (sizetype, "offset");
1349 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1350 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1351 mark_symbols_for_renaming (stmt);
1352 find_referenced_vars_in (stmt);
1354 /* Adjust the `this' pointer. */
1355 ptr = fold_build2_loc (input_location,
1356 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1357 offsettmp);
1360 if (!this_adjusting
1361 && fixed_offset != 0)
1362 /* Adjust the pointer by the constant. */
1364 tree ptrtmp;
1366 if (TREE_CODE (ptr) == VAR_DECL)
1367 ptrtmp = ptr;
1368 else
1370 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1371 stmt = gimple_build_assign (ptrtmp, ptr);
1372 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1373 mark_symbols_for_renaming (stmt);
1374 find_referenced_vars_in (stmt);
1376 ptr = fold_build2_loc (input_location,
1377 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1378 size_int (fixed_offset));
1381 /* Emit the statement and gimplify the adjustment expression. */
1382 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1383 stmt = gimple_build_assign (ret, ptr);
1384 mark_symbols_for_renaming (stmt);
1385 find_referenced_vars_in (stmt);
1386 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1388 return ret;
1391 /* Produce assembler for thunk NODE. */
1393 static void
1394 assemble_thunk (struct cgraph_node *node)
1396 bool this_adjusting = node->thunk.this_adjusting;
1397 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1398 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1399 tree virtual_offset = NULL;
1400 tree alias = node->thunk.alias;
1401 tree thunk_fndecl = node->decl;
1402 tree a = DECL_ARGUMENTS (thunk_fndecl);
1404 current_function_decl = thunk_fndecl;
1406 /* Ensure thunks are emitted in their correct sections. */
1407 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1409 if (this_adjusting
1410 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1411 virtual_value, alias))
1413 const char *fnname;
1414 tree fn_block;
1416 DECL_RESULT (thunk_fndecl)
1417 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1418 RESULT_DECL, 0, integer_type_node);
1419 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1421 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1422 create one. */
1423 fn_block = make_node (BLOCK);
1424 BLOCK_VARS (fn_block) = a;
1425 DECL_INITIAL (thunk_fndecl) = fn_block;
1426 init_function_start (thunk_fndecl);
1427 cfun->is_thunk = 1;
1428 assemble_start_function (thunk_fndecl, fnname);
1430 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1431 fixed_offset, virtual_value, alias);
1433 assemble_end_function (thunk_fndecl, fnname);
1434 init_insn_lengths ();
1435 free_after_compilation (cfun);
1436 set_cfun (NULL);
1437 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1438 node->thunk.thunk_p = false;
1439 node->analyzed = false;
1441 else
1443 tree restype;
1444 basic_block bb, then_bb, else_bb, return_bb;
1445 gimple_stmt_iterator bsi;
1446 int nargs = 0;
1447 tree arg;
1448 int i;
1449 tree resdecl;
1450 tree restmp = NULL;
1451 VEC(tree, heap) *vargs;
1453 gimple call;
1454 gimple ret;
1456 DECL_IGNORED_P (thunk_fndecl) = 1;
1457 bitmap_obstack_initialize (NULL);
1459 if (node->thunk.virtual_offset_p)
1460 virtual_offset = size_int (virtual_value);
1462 /* Build the return declaration for the function. */
1463 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1464 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1466 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1467 DECL_ARTIFICIAL (resdecl) = 1;
1468 DECL_IGNORED_P (resdecl) = 1;
1469 DECL_RESULT (thunk_fndecl) = resdecl;
1471 else
1472 resdecl = DECL_RESULT (thunk_fndecl);
1474 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1476 bsi = gsi_start_bb (bb);
1478 /* Build call to the function being thunked. */
1479 if (!VOID_TYPE_P (restype))
1481 if (!is_gimple_reg_type (restype))
1483 restmp = resdecl;
1484 add_local_decl (cfun, restmp);
1485 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1487 else
1488 restmp = create_tmp_var_raw (restype, "retval");
1491 for (arg = a; arg; arg = DECL_CHAIN (arg))
1492 nargs++;
1493 vargs = VEC_alloc (tree, heap, nargs);
1494 if (this_adjusting)
1495 VEC_quick_push (tree, vargs,
1496 thunk_adjust (&bsi,
1497 a, 1, fixed_offset,
1498 virtual_offset));
1499 else
1500 VEC_quick_push (tree, vargs, a);
1501 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1502 VEC_quick_push (tree, vargs, arg);
1503 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1504 VEC_free (tree, heap, vargs);
1505 gimple_call_set_cannot_inline (call, true);
1506 gimple_call_set_from_thunk (call, true);
1507 if (restmp)
1508 gimple_call_set_lhs (call, restmp);
1509 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1510 mark_symbols_for_renaming (call);
1511 find_referenced_vars_in (call);
1512 update_stmt (call);
1514 if (restmp && !this_adjusting)
1516 tree true_label = NULL_TREE;
1518 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1520 gimple stmt;
1521 /* If the return type is a pointer, we need to
1522 protect against NULL. We know there will be an
1523 adjustment, because that's why we're emitting a
1524 thunk. */
1525 then_bb = create_basic_block (NULL, (void *) 0, bb);
1526 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1527 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1528 remove_edge (single_succ_edge (bb));
1529 true_label = gimple_block_label (then_bb);
1530 stmt = gimple_build_cond (NE_EXPR, restmp,
1531 build_zero_cst (TREE_TYPE (restmp)),
1532 NULL_TREE, NULL_TREE);
1533 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1534 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1535 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1536 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1537 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1538 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1539 bsi = gsi_last_bb (then_bb);
1542 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1543 fixed_offset, virtual_offset);
1544 if (true_label)
1546 gimple stmt;
1547 bsi = gsi_last_bb (else_bb);
1548 stmt = gimple_build_assign (restmp,
1549 build_zero_cst (TREE_TYPE (restmp)));
1550 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1551 bsi = gsi_last_bb (return_bb);
1554 else
1555 gimple_call_set_tail (call, true);
1557 /* Build return value. */
1558 ret = gimple_build_return (restmp);
1559 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1561 delete_unreachable_blocks ();
1562 update_ssa (TODO_update_ssa);
1564 /* Since we want to emit the thunk, we explicitly mark its name as
1565 referenced. */
1566 node->thunk.thunk_p = false;
1567 cgraph_node_remove_callees (node);
1568 cgraph_add_new_function (thunk_fndecl, true);
1569 bitmap_obstack_release (NULL);
1571 current_function_decl = NULL;
1575 /* Assemble thunks asociated to NODE. */
1577 static void
1578 assemble_thunks (struct cgraph_node *node)
1580 struct cgraph_edge *e;
1581 for (e = node->callers; e;)
1582 if (e->caller->thunk.thunk_p)
1584 struct cgraph_node *thunk = e->caller;
1586 e = e->next_caller;
1587 assemble_thunks (thunk);
1588 assemble_thunk (thunk);
1590 else
1591 e = e->next_caller;
1594 /* Expand function specified by NODE. */
1596 static void
1597 cgraph_expand_function (struct cgraph_node *node)
1599 tree decl = node->decl;
1601 /* We ought to not compile any inline clones. */
1602 gcc_assert (!node->global.inlined_to);
1604 announce_function (decl);
1605 node->process = 0;
1606 if (node->same_body)
1608 struct cgraph_node *alias, *next;
1609 bool saved_alias = node->alias;
1610 for (alias = node->same_body;
1611 alias && alias->next; alias = alias->next)
1613 /* Walk aliases in the order they were created; it is possible that
1614 thunks refers to the aliases made earlier. */
1615 for (; alias; alias = next)
1617 next = alias->previous;
1618 if (!alias->thunk.thunk_p)
1619 assemble_alias (alias->decl,
1620 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1622 node->alias = saved_alias;
1623 cgraph_process_new_functions ();
1626 assemble_thunks (node);
1627 gcc_assert (node->lowered);
1629 /* Generate RTL for the body of DECL. */
1630 tree_rest_of_compilation (decl);
1632 /* Make sure that BE didn't give up on compiling. */
1633 gcc_assert (TREE_ASM_WRITTEN (decl));
1634 current_function_decl = NULL;
1635 gcc_assert (!cgraph_preserve_function_body_p (node));
1636 cgraph_release_function_body (node);
1637 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1638 points to the dead function body. */
1639 cgraph_node_remove_callees (node);
1641 cgraph_function_flags_ready = true;
1644 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1646 bool
1647 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1649 *reason = e->inline_failed;
1650 return !e->inline_failed;
1655 /* Expand all functions that must be output.
1657 Attempt to topologically sort the nodes so function is output when
1658 all called functions are already assembled to allow data to be
1659 propagated across the callgraph. Use a stack to get smaller distance
1660 between a function and its callees (later we may choose to use a more
1661 sophisticated algorithm for function reordering; we will likely want
1662 to use subsections to make the output functions appear in top-down
1663 order). */
1665 static void
1666 cgraph_expand_all_functions (void)
1668 struct cgraph_node *node;
1669 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1670 int order_pos, new_order_pos = 0;
1671 int i;
1673 order_pos = ipa_reverse_postorder (order);
1674 gcc_assert (order_pos == cgraph_n_nodes);
1676 /* Garbage collector may remove inline clones we eliminate during
1677 optimization. So we must be sure to not reference them. */
1678 for (i = 0; i < order_pos; i++)
1679 if (order[i]->process)
1680 order[new_order_pos++] = order[i];
1682 for (i = new_order_pos - 1; i >= 0; i--)
1684 node = order[i];
1685 if (node->process)
1687 gcc_assert (node->reachable);
1688 node->process = 0;
1689 cgraph_expand_function (node);
1692 cgraph_process_new_functions ();
1694 free (order);
1698 /* This is used to sort the node types by the cgraph order number. */
1700 enum cgraph_order_sort_kind
1702 ORDER_UNDEFINED = 0,
1703 ORDER_FUNCTION,
1704 ORDER_VAR,
1705 ORDER_ASM
1708 struct cgraph_order_sort
1710 enum cgraph_order_sort_kind kind;
1711 union
1713 struct cgraph_node *f;
1714 struct varpool_node *v;
1715 struct cgraph_asm_node *a;
1716 } u;
1719 /* Output all functions, variables, and asm statements in the order
1720 according to their order fields, which is the order in which they
1721 appeared in the file. This implements -fno-toplevel-reorder. In
1722 this mode we may output functions and variables which don't really
1723 need to be output. */
1725 static void
1726 cgraph_output_in_order (void)
1728 int max;
1729 struct cgraph_order_sort *nodes;
1730 int i;
1731 struct cgraph_node *pf;
1732 struct varpool_node *pv;
1733 struct cgraph_asm_node *pa;
1735 max = cgraph_order;
1736 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1738 varpool_analyze_pending_decls ();
1740 for (pf = cgraph_nodes; pf; pf = pf->next)
1742 if (pf->process && !pf->thunk.thunk_p)
1744 i = pf->order;
1745 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1746 nodes[i].kind = ORDER_FUNCTION;
1747 nodes[i].u.f = pf;
1751 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1753 i = pv->order;
1754 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1755 nodes[i].kind = ORDER_VAR;
1756 nodes[i].u.v = pv;
1759 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1761 i = pa->order;
1762 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1763 nodes[i].kind = ORDER_ASM;
1764 nodes[i].u.a = pa;
1767 /* In toplevel reorder mode we output all statics; mark them as needed. */
1768 for (i = 0; i < max; ++i)
1770 if (nodes[i].kind == ORDER_VAR)
1772 varpool_mark_needed_node (nodes[i].u.v);
1775 varpool_empty_needed_queue ();
1777 for (i = 0; i < max; ++i)
1778 if (nodes[i].kind == ORDER_VAR)
1779 varpool_finalize_named_section_flags (nodes[i].u.v);
1781 for (i = 0; i < max; ++i)
1783 switch (nodes[i].kind)
1785 case ORDER_FUNCTION:
1786 nodes[i].u.f->process = 0;
1787 cgraph_expand_function (nodes[i].u.f);
1788 break;
1790 case ORDER_VAR:
1791 varpool_assemble_decl (nodes[i].u.v);
1792 break;
1794 case ORDER_ASM:
1795 assemble_asm (nodes[i].u.a->asm_str);
1796 break;
1798 case ORDER_UNDEFINED:
1799 break;
1801 default:
1802 gcc_unreachable ();
1806 cgraph_asm_nodes = NULL;
1807 free (nodes);
1810 /* Return true when function body of DECL still needs to be kept around
1811 for later re-use. */
1812 bool
1813 cgraph_preserve_function_body_p (struct cgraph_node *node)
1815 gcc_assert (cgraph_global_info_ready);
1816 gcc_assert (!node->same_body_alias);
1818 /* Look if there is any clone around. */
1819 if (node->clones)
1820 return true;
1821 return false;
1824 static void
1825 ipa_passes (void)
1827 set_cfun (NULL);
1828 current_function_decl = NULL;
1829 gimple_register_cfg_hooks ();
1830 bitmap_obstack_initialize (NULL);
1832 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1834 if (!in_lto_p)
1836 execute_ipa_pass_list (all_small_ipa_passes);
1837 if (seen_error ())
1838 return;
1841 /* If pass_all_early_optimizations was not scheduled, the state of
1842 the cgraph will not be properly updated. Update it now. */
1843 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1844 cgraph_state = CGRAPH_STATE_IPA_SSA;
1846 if (!in_lto_p)
1848 /* Generate coverage variables and constructors. */
1849 coverage_finish ();
1851 /* Process new functions added. */
1852 set_cfun (NULL);
1853 current_function_decl = NULL;
1854 cgraph_process_new_functions ();
1856 execute_ipa_summary_passes
1857 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1860 /* Some targets need to handle LTO assembler output specially. */
1861 if (flag_generate_lto)
1862 targetm.asm_out.lto_start ();
1864 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1866 if (!in_lto_p)
1867 ipa_write_summaries ();
1869 if (flag_generate_lto)
1870 targetm.asm_out.lto_end ();
1872 if (!flag_ltrans)
1873 execute_ipa_pass_list (all_regular_ipa_passes);
1874 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1876 bitmap_obstack_release (NULL);
1880 /* Perform simple optimizations based on callgraph. */
1882 void
1883 cgraph_optimize (void)
1885 if (seen_error ())
1886 return;
1888 #ifdef ENABLE_CHECKING
1889 verify_cgraph ();
1890 #endif
1892 /* Frontend may output common variables after the unit has been finalized.
1893 It is safe to deal with them here as they are always zero initialized. */
1894 varpool_analyze_pending_decls ();
1896 timevar_push (TV_CGRAPHOPT);
1897 if (pre_ipa_mem_report)
1899 fprintf (stderr, "Memory consumption before IPA\n");
1900 dump_memory_report (false);
1902 if (!quiet_flag)
1903 fprintf (stderr, "Performing interprocedural optimizations\n");
1904 cgraph_state = CGRAPH_STATE_IPA;
1906 /* Don't run the IPA passes if there was any error or sorry messages. */
1907 if (!seen_error ())
1908 ipa_passes ();
1910 /* Do nothing else if any IPA pass found errors. */
1911 if (seen_error ())
1913 timevar_pop (TV_CGRAPHOPT);
1914 return;
1917 /* This pass remove bodies of extern inline functions we never inlined.
1918 Do this later so other IPA passes see what is really going on. */
1919 cgraph_remove_unreachable_nodes (false, dump_file);
1920 cgraph_global_info_ready = true;
1921 if (cgraph_dump_file)
1923 fprintf (cgraph_dump_file, "Optimized ");
1924 dump_cgraph (cgraph_dump_file);
1925 dump_varpool (cgraph_dump_file);
1927 if (post_ipa_mem_report)
1929 fprintf (stderr, "Memory consumption after IPA\n");
1930 dump_memory_report (false);
1932 timevar_pop (TV_CGRAPHOPT);
1934 /* Output everything. */
1935 (*debug_hooks->assembly_start) ();
1936 if (!quiet_flag)
1937 fprintf (stderr, "Assembling functions:\n");
1938 #ifdef ENABLE_CHECKING
1939 verify_cgraph ();
1940 #endif
1942 cgraph_materialize_all_clones ();
1943 cgraph_mark_functions_to_output ();
1945 cgraph_state = CGRAPH_STATE_EXPANSION;
1946 if (!flag_toplevel_reorder)
1947 cgraph_output_in_order ();
1948 else
1950 cgraph_output_pending_asms ();
1952 cgraph_expand_all_functions ();
1953 varpool_remove_unreferenced_decls ();
1955 varpool_assemble_pending_decls ();
1957 cgraph_process_new_functions ();
1958 cgraph_state = CGRAPH_STATE_FINISHED;
1960 if (cgraph_dump_file)
1962 fprintf (cgraph_dump_file, "\nFinal ");
1963 dump_cgraph (cgraph_dump_file);
1964 dump_varpool (cgraph_dump_file);
1966 #ifdef ENABLE_CHECKING
1967 verify_cgraph ();
1968 /* Double check that all inline clones are gone and that all
1969 function bodies have been released from memory. */
1970 if (!seen_error ())
1972 struct cgraph_node *node;
1973 bool error_found = false;
1975 for (node = cgraph_nodes; node; node = node->next)
1976 if (node->analyzed
1977 && (node->global.inlined_to
1978 || gimple_has_body_p (node->decl)))
1980 error_found = true;
1981 dump_cgraph_node (stderr, node);
1983 if (error_found)
1984 internal_error ("nodes with unreleased memory found");
1986 #endif
1989 void
1990 init_cgraph (void)
1992 if (!cgraph_dump_file)
1993 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1996 /* The edges representing the callers of the NEW_VERSION node were
1997 fixed by cgraph_function_versioning (), now the call_expr in their
1998 respective tree code should be updated to call the NEW_VERSION. */
2000 static void
2001 update_call_expr (struct cgraph_node *new_version)
2003 struct cgraph_edge *e;
2005 gcc_assert (new_version);
2007 /* Update the call expr on the edges to call the new version. */
2008 for (e = new_version->callers; e; e = e->next_caller)
2010 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2011 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2012 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2017 /* Create a new cgraph node which is the new version of
2018 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2019 edges which should be redirected to point to
2020 NEW_VERSION. ALL the callees edges of OLD_VERSION
2021 are cloned to the new version node. Return the new
2022 version node.
2024 If non-NULL BLOCK_TO_COPY determine what basic blocks
2025 was copied to prevent duplications of calls that are dead
2026 in the clone. */
2028 static struct cgraph_node *
2029 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2030 tree new_decl,
2031 VEC(cgraph_edge_p,heap) *redirect_callers,
2032 bitmap bbs_to_copy)
2034 struct cgraph_node *new_version;
2035 struct cgraph_edge *e;
2036 unsigned i;
2038 gcc_assert (old_version);
2040 new_version = cgraph_create_node (new_decl);
2042 new_version->analyzed = true;
2043 new_version->local = old_version->local;
2044 new_version->local.externally_visible = false;
2045 new_version->local.local = true;
2046 new_version->global = old_version->global;
2047 new_version->rtl = old_version->rtl;
2048 new_version->reachable = true;
2049 new_version->count = old_version->count;
2051 for (e = old_version->callees; e; e=e->next_callee)
2052 if (!bbs_to_copy
2053 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2054 cgraph_clone_edge (e, new_version, e->call_stmt,
2055 e->lto_stmt_uid, REG_BR_PROB_BASE,
2056 CGRAPH_FREQ_BASE,
2057 true);
2058 for (e = old_version->indirect_calls; e; e=e->next_callee)
2059 if (!bbs_to_copy
2060 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2061 cgraph_clone_edge (e, new_version, e->call_stmt,
2062 e->lto_stmt_uid, REG_BR_PROB_BASE,
2063 CGRAPH_FREQ_BASE,
2064 true);
2065 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2067 /* Redirect calls to the old version node to point to its new
2068 version. */
2069 cgraph_redirect_edge_callee (e, new_version);
2072 return new_version;
2075 /* Perform function versioning.
2076 Function versioning includes copying of the tree and
2077 a callgraph update (creating a new cgraph node and updating
2078 its callees and callers).
2080 REDIRECT_CALLERS varray includes the edges to be redirected
2081 to the new version.
2083 TREE_MAP is a mapping of tree nodes we want to replace with
2084 new ones (according to results of prior analysis).
2085 OLD_VERSION_NODE is the node that is versioned.
2086 It returns the new version's cgraph node.
2087 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2088 from new version.
2089 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2090 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2092 struct cgraph_node *
2093 cgraph_function_versioning (struct cgraph_node *old_version_node,
2094 VEC(cgraph_edge_p,heap) *redirect_callers,
2095 VEC (ipa_replace_map_p,gc)* tree_map,
2096 bitmap args_to_skip,
2097 bitmap bbs_to_copy,
2098 basic_block new_entry_block,
2099 const char *clone_name)
2101 tree old_decl = old_version_node->decl;
2102 struct cgraph_node *new_version_node = NULL;
2103 tree new_decl;
2105 if (!tree_versionable_function_p (old_decl))
2106 return NULL;
2108 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2110 /* Make a new FUNCTION_DECL tree node for the
2111 new version. */
2112 if (!args_to_skip)
2113 new_decl = copy_node (old_decl);
2114 else
2115 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2117 /* Generate a new name for the new version. */
2118 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2119 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2120 SET_DECL_RTL (new_decl, NULL);
2122 /* Create the new version's call-graph node.
2123 and update the edges of the new node. */
2124 new_version_node =
2125 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2126 redirect_callers, bbs_to_copy);
2128 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2129 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2130 bbs_to_copy, new_entry_block);
2132 /* Update the new version's properties.
2133 Make The new version visible only within this translation unit. Make sure
2134 that is not weak also.
2135 ??? We cannot use COMDAT linkage because there is no
2136 ABI support for this. */
2137 cgraph_make_decl_local (new_version_node->decl);
2138 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2139 new_version_node->local.externally_visible = 0;
2140 new_version_node->local.local = 1;
2141 new_version_node->lowered = true;
2143 /* Update the call_expr on the edges to call the new version node. */
2144 update_call_expr (new_version_node);
2146 cgraph_call_function_insertion_hooks (new_version_node);
2147 return new_version_node;
2150 /* Given virtual clone, turn it into actual clone. */
2151 static void
2152 cgraph_materialize_clone (struct cgraph_node *node)
2154 bitmap_obstack_initialize (NULL);
2155 node->former_clone_of = node->clone_of->decl;
2156 if (node->clone_of->former_clone_of)
2157 node->former_clone_of = node->clone_of->former_clone_of;
2158 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2159 tree_function_versioning (node->clone_of->decl, node->decl,
2160 node->clone.tree_map, true,
2161 node->clone.args_to_skip, NULL, NULL);
2162 if (cgraph_dump_file)
2164 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2165 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2168 /* Function is no longer clone. */
2169 if (node->next_sibling_clone)
2170 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2171 if (node->prev_sibling_clone)
2172 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2173 else
2174 node->clone_of->clones = node->next_sibling_clone;
2175 node->next_sibling_clone = NULL;
2176 node->prev_sibling_clone = NULL;
2177 if (!node->clone_of->analyzed && !node->clone_of->clones)
2179 cgraph_release_function_body (node->clone_of);
2180 cgraph_node_remove_callees (node->clone_of);
2181 ipa_remove_all_references (&node->clone_of->ref_list);
2183 node->clone_of = NULL;
2184 bitmap_obstack_release (NULL);
2187 /* If necessary, change the function declaration in the call statement
2188 associated with E so that it corresponds to the edge callee. */
2190 gimple
2191 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2193 tree decl = gimple_call_fndecl (e->call_stmt);
2194 gimple new_stmt;
2195 gimple_stmt_iterator gsi;
2196 bool gsi_computed = false;
2197 #ifdef ENABLE_CHECKING
2198 struct cgraph_node *node;
2199 #endif
2201 if (e->indirect_unknown_callee
2202 || decl == e->callee->decl
2203 /* Don't update call from same body alias to the real function. */
2204 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2205 return e->call_stmt;
2207 #ifdef ENABLE_CHECKING
2208 if (decl)
2210 node = cgraph_get_node (decl);
2211 gcc_assert (!node || !node->clone.combined_args_to_skip);
2213 #endif
2215 if (cgraph_dump_file)
2217 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2218 cgraph_node_name (e->caller), e->caller->uid,
2219 cgraph_node_name (e->callee), e->callee->uid);
2220 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2221 if (e->callee->clone.combined_args_to_skip)
2223 fprintf (cgraph_dump_file, " combined args to skip: ");
2224 dump_bitmap (cgraph_dump_file,
2225 e->callee->clone.combined_args_to_skip);
2229 if (e->indirect_info &&
2230 e->indirect_info->thunk_delta != 0
2231 && (!e->callee->clone.combined_args_to_skip
2232 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2234 if (cgraph_dump_file)
2235 fprintf (cgraph_dump_file, " Thunk delta is "
2236 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
2237 gsi = gsi_for_stmt (e->call_stmt);
2238 gsi_computed = true;
2239 gimple_adjust_this_by_delta (&gsi,
2240 build_int_cst (sizetype,
2241 e->indirect_info->thunk_delta));
2242 e->indirect_info->thunk_delta = 0;
2245 if (e->callee->clone.combined_args_to_skip)
2247 int lp_nr;
2249 new_stmt
2250 = gimple_call_copy_skip_args (e->call_stmt,
2251 e->callee->clone.combined_args_to_skip);
2252 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2254 if (gimple_vdef (new_stmt)
2255 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2256 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2258 if (!gsi_computed)
2259 gsi = gsi_for_stmt (e->call_stmt);
2260 gsi_replace (&gsi, new_stmt, false);
2261 /* We need to defer cleaning EH info on the new statement to
2262 fixup-cfg. We may not have dominator information at this point
2263 and thus would end up with unreachable blocks and have no way
2264 to communicate that we need to run CFG cleanup then. */
2265 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2266 if (lp_nr != 0)
2268 remove_stmt_from_eh_lp (e->call_stmt);
2269 add_stmt_to_eh_lp (new_stmt, lp_nr);
2272 else
2274 new_stmt = e->call_stmt;
2275 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2276 update_stmt (new_stmt);
2279 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2281 if (cgraph_dump_file)
2283 fprintf (cgraph_dump_file, " updated to:");
2284 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2286 return new_stmt;
2289 /* Once all functions from compilation unit are in memory, produce all clones
2290 and update all calls. We might also do this on demand if we don't want to
2291 bring all functions to memory prior compilation, but current WHOPR
2292 implementation does that and it is is bit easier to keep everything right in
2293 this order. */
2294 void
2295 cgraph_materialize_all_clones (void)
2297 struct cgraph_node *node;
2298 bool stabilized = false;
2300 if (cgraph_dump_file)
2301 fprintf (cgraph_dump_file, "Materializing clones\n");
2302 #ifdef ENABLE_CHECKING
2303 verify_cgraph ();
2304 #endif
2306 /* We can also do topological order, but number of iterations should be
2307 bounded by number of IPA passes since single IPA pass is probably not
2308 going to create clones of clones it created itself. */
2309 while (!stabilized)
2311 stabilized = true;
2312 for (node = cgraph_nodes; node; node = node->next)
2314 if (node->clone_of && node->decl != node->clone_of->decl
2315 && !gimple_has_body_p (node->decl))
2317 if (gimple_has_body_p (node->clone_of->decl))
2319 if (cgraph_dump_file)
2321 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2322 cgraph_node_name (node->clone_of),
2323 cgraph_node_name (node));
2324 if (node->clone.tree_map)
2326 unsigned int i;
2327 fprintf (cgraph_dump_file, " replace map: ");
2328 for (i = 0; i < VEC_length (ipa_replace_map_p,
2329 node->clone.tree_map);
2330 i++)
2332 struct ipa_replace_map *replace_info;
2333 replace_info = VEC_index (ipa_replace_map_p,
2334 node->clone.tree_map,
2336 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2337 fprintf (cgraph_dump_file, " -> ");
2338 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2339 fprintf (cgraph_dump_file, "%s%s;",
2340 replace_info->replace_p ? "(replace)":"",
2341 replace_info->ref_p ? "(ref)":"");
2343 fprintf (cgraph_dump_file, "\n");
2345 if (node->clone.args_to_skip)
2347 fprintf (cgraph_dump_file, " args_to_skip: ");
2348 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2350 if (node->clone.args_to_skip)
2352 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2353 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2356 cgraph_materialize_clone (node);
2357 stabilized = false;
2362 for (node = cgraph_nodes; node; node = node->next)
2363 if (!node->analyzed && node->callees)
2364 cgraph_node_remove_callees (node);
2365 if (cgraph_dump_file)
2366 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2367 #ifdef ENABLE_CHECKING
2368 verify_cgraph ();
2369 #endif
2370 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2373 #include "gt-cgraphunit.h"