20090811-1.c: Skip for incompatible options, do not override other options.
[official-gcc.git] / gcc / cgraphunit.c
blob1e3aa0ddf19f1b3f58509200ac59066ff0c4588a
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
36 function.)
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
41 variables.
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
55 - cgraph_optimize
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141 #include "ipa-inline.h"
142 #include "ipa-utils.h"
143 #include "lto-streamer.h"
145 static void cgraph_expand_all_functions (void);
146 static void cgraph_mark_functions_to_output (void);
147 static void cgraph_expand_function (struct cgraph_node *);
148 static void cgraph_output_pending_asms (void);
150 FILE *cgraph_dump_file;
152 /* Used for vtable lookup in thunk adjusting. */
153 static GTY (()) tree vtable_entry_type;
155 /* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
157 configury. */
159 bool
160 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
162 /* If the user told us it is used, then it must be so. */
163 if (node->local.externally_visible)
164 return true;
166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
170 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
171 return true;
173 /* With -fkeep-inline-functions we are keeping all inline functions except
174 for extern inline ones. */
175 if (flag_keep_inline_functions
176 && DECL_DECLARED_INLINE_P (decl)
177 && !DECL_EXTERNAL (decl)
178 && !DECL_DISREGARD_INLINE_LIMITS (decl))
179 return true;
181 /* If we decided it was needed before, but at the time we didn't have
182 the body of the function available, then it's still needed. We have
183 to go back and re-check its dependencies now. */
184 if (node->needed)
185 return true;
187 /* Externally visible functions must be output. The exception is
188 COMDAT functions that must be output only when they are needed.
190 When not optimizing, also output the static functions. (see
191 PR24561), but don't do so for always_inline functions, functions
192 declared inline and nested functions. These were optimized out
193 in the original implementation and it is unclear whether we want
194 to change the behavior here. */
195 if (((TREE_PUBLIC (decl)
196 || (!optimize
197 && !DECL_DISREGARD_INLINE_LIMITS (decl)
198 && !DECL_DECLARED_INLINE_P (decl)
199 && !(DECL_CONTEXT (decl)
200 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
201 && !flag_whole_program
202 && !flag_lto)
203 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
204 return true;
206 return false;
209 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
210 functions into callgraph in a way so they look like ordinary reachable
211 functions inserted into callgraph already at construction time. */
213 bool
214 cgraph_process_new_functions (void)
216 bool output = false;
217 tree fndecl;
218 struct cgraph_node *node;
220 varpool_analyze_pending_decls ();
221 /* Note that this queue may grow as its being processed, as the new
222 functions may generate new ones. */
223 while (cgraph_new_nodes)
225 node = cgraph_new_nodes;
226 fndecl = node->decl;
227 cgraph_new_nodes = cgraph_new_nodes->next_needed;
228 switch (cgraph_state)
230 case CGRAPH_STATE_CONSTRUCTION:
231 /* At construction time we just need to finalize function and move
232 it into reachable functions list. */
234 node->next_needed = NULL;
235 cgraph_finalize_function (fndecl, false);
236 cgraph_mark_reachable_node (node);
237 output = true;
238 cgraph_call_function_insertion_hooks (node);
239 break;
241 case CGRAPH_STATE_IPA:
242 case CGRAPH_STATE_IPA_SSA:
243 /* When IPA optimization already started, do all essential
244 transformations that has been already performed on the whole
245 cgraph but not on this function. */
247 gimple_register_cfg_hooks ();
248 if (!node->analyzed)
249 cgraph_analyze_function (node);
250 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
251 current_function_decl = fndecl;
252 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
253 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
254 /* When not optimizing, be sure we run early local passes anyway
255 to expand OMP. */
256 || !optimize)
257 execute_pass_list (pass_early_local_passes.pass.sub);
258 else
259 compute_inline_parameters (node, true);
260 free_dominance_info (CDI_POST_DOMINATORS);
261 free_dominance_info (CDI_DOMINATORS);
262 pop_cfun ();
263 current_function_decl = NULL;
264 cgraph_call_function_insertion_hooks (node);
265 break;
267 case CGRAPH_STATE_EXPANSION:
268 /* Functions created during expansion shall be compiled
269 directly. */
270 node->process = 0;
271 cgraph_call_function_insertion_hooks (node);
272 cgraph_expand_function (node);
273 break;
275 default:
276 gcc_unreachable ();
277 break;
279 varpool_analyze_pending_decls ();
281 return output;
284 /* As an GCC extension we allow redefinition of the function. The
285 semantics when both copies of bodies differ is not well defined.
286 We replace the old body with new body so in unit at a time mode
287 we always use new body, while in normal mode we may end up with
288 old body inlined into some functions and new body expanded and
289 inlined in others.
291 ??? It may make more sense to use one body for inlining and other
292 body for expanding the function but this is difficult to do. */
294 static void
295 cgraph_reset_node (struct cgraph_node *node)
297 /* If node->process is set, then we have already begun whole-unit analysis.
298 This is *not* testing for whether we've already emitted the function.
299 That case can be sort-of legitimately seen with real function redefinition
300 errors. I would argue that the front end should never present us with
301 such a case, but don't enforce that for now. */
302 gcc_assert (!node->process);
304 /* Reset our data structures so we can analyze the function again. */
305 memset (&node->local, 0, sizeof (node->local));
306 memset (&node->global, 0, sizeof (node->global));
307 memset (&node->rtl, 0, sizeof (node->rtl));
308 node->analyzed = false;
309 node->local.finalized = false;
311 cgraph_node_remove_callees (node);
314 static void
315 cgraph_lower_function (struct cgraph_node *node)
317 if (node->lowered)
318 return;
320 if (node->nested)
321 lower_nested_functions (node->decl);
322 gcc_assert (!node->nested);
324 tree_lowering_passes (node->decl);
325 node->lowered = true;
328 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
329 logic in effect. If NESTED is true, then our caller cannot stand to have
330 the garbage collector run at the moment. We would need to either create
331 a new GC context, or just not compile right now. */
333 void
334 cgraph_finalize_function (tree decl, bool nested)
336 struct cgraph_node *node = cgraph_get_create_node (decl);
338 if (node->local.finalized)
340 cgraph_reset_node (node);
341 node->local.redefined_extern_inline = true;
344 notice_global_symbol (decl);
345 node->local.finalized = true;
346 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
348 if (cgraph_decide_is_function_needed (node, decl))
349 cgraph_mark_needed_node (node);
351 /* Since we reclaim unreachable nodes at the end of every language
352 level unit, we need to be conservative about possible entry points
353 there. */
354 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
355 || DECL_STATIC_CONSTRUCTOR (decl)
356 || DECL_STATIC_DESTRUCTOR (decl)
357 /* COMDAT virtual functions may be referenced by vtable from
358 other compilation unit. Still we want to devirtualize calls
359 to those so we need to analyze them.
360 FIXME: We should introduce may edges for this purpose and update
361 their handling in unreachable function removal and inliner too. */
362 || (DECL_VIRTUAL_P (decl)
363 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
364 cgraph_mark_reachable_node (node);
366 /* If we've not yet emitted decl, tell the debug info about it. */
367 if (!TREE_ASM_WRITTEN (decl))
368 (*debug_hooks->deferred_inline_function) (decl);
370 /* Possibly warn about unused parameters. */
371 if (warn_unused_parameter)
372 do_warn_unused_parameter (decl);
374 if (!nested)
375 ggc_collect ();
378 /* C99 extern inline keywords allow changing of declaration after function
379 has been finalized. We need to re-decide if we want to mark the function as
380 needed then. */
382 void
383 cgraph_mark_if_needed (tree decl)
385 struct cgraph_node *node = cgraph_get_node (decl);
386 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
387 cgraph_mark_needed_node (node);
390 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
391 static bool
392 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394 while (node != node2 && node2)
395 node2 = node2->clone_of;
396 return node2 != NULL;
399 /* Verify edge E count and frequency. */
401 static bool
402 verify_edge_count_and_frequency (struct cgraph_edge *e)
404 bool error_found = false;
405 if (e->count < 0)
407 error ("caller edge count is negative");
408 error_found = true;
410 if (e->frequency < 0)
412 error ("caller edge frequency is negative");
413 error_found = true;
415 if (e->frequency > CGRAPH_FREQ_MAX)
417 error ("caller edge frequency is too large");
418 error_found = true;
420 if (gimple_has_body_p (e->caller->decl)
421 && !e->caller->global.inlined_to
422 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
423 Remove this once edges are actualy removed from the function at that time. */
424 && (e->frequency
425 || (inline_edge_summary_vec
426 && !inline_edge_summary (e)->predicate))
427 && (e->frequency
428 != compute_call_stmt_bb_frequency (e->caller->decl,
429 gimple_bb (e->call_stmt))))
431 error ("caller edge frequency %i does not match BB frequency %i",
432 e->frequency,
433 compute_call_stmt_bb_frequency (e->caller->decl,
434 gimple_bb (e->call_stmt)));
435 error_found = true;
437 return error_found;
440 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
441 static void
442 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
444 /* debug_gimple_stmt needs correct cfun */
445 if (cfun != this_cfun)
446 set_cfun (this_cfun);
447 debug_gimple_stmt (stmt);
450 /* Verify cgraph nodes of given cgraph node. */
451 DEBUG_FUNCTION void
452 verify_cgraph_node (struct cgraph_node *node)
454 struct cgraph_edge *e;
455 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
456 basic_block this_block;
457 gimple_stmt_iterator gsi;
458 bool error_found = false;
460 if (seen_error ())
461 return;
463 timevar_push (TV_CGRAPH_VERIFY);
464 for (e = node->callees; e; e = e->next_callee)
465 if (e->aux)
467 error ("aux field set for edge %s->%s",
468 identifier_to_locale (cgraph_node_name (e->caller)),
469 identifier_to_locale (cgraph_node_name (e->callee)));
470 error_found = true;
472 if (node->count < 0)
474 error ("execution count is negative");
475 error_found = true;
477 if (node->global.inlined_to && node->local.externally_visible)
479 error ("externally visible inline clone");
480 error_found = true;
482 if (node->global.inlined_to && node->address_taken)
484 error ("inline clone with address taken");
485 error_found = true;
487 if (node->global.inlined_to && node->needed)
489 error ("inline clone is needed");
490 error_found = true;
492 for (e = node->indirect_calls; e; e = e->next_callee)
494 if (e->aux)
496 error ("aux field set for indirect edge from %s",
497 identifier_to_locale (cgraph_node_name (e->caller)));
498 error_found = true;
500 if (!e->indirect_unknown_callee
501 || !e->indirect_info)
503 error ("An indirect edge from %s is not marked as indirect or has "
504 "associated indirect_info, the corresponding statement is: ",
505 identifier_to_locale (cgraph_node_name (e->caller)));
506 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
507 error_found = true;
510 for (e = node->callers; e; e = e->next_caller)
512 if (verify_edge_count_and_frequency (e))
513 error_found = true;
514 if (!e->inline_failed)
516 if (node->global.inlined_to
517 != (e->caller->global.inlined_to
518 ? e->caller->global.inlined_to : e->caller))
520 error ("inlined_to pointer is wrong");
521 error_found = true;
523 if (node->callers->next_caller)
525 error ("multiple inline callers");
526 error_found = true;
529 else
530 if (node->global.inlined_to)
532 error ("inlined_to pointer set for noninline callers");
533 error_found = true;
536 for (e = node->indirect_calls; e; e = e->next_callee)
537 if (verify_edge_count_and_frequency (e))
538 error_found = true;
539 if (!node->callers && node->global.inlined_to)
541 error ("inlined_to pointer is set but no predecessors found");
542 error_found = true;
544 if (node->global.inlined_to == node)
546 error ("inlined_to pointer refers to itself");
547 error_found = true;
550 if (!cgraph_get_node (node->decl))
552 error ("node not found in cgraph_hash");
553 error_found = true;
556 if (node->clone_of)
558 struct cgraph_node *n;
559 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
560 if (n == node)
561 break;
562 if (!n)
564 error ("node has wrong clone_of");
565 error_found = true;
568 if (node->clones)
570 struct cgraph_node *n;
571 for (n = node->clones; n; n = n->next_sibling_clone)
572 if (n->clone_of != node)
573 break;
574 if (n)
576 error ("node has wrong clone list");
577 error_found = true;
580 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
582 error ("node is in clone list but it is not clone");
583 error_found = true;
585 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
587 error ("node has wrong prev_clone pointer");
588 error_found = true;
590 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
592 error ("double linked list of clones corrupted");
593 error_found = true;
595 if (node->same_comdat_group)
597 struct cgraph_node *n = node->same_comdat_group;
599 if (!DECL_ONE_ONLY (node->decl))
601 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
602 error_found = true;
604 if (n == node)
606 error ("node is alone in a comdat group");
607 error_found = true;
611 if (!n->same_comdat_group)
613 error ("same_comdat_group is not a circular list");
614 error_found = true;
615 break;
617 n = n->same_comdat_group;
619 while (n != node);
622 if (node->analyzed && node->thunk.thunk_p)
624 if (!node->callees)
626 error ("No edge out of thunk node");
627 error_found = true;
629 else if (node->callees->next_callee)
631 error ("More than one edge out of thunk node");
632 error_found = true;
634 if (gimple_has_body_p (node->decl))
636 error ("Thunk is not supposed to have body");
637 error_found = true;
640 else if (node->analyzed && gimple_has_body_p (node->decl)
641 && !TREE_ASM_WRITTEN (node->decl)
642 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
643 && !flag_wpa)
645 if (this_cfun->cfg)
647 /* The nodes we're interested in are never shared, so walk
648 the tree ignoring duplicates. */
649 struct pointer_set_t *visited_nodes = pointer_set_create ();
650 /* Reach the trees by walking over the CFG, and note the
651 enclosing basic-blocks in the call edges. */
652 FOR_EACH_BB_FN (this_block, this_cfun)
653 for (gsi = gsi_start_bb (this_block);
654 !gsi_end_p (gsi);
655 gsi_next (&gsi))
657 gimple stmt = gsi_stmt (gsi);
658 if (is_gimple_call (stmt))
660 struct cgraph_edge *e = cgraph_edge (node, stmt);
661 tree decl = gimple_call_fndecl (stmt);
662 if (e)
664 if (e->aux)
666 error ("shared call_stmt:");
667 cgraph_debug_gimple_stmt (this_cfun, stmt);
668 error_found = true;
670 if (!e->indirect_unknown_callee)
672 if (e->callee->same_body_alias)
674 error ("edge points to same body alias:");
675 debug_tree (e->callee->decl);
676 error_found = true;
678 else if (!e->callee->global.inlined_to
679 && decl
680 && cgraph_get_node (decl)
681 && (e->callee->former_clone_of
682 != cgraph_get_node (decl)->decl)
683 && !clone_of_p (cgraph_get_node (decl),
684 e->callee))
686 error ("edge points to wrong declaration:");
687 debug_tree (e->callee->decl);
688 fprintf (stderr," Instead of:");
689 debug_tree (decl);
690 error_found = true;
693 else if (decl)
695 error ("an indirect edge with unknown callee "
696 "corresponding to a call_stmt with "
697 "a known declaration:");
698 error_found = true;
699 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
701 e->aux = (void *)1;
703 else if (decl)
705 error ("missing callgraph edge for call stmt:");
706 cgraph_debug_gimple_stmt (this_cfun, stmt);
707 error_found = true;
711 pointer_set_destroy (visited_nodes);
713 else
714 /* No CFG available?! */
715 gcc_unreachable ();
717 for (e = node->callees; e; e = e->next_callee)
719 if (!e->aux)
721 error ("edge %s->%s has no corresponding call_stmt",
722 identifier_to_locale (cgraph_node_name (e->caller)),
723 identifier_to_locale (cgraph_node_name (e->callee)));
724 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
725 error_found = true;
727 e->aux = 0;
729 for (e = node->indirect_calls; e; e = e->next_callee)
731 if (!e->aux)
733 error ("an indirect edge from %s has no corresponding call_stmt",
734 identifier_to_locale (cgraph_node_name (e->caller)));
735 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
736 error_found = true;
738 e->aux = 0;
741 if (error_found)
743 dump_cgraph_node (stderr, node);
744 internal_error ("verify_cgraph_node failed");
746 timevar_pop (TV_CGRAPH_VERIFY);
749 /* Verify whole cgraph structure. */
750 DEBUG_FUNCTION void
751 verify_cgraph (void)
753 struct cgraph_node *node;
755 if (seen_error ())
756 return;
758 for (node = cgraph_nodes; node; node = node->next)
759 verify_cgraph_node (node);
762 /* Output all asm statements we have stored up to be output. */
764 static void
765 cgraph_output_pending_asms (void)
767 struct cgraph_asm_node *can;
769 if (seen_error ())
770 return;
772 for (can = cgraph_asm_nodes; can; can = can->next)
773 assemble_asm (can->asm_str);
774 cgraph_asm_nodes = NULL;
777 /* Analyze the function scheduled to be output. */
778 void
779 cgraph_analyze_function (struct cgraph_node *node)
781 tree save = current_function_decl;
782 tree decl = node->decl;
784 if (node->thunk.thunk_p)
786 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
787 NULL, 0, CGRAPH_FREQ_BASE);
789 else
791 current_function_decl = decl;
792 push_cfun (DECL_STRUCT_FUNCTION (decl));
794 assign_assembler_name_if_neeeded (node->decl);
796 /* Make sure to gimplify bodies only once. During analyzing a
797 function we lower it, which will require gimplified nested
798 functions, so we can end up here with an already gimplified
799 body. */
800 if (!gimple_body (decl))
801 gimplify_function_tree (decl);
802 dump_function (TDI_generic, decl);
804 cgraph_lower_function (node);
805 pop_cfun ();
807 node->analyzed = true;
809 current_function_decl = save;
812 /* Process attributes common for vars and functions. */
814 static void
815 process_common_attributes (tree decl)
817 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
819 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
821 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
822 "%<weakref%> attribute should be accompanied with"
823 " an %<alias%> attribute");
824 DECL_WEAK (decl) = 0;
825 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
826 DECL_ATTRIBUTES (decl));
830 /* Look for externally_visible and used attributes and mark cgraph nodes
831 accordingly.
833 We cannot mark the nodes at the point the attributes are processed (in
834 handle_*_attribute) because the copy of the declarations available at that
835 point may not be canonical. For example, in:
837 void f();
838 void f() __attribute__((used));
840 the declaration we see in handle_used_attribute will be the second
841 declaration -- but the front end will subsequently merge that declaration
842 with the original declaration and discard the second declaration.
844 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
846 void f() {}
847 void f() __attribute__((externally_visible));
849 is valid.
851 So, we walk the nodes at the end of the translation unit, applying the
852 attributes at that point. */
854 static void
855 process_function_and_variable_attributes (struct cgraph_node *first,
856 struct varpool_node *first_var)
858 struct cgraph_node *node;
859 struct varpool_node *vnode;
861 for (node = cgraph_nodes; node != first; node = node->next)
863 tree decl = node->decl;
864 if (DECL_PRESERVE_P (decl))
865 cgraph_mark_needed_node (node);
866 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
867 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
868 && TREE_PUBLIC (node->decl))
870 if (node->local.finalized)
871 cgraph_mark_needed_node (node);
873 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
875 if (! TREE_PUBLIC (node->decl))
876 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
877 "%<externally_visible%>"
878 " attribute have effect only on public objects");
879 else if (node->local.finalized)
880 cgraph_mark_needed_node (node);
882 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
883 && node->local.finalized)
885 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
886 "%<weakref%> attribute ignored"
887 " because function is defined");
888 DECL_WEAK (decl) = 0;
889 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
890 DECL_ATTRIBUTES (decl));
892 process_common_attributes (decl);
894 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
896 tree decl = vnode->decl;
897 if (DECL_PRESERVE_P (decl))
899 vnode->force_output = true;
900 if (vnode->finalized)
901 varpool_mark_needed_node (vnode);
903 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
904 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
905 && TREE_PUBLIC (vnode->decl))
907 if (vnode->finalized)
908 varpool_mark_needed_node (vnode);
910 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
912 if (! TREE_PUBLIC (vnode->decl))
913 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
914 "%<externally_visible%>"
915 " attribute have effect only on public objects");
916 else if (vnode->finalized)
917 varpool_mark_needed_node (vnode);
919 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
920 && vnode->finalized
921 && DECL_INITIAL (decl))
923 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
924 "%<weakref%> attribute ignored"
925 " because variable is initialized");
926 DECL_WEAK (decl) = 0;
927 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
928 DECL_ATTRIBUTES (decl));
930 process_common_attributes (decl);
934 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
935 each reachable functions) and build cgraph.
936 The function can be called multiple times after inserting new nodes
937 into beginning of queue. Just the new part of queue is re-scanned then. */
939 static void
940 cgraph_analyze_functions (void)
942 /* Keep track of already processed nodes when called multiple times for
943 intermodule optimization. */
944 static struct cgraph_node *first_analyzed;
945 struct cgraph_node *first_processed = first_analyzed;
946 static struct varpool_node *first_analyzed_var;
947 struct cgraph_node *node, *next;
949 bitmap_obstack_initialize (NULL);
950 process_function_and_variable_attributes (first_processed,
951 first_analyzed_var);
952 first_processed = cgraph_nodes;
953 first_analyzed_var = varpool_nodes;
954 varpool_analyze_pending_decls ();
955 if (cgraph_dump_file)
957 fprintf (cgraph_dump_file, "Initial entry points:");
958 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
959 if (node->needed)
960 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
961 fprintf (cgraph_dump_file, "\n");
963 cgraph_process_new_functions ();
965 /* Propagate reachability flag and lower representation of all reachable
966 functions. In the future, lowering will introduce new functions and
967 new entry points on the way (by template instantiation and virtual
968 method table generation for instance). */
969 while (cgraph_nodes_queue)
971 struct cgraph_edge *edge;
972 tree decl = cgraph_nodes_queue->decl;
974 node = cgraph_nodes_queue;
975 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
976 node->next_needed = NULL;
978 /* ??? It is possible to create extern inline function and later using
979 weak alias attribute to kill its body. See
980 gcc.c-torture/compile/20011119-1.c */
981 if (!DECL_STRUCT_FUNCTION (decl)
982 && !node->thunk.thunk_p)
984 cgraph_reset_node (node);
985 node->local.redefined_extern_inline = true;
986 continue;
989 if (!node->analyzed)
990 cgraph_analyze_function (node);
992 for (edge = node->callees; edge; edge = edge->next_callee)
993 if (!edge->callee->reachable)
994 cgraph_mark_reachable_node (edge->callee);
995 for (edge = node->callers; edge; edge = edge->next_caller)
996 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
997 cgraph_mark_reachable_node (edge->caller);
999 if (node->same_comdat_group)
1001 for (next = node->same_comdat_group;
1002 next != node;
1003 next = next->same_comdat_group)
1004 cgraph_mark_reachable_node (next);
1007 /* If decl is a clone of an abstract function, mark that abstract
1008 function so that we don't release its body. The DECL_INITIAL() of that
1009 abstract function declaration will be later needed to output debug
1010 info. */
1011 if (DECL_ABSTRACT_ORIGIN (decl))
1013 struct cgraph_node *origin_node;
1014 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1015 origin_node->abstract_and_needed = true;
1018 /* We finalize local static variables during constructing callgraph
1019 edges. Process their attributes too. */
1020 process_function_and_variable_attributes (first_processed,
1021 first_analyzed_var);
1022 first_processed = cgraph_nodes;
1023 first_analyzed_var = varpool_nodes;
1024 varpool_analyze_pending_decls ();
1025 cgraph_process_new_functions ();
1028 /* Collect entry points to the unit. */
1029 if (cgraph_dump_file)
1031 fprintf (cgraph_dump_file, "Unit entry points:");
1032 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1033 if (node->needed)
1034 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1035 fprintf (cgraph_dump_file, "\n\nInitial ");
1036 dump_cgraph (cgraph_dump_file);
1037 dump_varpool (cgraph_dump_file);
1040 if (cgraph_dump_file)
1041 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1043 for (node = cgraph_nodes; node != first_analyzed; node = next)
1045 tree decl = node->decl;
1046 next = node->next;
1048 if (node->local.finalized && !gimple_has_body_p (decl)
1049 && !node->thunk.thunk_p)
1050 cgraph_reset_node (node);
1052 if (!node->reachable
1053 && (gimple_has_body_p (decl) || node->thunk.thunk_p))
1055 if (cgraph_dump_file)
1056 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1057 cgraph_remove_node (node);
1058 continue;
1060 else
1061 node->next_needed = NULL;
1062 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1063 || gimple_has_body_p (decl));
1064 gcc_assert (node->analyzed == node->local.finalized);
1066 if (cgraph_dump_file)
1068 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1069 dump_cgraph (cgraph_dump_file);
1070 dump_varpool (cgraph_dump_file);
1072 bitmap_obstack_release (NULL);
1073 first_analyzed = cgraph_nodes;
1074 ggc_collect ();
1078 /* Analyze the whole compilation unit once it is parsed completely. */
1080 void
1081 cgraph_finalize_compilation_unit (void)
1083 timevar_push (TV_CGRAPH);
1085 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1086 if (flag_lto)
1087 lto_streamer_hooks_init ();
1089 /* If we're here there's no current function anymore. Some frontends
1090 are lazy in clearing these. */
1091 current_function_decl = NULL;
1092 set_cfun (NULL);
1094 /* Do not skip analyzing the functions if there were errors, we
1095 miss diagnostics for following functions otherwise. */
1097 /* Emit size functions we didn't inline. */
1098 finalize_size_functions ();
1100 /* Mark alias targets necessary and emit diagnostics. */
1101 finish_aliases_1 ();
1103 if (!quiet_flag)
1105 fprintf (stderr, "\nAnalyzing compilation unit\n");
1106 fflush (stderr);
1109 /* Gimplify and lower all functions, compute reachability and
1110 remove unreachable nodes. */
1111 cgraph_analyze_functions ();
1113 /* Mark alias targets necessary and emit diagnostics. */
1114 finish_aliases_1 ();
1116 /* Gimplify and lower thunks. */
1117 cgraph_analyze_functions ();
1119 /* Finally drive the pass manager. */
1120 cgraph_optimize ();
1122 timevar_pop (TV_CGRAPH);
1126 /* Figure out what functions we want to assemble. */
1128 static void
1129 cgraph_mark_functions_to_output (void)
1131 struct cgraph_node *node;
1132 #ifdef ENABLE_CHECKING
1133 bool check_same_comdat_groups = false;
1135 for (node = cgraph_nodes; node; node = node->next)
1136 gcc_assert (!node->process);
1137 #endif
1139 for (node = cgraph_nodes; node; node = node->next)
1141 tree decl = node->decl;
1142 struct cgraph_edge *e;
1144 gcc_assert (!node->process || node->same_comdat_group);
1145 if (node->process)
1146 continue;
1148 for (e = node->callers; e; e = e->next_caller)
1149 if (e->inline_failed)
1150 break;
1152 /* We need to output all local functions that are used and not
1153 always inlined, as well as those that are reachable from
1154 outside the current compilation unit. */
1155 if (node->analyzed
1156 && !node->thunk.thunk_p
1157 && !node->global.inlined_to
1158 && (!cgraph_only_called_directly_p (node)
1159 || (e && node->reachable))
1160 && !TREE_ASM_WRITTEN (decl)
1161 && !DECL_EXTERNAL (decl))
1163 node->process = 1;
1164 if (node->same_comdat_group)
1166 struct cgraph_node *next;
1167 for (next = node->same_comdat_group;
1168 next != node;
1169 next = next->same_comdat_group)
1170 if (!next->thunk.thunk_p)
1171 next->process = 1;
1174 else if (node->same_comdat_group)
1176 #ifdef ENABLE_CHECKING
1177 check_same_comdat_groups = true;
1178 #endif
1180 else
1182 /* We should've reclaimed all functions that are not needed. */
1183 #ifdef ENABLE_CHECKING
1184 if (!node->global.inlined_to
1185 && gimple_has_body_p (decl)
1186 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1187 are inside partition, we can end up not removing the body since we no longer
1188 have analyzed node pointing to it. */
1189 && !node->in_other_partition
1190 && !DECL_EXTERNAL (decl))
1192 dump_cgraph_node (stderr, node);
1193 internal_error ("failed to reclaim unneeded function");
1195 #endif
1196 gcc_assert (node->global.inlined_to
1197 || !gimple_has_body_p (decl)
1198 || node->in_other_partition
1199 || DECL_EXTERNAL (decl));
1204 #ifdef ENABLE_CHECKING
1205 if (check_same_comdat_groups)
1206 for (node = cgraph_nodes; node; node = node->next)
1207 if (node->same_comdat_group && !node->process)
1209 tree decl = node->decl;
1210 if (!node->global.inlined_to
1211 && gimple_has_body_p (decl)
1212 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1213 are inside partition, we can end up not removing the body since we no longer
1214 have analyzed node pointing to it. */
1215 && !node->in_other_partition
1216 && !DECL_EXTERNAL (decl))
1218 dump_cgraph_node (stderr, node);
1219 internal_error ("failed to reclaim unneeded function");
1222 #endif
1225 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1226 in lowered gimple form.
1228 Set current_function_decl and cfun to newly constructed empty function body.
1229 return basic block in the function body. */
1231 static basic_block
1232 init_lowered_empty_function (tree decl)
1234 basic_block bb;
1236 current_function_decl = decl;
1237 allocate_struct_function (decl, false);
1238 gimple_register_cfg_hooks ();
1239 init_empty_tree_cfg ();
1240 init_tree_ssa (cfun);
1241 init_ssa_operands ();
1242 cfun->gimple_df->in_ssa_p = true;
1243 DECL_INITIAL (decl) = make_node (BLOCK);
1245 DECL_SAVED_TREE (decl) = error_mark_node;
1246 cfun->curr_properties |=
1247 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1248 PROP_ssa);
1250 /* Create BB for body of the function and connect it properly. */
1251 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1252 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1253 make_edge (bb, EXIT_BLOCK_PTR, 0);
1255 return bb;
1258 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1259 offset indicated by VIRTUAL_OFFSET, if that is
1260 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1261 zero for a result adjusting thunk. */
1263 static tree
1264 thunk_adjust (gimple_stmt_iterator * bsi,
1265 tree ptr, bool this_adjusting,
1266 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1268 gimple stmt;
1269 tree ret;
1271 if (this_adjusting
1272 && fixed_offset != 0)
1274 stmt = gimple_build_assign (ptr,
1275 fold_build2_loc (input_location,
1276 POINTER_PLUS_EXPR,
1277 TREE_TYPE (ptr), ptr,
1278 size_int (fixed_offset)));
1279 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1282 /* If there's a virtual offset, look up that value in the vtable and
1283 adjust the pointer again. */
1284 if (virtual_offset)
1286 tree vtabletmp;
1287 tree vtabletmp2;
1288 tree vtabletmp3;
1289 tree offsettmp;
1291 if (!vtable_entry_type)
1293 tree vfunc_type = make_node (FUNCTION_TYPE);
1294 TREE_TYPE (vfunc_type) = integer_type_node;
1295 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1296 layout_type (vfunc_type);
1298 vtable_entry_type = build_pointer_type (vfunc_type);
1301 vtabletmp =
1302 create_tmp_var (build_pointer_type
1303 (build_pointer_type (vtable_entry_type)), "vptr");
1305 /* The vptr is always at offset zero in the object. */
1306 stmt = gimple_build_assign (vtabletmp,
1307 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1308 ptr));
1309 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1310 mark_symbols_for_renaming (stmt);
1311 find_referenced_vars_in (stmt);
1313 /* Form the vtable address. */
1314 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1315 "vtableaddr");
1316 stmt = gimple_build_assign (vtabletmp2,
1317 build_simple_mem_ref (vtabletmp));
1318 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1319 mark_symbols_for_renaming (stmt);
1320 find_referenced_vars_in (stmt);
1322 /* Find the entry with the vcall offset. */
1323 stmt = gimple_build_assign (vtabletmp2,
1324 fold_build2_loc (input_location,
1325 POINTER_PLUS_EXPR,
1326 TREE_TYPE (vtabletmp2),
1327 vtabletmp2,
1328 fold_convert (sizetype,
1329 virtual_offset)));
1330 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1332 /* Get the offset itself. */
1333 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1334 "vcalloffset");
1335 stmt = gimple_build_assign (vtabletmp3,
1336 build_simple_mem_ref (vtabletmp2));
1337 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1338 mark_symbols_for_renaming (stmt);
1339 find_referenced_vars_in (stmt);
1341 /* Cast to sizetype. */
1342 offsettmp = create_tmp_var (sizetype, "offset");
1343 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1344 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1345 mark_symbols_for_renaming (stmt);
1346 find_referenced_vars_in (stmt);
1348 /* Adjust the `this' pointer. */
1349 ptr = fold_build2_loc (input_location,
1350 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1351 offsettmp);
1354 if (!this_adjusting
1355 && fixed_offset != 0)
1356 /* Adjust the pointer by the constant. */
1358 tree ptrtmp;
1360 if (TREE_CODE (ptr) == VAR_DECL)
1361 ptrtmp = ptr;
1362 else
1364 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1365 stmt = gimple_build_assign (ptrtmp, ptr);
1366 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1367 mark_symbols_for_renaming (stmt);
1368 find_referenced_vars_in (stmt);
1370 ptr = fold_build2_loc (input_location,
1371 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1372 size_int (fixed_offset));
1375 /* Emit the statement and gimplify the adjustment expression. */
1376 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1377 stmt = gimple_build_assign (ret, ptr);
1378 mark_symbols_for_renaming (stmt);
1379 find_referenced_vars_in (stmt);
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1382 return ret;
1385 /* Produce assembler for thunk NODE. */
1387 static void
1388 assemble_thunk (struct cgraph_node *node)
1390 bool this_adjusting = node->thunk.this_adjusting;
1391 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1392 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1393 tree virtual_offset = NULL;
1394 tree alias = node->thunk.alias;
1395 tree thunk_fndecl = node->decl;
1396 tree a = DECL_ARGUMENTS (thunk_fndecl);
1398 current_function_decl = thunk_fndecl;
1400 /* Ensure thunks are emitted in their correct sections. */
1401 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1403 if (this_adjusting
1404 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1405 virtual_value, alias))
1407 const char *fnname;
1408 tree fn_block;
1410 DECL_RESULT (thunk_fndecl)
1411 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1412 RESULT_DECL, 0, integer_type_node);
1413 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1415 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1416 create one. */
1417 fn_block = make_node (BLOCK);
1418 BLOCK_VARS (fn_block) = a;
1419 DECL_INITIAL (thunk_fndecl) = fn_block;
1420 init_function_start (thunk_fndecl);
1421 cfun->is_thunk = 1;
1422 assemble_start_function (thunk_fndecl, fnname);
1424 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1425 fixed_offset, virtual_value, alias);
1427 assemble_end_function (thunk_fndecl, fnname);
1428 init_insn_lengths ();
1429 free_after_compilation (cfun);
1430 set_cfun (NULL);
1431 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1432 node->thunk.thunk_p = false;
1433 node->analyzed = false;
1435 else
1437 tree restype;
1438 basic_block bb, then_bb, else_bb, return_bb;
1439 gimple_stmt_iterator bsi;
1440 int nargs = 0;
1441 tree arg;
1442 int i;
1443 tree resdecl;
1444 tree restmp = NULL;
1445 VEC(tree, heap) *vargs;
1447 gimple call;
1448 gimple ret;
1450 DECL_IGNORED_P (thunk_fndecl) = 1;
1451 bitmap_obstack_initialize (NULL);
1453 if (node->thunk.virtual_offset_p)
1454 virtual_offset = size_int (virtual_value);
1456 /* Build the return declaration for the function. */
1457 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1458 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1460 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1461 DECL_ARTIFICIAL (resdecl) = 1;
1462 DECL_IGNORED_P (resdecl) = 1;
1463 DECL_RESULT (thunk_fndecl) = resdecl;
1465 else
1466 resdecl = DECL_RESULT (thunk_fndecl);
1468 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1470 bsi = gsi_start_bb (bb);
1472 /* Build call to the function being thunked. */
1473 if (!VOID_TYPE_P (restype))
1475 if (!is_gimple_reg_type (restype))
1477 restmp = resdecl;
1478 add_local_decl (cfun, restmp);
1479 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1481 else
1482 restmp = create_tmp_var_raw (restype, "retval");
1485 for (arg = a; arg; arg = DECL_CHAIN (arg))
1486 nargs++;
1487 vargs = VEC_alloc (tree, heap, nargs);
1488 if (this_adjusting)
1489 VEC_quick_push (tree, vargs,
1490 thunk_adjust (&bsi,
1491 a, 1, fixed_offset,
1492 virtual_offset));
1493 else
1494 VEC_quick_push (tree, vargs, a);
1495 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1496 VEC_quick_push (tree, vargs, arg);
1497 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1498 VEC_free (tree, heap, vargs);
1499 gimple_call_set_cannot_inline (call, true);
1500 gimple_call_set_from_thunk (call, true);
1501 if (restmp)
1502 gimple_call_set_lhs (call, restmp);
1503 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1504 mark_symbols_for_renaming (call);
1505 find_referenced_vars_in (call);
1506 update_stmt (call);
1508 if (restmp && !this_adjusting)
1510 tree true_label = NULL_TREE;
1512 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1514 gimple stmt;
1515 /* If the return type is a pointer, we need to
1516 protect against NULL. We know there will be an
1517 adjustment, because that's why we're emitting a
1518 thunk. */
1519 then_bb = create_basic_block (NULL, (void *) 0, bb);
1520 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1521 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1522 remove_edge (single_succ_edge (bb));
1523 true_label = gimple_block_label (then_bb);
1524 stmt = gimple_build_cond (NE_EXPR, restmp,
1525 build_zero_cst (TREE_TYPE (restmp)),
1526 NULL_TREE, NULL_TREE);
1527 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1528 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1529 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1530 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1531 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1532 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1533 bsi = gsi_last_bb (then_bb);
1536 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1537 fixed_offset, virtual_offset);
1538 if (true_label)
1540 gimple stmt;
1541 bsi = gsi_last_bb (else_bb);
1542 stmt = gimple_build_assign (restmp,
1543 build_zero_cst (TREE_TYPE (restmp)));
1544 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1545 bsi = gsi_last_bb (return_bb);
1548 else
1549 gimple_call_set_tail (call, true);
1551 /* Build return value. */
1552 ret = gimple_build_return (restmp);
1553 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1555 delete_unreachable_blocks ();
1556 update_ssa (TODO_update_ssa);
1558 /* Since we want to emit the thunk, we explicitly mark its name as
1559 referenced. */
1560 node->thunk.thunk_p = false;
1561 cgraph_node_remove_callees (node);
1562 cgraph_add_new_function (thunk_fndecl, true);
1563 bitmap_obstack_release (NULL);
1565 current_function_decl = NULL;
1569 /* Assemble thunks asociated to NODE. */
1571 static void
1572 assemble_thunks (struct cgraph_node *node)
1574 struct cgraph_edge *e;
1575 for (e = node->callers; e;)
1576 if (e->caller->thunk.thunk_p)
1578 struct cgraph_node *thunk = e->caller;
1580 e = e->next_caller;
1581 assemble_thunks (thunk);
1582 assemble_thunk (thunk);
1584 else
1585 e = e->next_caller;
1588 /* Expand function specified by NODE. */
1590 static void
1591 cgraph_expand_function (struct cgraph_node *node)
1593 tree decl = node->decl;
1595 /* We ought to not compile any inline clones. */
1596 gcc_assert (!node->global.inlined_to);
1598 announce_function (decl);
1599 node->process = 0;
1600 if (node->same_body)
1602 struct cgraph_node *alias, *next;
1603 bool saved_alias = node->alias;
1604 for (alias = node->same_body;
1605 alias && alias->next; alias = alias->next)
1607 /* Walk aliases in the order they were created; it is possible that
1608 thunks refers to the aliases made earlier. */
1609 for (; alias; alias = next)
1611 next = alias->previous;
1612 if (!alias->thunk.thunk_p)
1613 assemble_alias (alias->decl,
1614 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1616 node->alias = saved_alias;
1617 cgraph_process_new_functions ();
1620 assemble_thunks (node);
1621 gcc_assert (node->lowered);
1623 /* Generate RTL for the body of DECL. */
1624 tree_rest_of_compilation (decl);
1626 /* Make sure that BE didn't give up on compiling. */
1627 gcc_assert (TREE_ASM_WRITTEN (decl));
1628 current_function_decl = NULL;
1629 gcc_assert (!cgraph_preserve_function_body_p (node));
1630 cgraph_release_function_body (node);
1631 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1632 points to the dead function body. */
1633 cgraph_node_remove_callees (node);
1635 cgraph_function_flags_ready = true;
1638 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1640 bool
1641 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1643 *reason = e->inline_failed;
1644 return !e->inline_failed;
1649 /* Expand all functions that must be output.
1651 Attempt to topologically sort the nodes so function is output when
1652 all called functions are already assembled to allow data to be
1653 propagated across the callgraph. Use a stack to get smaller distance
1654 between a function and its callees (later we may choose to use a more
1655 sophisticated algorithm for function reordering; we will likely want
1656 to use subsections to make the output functions appear in top-down
1657 order). */
1659 static void
1660 cgraph_expand_all_functions (void)
1662 struct cgraph_node *node;
1663 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1664 int order_pos, new_order_pos = 0;
1665 int i;
1667 order_pos = ipa_reverse_postorder (order);
1668 gcc_assert (order_pos == cgraph_n_nodes);
1670 /* Garbage collector may remove inline clones we eliminate during
1671 optimization. So we must be sure to not reference them. */
1672 for (i = 0; i < order_pos; i++)
1673 if (order[i]->process)
1674 order[new_order_pos++] = order[i];
1676 for (i = new_order_pos - 1; i >= 0; i--)
1678 node = order[i];
1679 if (node->process)
1681 gcc_assert (node->reachable);
1682 node->process = 0;
1683 cgraph_expand_function (node);
1686 cgraph_process_new_functions ();
1688 free (order);
1692 /* This is used to sort the node types by the cgraph order number. */
1694 enum cgraph_order_sort_kind
1696 ORDER_UNDEFINED = 0,
1697 ORDER_FUNCTION,
1698 ORDER_VAR,
1699 ORDER_ASM
1702 struct cgraph_order_sort
1704 enum cgraph_order_sort_kind kind;
1705 union
1707 struct cgraph_node *f;
1708 struct varpool_node *v;
1709 struct cgraph_asm_node *a;
1710 } u;
1713 /* Output all functions, variables, and asm statements in the order
1714 according to their order fields, which is the order in which they
1715 appeared in the file. This implements -fno-toplevel-reorder. In
1716 this mode we may output functions and variables which don't really
1717 need to be output. */
1719 static void
1720 cgraph_output_in_order (void)
1722 int max;
1723 struct cgraph_order_sort *nodes;
1724 int i;
1725 struct cgraph_node *pf;
1726 struct varpool_node *pv;
1727 struct cgraph_asm_node *pa;
1729 max = cgraph_order;
1730 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1732 varpool_analyze_pending_decls ();
1734 for (pf = cgraph_nodes; pf; pf = pf->next)
1736 if (pf->process && !pf->thunk.thunk_p)
1738 i = pf->order;
1739 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1740 nodes[i].kind = ORDER_FUNCTION;
1741 nodes[i].u.f = pf;
1745 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1747 i = pv->order;
1748 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1749 nodes[i].kind = ORDER_VAR;
1750 nodes[i].u.v = pv;
1753 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1755 i = pa->order;
1756 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1757 nodes[i].kind = ORDER_ASM;
1758 nodes[i].u.a = pa;
1761 /* In toplevel reorder mode we output all statics; mark them as needed. */
1762 for (i = 0; i < max; ++i)
1764 if (nodes[i].kind == ORDER_VAR)
1766 varpool_mark_needed_node (nodes[i].u.v);
1769 varpool_empty_needed_queue ();
1771 for (i = 0; i < max; ++i)
1772 if (nodes[i].kind == ORDER_VAR)
1773 varpool_finalize_named_section_flags (nodes[i].u.v);
1775 for (i = 0; i < max; ++i)
1777 switch (nodes[i].kind)
1779 case ORDER_FUNCTION:
1780 nodes[i].u.f->process = 0;
1781 cgraph_expand_function (nodes[i].u.f);
1782 break;
1784 case ORDER_VAR:
1785 varpool_assemble_decl (nodes[i].u.v);
1786 break;
1788 case ORDER_ASM:
1789 assemble_asm (nodes[i].u.a->asm_str);
1790 break;
1792 case ORDER_UNDEFINED:
1793 break;
1795 default:
1796 gcc_unreachable ();
1800 cgraph_asm_nodes = NULL;
1801 free (nodes);
1804 /* Return true when function body of DECL still needs to be kept around
1805 for later re-use. */
1806 bool
1807 cgraph_preserve_function_body_p (struct cgraph_node *node)
1809 gcc_assert (cgraph_global_info_ready);
1810 gcc_assert (!node->same_body_alias);
1812 /* Look if there is any clone around. */
1813 if (node->clones)
1814 return true;
1815 return false;
1818 static void
1819 ipa_passes (void)
1821 set_cfun (NULL);
1822 current_function_decl = NULL;
1823 gimple_register_cfg_hooks ();
1824 bitmap_obstack_initialize (NULL);
1826 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1828 if (!in_lto_p)
1830 execute_ipa_pass_list (all_small_ipa_passes);
1831 if (seen_error ())
1832 return;
1835 /* If pass_all_early_optimizations was not scheduled, the state of
1836 the cgraph will not be properly updated. Update it now. */
1837 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1838 cgraph_state = CGRAPH_STATE_IPA_SSA;
1840 if (!in_lto_p)
1842 /* Generate coverage variables and constructors. */
1843 coverage_finish ();
1845 /* Process new functions added. */
1846 set_cfun (NULL);
1847 current_function_decl = NULL;
1848 cgraph_process_new_functions ();
1850 execute_ipa_summary_passes
1851 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1854 /* Some targets need to handle LTO assembler output specially. */
1855 if (flag_generate_lto)
1856 targetm.asm_out.lto_start ();
1858 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1860 if (!in_lto_p)
1861 ipa_write_summaries ();
1863 if (flag_generate_lto)
1864 targetm.asm_out.lto_end ();
1866 if (!flag_ltrans)
1867 execute_ipa_pass_list (all_regular_ipa_passes);
1868 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1870 bitmap_obstack_release (NULL);
1874 /* Perform simple optimizations based on callgraph. */
1876 void
1877 cgraph_optimize (void)
1879 if (seen_error ())
1880 return;
1882 #ifdef ENABLE_CHECKING
1883 verify_cgraph ();
1884 #endif
1886 /* Frontend may output common variables after the unit has been finalized.
1887 It is safe to deal with them here as they are always zero initialized. */
1888 varpool_analyze_pending_decls ();
1890 timevar_push (TV_CGRAPHOPT);
1891 if (pre_ipa_mem_report)
1893 fprintf (stderr, "Memory consumption before IPA\n");
1894 dump_memory_report (false);
1896 if (!quiet_flag)
1897 fprintf (stderr, "Performing interprocedural optimizations\n");
1898 cgraph_state = CGRAPH_STATE_IPA;
1900 /* Don't run the IPA passes if there was any error or sorry messages. */
1901 if (!seen_error ())
1902 ipa_passes ();
1904 /* Do nothing else if any IPA pass found errors. */
1905 if (seen_error ())
1907 timevar_pop (TV_CGRAPHOPT);
1908 return;
1911 /* This pass remove bodies of extern inline functions we never inlined.
1912 Do this later so other IPA passes see what is really going on. */
1913 cgraph_remove_unreachable_nodes (false, dump_file);
1914 cgraph_global_info_ready = true;
1915 if (cgraph_dump_file)
1917 fprintf (cgraph_dump_file, "Optimized ");
1918 dump_cgraph (cgraph_dump_file);
1919 dump_varpool (cgraph_dump_file);
1921 if (post_ipa_mem_report)
1923 fprintf (stderr, "Memory consumption after IPA\n");
1924 dump_memory_report (false);
1926 timevar_pop (TV_CGRAPHOPT);
1928 /* Output everything. */
1929 (*debug_hooks->assembly_start) ();
1930 if (!quiet_flag)
1931 fprintf (stderr, "Assembling functions:\n");
1932 #ifdef ENABLE_CHECKING
1933 verify_cgraph ();
1934 #endif
1936 cgraph_materialize_all_clones ();
1937 cgraph_mark_functions_to_output ();
1939 cgraph_state = CGRAPH_STATE_EXPANSION;
1940 if (!flag_toplevel_reorder)
1941 cgraph_output_in_order ();
1942 else
1944 cgraph_output_pending_asms ();
1946 cgraph_expand_all_functions ();
1947 varpool_remove_unreferenced_decls ();
1949 varpool_assemble_pending_decls ();
1951 cgraph_process_new_functions ();
1952 cgraph_state = CGRAPH_STATE_FINISHED;
1954 if (cgraph_dump_file)
1956 fprintf (cgraph_dump_file, "\nFinal ");
1957 dump_cgraph (cgraph_dump_file);
1958 dump_varpool (cgraph_dump_file);
1960 #ifdef ENABLE_CHECKING
1961 verify_cgraph ();
1962 /* Double check that all inline clones are gone and that all
1963 function bodies have been released from memory. */
1964 if (!seen_error ())
1966 struct cgraph_node *node;
1967 bool error_found = false;
1969 for (node = cgraph_nodes; node; node = node->next)
1970 if (node->analyzed
1971 && (node->global.inlined_to
1972 || gimple_has_body_p (node->decl)))
1974 error_found = true;
1975 dump_cgraph_node (stderr, node);
1977 if (error_found)
1978 internal_error ("nodes with unreleased memory found");
1980 #endif
1983 void
1984 init_cgraph (void)
1986 if (!cgraph_dump_file)
1987 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1990 /* The edges representing the callers of the NEW_VERSION node were
1991 fixed by cgraph_function_versioning (), now the call_expr in their
1992 respective tree code should be updated to call the NEW_VERSION. */
1994 static void
1995 update_call_expr (struct cgraph_node *new_version)
1997 struct cgraph_edge *e;
1999 gcc_assert (new_version);
2001 /* Update the call expr on the edges to call the new version. */
2002 for (e = new_version->callers; e; e = e->next_caller)
2004 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2005 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2006 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2011 /* Create a new cgraph node which is the new version of
2012 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2013 edges which should be redirected to point to
2014 NEW_VERSION. ALL the callees edges of OLD_VERSION
2015 are cloned to the new version node. Return the new
2016 version node.
2018 If non-NULL BLOCK_TO_COPY determine what basic blocks
2019 was copied to prevent duplications of calls that are dead
2020 in the clone. */
2022 static struct cgraph_node *
2023 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2024 tree new_decl,
2025 VEC(cgraph_edge_p,heap) *redirect_callers,
2026 bitmap bbs_to_copy)
2028 struct cgraph_node *new_version;
2029 struct cgraph_edge *e;
2030 unsigned i;
2032 gcc_assert (old_version);
2034 new_version = cgraph_create_node (new_decl);
2036 new_version->analyzed = true;
2037 new_version->local = old_version->local;
2038 new_version->local.externally_visible = false;
2039 new_version->local.local = true;
2040 new_version->global = old_version->global;
2041 new_version->rtl = old_version->rtl;
2042 new_version->reachable = true;
2043 new_version->count = old_version->count;
2045 for (e = old_version->callees; e; e=e->next_callee)
2046 if (!bbs_to_copy
2047 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2048 cgraph_clone_edge (e, new_version, e->call_stmt,
2049 e->lto_stmt_uid, REG_BR_PROB_BASE,
2050 CGRAPH_FREQ_BASE,
2051 true);
2052 for (e = old_version->indirect_calls; e; e=e->next_callee)
2053 if (!bbs_to_copy
2054 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2055 cgraph_clone_edge (e, new_version, e->call_stmt,
2056 e->lto_stmt_uid, REG_BR_PROB_BASE,
2057 CGRAPH_FREQ_BASE,
2058 true);
2059 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2061 /* Redirect calls to the old version node to point to its new
2062 version. */
2063 cgraph_redirect_edge_callee (e, new_version);
2066 return new_version;
2069 /* Perform function versioning.
2070 Function versioning includes copying of the tree and
2071 a callgraph update (creating a new cgraph node and updating
2072 its callees and callers).
2074 REDIRECT_CALLERS varray includes the edges to be redirected
2075 to the new version.
2077 TREE_MAP is a mapping of tree nodes we want to replace with
2078 new ones (according to results of prior analysis).
2079 OLD_VERSION_NODE is the node that is versioned.
2080 It returns the new version's cgraph node.
2081 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2082 from new version.
2083 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2084 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2086 struct cgraph_node *
2087 cgraph_function_versioning (struct cgraph_node *old_version_node,
2088 VEC(cgraph_edge_p,heap) *redirect_callers,
2089 VEC (ipa_replace_map_p,gc)* tree_map,
2090 bitmap args_to_skip,
2091 bitmap bbs_to_copy,
2092 basic_block new_entry_block,
2093 const char *clone_name)
2095 tree old_decl = old_version_node->decl;
2096 struct cgraph_node *new_version_node = NULL;
2097 tree new_decl;
2099 if (!tree_versionable_function_p (old_decl))
2100 return NULL;
2102 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2104 /* Make a new FUNCTION_DECL tree node for the
2105 new version. */
2106 if (!args_to_skip)
2107 new_decl = copy_node (old_decl);
2108 else
2109 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2111 /* Generate a new name for the new version. */
2112 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2113 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2114 SET_DECL_RTL (new_decl, NULL);
2116 /* Create the new version's call-graph node.
2117 and update the edges of the new node. */
2118 new_version_node =
2119 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2120 redirect_callers, bbs_to_copy);
2122 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2123 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2124 bbs_to_copy, new_entry_block);
2126 /* Update the new version's properties.
2127 Make The new version visible only within this translation unit. Make sure
2128 that is not weak also.
2129 ??? We cannot use COMDAT linkage because there is no
2130 ABI support for this. */
2131 cgraph_make_decl_local (new_version_node->decl);
2132 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2133 new_version_node->local.externally_visible = 0;
2134 new_version_node->local.local = 1;
2135 new_version_node->lowered = true;
2137 /* Update the call_expr on the edges to call the new version node. */
2138 update_call_expr (new_version_node);
2140 cgraph_call_function_insertion_hooks (new_version_node);
2141 return new_version_node;
2144 /* Given virtual clone, turn it into actual clone. */
2145 static void
2146 cgraph_materialize_clone (struct cgraph_node *node)
2148 bitmap_obstack_initialize (NULL);
2149 node->former_clone_of = node->clone_of->decl;
2150 if (node->clone_of->former_clone_of)
2151 node->former_clone_of = node->clone_of->former_clone_of;
2152 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2153 tree_function_versioning (node->clone_of->decl, node->decl,
2154 node->clone.tree_map, true,
2155 node->clone.args_to_skip, NULL, NULL);
2156 if (cgraph_dump_file)
2158 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2159 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2162 /* Function is no longer clone. */
2163 if (node->next_sibling_clone)
2164 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2165 if (node->prev_sibling_clone)
2166 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2167 else
2168 node->clone_of->clones = node->next_sibling_clone;
2169 node->next_sibling_clone = NULL;
2170 node->prev_sibling_clone = NULL;
2171 if (!node->clone_of->analyzed && !node->clone_of->clones)
2173 cgraph_release_function_body (node->clone_of);
2174 cgraph_node_remove_callees (node->clone_of);
2175 ipa_remove_all_references (&node->clone_of->ref_list);
2177 node->clone_of = NULL;
2178 bitmap_obstack_release (NULL);
2181 /* If necessary, change the function declaration in the call statement
2182 associated with E so that it corresponds to the edge callee. */
2184 gimple
2185 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2187 tree decl = gimple_call_fndecl (e->call_stmt);
2188 gimple new_stmt;
2189 gimple_stmt_iterator gsi;
2190 bool gsi_computed = false;
2191 #ifdef ENABLE_CHECKING
2192 struct cgraph_node *node;
2193 #endif
2195 if (e->indirect_unknown_callee
2196 || decl == e->callee->decl
2197 /* Don't update call from same body alias to the real function. */
2198 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2199 return e->call_stmt;
2201 #ifdef ENABLE_CHECKING
2202 if (decl)
2204 node = cgraph_get_node (decl);
2205 gcc_assert (!node || !node->clone.combined_args_to_skip);
2207 #endif
2209 if (cgraph_dump_file)
2211 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2212 cgraph_node_name (e->caller), e->caller->uid,
2213 cgraph_node_name (e->callee), e->callee->uid);
2214 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2215 if (e->callee->clone.combined_args_to_skip)
2217 fprintf (cgraph_dump_file, " combined args to skip: ");
2218 dump_bitmap (cgraph_dump_file,
2219 e->callee->clone.combined_args_to_skip);
2223 if (e->indirect_info &&
2224 e->indirect_info->thunk_delta != 0
2225 && (!e->callee->clone.combined_args_to_skip
2226 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2228 if (cgraph_dump_file)
2229 fprintf (cgraph_dump_file, " Thunk delta is "
2230 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
2231 gsi = gsi_for_stmt (e->call_stmt);
2232 gsi_computed = true;
2233 gimple_adjust_this_by_delta (&gsi,
2234 build_int_cst (sizetype,
2235 e->indirect_info->thunk_delta));
2236 e->indirect_info->thunk_delta = 0;
2239 if (e->callee->clone.combined_args_to_skip)
2241 int lp_nr;
2243 new_stmt
2244 = gimple_call_copy_skip_args (e->call_stmt,
2245 e->callee->clone.combined_args_to_skip);
2246 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2248 if (gimple_vdef (new_stmt)
2249 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2250 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2252 if (!gsi_computed)
2253 gsi = gsi_for_stmt (e->call_stmt);
2254 gsi_replace (&gsi, new_stmt, false);
2255 /* We need to defer cleaning EH info on the new statement to
2256 fixup-cfg. We may not have dominator information at this point
2257 and thus would end up with unreachable blocks and have no way
2258 to communicate that we need to run CFG cleanup then. */
2259 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2260 if (lp_nr != 0)
2262 remove_stmt_from_eh_lp (e->call_stmt);
2263 add_stmt_to_eh_lp (new_stmt, lp_nr);
2266 else
2268 new_stmt = e->call_stmt;
2269 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2270 update_stmt (new_stmt);
2273 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2275 if (cgraph_dump_file)
2277 fprintf (cgraph_dump_file, " updated to:");
2278 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2280 return new_stmt;
2283 /* Once all functions from compilation unit are in memory, produce all clones
2284 and update all calls. We might also do this on demand if we don't want to
2285 bring all functions to memory prior compilation, but current WHOPR
2286 implementation does that and it is is bit easier to keep everything right in
2287 this order. */
2288 void
2289 cgraph_materialize_all_clones (void)
2291 struct cgraph_node *node;
2292 bool stabilized = false;
2294 if (cgraph_dump_file)
2295 fprintf (cgraph_dump_file, "Materializing clones\n");
2296 #ifdef ENABLE_CHECKING
2297 verify_cgraph ();
2298 #endif
2300 /* We can also do topological order, but number of iterations should be
2301 bounded by number of IPA passes since single IPA pass is probably not
2302 going to create clones of clones it created itself. */
2303 while (!stabilized)
2305 stabilized = true;
2306 for (node = cgraph_nodes; node; node = node->next)
2308 if (node->clone_of && node->decl != node->clone_of->decl
2309 && !gimple_has_body_p (node->decl))
2311 if (gimple_has_body_p (node->clone_of->decl))
2313 if (cgraph_dump_file)
2315 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2316 cgraph_node_name (node->clone_of),
2317 cgraph_node_name (node));
2318 if (node->clone.tree_map)
2320 unsigned int i;
2321 fprintf (cgraph_dump_file, " replace map: ");
2322 for (i = 0; i < VEC_length (ipa_replace_map_p,
2323 node->clone.tree_map);
2324 i++)
2326 struct ipa_replace_map *replace_info;
2327 replace_info = VEC_index (ipa_replace_map_p,
2328 node->clone.tree_map,
2330 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2331 fprintf (cgraph_dump_file, " -> ");
2332 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2333 fprintf (cgraph_dump_file, "%s%s;",
2334 replace_info->replace_p ? "(replace)":"",
2335 replace_info->ref_p ? "(ref)":"");
2337 fprintf (cgraph_dump_file, "\n");
2339 if (node->clone.args_to_skip)
2341 fprintf (cgraph_dump_file, " args_to_skip: ");
2342 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2344 if (node->clone.args_to_skip)
2346 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2347 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2350 cgraph_materialize_clone (node);
2351 stabilized = false;
2356 for (node = cgraph_nodes; node; node = node->next)
2357 if (!node->analyzed && node->callees)
2358 cgraph_node_remove_callees (node);
2359 if (cgraph_dump_file)
2360 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2361 #ifdef ENABLE_CHECKING
2362 verify_cgraph ();
2363 #endif
2364 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2367 #include "gt-cgraphunit.h"