1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 - expand_function callback
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
89 We implement two compilation modes.
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
126 Varpool data structures are not used and variables are output directly.
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
138 #include "coretypes.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
152 #include "diagnostic.h"
156 #include "c-common.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node
*);
167 static tree
record_reference (tree
*, int *, void *);
168 static void cgraph_output_pending_asms (void);
169 static void cgraph_increase_alignment (void);
170 static void initialize_inline_failed (struct cgraph_node
*);
172 /* Records tree nodes seen in record_reference. Simply using
173 walk_tree_without_duplicates doesn't guarantee each node is visited
174 once because it gets a new htab upon each recursive call from
175 record_reference itself. */
176 static struct pointer_set_t
*visited_nodes
;
178 static FILE *cgraph_dump_file
;
180 /* Determine if function DECL is needed. That is, visible to something
181 either outside this translation unit, something magic in the system
182 configury, or (if not doing unit-at-a-time) to something we havn't
186 decide_is_function_needed (struct cgraph_node
*node
, tree decl
)
189 if (MAIN_NAME_P (DECL_NAME (decl
))
190 && TREE_PUBLIC (decl
))
192 node
->local
.externally_visible
= true;
196 /* If the user told us it is used, then it must be so. */
197 if (node
->local
.externally_visible
)
200 if (!flag_unit_at_a_time
&& lookup_attribute ("used", DECL_ATTRIBUTES (decl
)))
203 /* ??? If the assembler name is set by hand, it is possible to assemble
204 the name later after finalizing the function and the fact is noticed
205 in assemble_name then. This is arguably a bug. */
206 if (DECL_ASSEMBLER_NAME_SET_P (decl
)
207 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)))
210 /* If we decided it was needed before, but at the time we didn't have
211 the body of the function available, then it's still needed. We have
212 to go back and re-check its dependencies now. */
216 /* Externally visible functions must be output. The exception is
217 COMDAT functions that must be output only when they are needed.
219 When not optimizing, also output the static functions. (see
220 PR24561), but don't do so for always_inline functions, functions
221 declared inline and nested functions. These was optimized out
222 in the original implementation and it is unclear whether we want
223 to change the behavior here. */
224 if (((TREE_PUBLIC (decl
)
225 || (!optimize
&& !node
->local
.disregard_inline_limits
226 && !DECL_DECLARED_INLINE_P (decl
)
228 && !flag_whole_program
)
229 && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
232 /* Constructors and destructors are reachable from the runtime by
234 if (DECL_STATIC_CONSTRUCTOR (decl
) || DECL_STATIC_DESTRUCTOR (decl
))
237 if (flag_unit_at_a_time
)
240 /* If not doing unit at a time, then we'll only defer this function
241 if its marked for inlining. Otherwise we want to emit it now. */
243 /* "extern inline" functions are never output locally. */
244 if (DECL_EXTERNAL (decl
))
246 /* Nested functions of extern inline function shall not be emit unless
247 we inlined the origin. */
248 for (origin
= decl_function_context (decl
); origin
;
249 origin
= decl_function_context (origin
))
250 if (DECL_EXTERNAL (origin
))
252 /* We want to emit COMDAT functions only when absolutely necessary. */
253 if (DECL_COMDAT (decl
))
255 if (!DECL_INLINE (decl
)
256 || (!node
->local
.disregard_inline_limits
257 /* When declared inline, defer even the uninlinable functions.
258 This allows them to be eliminated when unused. */
259 && !DECL_DECLARED_INLINE_P (decl
)
260 && (!node
->local
.inlinable
|| !cgraph_default_inline_p (node
, NULL
))))
266 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
267 functions into callgraph in a way so they look like ordinary reachable
268 functions inserted into callgraph already at construction time. */
271 cgraph_process_new_functions (void)
275 struct cgraph_node
*node
;
277 /* Note that this queue may grow as its being processed, as the new
278 functions may generate new ones. */
279 while (cgraph_new_nodes
)
281 node
= cgraph_new_nodes
;
283 cgraph_new_nodes
= cgraph_new_nodes
->next_needed
;
284 switch (cgraph_state
)
286 case CGRAPH_STATE_CONSTRUCTION
:
287 /* At construction time we just need to finalize function and move
288 it into reachable functions list. */
290 node
->next_needed
= NULL
;
291 cgraph_finalize_function (fndecl
, false);
292 cgraph_mark_reachable_node (node
);
296 case CGRAPH_STATE_IPA
:
297 case CGRAPH_STATE_IPA_SSA
:
298 /* When IPA optimization already started, do all essential
299 transformations that has been already performed on the whole
300 cgraph but not on this function. */
302 tree_register_cfg_hooks ();
304 cgraph_analyze_function (node
);
305 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
306 current_function_decl
= fndecl
;
307 node
->local
.inlinable
= tree_inlinable_function_p (fndecl
);
308 node
->local
.self_insns
= estimate_num_insns (fndecl
);
309 node
->local
.disregard_inline_limits
310 = lang_hooks
.tree_inlining
.disregard_inline_limits (fndecl
);
311 /* Inlining characteristics are maintained by the
312 cgraph_mark_inline. */
313 node
->global
.insns
= node
->local
.self_insns
;
314 initialize_inline_failed (node
);
315 if (flag_really_no_inline
&& !node
->local
.disregard_inline_limits
)
316 node
->local
.inlinable
= 0;
317 if ((cgraph_state
== CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
319 /* When not optimizing, be sure we run early local passes anyway
322 execute_pass_list (pass_early_local_passes
.sub
);
323 free_dominance_info (CDI_POST_DOMINATORS
);
324 free_dominance_info (CDI_DOMINATORS
);
326 current_function_decl
= NULL
;
329 case CGRAPH_STATE_EXPANSION
:
330 /* Functions created during expansion shall be compiled
333 cgraph_expand_function (node
);
344 /* When not doing unit-at-a-time, output all functions enqueued.
345 Return true when such a functions were found. */
348 cgraph_assemble_pending_functions (void)
352 if (flag_unit_at_a_time
)
355 cgraph_output_pending_asms ();
357 while (cgraph_nodes_queue
)
359 struct cgraph_node
*n
= cgraph_nodes_queue
;
361 cgraph_nodes_queue
= cgraph_nodes_queue
->next_needed
;
362 n
->next_needed
= NULL
;
363 if (!n
->global
.inlined_to
365 && !DECL_EXTERNAL (n
->decl
))
367 cgraph_expand_function (n
);
370 output
|= cgraph_process_new_functions ();
377 /* As an GCC extension we allow redefinition of the function. The
378 semantics when both copies of bodies differ is not well defined.
379 We replace the old body with new body so in unit at a time mode
380 we always use new body, while in normal mode we may end up with
381 old body inlined into some functions and new body expanded and
384 ??? It may make more sense to use one body for inlining and other
385 body for expanding the function but this is difficult to do. */
388 cgraph_reset_node (struct cgraph_node
*node
)
390 /* If node->output is set, then this is a unit-at-a-time compilation
391 and we have already begun whole-unit analysis. This is *not*
392 testing for whether we've already emitted the function. That
393 case can be sort-of legitimately seen with real function
394 redefinition errors. I would argue that the front end should
395 never present us with such a case, but don't enforce that for now. */
396 gcc_assert (!node
->output
);
398 /* Reset our data structures so we can analyze the function again. */
399 memset (&node
->local
, 0, sizeof (node
->local
));
400 memset (&node
->global
, 0, sizeof (node
->global
));
401 memset (&node
->rtl
, 0, sizeof (node
->rtl
));
402 node
->analyzed
= false;
403 node
->local
.redefined_extern_inline
= true;
404 node
->local
.finalized
= false;
406 if (!flag_unit_at_a_time
)
408 struct cgraph_node
*n
, *next
;
410 for (n
= cgraph_nodes
; n
; n
= next
)
413 if (n
->global
.inlined_to
== node
)
414 cgraph_remove_node (n
);
418 cgraph_node_remove_callees (node
);
420 /* We may need to re-queue the node for assembling in case
421 we already proceeded it and ignored as not needed. */
422 if (node
->reachable
&& !flag_unit_at_a_time
)
424 struct cgraph_node
*n
;
426 for (n
= cgraph_nodes_queue
; n
; n
= n
->next_needed
)
435 cgraph_lower_function (struct cgraph_node
*node
)
439 tree_lowering_passes (node
->decl
);
440 node
->lowered
= true;
443 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
444 logic in effect. If NESTED is true, then our caller cannot stand to have
445 the garbage collector run at the moment. We would need to either create
446 a new GC context, or just not compile right now. */
449 cgraph_finalize_function (tree decl
, bool nested
)
451 struct cgraph_node
*node
= cgraph_node (decl
);
453 if (node
->local
.finalized
)
454 cgraph_reset_node (node
);
456 notice_global_symbol (decl
);
458 node
->local
.finalized
= true;
459 node
->lowered
= DECL_STRUCT_FUNCTION (decl
)->cfg
!= NULL
;
461 lower_nested_functions (decl
);
462 gcc_assert (!node
->nested
);
464 /* If not unit at a time, then we need to create the call graph
465 now, so that called functions can be queued and emitted now. */
466 if (!flag_unit_at_a_time
)
468 cgraph_analyze_function (node
);
469 cgraph_decide_inlining_incrementally (node
, false);
472 if (decide_is_function_needed (node
, decl
))
473 cgraph_mark_needed_node (node
);
475 /* Since we reclaim unreachable nodes at the end of every language
476 level unit, we need to be conservative about possible entry points
478 if ((TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
)))
479 cgraph_mark_reachable_node (node
);
481 /* If not unit at a time, go ahead and emit everything we've found
482 to be reachable at this time. */
485 if (!cgraph_assemble_pending_functions ())
489 /* If we've not yet emitted decl, tell the debug info about it. */
490 if (!TREE_ASM_WRITTEN (decl
))
491 (*debug_hooks
->deferred_inline_function
) (decl
);
493 /* Possibly warn about unused parameters. */
494 if (warn_unused_parameter
)
495 do_warn_unused_parameter (decl
);
498 /* Walk tree and record all calls. Called via walk_tree. */
500 record_reference (tree
*tp
, int *walk_subtrees
, void *data
)
504 switch (TREE_CODE (t
))
507 /* ??? Really, we should mark this decl as *potentially* referenced
508 by this function and re-examine whether the decl is actually used
509 after rtl has been generated. */
510 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
512 varpool_mark_needed_node (varpool_node (t
));
513 if (lang_hooks
.callgraph
.analyze_expr
)
514 return lang_hooks
.callgraph
.analyze_expr (tp
, walk_subtrees
,
521 if (flag_unit_at_a_time
)
523 /* Record dereferences to the functions. This makes the
524 functions reachable unconditionally. */
525 tree decl
= TREE_OPERAND (*tp
, 0);
526 if (TREE_CODE (decl
) == FUNCTION_DECL
)
527 cgraph_mark_needed_node (cgraph_node (decl
));
532 /* Save some cycles by not walking types and declaration as we
533 won't find anything useful there anyway. */
534 if (IS_TYPE_OR_DECL_P (*tp
))
540 if ((unsigned int) TREE_CODE (t
) >= LAST_AND_UNUSED_TREE_CODE
)
541 return lang_hooks
.callgraph
.analyze_expr (tp
, walk_subtrees
, data
);
548 /* Create cgraph edges for function calls inside BODY from NODE. */
551 cgraph_create_edges (struct cgraph_node
*node
, tree body
)
555 struct function
*this_cfun
= DECL_STRUCT_FUNCTION (body
);
556 block_stmt_iterator bsi
;
558 visited_nodes
= pointer_set_create ();
560 /* Reach the trees by walking over the CFG, and note the
561 enclosing basic-blocks in the call edges. */
562 FOR_EACH_BB_FN (bb
, this_cfun
)
563 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
565 tree stmt
= bsi_stmt (bsi
);
566 tree call
= get_call_expr_in (stmt
);
569 if (call
&& (decl
= get_callee_fndecl (call
)))
571 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
574 walk_tree (&TREE_OPERAND (call
, 1),
575 record_reference
, node
, visited_nodes
);
576 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
577 walk_tree (&GIMPLE_STMT_OPERAND (stmt
, 0),
578 record_reference
, node
, visited_nodes
);
581 walk_tree (bsi_stmt_ptr (bsi
), record_reference
, node
, visited_nodes
);
584 /* Look for initializers of constant variables and private statics. */
585 for (step
= DECL_STRUCT_FUNCTION (body
)->unexpanded_var_list
;
587 step
= TREE_CHAIN (step
))
589 tree decl
= TREE_VALUE (step
);
590 if (TREE_CODE (decl
) == VAR_DECL
591 && (TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
592 && flag_unit_at_a_time
)
593 varpool_finalize_decl (decl
);
594 else if (TREE_CODE (decl
) == VAR_DECL
&& DECL_INITIAL (decl
))
595 walk_tree (&DECL_INITIAL (decl
), record_reference
, node
, visited_nodes
);
598 pointer_set_destroy (visited_nodes
);
599 visited_nodes
= NULL
;
603 record_references_in_initializer (tree decl
)
605 visited_nodes
= pointer_set_create ();
606 walk_tree (&DECL_INITIAL (decl
), record_reference
, NULL
, visited_nodes
);
607 pointer_set_destroy (visited_nodes
);
608 visited_nodes
= NULL
;
612 /* Give initial reasons why inlining would fail. Those gets
613 either NULLified or usually overwritten by more precise reason
616 initialize_inline_failed (struct cgraph_node
*node
)
618 struct cgraph_edge
*e
;
620 for (e
= node
->callers
; e
; e
= e
->next_caller
)
622 gcc_assert (!e
->callee
->global
.inlined_to
);
623 gcc_assert (e
->inline_failed
);
624 if (node
->local
.redefined_extern_inline
)
625 e
->inline_failed
= N_("redefined extern inline functions are not "
626 "considered for inlining");
627 else if (!node
->local
.inlinable
)
628 e
->inline_failed
= N_("function not inlinable");
630 e
->inline_failed
= N_("function not considered for inlining");
634 /* Rebuild call edges from current function after a passes not aware
635 of cgraph updating. */
637 rebuild_cgraph_edges (void)
640 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
641 block_stmt_iterator bsi
;
643 cgraph_node_remove_callees (node
);
645 node
->count
= ENTRY_BLOCK_PTR
->count
;
648 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
650 tree stmt
= bsi_stmt (bsi
);
651 tree call
= get_call_expr_in (stmt
);
654 if (call
&& (decl
= get_callee_fndecl (call
)))
655 cgraph_create_edge (node
, cgraph_node (decl
), stmt
,
659 initialize_inline_failed (node
);
660 gcc_assert (!node
->global
.inlined_to
);
664 struct tree_opt_pass pass_rebuild_cgraph_edges
=
668 rebuild_cgraph_edges
, /* execute */
671 0, /* static_pass_number */
673 PROP_cfg
, /* properties_required */
674 0, /* properties_provided */
675 0, /* properties_destroyed */
676 0, /* todo_flags_start */
677 0, /* todo_flags_finish */
681 /* Verify cgraph nodes of given cgraph node. */
683 verify_cgraph_node (struct cgraph_node
*node
)
685 struct cgraph_edge
*e
;
686 struct cgraph_node
*main_clone
;
687 struct function
*this_cfun
= DECL_STRUCT_FUNCTION (node
->decl
);
688 basic_block this_block
;
689 block_stmt_iterator bsi
;
690 bool error_found
= false;
692 if (errorcount
|| sorrycount
)
695 timevar_push (TV_CGRAPH_VERIFY
);
696 for (e
= node
->callees
; e
; e
= e
->next_callee
)
699 error ("aux field set for edge %s->%s",
700 cgraph_node_name (e
->caller
), cgraph_node_name (e
->callee
));
705 error ("Execution count is negative");
708 for (e
= node
->callers
; e
; e
= e
->next_caller
)
712 error ("caller edge count is negative");
715 if (!e
->inline_failed
)
717 if (node
->global
.inlined_to
718 != (e
->caller
->global
.inlined_to
719 ? e
->caller
->global
.inlined_to
: e
->caller
))
721 error ("inlined_to pointer is wrong");
724 if (node
->callers
->next_caller
)
726 error ("multiple inline callers");
731 if (node
->global
.inlined_to
)
733 error ("inlined_to pointer set for noninline callers");
737 if (!node
->callers
&& node
->global
.inlined_to
)
739 error ("inlined_to pointer is set but no predecessors found");
742 if (node
->global
.inlined_to
== node
)
744 error ("inlined_to pointer refers to itself");
748 for (main_clone
= cgraph_node (node
->decl
); main_clone
;
749 main_clone
= main_clone
->next_clone
)
750 if (main_clone
== node
)
752 if (!cgraph_node (node
->decl
))
754 error ("node not found in cgraph_hash");
759 && DECL_SAVED_TREE (node
->decl
) && !TREE_ASM_WRITTEN (node
->decl
)
760 && (!DECL_EXTERNAL (node
->decl
) || node
->global
.inlined_to
))
764 /* The nodes we're interested in are never shared, so walk
765 the tree ignoring duplicates. */
766 visited_nodes
= pointer_set_create ();
767 /* Reach the trees by walking over the CFG, and note the
768 enclosing basic-blocks in the call edges. */
769 FOR_EACH_BB_FN (this_block
, this_cfun
)
770 for (bsi
= bsi_start (this_block
); !bsi_end_p (bsi
); bsi_next (&bsi
))
772 tree stmt
= bsi_stmt (bsi
);
773 tree call
= get_call_expr_in (stmt
);
775 if (call
&& (decl
= get_callee_fndecl (call
)))
777 struct cgraph_edge
*e
= cgraph_edge (node
, stmt
);
782 error ("shared call_stmt:");
783 debug_generic_stmt (stmt
);
786 if (e
->callee
->decl
!= cgraph_node (decl
)->decl
789 error ("edge points to wrong declaration:");
790 debug_tree (e
->callee
->decl
);
791 fprintf (stderr
," Instead of:");
798 error ("missing callgraph edge for call stmt:");
799 debug_generic_stmt (stmt
);
804 pointer_set_destroy (visited_nodes
);
805 visited_nodes
= NULL
;
808 /* No CFG available?! */
811 for (e
= node
->callees
; e
; e
= e
->next_callee
)
815 error ("edge %s->%s has no corresponding call_stmt",
816 cgraph_node_name (e
->caller
),
817 cgraph_node_name (e
->callee
));
818 debug_generic_stmt (e
->call_stmt
);
826 dump_cgraph_node (stderr
, node
);
827 internal_error ("verify_cgraph_node failed");
829 timevar_pop (TV_CGRAPH_VERIFY
);
832 /* Verify whole cgraph structure. */
836 struct cgraph_node
*node
;
838 if (sorrycount
|| errorcount
)
841 for (node
= cgraph_nodes
; node
; node
= node
->next
)
842 verify_cgraph_node (node
);
845 /* Output all asm statements we have stored up to be output. */
848 cgraph_output_pending_asms (void)
850 struct cgraph_asm_node
*can
;
852 if (errorcount
|| sorrycount
)
855 for (can
= cgraph_asm_nodes
; can
; can
= can
->next
)
856 assemble_asm (can
->asm_str
);
857 cgraph_asm_nodes
= NULL
;
860 /* Analyze the function scheduled to be output. */
862 cgraph_analyze_function (struct cgraph_node
*node
)
864 tree decl
= node
->decl
;
866 current_function_decl
= decl
;
867 push_cfun (DECL_STRUCT_FUNCTION (decl
));
868 cgraph_lower_function (node
);
870 /* First kill forward declaration so reverse inlining works properly. */
871 cgraph_create_edges (node
, decl
);
873 node
->local
.estimated_self_stack_size
= estimated_stack_frame_size ();
874 node
->global
.estimated_stack_size
= node
->local
.estimated_self_stack_size
;
875 node
->global
.stack_frame_offset
= 0;
876 node
->local
.inlinable
= tree_inlinable_function_p (decl
);
877 if (!flag_unit_at_a_time
)
878 node
->local
.self_insns
= estimate_num_insns (decl
);
879 if (node
->local
.inlinable
)
880 node
->local
.disregard_inline_limits
881 = lang_hooks
.tree_inlining
.disregard_inline_limits (decl
);
882 initialize_inline_failed (node
);
883 if (flag_really_no_inline
&& !node
->local
.disregard_inline_limits
)
884 node
->local
.inlinable
= 0;
885 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
886 node
->global
.insns
= node
->local
.self_insns
;
887 if (!flag_unit_at_a_time
)
889 bitmap_obstack_initialize (NULL
);
890 tree_register_cfg_hooks ();
891 execute_pass_list (pass_early_local_passes
.sub
);
892 free_dominance_info (CDI_POST_DOMINATORS
);
893 free_dominance_info (CDI_DOMINATORS
);
894 bitmap_obstack_release (NULL
);
897 node
->analyzed
= true;
899 current_function_decl
= NULL
;
902 /* Look for externally_visible and used attributes and mark cgraph nodes
905 We cannot mark the nodes at the point the attributes are processed (in
906 handle_*_attribute) because the copy of the declarations available at that
907 point may not be canonical. For example, in:
910 void f() __attribute__((used));
912 the declaration we see in handle_used_attribute will be the second
913 declaration -- but the front end will subsequently merge that declaration
914 with the original declaration and discard the second declaration.
916 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
919 void f() __attribute__((externally_visible));
923 So, we walk the nodes at the end of the translation unit, applying the
924 attributes at that point. */
927 process_function_and_variable_attributes (struct cgraph_node
*first
,
928 struct varpool_node
*first_var
)
930 struct cgraph_node
*node
;
931 struct varpool_node
*vnode
;
933 for (node
= cgraph_nodes
; node
!= first
; node
= node
->next
)
935 tree decl
= node
->decl
;
936 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl
)))
938 mark_decl_referenced (decl
);
939 if (node
->local
.finalized
)
940 cgraph_mark_needed_node (node
);
942 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
944 if (! TREE_PUBLIC (node
->decl
))
945 warning (OPT_Wattributes
,
946 "%J%<externally_visible%> attribute have effect only on public objects",
950 if (node
->local
.finalized
)
951 cgraph_mark_needed_node (node
);
952 node
->local
.externally_visible
= true;
956 for (vnode
= varpool_nodes
; vnode
!= first_var
; vnode
= vnode
->next
)
958 tree decl
= vnode
->decl
;
959 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl
)))
961 mark_decl_referenced (decl
);
962 if (vnode
->finalized
)
963 varpool_mark_needed_node (vnode
);
965 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
967 if (! TREE_PUBLIC (vnode
->decl
))
968 warning (OPT_Wattributes
,
969 "%J%<externally_visible%> attribute have effect only on public objects",
973 if (vnode
->finalized
)
974 varpool_mark_needed_node (vnode
);
975 vnode
->externally_visible
= true;
981 /* Analyze the whole compilation unit once it is parsed completely. */
984 cgraph_finalize_compilation_unit (void)
986 struct cgraph_node
*node
, *next
;
987 /* Keep track of already processed nodes when called multiple times for
988 intermodule optimization. */
989 static struct cgraph_node
*first_analyzed
;
990 struct cgraph_node
*first_processed
= first_analyzed
;
991 static struct varpool_node
*first_analyzed_var
;
993 if (errorcount
|| sorrycount
)
998 if (!flag_unit_at_a_time
)
1000 cgraph_output_pending_asms ();
1001 cgraph_assemble_pending_functions ();
1002 varpool_output_debug_info ();
1008 fprintf (stderr
, "\nAnalyzing compilation unit\n");
1012 timevar_push (TV_CGRAPH
);
1013 process_function_and_variable_attributes (first_processed
,
1014 first_analyzed_var
);
1015 first_processed
= cgraph_nodes
;
1016 first_analyzed_var
= varpool_nodes
;
1017 varpool_analyze_pending_decls ();
1018 if (cgraph_dump_file
)
1020 fprintf (cgraph_dump_file
, "Initial entry points:");
1021 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
1022 if (node
->needed
&& DECL_SAVED_TREE (node
->decl
))
1023 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1024 fprintf (cgraph_dump_file
, "\n");
1027 /* Propagate reachability flag and lower representation of all reachable
1028 functions. In the future, lowering will introduce new functions and
1029 new entry points on the way (by template instantiation and virtual
1030 method table generation for instance). */
1031 while (cgraph_nodes_queue
)
1033 struct cgraph_edge
*edge
;
1034 tree decl
= cgraph_nodes_queue
->decl
;
1036 node
= cgraph_nodes_queue
;
1037 cgraph_nodes_queue
= cgraph_nodes_queue
->next_needed
;
1038 node
->next_needed
= NULL
;
1040 /* ??? It is possible to create extern inline function and later using
1041 weak alias attribute to kill its body. See
1042 gcc.c-torture/compile/20011119-1.c */
1043 if (!DECL_SAVED_TREE (decl
))
1045 cgraph_reset_node (node
);
1049 gcc_assert (!node
->analyzed
&& node
->reachable
);
1050 gcc_assert (DECL_SAVED_TREE (decl
));
1052 cgraph_analyze_function (node
);
1054 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
1055 if (!edge
->callee
->reachable
)
1056 cgraph_mark_reachable_node (edge
->callee
);
1058 /* We finalize local static variables during constructing callgraph
1059 edges. Process their attributes too. */
1060 process_function_and_variable_attributes (first_processed
,
1061 first_analyzed_var
);
1062 first_processed
= cgraph_nodes
;
1063 first_analyzed_var
= varpool_nodes
;
1064 varpool_analyze_pending_decls ();
1067 /* Collect entry points to the unit. */
1068 if (cgraph_dump_file
)
1070 fprintf (cgraph_dump_file
, "Unit entry points:");
1071 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
1072 if (node
->needed
&& DECL_SAVED_TREE (node
->decl
))
1073 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1074 fprintf (cgraph_dump_file
, "\n\nInitial ");
1075 dump_cgraph (cgraph_dump_file
);
1078 if (cgraph_dump_file
)
1079 fprintf (cgraph_dump_file
, "\nReclaiming functions:");
1081 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= next
)
1083 tree decl
= node
->decl
;
1086 if (node
->local
.finalized
&& !DECL_SAVED_TREE (decl
))
1087 cgraph_reset_node (node
);
1089 if (!node
->reachable
&& DECL_SAVED_TREE (decl
))
1091 if (cgraph_dump_file
)
1092 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1093 cgraph_remove_node (node
);
1097 node
->next_needed
= NULL
;
1098 gcc_assert (!node
->local
.finalized
|| DECL_SAVED_TREE (decl
));
1099 gcc_assert (node
->analyzed
== node
->local
.finalized
);
1101 if (cgraph_dump_file
)
1103 fprintf (cgraph_dump_file
, "\n\nReclaimed ");
1104 dump_cgraph (cgraph_dump_file
);
1106 first_analyzed
= cgraph_nodes
;
1108 timevar_pop (TV_CGRAPH
);
1110 /* Figure out what functions we want to assemble. */
1113 cgraph_mark_functions_to_output (void)
1115 struct cgraph_node
*node
;
1117 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1119 tree decl
= node
->decl
;
1120 struct cgraph_edge
*e
;
1122 gcc_assert (!node
->output
);
1124 for (e
= node
->callers
; e
; e
= e
->next_caller
)
1125 if (e
->inline_failed
)
1128 /* We need to output all local functions that are used and not
1129 always inlined, as well as those that are reachable from
1130 outside the current compilation unit. */
1131 if (DECL_SAVED_TREE (decl
)
1132 && !node
->global
.inlined_to
1134 || (e
&& node
->reachable
))
1135 && !TREE_ASM_WRITTEN (decl
)
1136 && !DECL_EXTERNAL (decl
))
1140 /* We should've reclaimed all functions that are not needed. */
1141 #ifdef ENABLE_CHECKING
1142 if (!node
->global
.inlined_to
&& DECL_SAVED_TREE (decl
)
1143 && !DECL_EXTERNAL (decl
))
1145 dump_cgraph_node (stderr
, node
);
1146 internal_error ("failed to reclaim unneeded function");
1149 gcc_assert (node
->global
.inlined_to
|| !DECL_SAVED_TREE (decl
)
1150 || DECL_EXTERNAL (decl
));
1157 /* Expand function specified by NODE. */
1160 cgraph_expand_function (struct cgraph_node
*node
)
1162 tree decl
= node
->decl
;
1164 /* We ought to not compile any inline clones. */
1165 gcc_assert (!node
->global
.inlined_to
);
1167 if (flag_unit_at_a_time
)
1168 announce_function (decl
);
1170 cgraph_lower_function (node
);
1172 /* Generate RTL for the body of DECL. */
1173 lang_hooks
.callgraph
.expand_function (decl
);
1175 /* Make sure that BE didn't give up on compiling. */
1176 /* ??? Can happen with nested function of extern inline. */
1177 gcc_assert (TREE_ASM_WRITTEN (node
->decl
));
1179 current_function_decl
= NULL
;
1180 if (!cgraph_preserve_function_body_p (node
->decl
))
1182 DECL_SAVED_TREE (node
->decl
) = NULL
;
1183 DECL_STRUCT_FUNCTION (node
->decl
) = NULL
;
1184 DECL_INITIAL (node
->decl
) = error_mark_node
;
1185 /* Eliminate all call edges. This is important so the call_expr no longer
1186 points to the dead function body. */
1187 cgraph_node_remove_callees (node
);
1190 cgraph_function_flags_ready
= true;
1193 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1196 cgraph_inline_p (struct cgraph_edge
*e
, const char **reason
)
1198 *reason
= e
->inline_failed
;
1199 return !e
->inline_failed
;
1204 /* Expand all functions that must be output.
1206 Attempt to topologically sort the nodes so function is output when
1207 all called functions are already assembled to allow data to be
1208 propagated across the callgraph. Use a stack to get smaller distance
1209 between a function and its callees (later we may choose to use a more
1210 sophisticated algorithm for function reordering; we will likely want
1211 to use subsections to make the output functions appear in top-down
1215 cgraph_expand_all_functions (void)
1217 struct cgraph_node
*node
;
1218 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1219 int order_pos
= 0, new_order_pos
= 0;
1222 order_pos
= cgraph_postorder (order
);
1223 gcc_assert (order_pos
== cgraph_n_nodes
);
1225 /* Garbage collector may remove inline clones we eliminate during
1226 optimization. So we must be sure to not reference them. */
1227 for (i
= 0; i
< order_pos
; i
++)
1228 if (order
[i
]->output
)
1229 order
[new_order_pos
++] = order
[i
];
1231 for (i
= new_order_pos
- 1; i
>= 0; i
--)
1236 gcc_assert (node
->reachable
);
1238 cgraph_expand_function (node
);
1241 cgraph_process_new_functions ();
1247 /* This is used to sort the node types by the cgraph order number. */
1249 struct cgraph_order_sort
1251 enum { ORDER_UNDEFINED
= 0, ORDER_FUNCTION
, ORDER_VAR
, ORDER_ASM
} kind
;
1254 struct cgraph_node
*f
;
1255 struct varpool_node
*v
;
1256 struct cgraph_asm_node
*a
;
1260 /* Output all functions, variables, and asm statements in the order
1261 according to their order fields, which is the order in which they
1262 appeared in the file. This implements -fno-toplevel-reorder. In
1263 this mode we may output functions and variables which don't really
1264 need to be output. */
1267 cgraph_output_in_order (void)
1271 struct cgraph_order_sort
*nodes
;
1273 struct cgraph_node
*pf
;
1274 struct varpool_node
*pv
;
1275 struct cgraph_asm_node
*pa
;
1278 size
= max
* sizeof (struct cgraph_order_sort
);
1279 nodes
= (struct cgraph_order_sort
*) alloca (size
);
1280 memset (nodes
, 0, size
);
1282 varpool_analyze_pending_decls ();
1284 for (pf
= cgraph_nodes
; pf
; pf
= pf
->next
)
1289 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1290 nodes
[i
].kind
= ORDER_FUNCTION
;
1295 for (pv
= varpool_nodes_queue
; pv
; pv
= pv
->next_needed
)
1298 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1299 nodes
[i
].kind
= ORDER_VAR
;
1303 for (pa
= cgraph_asm_nodes
; pa
; pa
= pa
->next
)
1306 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1307 nodes
[i
].kind
= ORDER_ASM
;
1311 for (i
= 0; i
< max
; ++i
)
1313 switch (nodes
[i
].kind
)
1315 case ORDER_FUNCTION
:
1316 nodes
[i
].u
.f
->output
= 0;
1317 cgraph_expand_function (nodes
[i
].u
.f
);
1321 varpool_assemble_decl (nodes
[i
].u
.v
);
1325 assemble_asm (nodes
[i
].u
.a
->asm_str
);
1328 case ORDER_UNDEFINED
:
1336 cgraph_asm_nodes
= NULL
;
1339 /* Mark visibility of all functions.
1341 A local function is one whose calls can occur only in the current
1342 compilation unit and all its calls are explicit, so we can change
1343 its calling convention. We simply mark all static functions whose
1344 address is not taken as local.
1346 We also change the TREE_PUBLIC flag of all declarations that are public
1347 in language point of view but we want to overwrite this default
1348 via visibilities for the backend point of view. */
1351 cgraph_function_and_variable_visibility (void)
1353 struct cgraph_node
*node
;
1354 struct varpool_node
*vnode
;
1356 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1359 && (DECL_COMDAT (node
->decl
)
1360 || (!flag_whole_program
1361 && TREE_PUBLIC (node
->decl
) && !DECL_EXTERNAL (node
->decl
))))
1362 node
->local
.externally_visible
= true;
1363 if (!node
->local
.externally_visible
&& node
->analyzed
1364 && !DECL_EXTERNAL (node
->decl
))
1366 gcc_assert (flag_whole_program
|| !TREE_PUBLIC (node
->decl
));
1367 TREE_PUBLIC (node
->decl
) = 0;
1369 node
->local
.local
= (!node
->needed
1371 && !DECL_EXTERNAL (node
->decl
)
1372 && !node
->local
.externally_visible
);
1374 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
1377 && !flag_whole_program
1378 && (DECL_COMDAT (vnode
->decl
) || TREE_PUBLIC (vnode
->decl
)))
1379 vnode
->externally_visible
= 1;
1380 if (!vnode
->externally_visible
)
1382 gcc_assert (flag_whole_program
|| !TREE_PUBLIC (vnode
->decl
));
1383 TREE_PUBLIC (vnode
->decl
) = 0;
1385 gcc_assert (TREE_STATIC (vnode
->decl
));
1388 /* Because we have to be conservative on the boundaries of source
1389 level units, it is possible that we marked some functions in
1390 reachable just because they might be used later via external
1391 linkage, but after making them local they are really unreachable
1393 cgraph_remove_unreachable_nodes (true, cgraph_dump_file
);
1395 if (cgraph_dump_file
)
1397 fprintf (cgraph_dump_file
, "\nMarking local functions:");
1398 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1399 if (node
->local
.local
)
1400 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1401 fprintf (cgraph_dump_file
, "\n\n");
1402 fprintf (cgraph_dump_file
, "\nMarking externally visible functions:");
1403 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1404 if (node
->local
.externally_visible
)
1405 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1406 fprintf (cgraph_dump_file
, "\n\n");
1408 cgraph_function_flags_ready
= true;
1411 /* Return true when function body of DECL still needs to be kept around
1412 for later re-use. */
1414 cgraph_preserve_function_body_p (tree decl
)
1416 struct cgraph_node
*node
;
1417 if (!cgraph_global_info_ready
)
1418 return (flag_really_no_inline
1419 ? lang_hooks
.tree_inlining
.disregard_inline_limits (decl
)
1420 : DECL_INLINE (decl
));
1421 /* Look if there is any clone around. */
1422 for (node
= cgraph_node (decl
); node
; node
= node
->next_clone
)
1423 if (node
->global
.inlined_to
)
1432 current_function_decl
= NULL
;
1433 tree_register_cfg_hooks ();
1434 bitmap_obstack_initialize (NULL
);
1435 execute_ipa_pass_list (all_ipa_passes
);
1436 bitmap_obstack_release (NULL
);
1439 /* Perform simple optimizations based on callgraph. */
1442 cgraph_optimize (void)
1444 if (errorcount
|| sorrycount
)
1447 #ifdef ENABLE_CHECKING
1450 if (!flag_unit_at_a_time
)
1452 cgraph_assemble_pending_functions ();
1453 cgraph_process_new_functions ();
1454 cgraph_state
= CGRAPH_STATE_FINISHED
;
1455 cgraph_output_pending_asms ();
1456 varpool_assemble_pending_decls ();
1457 varpool_output_debug_info ();
1461 /* Frontend may output common variables after the unit has been finalized.
1462 It is safe to deal with them here as they are always zero initialized. */
1463 varpool_analyze_pending_decls ();
1464 cgraph_process_new_functions ();
1466 timevar_push (TV_CGRAPHOPT
);
1467 if (pre_ipa_mem_report
)
1469 fprintf (stderr
, "Memory consumption before IPA\n");
1470 dump_memory_report (false);
1473 fprintf (stderr
, "Performing interprocedural optimizations\n");
1475 cgraph_function_and_variable_visibility ();
1476 if (cgraph_dump_file
)
1478 fprintf (cgraph_dump_file
, "Marked ");
1479 dump_cgraph (cgraph_dump_file
);
1481 cgraph_state
= CGRAPH_STATE_IPA
;
1483 /* Don't run the IPA passes if there was any error or sorry messages. */
1484 if (errorcount
== 0 && sorrycount
== 0)
1487 /* This pass remove bodies of extern inline functions we never inlined.
1488 Do this later so other IPA passes see what is really going on. */
1489 cgraph_remove_unreachable_nodes (false, dump_file
);
1490 cgraph_increase_alignment ();
1491 cgraph_global_info_ready
= true;
1492 if (cgraph_dump_file
)
1494 fprintf (cgraph_dump_file
, "Optimized ");
1495 dump_cgraph (cgraph_dump_file
);
1496 dump_varpool (cgraph_dump_file
);
1498 if (post_ipa_mem_report
)
1500 fprintf (stderr
, "Memory consumption after IPA\n");
1501 dump_memory_report (false);
1503 timevar_pop (TV_CGRAPHOPT
);
1505 /* Output everything. */
1507 fprintf (stderr
, "Assembling functions:\n");
1508 #ifdef ENABLE_CHECKING
1512 cgraph_mark_functions_to_output ();
1514 cgraph_state
= CGRAPH_STATE_EXPANSION
;
1515 if (!flag_toplevel_reorder
)
1516 cgraph_output_in_order ();
1519 cgraph_output_pending_asms ();
1521 cgraph_expand_all_functions ();
1522 varpool_remove_unreferenced_decls ();
1524 varpool_assemble_pending_decls ();
1525 varpool_output_debug_info ();
1527 cgraph_process_new_functions ();
1528 cgraph_state
= CGRAPH_STATE_FINISHED
;
1530 if (cgraph_dump_file
)
1532 fprintf (cgraph_dump_file
, "\nFinal ");
1533 dump_cgraph (cgraph_dump_file
);
1535 #ifdef ENABLE_CHECKING
1537 /* Double check that all inline clones are gone and that all
1538 function bodies have been released from memory. */
1539 if (flag_unit_at_a_time
1540 && !(sorrycount
|| errorcount
))
1542 struct cgraph_node
*node
;
1543 bool error_found
= false;
1545 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1547 && (node
->global
.inlined_to
1548 || DECL_SAVED_TREE (node
->decl
)))
1551 dump_cgraph_node (stderr
, node
);
1554 internal_error ("nodes with no released memory found");
1559 /* Increase alignment of global arrays to improve vectorization potential.
1561 - Consider also structs that have an array field.
1562 - Use ipa analysis to prune arrays that can't be vectorized?
1563 This should involve global alignment analysis and in the future also
1567 cgraph_increase_alignment (void)
1569 if (flag_section_anchors
&& flag_tree_vectorize
)
1571 struct varpool_node
*vnode
;
1573 /* Increase the alignment of all global arrays for vectorization. */
1574 for (vnode
= varpool_nodes_queue
;
1576 vnode
= vnode
->next_needed
)
1578 tree vectype
, decl
= vnode
->decl
;
1579 unsigned int alignment
;
1581 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
1583 vectype
= get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl
)));
1586 alignment
= TYPE_ALIGN (vectype
);
1587 if (DECL_ALIGN (decl
) >= alignment
)
1590 if (vect_can_force_dr_alignment_p (decl
, alignment
))
1592 DECL_ALIGN (decl
) = TYPE_ALIGN (vectype
);
1593 DECL_USER_ALIGN (decl
) = 1;
1594 if (cgraph_dump_file
)
1596 fprintf (cgraph_dump_file
, "Increasing alignment of decl: ");
1597 print_generic_expr (cgraph_dump_file
, decl
, TDF_SLIM
);
1604 /* Generate and emit a static constructor or destructor. WHICH must be
1605 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1606 GENERIC statements. */
1609 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
1611 static int counter
= 0;
1613 tree decl
, name
, resdecl
;
1615 sprintf (which_buf
, "%c_%d", which
, counter
++);
1616 name
= get_file_function_name (which_buf
);
1618 decl
= build_decl (FUNCTION_DECL
, name
,
1619 build_function_type (void_type_node
, void_list_node
));
1620 current_function_decl
= decl
;
1622 resdecl
= build_decl (RESULT_DECL
, NULL_TREE
, void_type_node
);
1623 DECL_ARTIFICIAL (resdecl
) = 1;
1624 DECL_IGNORED_P (resdecl
) = 1;
1625 DECL_RESULT (decl
) = resdecl
;
1627 allocate_struct_function (decl
);
1629 TREE_STATIC (decl
) = 1;
1630 TREE_USED (decl
) = 1;
1631 DECL_ARTIFICIAL (decl
) = 1;
1632 DECL_IGNORED_P (decl
) = 1;
1633 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
1634 DECL_SAVED_TREE (decl
) = body
;
1635 TREE_PUBLIC (decl
) = ! targetm
.have_ctors_dtors
;
1636 DECL_UNINLINABLE (decl
) = 1;
1638 DECL_INITIAL (decl
) = make_node (BLOCK
);
1639 TREE_USED (DECL_INITIAL (decl
)) = 1;
1641 DECL_SOURCE_LOCATION (decl
) = input_location
;
1642 cfun
->function_end_locus
= input_location
;
1647 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1650 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1656 gimplify_function_tree (decl
);
1658 cgraph_add_new_function (decl
, false);
1659 cgraph_mark_needed_node (cgraph_node (decl
));
1661 if (targetm
.have_ctors_dtors
)
1663 void (*fn
) (rtx
, int);
1666 fn
= targetm
.asm_out
.constructor
;
1668 fn
= targetm
.asm_out
.destructor
;
1669 fn (XEXP (DECL_RTL (decl
), 0), priority
);
1676 cgraph_dump_file
= dump_begin (TDI_cgraph
, NULL
);
1679 /* The edges representing the callers of the NEW_VERSION node were
1680 fixed by cgraph_function_versioning (), now the call_expr in their
1681 respective tree code should be updated to call the NEW_VERSION. */
1684 update_call_expr (struct cgraph_node
*new_version
)
1686 struct cgraph_edge
*e
;
1688 gcc_assert (new_version
);
1689 for (e
= new_version
->callers
; e
; e
= e
->next_caller
)
1690 /* Update the call expr on the edges
1691 to call the new version. */
1692 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e
->call_stmt
), 0), 0) = new_version
->decl
;
1696 /* Create a new cgraph node which is the new version of
1697 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1698 edges which should be redirected to point to
1699 NEW_VERSION. ALL the callees edges of OLD_VERSION
1700 are cloned to the new version node. Return the new
1703 static struct cgraph_node
*
1704 cgraph_copy_node_for_versioning (struct cgraph_node
*old_version
,
1706 VEC(cgraph_edge_p
,heap
) *redirect_callers
)
1708 struct cgraph_node
*new_version
;
1709 struct cgraph_edge
*e
, *new_e
;
1710 struct cgraph_edge
*next_callee
;
1713 gcc_assert (old_version
);
1715 new_version
= cgraph_node (new_decl
);
1717 new_version
->analyzed
= true;
1718 new_version
->local
= old_version
->local
;
1719 new_version
->global
= old_version
->global
;
1720 new_version
->rtl
= new_version
->rtl
;
1721 new_version
->reachable
= true;
1722 new_version
->count
= old_version
->count
;
1724 /* Clone the old node callees. Recursive calls are
1726 for (e
= old_version
->callees
;e
; e
=e
->next_callee
)
1728 new_e
= cgraph_clone_edge (e
, new_version
, e
->call_stmt
, 0, e
->loop_nest
, true);
1729 new_e
->count
= e
->count
;
1731 /* Fix recursive calls.
1732 If OLD_VERSION has a recursive call after the
1733 previous edge cloning, the new version will have an edge
1734 pointing to the old version, which is wrong;
1735 Redirect it to point to the new version. */
1736 for (e
= new_version
->callees
; e
; e
= next_callee
)
1738 next_callee
= e
->next_callee
;
1739 if (e
->callee
== old_version
)
1740 cgraph_redirect_edge_callee (e
, new_version
);
1745 for (i
= 0; VEC_iterate (cgraph_edge_p
, redirect_callers
, i
, e
); i
++)
1747 /* Redirect calls to the old version node to point to its new
1749 cgraph_redirect_edge_callee (e
, new_version
);
1755 /* Perform function versioning.
1756 Function versioning includes copying of the tree and
1757 a callgraph update (creating a new cgraph node and updating
1758 its callees and callers).
1760 REDIRECT_CALLERS varray includes the edges to be redirected
1763 TREE_MAP is a mapping of tree nodes we want to replace with
1764 new ones (according to results of prior analysis).
1765 OLD_VERSION_NODE is the node that is versioned.
1766 It returns the new version's cgraph node. */
1768 struct cgraph_node
*
1769 cgraph_function_versioning (struct cgraph_node
*old_version_node
,
1770 VEC(cgraph_edge_p
,heap
) *redirect_callers
,
1771 varray_type tree_map
)
1773 tree old_decl
= old_version_node
->decl
;
1774 struct cgraph_node
*new_version_node
= NULL
;
1777 if (!tree_versionable_function_p (old_decl
))
1780 /* Make a new FUNCTION_DECL tree node for the
1782 new_decl
= copy_node (old_decl
);
1784 /* Create the new version's call-graph node.
1785 and update the edges of the new node. */
1787 cgraph_copy_node_for_versioning (old_version_node
, new_decl
,
1790 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1791 tree_function_versioning (old_decl
, new_decl
, tree_map
, false);
1792 /* Update the call_expr on the edges to call the new version node. */
1793 update_call_expr (new_version_node
);
1795 /* Update the new version's properties.
1796 Make The new version visible only within this translation unit.
1797 ??? We cannot use COMDAT linkage because there is no
1798 ABI support for this. */
1799 DECL_EXTERNAL (new_version_node
->decl
) = 0;
1800 DECL_ONE_ONLY (new_version_node
->decl
) = 0;
1801 TREE_PUBLIC (new_version_node
->decl
) = 0;
1802 DECL_COMDAT (new_version_node
->decl
) = 0;
1803 new_version_node
->local
.externally_visible
= 0;
1804 new_version_node
->local
.local
= 1;
1805 new_version_node
->lowered
= true;
1806 return new_version_node
;
1809 /* Produce separate function body for inline clones so the offline copy can be
1810 modified without affecting them. */
1811 struct cgraph_node
*
1812 save_inline_function_body (struct cgraph_node
*node
)
1814 struct cgraph_node
*first_clone
;
1816 gcc_assert (node
== cgraph_node (node
->decl
));
1818 cgraph_lower_function (node
);
1820 /* In non-unit-at-a-time we construct full fledged clone we never output to
1821 assembly file. This clone is pointed out by inline_decl of original function
1822 and inlining infrastructure knows how to deal with this. */
1823 if (!flag_unit_at_a_time
)
1825 struct cgraph_edge
*e
;
1827 first_clone
= cgraph_clone_node (node
, node
->count
, 0, false);
1828 first_clone
->needed
= 0;
1829 first_clone
->reachable
= 1;
1830 /* Recursively clone all bodies. */
1831 for (e
= first_clone
->callees
; e
; e
= e
->next_callee
)
1832 if (!e
->inline_failed
)
1833 cgraph_clone_inlined_nodes (e
, true, false);
1836 first_clone
= node
->next_clone
;
1838 first_clone
->decl
= copy_node (node
->decl
);
1839 node
->next_clone
= NULL
;
1840 if (!flag_unit_at_a_time
)
1841 node
->inline_decl
= first_clone
->decl
;
1842 first_clone
->prev_clone
= NULL
;
1843 cgraph_insert_node_to_hashtable (first_clone
);
1844 gcc_assert (first_clone
== cgraph_node (first_clone
->decl
));
1846 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1847 tree_function_versioning (node
->decl
, first_clone
->decl
, NULL
, true);
1849 DECL_EXTERNAL (first_clone
->decl
) = 0;
1850 DECL_ONE_ONLY (first_clone
->decl
) = 0;
1851 TREE_PUBLIC (first_clone
->decl
) = 0;
1852 DECL_COMDAT (first_clone
->decl
) = 0;
1854 for (node
= first_clone
->next_clone
; node
; node
= node
->next_clone
)
1855 node
->decl
= first_clone
->decl
;
1856 #ifdef ENABLE_CHECKING
1857 verify_cgraph_node (first_clone
);