1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
111 #include "coretypes.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
132 #include "function.h"
133 #include "ipa-prop.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
139 #include "coverage.h"
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node
*);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node
*);
148 FILE *cgraph_dump_file
;
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type
;
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
158 cgraph_decide_is_function_needed (struct cgraph_node
*node
, tree decl
)
160 /* If the user told us it is used, then it must be so. */
161 if (node
->local
.externally_visible
)
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl
)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)))
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl
)
175 && !DECL_EXTERNAL (decl
)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl
)))
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl
)
195 && !node
->local
.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl
)
197 && !(DECL_CONTEXT (decl
)
198 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
)))
199 && !flag_whole_program
201 && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
212 cgraph_process_new_functions (void)
216 struct cgraph_node
*node
;
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes
)
223 node
= cgraph_new_nodes
;
225 cgraph_new_nodes
= cgraph_new_nodes
->next_needed
;
226 switch (cgraph_state
)
228 case CGRAPH_STATE_CONSTRUCTION
:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
232 node
->next_needed
= NULL
;
233 cgraph_finalize_function (fndecl
, false);
234 cgraph_mark_reachable_node (node
);
238 case CGRAPH_STATE_IPA
:
239 case CGRAPH_STATE_IPA_SSA
:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
244 gimple_register_cfg_hooks ();
246 cgraph_analyze_function (node
);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
248 current_function_decl
= fndecl
;
249 compute_inline_parameters (node
);
250 if ((cgraph_state
== CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
252 /* When not optimizing, be sure we run early local passes anyway
255 execute_pass_list (pass_early_local_passes
.pass
.sub
);
256 free_dominance_info (CDI_POST_DOMINATORS
);
257 free_dominance_info (CDI_DOMINATORS
);
259 current_function_decl
= NULL
;
262 case CGRAPH_STATE_EXPANSION
:
263 /* Functions created during expansion shall be compiled
266 cgraph_expand_function (node
);
273 cgraph_call_function_insertion_hooks (node
);
274 varpool_analyze_pending_decls ();
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
290 cgraph_reset_node (struct cgraph_node
*node
)
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node
->process
);
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node
->local
, 0, sizeof (node
->local
));
301 memset (&node
->global
, 0, sizeof (node
->global
));
302 memset (&node
->rtl
, 0, sizeof (node
->rtl
));
303 node
->analyzed
= false;
304 node
->local
.redefined_extern_inline
= true;
305 node
->local
.finalized
= false;
307 cgraph_node_remove_callees (node
);
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
314 struct cgraph_node
*n
;
316 for (n
= cgraph_nodes_queue
; n
; n
= n
->next_needed
)
325 cgraph_lower_function (struct cgraph_node
*node
)
331 lower_nested_functions (node
->decl
);
332 gcc_assert (!node
->nested
);
334 tree_lowering_passes (node
->decl
);
335 node
->lowered
= true;
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
344 cgraph_finalize_function (tree decl
, bool nested
)
346 struct cgraph_node
*node
= cgraph_node (decl
);
348 if (node
->local
.finalized
)
349 cgraph_reset_node (node
);
351 node
->pid
= cgraph_max_pid
++;
352 notice_global_symbol (decl
);
353 node
->local
.finalized
= true;
354 node
->lowered
= DECL_STRUCT_FUNCTION (decl
)->cfg
!= NULL
;
355 node
->finalized_by_frontend
= true;
357 if (cgraph_decide_is_function_needed (node
, decl
))
358 cgraph_mark_needed_node (node
);
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
363 if ((TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
364 || DECL_STATIC_CONSTRUCTOR (decl
)
365 || DECL_STATIC_DESTRUCTOR (decl
)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl
) && (DECL_COMDAT (decl
) || DECL_EXTERNAL (decl
))))
372 cgraph_mark_reachable_node (node
);
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl
))
376 (*debug_hooks
->deferred_inline_function
) (decl
);
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter
)
380 do_warn_unused_parameter (decl
);
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
391 cgraph_mark_if_needed (tree decl
)
393 struct cgraph_node
*node
= cgraph_node (decl
);
394 if (node
->local
.finalized
&& cgraph_decide_is_function_needed (node
, decl
))
395 cgraph_mark_needed_node (node
);
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
400 clone_of_p (struct cgraph_node
*node
, struct cgraph_node
*node2
)
402 while (node
!= node2
&& node2
)
403 node2
= node2
->clone_of
;
404 return node2
!= NULL
;
407 /* Verify edge E count and frequency. */
410 verify_edge_count_and_frequency (struct cgraph_edge
*e
)
412 bool error_found
= false;
415 error ("caller edge count is negative");
418 if (e
->frequency
< 0)
420 error ("caller edge frequency is negative");
423 if (e
->frequency
> CGRAPH_FREQ_MAX
)
425 error ("caller edge frequency is too large");
428 if (gimple_has_body_p (e
->caller
->decl
)
429 && !e
->caller
->global
.inlined_to
431 != compute_call_stmt_bb_frequency (e
->caller
->decl
,
432 gimple_bb (e
->call_stmt
))))
434 error ("caller edge frequency %i does not match BB freqency %i",
436 compute_call_stmt_bb_frequency (e
->caller
->decl
,
437 gimple_bb (e
->call_stmt
)));
443 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
445 cgraph_debug_gimple_stmt (struct function
*this_cfun
, gimple stmt
)
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun
!= this_cfun
)
449 set_cfun (this_cfun
);
450 debug_gimple_stmt (stmt
);
453 /* Verify cgraph nodes of given cgraph node. */
455 verify_cgraph_node (struct cgraph_node
*node
)
457 struct cgraph_edge
*e
;
458 struct function
*this_cfun
= DECL_STRUCT_FUNCTION (node
->decl
);
459 basic_block this_block
;
460 gimple_stmt_iterator gsi
;
461 bool error_found
= false;
466 timevar_push (TV_CGRAPH_VERIFY
);
467 for (e
= node
->callees
; e
; e
= e
->next_callee
)
470 error ("aux field set for edge %s->%s",
471 identifier_to_locale (cgraph_node_name (e
->caller
)),
472 identifier_to_locale (cgraph_node_name (e
->callee
)));
477 error ("execution count is negative");
480 if (node
->global
.inlined_to
&& node
->local
.externally_visible
)
482 error ("externally visible inline clone");
485 if (node
->global
.inlined_to
&& node
->address_taken
)
487 error ("inline clone with address taken");
490 if (node
->global
.inlined_to
&& node
->needed
)
492 error ("inline clone is needed");
495 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e
->caller
)));
503 if (!e
->indirect_unknown_callee
504 || !e
->indirect_info
)
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e
->caller
)));
509 cgraph_debug_gimple_stmt (this_cfun
, e
->call_stmt
);
513 for (e
= node
->callers
; e
; e
= e
->next_caller
)
515 if (verify_edge_count_and_frequency (e
))
517 if (!e
->inline_failed
)
519 if (node
->global
.inlined_to
520 != (e
->caller
->global
.inlined_to
521 ? e
->caller
->global
.inlined_to
: e
->caller
))
523 error ("inlined_to pointer is wrong");
526 if (node
->callers
->next_caller
)
528 error ("multiple inline callers");
533 if (node
->global
.inlined_to
)
535 error ("inlined_to pointer set for noninline callers");
539 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
540 if (verify_edge_count_and_frequency (e
))
542 if (!node
->callers
&& node
->global
.inlined_to
)
544 error ("inlined_to pointer is set but no predecessors found");
547 if (node
->global
.inlined_to
== node
)
549 error ("inlined_to pointer refers to itself");
553 if (!cgraph_node (node
->decl
))
555 error ("node not found in cgraph_hash");
561 struct cgraph_node
*n
;
562 for (n
= node
->clone_of
->clones
; n
; n
= n
->next_sibling_clone
)
567 error ("node has wrong clone_of");
573 struct cgraph_node
*n
;
574 for (n
= node
->clones
; n
; n
= n
->next_sibling_clone
)
575 if (n
->clone_of
!= node
)
579 error ("node has wrong clone list");
583 if ((node
->prev_sibling_clone
|| node
->next_sibling_clone
) && !node
->clone_of
)
585 error ("node is in clone list but it is not clone");
588 if (!node
->prev_sibling_clone
&& node
->clone_of
&& node
->clone_of
->clones
!= node
)
590 error ("node has wrong prev_clone pointer");
593 if (node
->prev_sibling_clone
&& node
->prev_sibling_clone
->next_sibling_clone
!= node
)
595 error ("double linked list of clones corrupted");
598 if (node
->same_comdat_group
)
600 struct cgraph_node
*n
= node
->same_comdat_group
;
602 if (!DECL_ONE_ONLY (node
->decl
))
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
609 error ("node is alone in a comdat group");
614 if (!n
->same_comdat_group
)
616 error ("same_comdat_group is not a circular list");
620 n
= n
->same_comdat_group
;
625 if (node
->analyzed
&& gimple_has_body_p (node
->decl
)
626 && !TREE_ASM_WRITTEN (node
->decl
)
627 && (!DECL_EXTERNAL (node
->decl
) || node
->global
.inlined_to
)
632 /* The nodes we're interested in are never shared, so walk
633 the tree ignoring duplicates. */
634 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
635 /* Reach the trees by walking over the CFG, and note the
636 enclosing basic-blocks in the call edges. */
637 FOR_EACH_BB_FN (this_block
, this_cfun
)
638 for (gsi
= gsi_start_bb (this_block
);
642 gimple stmt
= gsi_stmt (gsi
);
643 if (is_gimple_call (stmt
))
645 struct cgraph_edge
*e
= cgraph_edge (node
, stmt
);
646 tree decl
= gimple_call_fndecl (stmt
);
651 error ("shared call_stmt:");
652 cgraph_debug_gimple_stmt (this_cfun
, stmt
);
655 if (!e
->indirect_unknown_callee
)
657 struct cgraph_node
*n
;
659 if (e
->callee
->same_body_alias
)
661 error ("edge points to same body alias:");
662 debug_tree (e
->callee
->decl
);
665 else if (!e
->callee
->global
.inlined_to
667 && cgraph_get_node (decl
)
668 && (e
->callee
->former_clone_of
669 != cgraph_get_node (decl
)->decl
)
670 && !clone_of_p (cgraph_node (decl
),
673 error ("edge points to wrong declaration:");
674 debug_tree (e
->callee
->decl
);
675 fprintf (stderr
," Instead of:");
680 && (n
= cgraph_get_node_or_alias (decl
))
681 && (n
->same_body_alias
682 && n
->thunk
.thunk_p
))
684 error ("a call to thunk improperly represented "
685 "in the call graph:");
686 cgraph_debug_gimple_stmt (this_cfun
, stmt
);
692 error ("an indirect edge with unknown callee "
693 "corresponding to a call_stmt with "
694 "a known declaration:");
696 cgraph_debug_gimple_stmt (this_cfun
, e
->call_stmt
);
702 error ("missing callgraph edge for call stmt:");
703 cgraph_debug_gimple_stmt (this_cfun
, stmt
);
708 pointer_set_destroy (visited_nodes
);
711 /* No CFG available?! */
714 for (e
= node
->callees
; e
; e
= e
->next_callee
)
718 error ("edge %s->%s has no corresponding call_stmt",
719 identifier_to_locale (cgraph_node_name (e
->caller
)),
720 identifier_to_locale (cgraph_node_name (e
->callee
)));
721 cgraph_debug_gimple_stmt (this_cfun
, e
->call_stmt
);
726 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
730 error ("an indirect edge from %s has no corresponding call_stmt",
731 identifier_to_locale (cgraph_node_name (e
->caller
)));
732 cgraph_debug_gimple_stmt (this_cfun
, e
->call_stmt
);
740 dump_cgraph_node (stderr
, node
);
741 internal_error ("verify_cgraph_node failed");
743 timevar_pop (TV_CGRAPH_VERIFY
);
746 /* Verify whole cgraph structure. */
750 struct cgraph_node
*node
;
755 for (node
= cgraph_nodes
; node
; node
= node
->next
)
756 verify_cgraph_node (node
);
759 /* Output all asm statements we have stored up to be output. */
762 cgraph_output_pending_asms (void)
764 struct cgraph_asm_node
*can
;
769 for (can
= cgraph_asm_nodes
; can
; can
= can
->next
)
770 assemble_asm (can
->asm_str
);
771 cgraph_asm_nodes
= NULL
;
774 /* Analyze the function scheduled to be output. */
776 cgraph_analyze_function (struct cgraph_node
*node
)
778 tree save
= current_function_decl
;
779 tree decl
= node
->decl
;
781 current_function_decl
= decl
;
782 push_cfun (DECL_STRUCT_FUNCTION (decl
));
784 assign_assembler_name_if_neeeded (node
->decl
);
786 /* Make sure to gimplify bodies only once. During analyzing a
787 function we lower it, which will require gimplified nested
788 functions, so we can end up here with an already gimplified
790 if (!gimple_body (decl
))
791 gimplify_function_tree (decl
);
792 dump_function (TDI_generic
, decl
);
794 cgraph_lower_function (node
);
795 node
->analyzed
= true;
798 current_function_decl
= save
;
801 /* Process attributes common for vars and functions. */
804 process_common_attributes (tree decl
)
806 tree weakref
= lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
));
808 if (weakref
&& !lookup_attribute ("alias", DECL_ATTRIBUTES (decl
)))
810 warning_at (DECL_SOURCE_LOCATION (decl
), OPT_Wattributes
,
811 "%<weakref%> attribute should be accompanied with"
812 " an %<alias%> attribute");
813 DECL_WEAK (decl
) = 0;
814 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
815 DECL_ATTRIBUTES (decl
));
819 /* Look for externally_visible and used attributes and mark cgraph nodes
822 We cannot mark the nodes at the point the attributes are processed (in
823 handle_*_attribute) because the copy of the declarations available at that
824 point may not be canonical. For example, in:
827 void f() __attribute__((used));
829 the declaration we see in handle_used_attribute will be the second
830 declaration -- but the front end will subsequently merge that declaration
831 with the original declaration and discard the second declaration.
833 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
836 void f() __attribute__((externally_visible));
840 So, we walk the nodes at the end of the translation unit, applying the
841 attributes at that point. */
844 process_function_and_variable_attributes (struct cgraph_node
*first
,
845 struct varpool_node
*first_var
)
847 struct cgraph_node
*node
;
848 struct varpool_node
*vnode
;
850 for (node
= cgraph_nodes
; node
!= first
; node
= node
->next
)
852 tree decl
= node
->decl
;
853 if (DECL_PRESERVE_P (decl
))
854 cgraph_mark_needed_node (node
);
855 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
856 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl
))
857 && TREE_PUBLIC (node
->decl
))
859 if (node
->local
.finalized
)
860 cgraph_mark_needed_node (node
);
862 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
864 if (! TREE_PUBLIC (node
->decl
))
865 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
866 "%<externally_visible%>"
867 " attribute have effect only on public objects");
868 else if (node
->local
.finalized
)
869 cgraph_mark_needed_node (node
);
871 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
872 && node
->local
.finalized
)
874 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
875 "%<weakref%> attribute ignored"
876 " because function is defined");
877 DECL_WEAK (decl
) = 0;
878 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
879 DECL_ATTRIBUTES (decl
));
881 process_common_attributes (decl
);
883 for (vnode
= varpool_nodes
; vnode
!= first_var
; vnode
= vnode
->next
)
885 tree decl
= vnode
->decl
;
886 if (DECL_PRESERVE_P (decl
))
888 vnode
->force_output
= true;
889 if (vnode
->finalized
)
890 varpool_mark_needed_node (vnode
);
892 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
893 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl
))
894 && TREE_PUBLIC (vnode
->decl
))
896 if (vnode
->finalized
)
897 varpool_mark_needed_node (vnode
);
899 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
901 if (! TREE_PUBLIC (vnode
->decl
))
902 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
903 "%<externally_visible%>"
904 " attribute have effect only on public objects");
905 else if (vnode
->finalized
)
906 varpool_mark_needed_node (vnode
);
908 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
910 && DECL_INITIAL (decl
))
912 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
913 "%<weakref%> attribute ignored"
914 " because variable is initialized");
915 DECL_WEAK (decl
) = 0;
916 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
917 DECL_ATTRIBUTES (decl
));
919 process_common_attributes (decl
);
923 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
924 each reachable functions) and build cgraph.
925 The function can be called multiple times after inserting new nodes
926 into beginning of queue. Just the new part of queue is re-scanned then. */
929 cgraph_analyze_functions (void)
931 /* Keep track of already processed nodes when called multiple times for
932 intermodule optimization. */
933 static struct cgraph_node
*first_analyzed
;
934 struct cgraph_node
*first_processed
= first_analyzed
;
935 static struct varpool_node
*first_analyzed_var
;
936 struct cgraph_node
*node
, *next
;
938 bitmap_obstack_initialize (NULL
);
939 process_function_and_variable_attributes (first_processed
,
941 first_processed
= cgraph_nodes
;
942 first_analyzed_var
= varpool_nodes
;
943 varpool_analyze_pending_decls ();
944 if (cgraph_dump_file
)
946 fprintf (cgraph_dump_file
, "Initial entry points:");
947 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
949 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
950 fprintf (cgraph_dump_file
, "\n");
952 cgraph_process_new_functions ();
954 /* Propagate reachability flag and lower representation of all reachable
955 functions. In the future, lowering will introduce new functions and
956 new entry points on the way (by template instantiation and virtual
957 method table generation for instance). */
958 while (cgraph_nodes_queue
)
960 struct cgraph_edge
*edge
;
961 tree decl
= cgraph_nodes_queue
->decl
;
963 node
= cgraph_nodes_queue
;
964 cgraph_nodes_queue
= cgraph_nodes_queue
->next_needed
;
965 node
->next_needed
= NULL
;
967 /* ??? It is possible to create extern inline function and later using
968 weak alias attribute to kill its body. See
969 gcc.c-torture/compile/20011119-1.c */
970 if (!DECL_STRUCT_FUNCTION (decl
))
972 cgraph_reset_node (node
);
977 cgraph_analyze_function (node
);
979 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
980 if (!edge
->callee
->reachable
)
981 cgraph_mark_reachable_node (edge
->callee
);
983 if (node
->same_comdat_group
)
985 for (next
= node
->same_comdat_group
;
987 next
= next
->same_comdat_group
)
988 cgraph_mark_reachable_node (next
);
991 /* If decl is a clone of an abstract function, mark that abstract
992 function so that we don't release its body. The DECL_INITIAL() of that
993 abstract function declaration will be later needed to output debug info. */
994 if (DECL_ABSTRACT_ORIGIN (decl
))
996 struct cgraph_node
*origin_node
= cgraph_node (DECL_ABSTRACT_ORIGIN (decl
));
997 origin_node
->abstract_and_needed
= true;
1000 /* We finalize local static variables during constructing callgraph
1001 edges. Process their attributes too. */
1002 process_function_and_variable_attributes (first_processed
,
1003 first_analyzed_var
);
1004 first_processed
= cgraph_nodes
;
1005 first_analyzed_var
= varpool_nodes
;
1006 varpool_analyze_pending_decls ();
1007 cgraph_process_new_functions ();
1010 /* Collect entry points to the unit. */
1011 if (cgraph_dump_file
)
1013 fprintf (cgraph_dump_file
, "Unit entry points:");
1014 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
1016 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1017 fprintf (cgraph_dump_file
, "\n\nInitial ");
1018 dump_cgraph (cgraph_dump_file
);
1019 dump_varpool (cgraph_dump_file
);
1022 if (cgraph_dump_file
)
1023 fprintf (cgraph_dump_file
, "\nReclaiming functions:");
1025 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= next
)
1027 tree decl
= node
->decl
;
1030 if (node
->local
.finalized
&& !gimple_has_body_p (decl
))
1031 cgraph_reset_node (node
);
1033 if (!node
->reachable
&& gimple_has_body_p (decl
))
1035 if (cgraph_dump_file
)
1036 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1037 cgraph_remove_node (node
);
1041 node
->next_needed
= NULL
;
1042 gcc_assert (!node
->local
.finalized
|| gimple_has_body_p (decl
));
1043 gcc_assert (node
->analyzed
== node
->local
.finalized
);
1045 if (cgraph_dump_file
)
1047 fprintf (cgraph_dump_file
, "\n\nReclaimed ");
1048 dump_cgraph (cgraph_dump_file
);
1049 dump_varpool (cgraph_dump_file
);
1051 bitmap_obstack_release (NULL
);
1052 first_analyzed
= cgraph_nodes
;
1057 /* Analyze the whole compilation unit once it is parsed completely. */
1060 cgraph_finalize_compilation_unit (void)
1062 timevar_push (TV_CGRAPH
);
1064 /* Do not skip analyzing the functions if there were errors, we
1065 miss diagnostics for following functions otherwise. */
1067 /* Emit size functions we didn't inline. */
1068 finalize_size_functions ();
1070 /* Mark alias targets necessary and emit diagnostics. */
1071 finish_aliases_1 ();
1075 fprintf (stderr
, "\nAnalyzing compilation unit\n");
1079 /* Gimplify and lower all functions, compute reachability and
1080 remove unreachable nodes. */
1081 cgraph_analyze_functions ();
1083 /* Mark alias targets necessary and emit diagnostics. */
1084 finish_aliases_1 ();
1086 /* Gimplify and lower thunks. */
1087 cgraph_analyze_functions ();
1089 /* Finally drive the pass manager. */
1092 timevar_pop (TV_CGRAPH
);
1096 /* Figure out what functions we want to assemble. */
1099 cgraph_mark_functions_to_output (void)
1101 struct cgraph_node
*node
;
1102 #ifdef ENABLE_CHECKING
1103 bool check_same_comdat_groups
= false;
1105 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1106 gcc_assert (!node
->process
);
1109 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1111 tree decl
= node
->decl
;
1112 struct cgraph_edge
*e
;
1114 gcc_assert (!node
->process
|| node
->same_comdat_group
);
1118 for (e
= node
->callers
; e
; e
= e
->next_caller
)
1119 if (e
->inline_failed
)
1122 /* We need to output all local functions that are used and not
1123 always inlined, as well as those that are reachable from
1124 outside the current compilation unit. */
1126 && !node
->global
.inlined_to
1127 && (!cgraph_only_called_directly_p (node
)
1128 || (e
&& node
->reachable
))
1129 && !TREE_ASM_WRITTEN (decl
)
1130 && !DECL_EXTERNAL (decl
))
1133 if (node
->same_comdat_group
)
1135 struct cgraph_node
*next
;
1136 for (next
= node
->same_comdat_group
;
1138 next
= next
->same_comdat_group
)
1142 else if (node
->same_comdat_group
)
1144 #ifdef ENABLE_CHECKING
1145 check_same_comdat_groups
= true;
1150 /* We should've reclaimed all functions that are not needed. */
1151 #ifdef ENABLE_CHECKING
1152 if (!node
->global
.inlined_to
1153 && gimple_has_body_p (decl
)
1154 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1155 are inside partition, we can end up not removing the body since we no longer
1156 have analyzed node pointing to it. */
1157 && !node
->in_other_partition
1158 && !DECL_EXTERNAL (decl
))
1160 dump_cgraph_node (stderr
, node
);
1161 internal_error ("failed to reclaim unneeded function");
1164 gcc_assert (node
->global
.inlined_to
1165 || !gimple_has_body_p (decl
)
1166 || node
->in_other_partition
1167 || DECL_EXTERNAL (decl
));
1172 #ifdef ENABLE_CHECKING
1173 if (check_same_comdat_groups
)
1174 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1175 if (node
->same_comdat_group
&& !node
->process
)
1177 tree decl
= node
->decl
;
1178 if (!node
->global
.inlined_to
1179 && gimple_has_body_p (decl
)
1180 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1181 are inside partition, we can end up not removing the body since we no longer
1182 have analyzed node pointing to it. */
1183 && !node
->in_other_partition
1184 && !DECL_EXTERNAL (decl
))
1186 dump_cgraph_node (stderr
, node
);
1187 internal_error ("failed to reclaim unneeded function");
1193 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1194 in lowered gimple form.
1196 Set current_function_decl and cfun to newly constructed empty function body.
1197 return basic block in the function body. */
1200 init_lowered_empty_function (tree decl
)
1204 current_function_decl
= decl
;
1205 allocate_struct_function (decl
, false);
1206 gimple_register_cfg_hooks ();
1207 init_empty_tree_cfg ();
1208 init_tree_ssa (cfun
);
1209 init_ssa_operands ();
1210 cfun
->gimple_df
->in_ssa_p
= true;
1211 DECL_INITIAL (decl
) = make_node (BLOCK
);
1213 DECL_SAVED_TREE (decl
) = error_mark_node
;
1214 cfun
->curr_properties
|=
1215 (PROP_gimple_lcf
| PROP_gimple_leh
| PROP_cfg
| PROP_referenced_vars
|
1218 /* Create BB for body of the function and connect it properly. */
1219 bb
= create_basic_block (NULL
, (void *) 0, ENTRY_BLOCK_PTR
);
1220 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
1221 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
1226 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1227 offset indicated by VIRTUAL_OFFSET, if that is
1228 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1229 zero for a result adjusting thunk. */
1232 thunk_adjust (gimple_stmt_iterator
* bsi
,
1233 tree ptr
, bool this_adjusting
,
1234 HOST_WIDE_INT fixed_offset
, tree virtual_offset
)
1240 && fixed_offset
!= 0)
1242 stmt
= gimple_build_assign (ptr
,
1243 fold_build2_loc (input_location
,
1245 TREE_TYPE (ptr
), ptr
,
1246 size_int (fixed_offset
)));
1247 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1250 /* If there's a virtual offset, look up that value in the vtable and
1251 adjust the pointer again. */
1259 if (!vtable_entry_type
)
1261 tree vfunc_type
= make_node (FUNCTION_TYPE
);
1262 TREE_TYPE (vfunc_type
) = integer_type_node
;
1263 TYPE_ARG_TYPES (vfunc_type
) = NULL_TREE
;
1264 layout_type (vfunc_type
);
1266 vtable_entry_type
= build_pointer_type (vfunc_type
);
1270 create_tmp_var (build_pointer_type
1271 (build_pointer_type (vtable_entry_type
)), "vptr");
1273 /* The vptr is always at offset zero in the object. */
1274 stmt
= gimple_build_assign (vtabletmp
,
1275 build1 (NOP_EXPR
, TREE_TYPE (vtabletmp
),
1277 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1278 mark_symbols_for_renaming (stmt
);
1279 find_referenced_vars_in (stmt
);
1281 /* Form the vtable address. */
1282 vtabletmp2
= create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp
)),
1284 stmt
= gimple_build_assign (vtabletmp2
,
1285 build_simple_mem_ref (vtabletmp
));
1286 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1287 mark_symbols_for_renaming (stmt
);
1288 find_referenced_vars_in (stmt
);
1290 /* Find the entry with the vcall offset. */
1291 stmt
= gimple_build_assign (vtabletmp2
,
1292 fold_build2_loc (input_location
,
1294 TREE_TYPE (vtabletmp2
),
1296 fold_convert (sizetype
,
1298 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1300 /* Get the offset itself. */
1301 vtabletmp3
= create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2
)),
1303 stmt
= gimple_build_assign (vtabletmp3
,
1304 build_simple_mem_ref (vtabletmp2
));
1305 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1306 mark_symbols_for_renaming (stmt
);
1307 find_referenced_vars_in (stmt
);
1309 /* Cast to sizetype. */
1310 offsettmp
= create_tmp_var (sizetype
, "offset");
1311 stmt
= gimple_build_assign (offsettmp
, fold_convert (sizetype
, vtabletmp3
));
1312 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1313 mark_symbols_for_renaming (stmt
);
1314 find_referenced_vars_in (stmt
);
1316 /* Adjust the `this' pointer. */
1317 ptr
= fold_build2_loc (input_location
,
1318 POINTER_PLUS_EXPR
, TREE_TYPE (ptr
), ptr
,
1323 && fixed_offset
!= 0)
1324 /* Adjust the pointer by the constant. */
1328 if (TREE_CODE (ptr
) == VAR_DECL
)
1332 ptrtmp
= create_tmp_var (TREE_TYPE (ptr
), "ptr");
1333 stmt
= gimple_build_assign (ptrtmp
, ptr
);
1334 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1335 mark_symbols_for_renaming (stmt
);
1336 find_referenced_vars_in (stmt
);
1338 ptr
= fold_build2_loc (input_location
,
1339 POINTER_PLUS_EXPR
, TREE_TYPE (ptrtmp
), ptrtmp
,
1340 size_int (fixed_offset
));
1343 /* Emit the statement and gimplify the adjustment expression. */
1344 ret
= create_tmp_var (TREE_TYPE (ptr
), "adjusted_this");
1345 stmt
= gimple_build_assign (ret
, ptr
);
1346 mark_symbols_for_renaming (stmt
);
1347 find_referenced_vars_in (stmt
);
1348 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1353 /* Produce assembler for thunk NODE. */
1356 assemble_thunk (struct cgraph_node
*node
)
1358 bool this_adjusting
= node
->thunk
.this_adjusting
;
1359 HOST_WIDE_INT fixed_offset
= node
->thunk
.fixed_offset
;
1360 HOST_WIDE_INT virtual_value
= node
->thunk
.virtual_value
;
1361 tree virtual_offset
= NULL
;
1362 tree alias
= node
->thunk
.alias
;
1363 tree thunk_fndecl
= node
->decl
;
1364 tree a
= DECL_ARGUMENTS (thunk_fndecl
);
1366 current_function_decl
= thunk_fndecl
;
1368 /* Ensure thunks are emitted in their correct sections. */
1369 resolve_unique_section (thunk_fndecl
, 0, flag_function_sections
);
1372 && targetm
.asm_out
.can_output_mi_thunk (thunk_fndecl
, fixed_offset
,
1373 virtual_value
, alias
))
1378 DECL_RESULT (thunk_fndecl
)
1379 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
1380 RESULT_DECL
, 0, integer_type_node
);
1381 fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
1383 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1385 fn_block
= make_node (BLOCK
);
1386 BLOCK_VARS (fn_block
) = a
;
1387 DECL_INITIAL (thunk_fndecl
) = fn_block
;
1388 init_function_start (thunk_fndecl
);
1390 assemble_start_function (thunk_fndecl
, fnname
);
1392 targetm
.asm_out
.output_mi_thunk (asm_out_file
, thunk_fndecl
,
1393 fixed_offset
, virtual_value
, alias
);
1395 assemble_end_function (thunk_fndecl
, fnname
);
1396 init_insn_lengths ();
1397 free_after_compilation (cfun
);
1399 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1404 basic_block bb
, then_bb
, else_bb
, return_bb
;
1405 gimple_stmt_iterator bsi
;
1411 VEC(tree
, heap
) *vargs
;
1416 DECL_IGNORED_P (thunk_fndecl
) = 1;
1417 bitmap_obstack_initialize (NULL
);
1419 if (node
->thunk
.virtual_offset_p
)
1420 virtual_offset
= size_int (virtual_value
);
1422 /* Build the return declaration for the function. */
1423 restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1424 if (DECL_RESULT (thunk_fndecl
) == NULL_TREE
)
1426 resdecl
= build_decl (input_location
, RESULT_DECL
, 0, restype
);
1427 DECL_ARTIFICIAL (resdecl
) = 1;
1428 DECL_IGNORED_P (resdecl
) = 1;
1429 DECL_RESULT (thunk_fndecl
) = resdecl
;
1432 resdecl
= DECL_RESULT (thunk_fndecl
);
1434 bb
= then_bb
= else_bb
= return_bb
= init_lowered_empty_function (thunk_fndecl
);
1436 bsi
= gsi_start_bb (bb
);
1438 /* Build call to the function being thunked. */
1439 if (!VOID_TYPE_P (restype
))
1441 if (!is_gimple_reg_type (restype
))
1444 add_local_decl (cfun
, restmp
);
1445 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = restmp
;
1448 restmp
= create_tmp_var_raw (restype
, "retval");
1451 for (arg
= a
; arg
; arg
= DECL_CHAIN (arg
))
1453 vargs
= VEC_alloc (tree
, heap
, nargs
);
1455 VEC_quick_push (tree
, vargs
,
1460 VEC_quick_push (tree
, vargs
, a
);
1461 for (i
= 1, arg
= DECL_CHAIN (a
); i
< nargs
; i
++, arg
= DECL_CHAIN (arg
))
1462 VEC_quick_push (tree
, vargs
, arg
);
1463 call
= gimple_build_call_vec (build_fold_addr_expr_loc (0, alias
), vargs
);
1464 VEC_free (tree
, heap
, vargs
);
1465 gimple_call_set_cannot_inline (call
, true);
1466 gimple_call_set_from_thunk (call
, true);
1468 gimple_call_set_lhs (call
, restmp
);
1469 gsi_insert_after (&bsi
, call
, GSI_NEW_STMT
);
1470 mark_symbols_for_renaming (call
);
1471 find_referenced_vars_in (call
);
1474 if (restmp
&& !this_adjusting
)
1476 tree true_label
= NULL_TREE
;
1478 if (TREE_CODE (TREE_TYPE (restmp
)) == POINTER_TYPE
)
1481 /* If the return type is a pointer, we need to
1482 protect against NULL. We know there will be an
1483 adjustment, because that's why we're emitting a
1485 then_bb
= create_basic_block (NULL
, (void *) 0, bb
);
1486 return_bb
= create_basic_block (NULL
, (void *) 0, then_bb
);
1487 else_bb
= create_basic_block (NULL
, (void *) 0, else_bb
);
1488 remove_edge (single_succ_edge (bb
));
1489 true_label
= gimple_block_label (then_bb
);
1490 stmt
= gimple_build_cond (NE_EXPR
, restmp
,
1491 build_zero_cst (TREE_TYPE (restmp
)),
1492 NULL_TREE
, NULL_TREE
);
1493 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1494 make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1495 make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1496 make_edge (return_bb
, EXIT_BLOCK_PTR
, 0);
1497 make_edge (then_bb
, return_bb
, EDGE_FALLTHRU
);
1498 make_edge (else_bb
, return_bb
, EDGE_FALLTHRU
);
1499 bsi
= gsi_last_bb (then_bb
);
1502 restmp
= thunk_adjust (&bsi
, restmp
, /*this_adjusting=*/0,
1503 fixed_offset
, virtual_offset
);
1507 bsi
= gsi_last_bb (else_bb
);
1508 stmt
= gimple_build_assign (restmp
,
1509 build_zero_cst (TREE_TYPE (restmp
)));
1510 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1511 bsi
= gsi_last_bb (return_bb
);
1515 gimple_call_set_tail (call
, true);
1517 /* Build return value. */
1518 ret
= gimple_build_return (restmp
);
1519 gsi_insert_after (&bsi
, ret
, GSI_NEW_STMT
);
1521 delete_unreachable_blocks ();
1522 update_ssa (TODO_update_ssa
);
1524 cgraph_remove_same_body_alias (node
);
1525 /* Since we want to emit the thunk, we explicitly mark its name as
1527 cgraph_add_new_function (thunk_fndecl
, true);
1528 bitmap_obstack_release (NULL
);
1530 current_function_decl
= NULL
;
1533 /* Expand function specified by NODE. */
1536 cgraph_expand_function (struct cgraph_node
*node
)
1538 tree decl
= node
->decl
;
1540 /* We ought to not compile any inline clones. */
1541 gcc_assert (!node
->global
.inlined_to
);
1543 announce_function (decl
);
1545 if (node
->same_body
)
1547 struct cgraph_node
*alias
, *next
;
1548 bool saved_alias
= node
->alias
;
1549 for (alias
= node
->same_body
;
1550 alias
&& alias
->next
; alias
= alias
->next
)
1552 /* Walk aliases in the order they were created; it is possible that
1553 thunks reffers to the aliases made earlier. */
1554 for (; alias
; alias
= next
)
1556 next
= alias
->previous
;
1557 if (!alias
->thunk
.thunk_p
)
1558 assemble_alias (alias
->decl
,
1559 DECL_ASSEMBLER_NAME (alias
->thunk
.alias
));
1561 assemble_thunk (alias
);
1563 node
->alias
= saved_alias
;
1564 cgraph_process_new_functions ();
1567 gcc_assert (node
->lowered
);
1569 /* Generate RTL for the body of DECL. */
1570 tree_rest_of_compilation (decl
);
1572 /* Make sure that BE didn't give up on compiling. */
1573 gcc_assert (TREE_ASM_WRITTEN (decl
));
1574 current_function_decl
= NULL
;
1575 gcc_assert (!cgraph_preserve_function_body_p (decl
));
1576 cgraph_release_function_body (node
);
1577 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1578 points to the dead function body. */
1579 cgraph_node_remove_callees (node
);
1581 cgraph_function_flags_ready
= true;
1584 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1587 cgraph_inline_p (struct cgraph_edge
*e
, cgraph_inline_failed_t
*reason
)
1589 *reason
= e
->inline_failed
;
1590 return !e
->inline_failed
;
1595 /* Expand all functions that must be output.
1597 Attempt to topologically sort the nodes so function is output when
1598 all called functions are already assembled to allow data to be
1599 propagated across the callgraph. Use a stack to get smaller distance
1600 between a function and its callees (later we may choose to use a more
1601 sophisticated algorithm for function reordering; we will likely want
1602 to use subsections to make the output functions appear in top-down
1606 cgraph_expand_all_functions (void)
1608 struct cgraph_node
*node
;
1609 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1610 int order_pos
, new_order_pos
= 0;
1613 order_pos
= cgraph_postorder (order
);
1614 gcc_assert (order_pos
== cgraph_n_nodes
);
1616 /* Garbage collector may remove inline clones we eliminate during
1617 optimization. So we must be sure to not reference them. */
1618 for (i
= 0; i
< order_pos
; i
++)
1619 if (order
[i
]->process
)
1620 order
[new_order_pos
++] = order
[i
];
1622 for (i
= new_order_pos
- 1; i
>= 0; i
--)
1627 gcc_assert (node
->reachable
);
1629 cgraph_expand_function (node
);
1632 cgraph_process_new_functions ();
1638 /* This is used to sort the node types by the cgraph order number. */
1640 enum cgraph_order_sort_kind
1642 ORDER_UNDEFINED
= 0,
1648 struct cgraph_order_sort
1650 enum cgraph_order_sort_kind kind
;
1653 struct cgraph_node
*f
;
1654 struct varpool_node
*v
;
1655 struct cgraph_asm_node
*a
;
1659 /* Output all functions, variables, and asm statements in the order
1660 according to their order fields, which is the order in which they
1661 appeared in the file. This implements -fno-toplevel-reorder. In
1662 this mode we may output functions and variables which don't really
1663 need to be output. */
1666 cgraph_output_in_order (void)
1669 struct cgraph_order_sort
*nodes
;
1671 struct cgraph_node
*pf
;
1672 struct varpool_node
*pv
;
1673 struct cgraph_asm_node
*pa
;
1676 nodes
= XCNEWVEC (struct cgraph_order_sort
, max
);
1678 varpool_analyze_pending_decls ();
1680 for (pf
= cgraph_nodes
; pf
; pf
= pf
->next
)
1685 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1686 nodes
[i
].kind
= ORDER_FUNCTION
;
1691 for (pv
= varpool_nodes_queue
; pv
; pv
= pv
->next_needed
)
1694 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1695 nodes
[i
].kind
= ORDER_VAR
;
1699 for (pa
= cgraph_asm_nodes
; pa
; pa
= pa
->next
)
1702 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1703 nodes
[i
].kind
= ORDER_ASM
;
1707 /* In toplevel reorder mode we output all statics; mark them as needed. */
1708 for (i
= 0; i
< max
; ++i
)
1710 if (nodes
[i
].kind
== ORDER_VAR
)
1712 varpool_mark_needed_node (nodes
[i
].u
.v
);
1715 varpool_empty_needed_queue ();
1717 for (i
= 0; i
< max
; ++i
)
1718 if (nodes
[i
].kind
== ORDER_VAR
)
1719 varpool_finalize_named_section_flags (nodes
[i
].u
.v
);
1721 for (i
= 0; i
< max
; ++i
)
1723 switch (nodes
[i
].kind
)
1725 case ORDER_FUNCTION
:
1726 nodes
[i
].u
.f
->process
= 0;
1727 cgraph_expand_function (nodes
[i
].u
.f
);
1731 varpool_assemble_decl (nodes
[i
].u
.v
);
1735 assemble_asm (nodes
[i
].u
.a
->asm_str
);
1738 case ORDER_UNDEFINED
:
1746 cgraph_asm_nodes
= NULL
;
1750 /* Return true when function body of DECL still needs to be kept around
1751 for later re-use. */
1753 cgraph_preserve_function_body_p (tree decl
)
1755 struct cgraph_node
*node
;
1757 gcc_assert (cgraph_global_info_ready
);
1758 /* Look if there is any clone around. */
1759 node
= cgraph_node (decl
);
1769 current_function_decl
= NULL
;
1770 gimple_register_cfg_hooks ();
1771 bitmap_obstack_initialize (NULL
);
1773 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START
, NULL
);
1777 execute_ipa_pass_list (all_small_ipa_passes
);
1782 /* If pass_all_early_optimizations was not scheduled, the state of
1783 the cgraph will not be properly updated. Update it now. */
1784 if (cgraph_state
< CGRAPH_STATE_IPA_SSA
)
1785 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
1789 /* Generate coverage variables and constructors. */
1792 /* Process new functions added. */
1794 current_function_decl
= NULL
;
1795 cgraph_process_new_functions ();
1797 execute_ipa_summary_passes
1798 ((struct ipa_opt_pass_d
*) all_regular_ipa_passes
);
1801 /* Some targets need to handle LTO assembler output specially. */
1802 if (flag_generate_lto
)
1803 targetm
.asm_out
.lto_start ();
1805 execute_ipa_summary_passes ((struct ipa_opt_pass_d
*) all_lto_gen_passes
);
1808 ipa_write_summaries ();
1810 if (flag_generate_lto
)
1811 targetm
.asm_out
.lto_end ();
1814 execute_ipa_pass_list (all_regular_ipa_passes
);
1815 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END
, NULL
);
1817 bitmap_obstack_release (NULL
);
1821 /* Perform simple optimizations based on callgraph. */
1824 cgraph_optimize (void)
1829 #ifdef ENABLE_CHECKING
1833 /* Frontend may output common variables after the unit has been finalized.
1834 It is safe to deal with them here as they are always zero initialized. */
1835 varpool_analyze_pending_decls ();
1837 timevar_push (TV_CGRAPHOPT
);
1838 if (pre_ipa_mem_report
)
1840 fprintf (stderr
, "Memory consumption before IPA\n");
1841 dump_memory_report (false);
1844 fprintf (stderr
, "Performing interprocedural optimizations\n");
1845 cgraph_state
= CGRAPH_STATE_IPA
;
1847 /* Don't run the IPA passes if there was any error or sorry messages. */
1851 /* Do nothing else if any IPA pass found errors. */
1854 timevar_pop (TV_CGRAPHOPT
);
1858 /* This pass remove bodies of extern inline functions we never inlined.
1859 Do this later so other IPA passes see what is really going on. */
1860 cgraph_remove_unreachable_nodes (false, dump_file
);
1861 cgraph_global_info_ready
= true;
1862 if (cgraph_dump_file
)
1864 fprintf (cgraph_dump_file
, "Optimized ");
1865 dump_cgraph (cgraph_dump_file
);
1866 dump_varpool (cgraph_dump_file
);
1868 if (post_ipa_mem_report
)
1870 fprintf (stderr
, "Memory consumption after IPA\n");
1871 dump_memory_report (false);
1873 timevar_pop (TV_CGRAPHOPT
);
1875 /* Output everything. */
1876 (*debug_hooks
->assembly_start
) ();
1878 fprintf (stderr
, "Assembling functions:\n");
1879 #ifdef ENABLE_CHECKING
1883 cgraph_materialize_all_clones ();
1884 cgraph_mark_functions_to_output ();
1886 cgraph_state
= CGRAPH_STATE_EXPANSION
;
1887 if (!flag_toplevel_reorder
)
1888 cgraph_output_in_order ();
1891 cgraph_output_pending_asms ();
1893 cgraph_expand_all_functions ();
1894 varpool_remove_unreferenced_decls ();
1896 varpool_assemble_pending_decls ();
1898 cgraph_process_new_functions ();
1899 cgraph_state
= CGRAPH_STATE_FINISHED
;
1901 if (cgraph_dump_file
)
1903 fprintf (cgraph_dump_file
, "\nFinal ");
1904 dump_cgraph (cgraph_dump_file
);
1905 dump_varpool (cgraph_dump_file
);
1907 #ifdef ENABLE_CHECKING
1909 /* Double check that all inline clones are gone and that all
1910 function bodies have been released from memory. */
1913 struct cgraph_node
*node
;
1914 bool error_found
= false;
1916 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1918 && (node
->global
.inlined_to
1919 || gimple_has_body_p (node
->decl
)))
1922 dump_cgraph_node (stderr
, node
);
1925 internal_error ("nodes with unreleased memory found");
1933 if (!cgraph_dump_file
)
1934 cgraph_dump_file
= dump_begin (TDI_cgraph
, NULL
);
1937 /* The edges representing the callers of the NEW_VERSION node were
1938 fixed by cgraph_function_versioning (), now the call_expr in their
1939 respective tree code should be updated to call the NEW_VERSION. */
1942 update_call_expr (struct cgraph_node
*new_version
)
1944 struct cgraph_edge
*e
;
1946 gcc_assert (new_version
);
1948 /* Update the call expr on the edges to call the new version. */
1949 for (e
= new_version
->callers
; e
; e
= e
->next_caller
)
1951 struct function
*inner_function
= DECL_STRUCT_FUNCTION (e
->caller
->decl
);
1952 gimple_call_set_fndecl (e
->call_stmt
, new_version
->decl
);
1953 maybe_clean_eh_stmt_fn (inner_function
, e
->call_stmt
);
1958 /* Create a new cgraph node which is the new version of
1959 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1960 edges which should be redirected to point to
1961 NEW_VERSION. ALL the callees edges of OLD_VERSION
1962 are cloned to the new version node. Return the new
1965 If non-NULL BLOCK_TO_COPY determine what basic blocks
1966 was copied to prevent duplications of calls that are dead
1969 static struct cgraph_node
*
1970 cgraph_copy_node_for_versioning (struct cgraph_node
*old_version
,
1972 VEC(cgraph_edge_p
,heap
) *redirect_callers
,
1975 struct cgraph_node
*new_version
;
1976 struct cgraph_edge
*e
;
1979 gcc_assert (old_version
);
1981 new_version
= cgraph_node (new_decl
);
1983 new_version
->analyzed
= true;
1984 new_version
->local
= old_version
->local
;
1985 new_version
->local
.externally_visible
= false;
1986 new_version
->local
.local
= true;
1987 new_version
->local
.vtable_method
= false;
1988 new_version
->global
= old_version
->global
;
1989 new_version
->rtl
= old_version
->rtl
;
1990 new_version
->reachable
= true;
1991 new_version
->count
= old_version
->count
;
1993 for (e
= old_version
->callees
; e
; e
=e
->next_callee
)
1995 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
1996 cgraph_clone_edge (e
, new_version
, e
->call_stmt
,
1997 e
->lto_stmt_uid
, REG_BR_PROB_BASE
,
1999 e
->loop_nest
, true);
2000 for (e
= old_version
->indirect_calls
; e
; e
=e
->next_callee
)
2002 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
2003 cgraph_clone_edge (e
, new_version
, e
->call_stmt
,
2004 e
->lto_stmt_uid
, REG_BR_PROB_BASE
,
2006 e
->loop_nest
, true);
2007 FOR_EACH_VEC_ELT (cgraph_edge_p
, redirect_callers
, i
, e
)
2009 /* Redirect calls to the old version node to point to its new
2011 cgraph_redirect_edge_callee (e
, new_version
);
2017 /* Perform function versioning.
2018 Function versioning includes copying of the tree and
2019 a callgraph update (creating a new cgraph node and updating
2020 its callees and callers).
2022 REDIRECT_CALLERS varray includes the edges to be redirected
2025 TREE_MAP is a mapping of tree nodes we want to replace with
2026 new ones (according to results of prior analysis).
2027 OLD_VERSION_NODE is the node that is versioned.
2028 It returns the new version's cgraph node.
2029 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2031 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2032 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2034 struct cgraph_node
*
2035 cgraph_function_versioning (struct cgraph_node
*old_version_node
,
2036 VEC(cgraph_edge_p
,heap
) *redirect_callers
,
2037 VEC (ipa_replace_map_p
,gc
)* tree_map
,
2038 bitmap args_to_skip
,
2040 basic_block new_entry_block
,
2041 const char *clone_name
)
2043 tree old_decl
= old_version_node
->decl
;
2044 struct cgraph_node
*new_version_node
= NULL
;
2047 if (!tree_versionable_function_p (old_decl
))
2050 gcc_assert (old_version_node
->local
.can_change_signature
|| !args_to_skip
);
2052 /* Make a new FUNCTION_DECL tree node for the
2055 new_decl
= copy_node (old_decl
);
2057 new_decl
= build_function_decl_skip_args (old_decl
, args_to_skip
);
2059 /* Generate a new name for the new version. */
2060 DECL_NAME (new_decl
) = clone_function_name (old_decl
, clone_name
);
2061 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
2062 SET_DECL_RTL (new_decl
, NULL
);
2064 /* Create the new version's call-graph node.
2065 and update the edges of the new node. */
2067 cgraph_copy_node_for_versioning (old_version_node
, new_decl
,
2068 redirect_callers
, bbs_to_copy
);
2070 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2071 tree_function_versioning (old_decl
, new_decl
, tree_map
, false, args_to_skip
,
2072 bbs_to_copy
, new_entry_block
);
2074 /* Update the new version's properties.
2075 Make The new version visible only within this translation unit. Make sure
2076 that is not weak also.
2077 ??? We cannot use COMDAT linkage because there is no
2078 ABI support for this. */
2079 cgraph_make_decl_local (new_version_node
->decl
);
2080 DECL_VIRTUAL_P (new_version_node
->decl
) = 0;
2081 new_version_node
->local
.externally_visible
= 0;
2082 new_version_node
->local
.local
= 1;
2083 new_version_node
->lowered
= true;
2085 /* Update the call_expr on the edges to call the new version node. */
2086 update_call_expr (new_version_node
);
2088 cgraph_call_function_insertion_hooks (new_version_node
);
2089 return new_version_node
;
2092 /* Produce separate function body for inline clones so the offline copy can be
2093 modified without affecting them. */
2094 struct cgraph_node
*
2095 save_inline_function_body (struct cgraph_node
*node
)
2097 struct cgraph_node
*first_clone
, *n
;
2099 gcc_assert (node
== cgraph_node (node
->decl
));
2101 cgraph_lower_function (node
);
2103 first_clone
= node
->clones
;
2105 first_clone
->decl
= copy_node (node
->decl
);
2106 cgraph_insert_node_to_hashtable (first_clone
);
2107 gcc_assert (first_clone
== cgraph_node (first_clone
->decl
));
2108 if (first_clone
->next_sibling_clone
)
2110 for (n
= first_clone
->next_sibling_clone
; n
->next_sibling_clone
; n
= n
->next_sibling_clone
)
2111 n
->clone_of
= first_clone
;
2112 n
->clone_of
= first_clone
;
2113 n
->next_sibling_clone
= first_clone
->clones
;
2114 if (first_clone
->clones
)
2115 first_clone
->clones
->prev_sibling_clone
= n
;
2116 first_clone
->clones
= first_clone
->next_sibling_clone
;
2117 first_clone
->next_sibling_clone
->prev_sibling_clone
= NULL
;
2118 first_clone
->next_sibling_clone
= NULL
;
2119 gcc_assert (!first_clone
->prev_sibling_clone
);
2121 first_clone
->clone_of
= NULL
;
2122 node
->clones
= NULL
;
2124 if (first_clone
->clones
)
2125 for (n
= first_clone
->clones
; n
!= first_clone
;)
2127 gcc_assert (n
->decl
== node
->decl
);
2128 n
->decl
= first_clone
->decl
;
2131 else if (n
->next_sibling_clone
)
2132 n
= n
->next_sibling_clone
;
2135 while (n
!= first_clone
&& !n
->next_sibling_clone
)
2137 if (n
!= first_clone
)
2138 n
= n
->next_sibling_clone
;
2142 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2143 tree_function_versioning (node
->decl
, first_clone
->decl
, NULL
, true, NULL
,
2146 DECL_EXTERNAL (first_clone
->decl
) = 0;
2147 DECL_COMDAT_GROUP (first_clone
->decl
) = NULL_TREE
;
2148 TREE_PUBLIC (first_clone
->decl
) = 0;
2149 DECL_COMDAT (first_clone
->decl
) = 0;
2150 VEC_free (ipa_opt_pass
, heap
,
2151 first_clone
->ipa_transforms_to_apply
);
2152 first_clone
->ipa_transforms_to_apply
= NULL
;
2154 #ifdef ENABLE_CHECKING
2155 verify_cgraph_node (first_clone
);
2160 /* Given virtual clone, turn it into actual clone. */
2162 cgraph_materialize_clone (struct cgraph_node
*node
)
2164 bitmap_obstack_initialize (NULL
);
2165 node
->former_clone_of
= node
->clone_of
->decl
;
2166 if (node
->clone_of
->former_clone_of
)
2167 node
->former_clone_of
= node
->clone_of
->former_clone_of
;
2168 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2169 tree_function_versioning (node
->clone_of
->decl
, node
->decl
,
2170 node
->clone
.tree_map
, true,
2171 node
->clone
.args_to_skip
, NULL
, NULL
);
2172 if (cgraph_dump_file
)
2174 dump_function_to_file (node
->clone_of
->decl
, cgraph_dump_file
, dump_flags
);
2175 dump_function_to_file (node
->decl
, cgraph_dump_file
, dump_flags
);
2178 /* Function is no longer clone. */
2179 if (node
->next_sibling_clone
)
2180 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
2181 if (node
->prev_sibling_clone
)
2182 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
2184 node
->clone_of
->clones
= node
->next_sibling_clone
;
2185 node
->next_sibling_clone
= NULL
;
2186 node
->prev_sibling_clone
= NULL
;
2187 if (!node
->clone_of
->analyzed
&& !node
->clone_of
->clones
)
2189 cgraph_release_function_body (node
->clone_of
);
2190 cgraph_node_remove_callees (node
->clone_of
);
2191 ipa_remove_all_references (&node
->clone_of
->ref_list
);
2193 node
->clone_of
= NULL
;
2194 bitmap_obstack_release (NULL
);
2197 /* If necessary, change the function declaration in the call statement
2198 associated with E so that it corresponds to the edge callee. */
2201 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge
*e
)
2203 tree decl
= gimple_call_fndecl (e
->call_stmt
);
2205 gimple_stmt_iterator gsi
;
2206 bool gsi_computed
= false;
2207 #ifdef ENABLE_CHECKING
2208 struct cgraph_node
*node
;
2211 if (e
->indirect_unknown_callee
2212 || decl
== e
->callee
->decl
2213 /* Don't update call from same body alias to the real function. */
2214 || (decl
&& cgraph_get_node (decl
) == cgraph_get_node (e
->callee
->decl
)))
2215 return e
->call_stmt
;
2217 #ifdef ENABLE_CHECKING
2220 node
= cgraph_get_node (decl
);
2221 gcc_assert (!node
|| !node
->clone
.combined_args_to_skip
);
2225 if (cgraph_dump_file
)
2227 fprintf (cgraph_dump_file
, "updating call of %s/%i -> %s/%i: ",
2228 cgraph_node_name (e
->caller
), e
->caller
->uid
,
2229 cgraph_node_name (e
->callee
), e
->callee
->uid
);
2230 print_gimple_stmt (cgraph_dump_file
, e
->call_stmt
, 0, dump_flags
);
2231 if (e
->callee
->clone
.combined_args_to_skip
)
2233 fprintf (cgraph_dump_file
, " combined args to skip: ");
2234 dump_bitmap (cgraph_dump_file
,
2235 e
->callee
->clone
.combined_args_to_skip
);
2239 if (e
->indirect_info
&&
2240 e
->indirect_info
->thunk_delta
!= 0
2241 && (!e
->callee
->clone
.combined_args_to_skip
2242 || !bitmap_bit_p (e
->callee
->clone
.combined_args_to_skip
, 0)))
2244 if (cgraph_dump_file
)
2245 fprintf (cgraph_dump_file
, " Thunk delta is "
2246 HOST_WIDE_INT_PRINT_DEC
"\n", e
->indirect_info
->thunk_delta
);
2247 gsi
= gsi_for_stmt (e
->call_stmt
);
2248 gsi_computed
= true;
2249 gimple_adjust_this_by_delta (&gsi
,
2250 build_int_cst (sizetype
,
2251 e
->indirect_info
->thunk_delta
));
2252 e
->indirect_info
->thunk_delta
= 0;
2255 if (e
->callee
->clone
.combined_args_to_skip
)
2260 = gimple_call_copy_skip_args (e
->call_stmt
,
2261 e
->callee
->clone
.combined_args_to_skip
);
2262 gimple_call_set_fndecl (new_stmt
, e
->callee
->decl
);
2264 if (gimple_vdef (new_stmt
)
2265 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
2266 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2269 gsi
= gsi_for_stmt (e
->call_stmt
);
2270 gsi_replace (&gsi
, new_stmt
, false);
2271 /* We need to defer cleaning EH info on the new statement to
2272 fixup-cfg. We may not have dominator information at this point
2273 and thus would end up with unreachable blocks and have no way
2274 to communicate that we need to run CFG cleanup then. */
2275 lp_nr
= lookup_stmt_eh_lp (e
->call_stmt
);
2278 remove_stmt_from_eh_lp (e
->call_stmt
);
2279 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
2284 new_stmt
= e
->call_stmt
;
2285 gimple_call_set_fndecl (new_stmt
, e
->callee
->decl
);
2286 update_stmt (new_stmt
);
2289 cgraph_set_call_stmt_including_clones (e
->caller
, e
->call_stmt
, new_stmt
);
2291 if (cgraph_dump_file
)
2293 fprintf (cgraph_dump_file
, " updated to:");
2294 print_gimple_stmt (cgraph_dump_file
, e
->call_stmt
, 0, dump_flags
);
2299 /* Once all functions from compilation unit are in memory, produce all clones
2300 and update all calls. We might also do this on demand if we don't want to
2301 bring all functions to memory prior compilation, but current WHOPR
2302 implementation does that and it is is bit easier to keep everything right in
2305 cgraph_materialize_all_clones (void)
2307 struct cgraph_node
*node
;
2308 bool stabilized
= false;
2310 if (cgraph_dump_file
)
2311 fprintf (cgraph_dump_file
, "Materializing clones\n");
2312 #ifdef ENABLE_CHECKING
2316 /* We can also do topological order, but number of iterations should be
2317 bounded by number of IPA passes since single IPA pass is probably not
2318 going to create clones of clones it created itself. */
2322 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2324 if (node
->clone_of
&& node
->decl
!= node
->clone_of
->decl
2325 && !gimple_has_body_p (node
->decl
))
2327 if (gimple_has_body_p (node
->clone_of
->decl
))
2329 if (cgraph_dump_file
)
2331 fprintf (cgraph_dump_file
, "clonning %s to %s\n",
2332 cgraph_node_name (node
->clone_of
),
2333 cgraph_node_name (node
));
2334 if (node
->clone
.tree_map
)
2337 fprintf (cgraph_dump_file
, " replace map: ");
2338 for (i
= 0; i
< VEC_length (ipa_replace_map_p
,
2339 node
->clone
.tree_map
);
2342 struct ipa_replace_map
*replace_info
;
2343 replace_info
= VEC_index (ipa_replace_map_p
,
2344 node
->clone
.tree_map
,
2346 print_generic_expr (cgraph_dump_file
, replace_info
->old_tree
, 0);
2347 fprintf (cgraph_dump_file
, " -> ");
2348 print_generic_expr (cgraph_dump_file
, replace_info
->new_tree
, 0);
2349 fprintf (cgraph_dump_file
, "%s%s;",
2350 replace_info
->replace_p
? "(replace)":"",
2351 replace_info
->ref_p
? "(ref)":"");
2353 fprintf (cgraph_dump_file
, "\n");
2355 if (node
->clone
.args_to_skip
)
2357 fprintf (cgraph_dump_file
, " args_to_skip: ");
2358 dump_bitmap (cgraph_dump_file
, node
->clone
.args_to_skip
);
2360 if (node
->clone
.args_to_skip
)
2362 fprintf (cgraph_dump_file
, " combined_args_to_skip:");
2363 dump_bitmap (cgraph_dump_file
, node
->clone
.combined_args_to_skip
);
2366 cgraph_materialize_clone (node
);
2372 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2373 if (!node
->analyzed
&& node
->callees
)
2374 cgraph_node_remove_callees (node
);
2375 if (cgraph_dump_file
)
2376 fprintf (cgraph_dump_file
, "Materialization Call site updates done.\n");
2377 #ifdef ENABLE_CHECKING
2380 cgraph_remove_unreachable_nodes (false, cgraph_dump_file
);
2383 #include "gt-cgraphunit.h"