1 /* Driver of optimization process
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
70 1) Inter-procedural optimization.
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
162 #include "coretypes.h"
164 #include "cfghooks.h"
169 #include "fold-const.h"
171 #include "stor-layout.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
175 #include "cfgcleanup.h"
176 #include "internal-fn.h"
177 #include "gimple-fold.h"
178 #include "gimplify.h"
179 #include "gimple-iterator.h"
180 #include "gimplify-me.h"
181 #include "tree-cfg.h"
182 #include "tree-into-ssa.h"
183 #include "tree-ssa.h"
184 #include "tree-inline.h"
185 #include "langhooks.h"
190 #include "diagnostic.h"
194 #include "alloc-pool.h"
195 #include "symbol-summary.h"
196 #include "ipa-prop.h"
197 #include "tree-iterator.h"
198 #include "tree-pass.h"
199 #include "tree-dump.h"
200 #include "gimple-pretty-print.h"
202 #include "coverage.h"
204 #include "ipa-inline.h"
205 #include "ipa-utils.h"
206 #include "lto-streamer.h"
209 #include "regset.h" /* FIXME: For reg_obstack. */
211 #include "pass_manager.h"
212 #include "tree-nested.h"
213 #include "gimplify.h"
215 #include "tree-chkp.h"
216 #include "lto-section-names.h"
218 #include "print-tree.h"
220 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
221 secondary queue used during optimization to accommodate passes that
222 may generate new functions that need to be optimized and expanded. */
223 vec
<cgraph_node
*> cgraph_new_nodes
;
225 static void expand_all_functions (void);
226 static void mark_functions_to_output (void);
227 static void handle_alias_pairs (void);
229 /* Used for vtable lookup in thunk adjusting. */
230 static GTY (()) tree vtable_entry_type
;
232 /* Determine if symbol declaration is needed. That is, visible to something
233 either outside this translation unit, something magic in the system
236 symtab_node::needed_p (void)
238 /* Double check that no one output the function into assembly file
240 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl
)
241 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)));
246 if (DECL_EXTERNAL (decl
))
249 /* If the user told us it is used, then it must be so. */
253 /* ABI forced symbols are needed when they are external. */
254 if (forced_by_abi
&& TREE_PUBLIC (decl
))
257 /* Keep constructors, destructors and virtual functions. */
258 if (TREE_CODE (decl
) == FUNCTION_DECL
259 && (DECL_STATIC_CONSTRUCTOR (decl
) || DECL_STATIC_DESTRUCTOR (decl
)))
262 /* Externally visible variables must be output. The exception is
263 COMDAT variables that must be output only when they are needed. */
264 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
270 /* Head and terminator of the queue of nodes to be processed while building
273 static symtab_node symtab_terminator
;
274 static symtab_node
*queued_nodes
= &symtab_terminator
;
276 /* Add NODE to queue starting at QUEUED_NODES.
277 The queue is linked via AUX pointers and terminated by pointer to 1. */
280 enqueue_node (symtab_node
*node
)
284 gcc_checking_assert (queued_nodes
);
285 node
->aux
= queued_nodes
;
289 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
290 functions into callgraph in a way so they look like ordinary reachable
291 functions inserted into callgraph already at construction time. */
294 symbol_table::process_new_functions (void)
298 if (!cgraph_new_nodes
.exists ())
301 handle_alias_pairs ();
302 /* Note that this queue may grow as its being processed, as the new
303 functions may generate new ones. */
304 for (unsigned i
= 0; i
< cgraph_new_nodes
.length (); i
++)
306 cgraph_node
*node
= cgraph_new_nodes
[i
];
311 /* At construction time we just need to finalize function and move
312 it into reachable functions list. */
314 cgraph_node::finalize_function (fndecl
, false);
315 call_cgraph_insertion_hooks (node
);
321 case IPA_SSA_AFTER_INLINING
:
322 /* When IPA optimization already started, do all essential
323 transformations that has been already performed on the whole
324 cgraph but not on this function. */
326 gimple_register_cfg_hooks ();
329 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
330 if ((state
== IPA_SSA
|| state
== IPA_SSA_AFTER_INLINING
)
331 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
332 g
->get_passes ()->execute_early_local_passes ();
333 else if (inline_summaries
!= NULL
)
334 compute_inline_parameters (node
, true);
335 free_dominance_info (CDI_POST_DOMINATORS
);
336 free_dominance_info (CDI_DOMINATORS
);
338 call_cgraph_insertion_hooks (node
);
342 /* Functions created during expansion shall be compiled
345 call_cgraph_insertion_hooks (node
);
355 cgraph_new_nodes
.release ();
358 /* As an GCC extension we allow redefinition of the function. The
359 semantics when both copies of bodies differ is not well defined.
360 We replace the old body with new body so in unit at a time mode
361 we always use new body, while in normal mode we may end up with
362 old body inlined into some functions and new body expanded and
365 ??? It may make more sense to use one body for inlining and other
366 body for expanding the function but this is difficult to do. */
369 cgraph_node::reset (void)
371 /* If process is set, then we have already begun whole-unit analysis.
372 This is *not* testing for whether we've already emitted the function.
373 That case can be sort-of legitimately seen with real function redefinition
374 errors. I would argue that the front end should never present us with
375 such a case, but don't enforce that for now. */
376 gcc_assert (!process
);
378 /* Reset our data structures so we can analyze the function again. */
379 memset (&local
, 0, sizeof (local
));
380 memset (&global
, 0, sizeof (global
));
381 memset (&rtl
, 0, sizeof (rtl
));
386 cpp_implicit_alias
= false;
389 remove_all_references ();
392 /* Return true when there are references to the node. INCLUDE_SELF is
393 true if a self reference counts as a reference. */
396 symtab_node::referred_to_p (bool include_self
)
400 /* See if there are any references at all. */
401 if (iterate_referring (0, ref
))
403 /* For functions check also calls. */
404 cgraph_node
*cn
= dyn_cast
<cgraph_node
*> (this);
405 if (cn
&& cn
->callers
)
409 for (cgraph_edge
*e
= cn
->callers
; e
; e
= e
->next_caller
)
410 if (e
->caller
!= this)
416 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
417 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
418 the garbage collector run at the moment. We would need to either create
419 a new GC context, or just not compile right now. */
422 cgraph_node::finalize_function (tree decl
, bool no_collect
)
424 cgraph_node
*node
= cgraph_node::get_create (decl
);
426 if (node
->definition
)
428 /* Nested functions should only be defined once. */
429 gcc_assert (!DECL_CONTEXT (decl
)
430 || TREE_CODE (DECL_CONTEXT (decl
)) != FUNCTION_DECL
);
432 node
->local
.redefined_extern_inline
= true;
435 /* Set definition first before calling notice_global_symbol so that
436 it is available to notice_global_symbol. */
437 node
->definition
= true;
438 notice_global_symbol (decl
);
439 node
->lowered
= DECL_STRUCT_FUNCTION (decl
)->cfg
!= NULL
;
441 /* With -fkeep-inline-functions we are keeping all inline functions except
442 for extern inline ones. */
443 if (flag_keep_inline_functions
444 && DECL_DECLARED_INLINE_P (decl
)
445 && !DECL_EXTERNAL (decl
)
446 && !DECL_DISREGARD_INLINE_LIMITS (decl
))
447 node
->force_output
= 1;
449 /* When not optimizing, also output the static functions. (see
450 PR24561), but don't do so for always_inline functions, functions
451 declared inline and nested functions. These were optimized out
452 in the original implementation and it is unclear whether we want
453 to change the behavior here. */
454 if ((!opt_for_fn (decl
, optimize
)
455 && !node
->cpp_implicit_alias
456 && !DECL_DISREGARD_INLINE_LIMITS (decl
)
457 && !DECL_DECLARED_INLINE_P (decl
)
458 && !(DECL_CONTEXT (decl
)
459 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
))
460 && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
461 node
->force_output
= 1;
463 /* If we've not yet emitted decl, tell the debug info about it. */
464 if (!TREE_ASM_WRITTEN (decl
))
465 (*debug_hooks
->deferred_inline_function
) (decl
);
470 if (symtab
->state
== CONSTRUCTION
471 && (node
->needed_p () || node
->referred_to_p ()))
475 /* Add the function FNDECL to the call graph.
476 Unlike finalize_function, this function is intended to be used
477 by middle end and allows insertion of new function at arbitrary point
478 of compilation. The function can be either in high, low or SSA form
481 The function is assumed to be reachable and have address taken (so no
482 API breaking optimizations are performed on it).
484 Main work done by this function is to enqueue the function for later
485 processing to avoid need the passes to be re-entrant. */
488 cgraph_node::add_new_function (tree fndecl
, bool lowered
)
490 gcc::pass_manager
*passes
= g
->get_passes ();
495 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
496 const char *function_type
= ((gimple_has_body_p (fndecl
))
498 ? (gimple_in_ssa_p (fn
)
502 : "to-be-gimplified");
504 "Added new %s function %s to callgraph\n",
506 fndecl_name (fndecl
));
509 switch (symtab
->state
)
512 cgraph_node::finalize_function (fndecl
, false);
515 /* Just enqueue function to be processed at nearest occurrence. */
516 node
= cgraph_node::get_create (fndecl
);
518 node
->lowered
= true;
519 cgraph_new_nodes
.safe_push (node
);
524 case IPA_SSA_AFTER_INLINING
:
526 /* Bring the function into finalized state and enqueue for later
527 analyzing and compilation. */
528 node
= cgraph_node::get_create (fndecl
);
529 node
->local
.local
= false;
530 node
->definition
= true;
531 node
->force_output
= true;
532 if (!lowered
&& symtab
->state
== EXPANSION
)
534 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
535 gimple_register_cfg_hooks ();
536 bitmap_obstack_initialize (NULL
);
537 execute_pass_list (cfun
, passes
->all_lowering_passes
);
538 passes
->execute_early_local_passes ();
539 bitmap_obstack_release (NULL
);
545 node
->lowered
= true;
546 cgraph_new_nodes
.safe_push (node
);
550 /* At the very end of compilation we have to do all the work up
552 node
= cgraph_node::create (fndecl
);
554 node
->lowered
= true;
555 node
->definition
= true;
557 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
558 gimple_register_cfg_hooks ();
559 bitmap_obstack_initialize (NULL
);
560 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
561 g
->get_passes ()->execute_early_local_passes ();
562 bitmap_obstack_release (NULL
);
571 /* Set a personality if required and we already passed EH lowering. */
573 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl
))
574 == eh_personality_lang
))
575 DECL_FUNCTION_PERSONALITY (fndecl
) = lang_hooks
.eh_personality ();
578 /* Analyze the function scheduled to be output. */
580 cgraph_node::analyze (void)
582 tree decl
= this->decl
;
583 location_t saved_loc
= input_location
;
584 input_location
= DECL_SOURCE_LOCATION (decl
);
588 cgraph_node
*t
= cgraph_node::get (thunk
.alias
);
590 create_edge (t
, NULL
, 0, CGRAPH_FREQ_BASE
);
591 /* Target code in expand_thunk may need the thunk's target
592 to be analyzed, so recurse here. */
597 t
= t
->get_alias_target ();
601 if (!expand_thunk (false, false))
609 resolve_alias (cgraph_node::get (alias_target
));
610 else if (dispatcher_function
)
612 /* Generate the dispatcher body of multi-versioned functions. */
613 cgraph_function_version_info
*dispatcher_version_info
614 = function_version ();
615 if (dispatcher_version_info
!= NULL
616 && (dispatcher_version_info
->dispatcher_resolver
619 tree resolver
= NULL_TREE
;
620 gcc_assert (targetm
.generate_version_dispatcher_body
);
621 resolver
= targetm
.generate_version_dispatcher_body (this);
622 gcc_assert (resolver
!= NULL_TREE
);
627 push_cfun (DECL_STRUCT_FUNCTION (decl
));
629 assign_assembler_name_if_neeeded (decl
);
631 /* Make sure to gimplify bodies only once. During analyzing a
632 function we lower it, which will require gimplified nested
633 functions, so we can end up here with an already gimplified
635 if (!gimple_has_body_p (decl
))
636 gimplify_function_tree (decl
);
638 /* Lower the function. */
642 lower_nested_functions (decl
);
643 gcc_assert (!nested
);
645 gimple_register_cfg_hooks ();
646 bitmap_obstack_initialize (NULL
);
647 execute_pass_list (cfun
, g
->get_passes ()->all_lowering_passes
);
648 free_dominance_info (CDI_POST_DOMINATORS
);
649 free_dominance_info (CDI_DOMINATORS
);
651 bitmap_obstack_release (NULL
);
659 input_location
= saved_loc
;
662 /* C++ frontend produce same body aliases all over the place, even before PCH
663 gets streamed out. It relies on us linking the aliases with their function
664 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
665 first produce aliases without links, but once C++ FE is sure he won't sream
666 PCH we build the links via this function. */
669 symbol_table::process_same_body_aliases (void)
672 FOR_EACH_SYMBOL (node
)
673 if (node
->cpp_implicit_alias
&& !node
->analyzed
)
675 (TREE_CODE (node
->alias_target
) == VAR_DECL
676 ? (symtab_node
*)varpool_node::get_create (node
->alias_target
)
677 : (symtab_node
*)cgraph_node::get_create (node
->alias_target
));
678 cpp_implicit_aliases_done
= true;
681 /* Process attributes common for vars and functions. */
684 process_common_attributes (symtab_node
*node
, tree decl
)
686 tree weakref
= lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
));
688 if (weakref
&& !lookup_attribute ("alias", DECL_ATTRIBUTES (decl
)))
690 warning_at (DECL_SOURCE_LOCATION (decl
), OPT_Wattributes
,
691 "%<weakref%> attribute should be accompanied with"
692 " an %<alias%> attribute");
693 DECL_WEAK (decl
) = 0;
694 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
695 DECL_ATTRIBUTES (decl
));
698 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl
)))
699 node
->no_reorder
= 1;
702 /* Look for externally_visible and used attributes and mark cgraph nodes
705 We cannot mark the nodes at the point the attributes are processed (in
706 handle_*_attribute) because the copy of the declarations available at that
707 point may not be canonical. For example, in:
710 void f() __attribute__((used));
712 the declaration we see in handle_used_attribute will be the second
713 declaration -- but the front end will subsequently merge that declaration
714 with the original declaration and discard the second declaration.
716 Furthermore, we can't mark these nodes in finalize_function because:
719 void f() __attribute__((externally_visible));
723 So, we walk the nodes at the end of the translation unit, applying the
724 attributes at that point. */
727 process_function_and_variable_attributes (cgraph_node
*first
,
728 varpool_node
*first_var
)
733 for (node
= symtab
->first_function (); node
!= first
;
734 node
= symtab
->next_function (node
))
736 tree decl
= node
->decl
;
737 if (DECL_PRESERVE_P (decl
))
738 node
->mark_force_output ();
739 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
741 if (! TREE_PUBLIC (node
->decl
))
742 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
743 "%<externally_visible%>"
744 " attribute have effect only on public objects");
746 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
747 && (node
->definition
&& !node
->alias
))
749 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
750 "%<weakref%> attribute ignored"
751 " because function is defined");
752 DECL_WEAK (decl
) = 0;
753 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
754 DECL_ATTRIBUTES (decl
));
757 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl
))
758 && !DECL_DECLARED_INLINE_P (decl
)
759 /* redefining extern inline function makes it DECL_UNINLINABLE. */
760 && !DECL_UNINLINABLE (decl
))
761 warning_at (DECL_SOURCE_LOCATION (decl
), OPT_Wattributes
,
762 "always_inline function might not be inlinable");
764 process_common_attributes (node
, decl
);
766 for (vnode
= symtab
->first_variable (); vnode
!= first_var
;
767 vnode
= symtab
->next_variable (vnode
))
769 tree decl
= vnode
->decl
;
770 if (DECL_EXTERNAL (decl
)
771 && DECL_INITIAL (decl
))
772 varpool_node::finalize_decl (decl
);
773 if (DECL_PRESERVE_P (decl
))
774 vnode
->force_output
= true;
775 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
777 if (! TREE_PUBLIC (vnode
->decl
))
778 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
779 "%<externally_visible%>"
780 " attribute have effect only on public objects");
782 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
784 && DECL_INITIAL (decl
))
786 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
787 "%<weakref%> attribute ignored"
788 " because variable is initialized");
789 DECL_WEAK (decl
) = 0;
790 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
791 DECL_ATTRIBUTES (decl
));
793 process_common_attributes (vnode
, decl
);
797 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
798 middle end to output the variable to asm file, if needed or externally
802 varpool_node::finalize_decl (tree decl
)
804 varpool_node
*node
= varpool_node::get_create (decl
);
806 gcc_assert (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
));
808 if (node
->definition
)
810 /* Set definition first before calling notice_global_symbol so that
811 it is available to notice_global_symbol. */
812 node
->definition
= true;
813 notice_global_symbol (decl
);
814 if (TREE_THIS_VOLATILE (decl
) || DECL_PRESERVE_P (decl
)
815 /* Traditionally we do not eliminate static variables when not
816 optimizing and when not doing toplevel reoder. */
818 || ((!flag_toplevel_reorder
819 && !DECL_COMDAT (node
->decl
)
820 && !DECL_ARTIFICIAL (node
->decl
))))
821 node
->force_output
= true;
823 if (symtab
->state
== CONSTRUCTION
824 && (node
->needed_p () || node
->referred_to_p ()))
826 if (symtab
->state
>= IPA_SSA
)
828 /* Some frontends produce various interface variables after compilation
830 if (symtab
->state
== FINISHED
831 || (!flag_toplevel_reorder
832 && symtab
->state
== EXPANSION
))
833 node
->assemble_decl ();
835 if (DECL_INITIAL (decl
))
836 chkp_register_var_initializer (decl
);
839 /* EDGE is an polymorphic call. Mark all possible targets as reachable
840 and if there is only one target, perform trivial devirtualization.
841 REACHABLE_CALL_TARGETS collects target lists we already walked to
842 avoid udplicate work. */
845 walk_polymorphic_call_targets (hash_set
<void *> *reachable_call_targets
,
851 vec
<cgraph_node
*>targets
852 = possible_polymorphic_call_targets
853 (edge
, &final
, &cache_token
);
855 if (!reachable_call_targets
->add (cache_token
))
857 if (symtab
->dump_file
)
858 dump_possible_polymorphic_call_targets
859 (symtab
->dump_file
, edge
);
861 for (i
= 0; i
< targets
.length (); i
++)
863 /* Do not bother to mark virtual methods in anonymous namespace;
864 either we will find use of virtual table defining it, or it is
866 if (targets
[i
]->definition
868 (TREE_TYPE (targets
[i
]->decl
))
870 && !type_in_anonymous_namespace_p
871 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets
[i
]->decl
))))
872 enqueue_node (targets
[i
]);
876 /* Very trivial devirtualization; when the type is
877 final or anonymous (so we know all its derivation)
878 and there is only one possible virtual call target,
879 make the edge direct. */
882 if (targets
.length () <= 1 && dbg_cnt (devirt
))
885 if (targets
.length () == 1)
888 target
= cgraph_node::create
889 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
));
891 if (symtab
->dump_file
)
893 fprintf (symtab
->dump_file
,
894 "Devirtualizing call: ");
895 print_gimple_stmt (symtab
->dump_file
,
899 if (dump_enabled_p ())
901 location_t locus
= gimple_location_safe (edge
->call_stmt
);
902 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, locus
,
903 "devirtualizing call in %s to %s\n",
904 edge
->caller
->name (), target
->name ());
907 edge
->make_direct (target
);
908 edge
->redirect_call_stmt_to_callee ();
910 /* Call to __builtin_unreachable shouldn't be instrumented. */
911 if (!targets
.length ())
912 gimple_call_set_with_bounds (edge
->call_stmt
, false);
914 if (symtab
->dump_file
)
916 fprintf (symtab
->dump_file
,
917 "Devirtualized as: ");
918 print_gimple_stmt (symtab
->dump_file
,
927 /* Discover all functions and variables that are trivially needed, analyze
928 them as well as all functions and variables referred by them */
929 static cgraph_node
*first_analyzed
;
930 static varpool_node
*first_analyzed_var
;
932 /* FIRST_TIME is set to TRUE for the first time we are called for a
933 translation unit from finalize_compilation_unit() or false
937 analyze_functions (bool first_time
)
939 /* Keep track of already processed nodes when called multiple times for
940 intermodule optimization. */
941 cgraph_node
*first_handled
= first_analyzed
;
942 varpool_node
*first_handled_var
= first_analyzed_var
;
943 hash_set
<void *> reachable_call_targets
;
950 location_t saved_loc
= input_location
;
952 bitmap_obstack_initialize (NULL
);
953 symtab
->state
= CONSTRUCTION
;
954 input_location
= UNKNOWN_LOCATION
;
956 /* Ugly, but the fixup can not happen at a time same body alias is created;
957 C++ FE is confused about the COMDAT groups being right. */
958 if (symtab
->cpp_implicit_aliases_done
)
959 FOR_EACH_SYMBOL (node
)
960 if (node
->cpp_implicit_alias
)
961 node
->fixup_same_cpp_alias_visibility (node
->get_alias_target ());
962 build_type_inheritance_graph ();
964 /* Analysis adds static variables that in turn adds references to new functions.
965 So we need to iterate the process until it stabilize. */
969 process_function_and_variable_attributes (first_analyzed
,
972 /* First identify the trivially needed symbols. */
973 for (node
= symtab
->first_symbol ();
974 node
!= first_analyzed
975 && node
!= first_analyzed_var
; node
= node
->next
)
977 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
978 node
->get_comdat_group_id ();
979 if (node
->needed_p ())
982 if (!changed
&& symtab
->dump_file
)
983 fprintf (symtab
->dump_file
, "Trivially needed symbols:");
985 if (symtab
->dump_file
)
986 fprintf (symtab
->dump_file
, " %s", node
->asm_name ());
987 if (!changed
&& symtab
->dump_file
)
988 fprintf (symtab
->dump_file
, "\n");
990 if (node
== first_analyzed
991 || node
== first_analyzed_var
)
994 symtab
->process_new_functions ();
995 first_analyzed_var
= symtab
->first_variable ();
996 first_analyzed
= symtab
->first_function ();
998 if (changed
&& symtab
->dump_file
)
999 fprintf (symtab
->dump_file
, "\n");
1001 /* Lower representation, build callgraph edges and references for all trivially
1002 needed symbols and all symbols referred by them. */
1003 while (queued_nodes
!= &symtab_terminator
)
1006 node
= queued_nodes
;
1007 queued_nodes
= (symtab_node
*)queued_nodes
->aux
;
1008 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
);
1009 if (cnode
&& cnode
->definition
)
1012 tree decl
= cnode
->decl
;
1014 /* ??? It is possible to create extern inline function
1015 and later using weak alias attribute to kill its body.
1016 See gcc.c-torture/compile/20011119-1.c */
1017 if (!DECL_STRUCT_FUNCTION (decl
)
1019 && !cnode
->thunk
.thunk_p
1020 && !cnode
->dispatcher_function
)
1023 cnode
->local
.redefined_extern_inline
= true;
1027 if (!cnode
->analyzed
)
1030 for (edge
= cnode
->callees
; edge
; edge
= edge
->next_callee
)
1031 if (edge
->callee
->definition
1032 && (!DECL_EXTERNAL (edge
->callee
->decl
)
1033 /* When not optimizing, do not try to analyze extern
1034 inline functions. Doing so is pointless. */
1035 || opt_for_fn (edge
->callee
->decl
, optimize
)
1036 /* Weakrefs needs to be preserved. */
1037 || edge
->callee
->alias
1038 /* always_inline functions are inlined aven at -O0. */
1041 DECL_ATTRIBUTES (edge
->callee
->decl
))
1042 /* Multiversioned functions needs the dispatcher to
1043 be produced locally even for extern functions. */
1044 || edge
->callee
->function_version ()))
1045 enqueue_node (edge
->callee
);
1046 if (opt_for_fn (cnode
->decl
, optimize
)
1047 && opt_for_fn (cnode
->decl
, flag_devirtualize
))
1051 for (edge
= cnode
->indirect_calls
; edge
; edge
= next
)
1053 next
= edge
->next_callee
;
1054 if (edge
->indirect_info
->polymorphic
)
1055 walk_polymorphic_call_targets (&reachable_call_targets
,
1060 /* If decl is a clone of an abstract function,
1061 mark that abstract function so that we don't release its body.
1062 The DECL_INITIAL() of that abstract function declaration
1063 will be later needed to output debug info. */
1064 if (DECL_ABSTRACT_ORIGIN (decl
))
1066 cgraph_node
*origin_node
1067 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl
));
1068 origin_node
->used_as_abstract_origin
= true;
1073 varpool_node
*vnode
= dyn_cast
<varpool_node
*> (node
);
1074 if (vnode
&& vnode
->definition
&& !vnode
->analyzed
)
1078 if (node
->same_comdat_group
)
1081 for (next
= node
->same_comdat_group
;
1083 next
= next
->same_comdat_group
)
1084 if (!next
->comdat_local_p ())
1085 enqueue_node (next
);
1087 for (i
= 0; node
->iterate_reference (i
, ref
); i
++)
1088 if (ref
->referred
->definition
1089 && (!DECL_EXTERNAL (ref
->referred
->decl
)
1090 || ((TREE_CODE (ref
->referred
->decl
) != FUNCTION_DECL
1092 || (TREE_CODE (ref
->referred
->decl
) == FUNCTION_DECL
1093 && opt_for_fn (ref
->referred
->decl
, optimize
))
1095 || ref
->referred
->alias
)))
1096 enqueue_node (ref
->referred
);
1097 symtab
->process_new_functions ();
1100 update_type_inheritance_graph ();
1102 /* Collect entry points to the unit. */
1103 if (symtab
->dump_file
)
1105 fprintf (symtab
->dump_file
, "\n\nInitial ");
1106 symtab_node::dump_table (symtab
->dump_file
);
1112 FOR_EACH_SYMBOL (snode
)
1113 check_global_declaration (snode
->decl
);
1116 if (symtab
->dump_file
)
1117 fprintf (symtab
->dump_file
, "\nRemoving unused symbols:");
1119 for (node
= symtab
->first_symbol ();
1120 node
!= first_handled
1121 && node
!= first_handled_var
; node
= next
)
1124 if (!node
->aux
&& !node
->referred_to_p ())
1126 if (symtab
->dump_file
)
1127 fprintf (symtab
->dump_file
, " %s", node
->name ());
1129 /* See if the debugger can use anything before the DECL
1130 passes away. Perhaps it can notice a DECL that is now a
1131 constant and can tag the early DIE with an appropriate
1134 Otherwise, this is the last chance the debug_hooks have
1135 at looking at optimized away DECLs, since
1136 late_global_decl will subsequently be called from the
1137 contents of the now pruned symbol table. */
1138 if (!decl_function_context (node
->decl
))
1139 (*debug_hooks
->late_global_decl
) (node
->decl
);
1144 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
))
1146 tree decl
= node
->decl
;
1148 if (cnode
->definition
&& !gimple_has_body_p (decl
)
1150 && !cnode
->thunk
.thunk_p
)
1153 gcc_assert (!cnode
->definition
|| cnode
->thunk
.thunk_p
1155 || gimple_has_body_p (decl
));
1156 gcc_assert (cnode
->analyzed
== cnode
->definition
);
1160 for (;node
; node
= node
->next
)
1162 first_analyzed
= symtab
->first_function ();
1163 first_analyzed_var
= symtab
->first_variable ();
1164 if (symtab
->dump_file
)
1166 fprintf (symtab
->dump_file
, "\n\nReclaimed ");
1167 symtab_node::dump_table (symtab
->dump_file
);
1169 bitmap_obstack_release (NULL
);
1171 /* Initialize assembler name hash, in particular we want to trigger C++
1172 mangling and same body alias creation before we free DECL_ARGUMENTS
1175 symtab
->symtab_initialize_asm_name_hash ();
1177 input_location
= saved_loc
;
1180 /* Translate the ugly representation of aliases as alias pairs into nice
1181 representation in callgraph. We don't handle all cases yet,
1185 handle_alias_pairs (void)
1190 for (i
= 0; alias_pairs
&& alias_pairs
->iterate (i
, &p
);)
1192 symtab_node
*target_node
= symtab_node::get_for_asmname (p
->target
);
1194 /* Weakrefs with target not defined in current unit are easy to handle:
1195 they behave just as external variables except we need to note the
1196 alias flag to later output the weakref pseudo op into asm file. */
1198 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p
->decl
)) != NULL
)
1200 symtab_node
*node
= symtab_node::get (p
->decl
);
1203 node
->alias_target
= p
->target
;
1204 node
->weakref
= true;
1207 alias_pairs
->unordered_remove (i
);
1210 else if (!target_node
)
1212 error ("%q+D aliased to undefined symbol %qE", p
->decl
, p
->target
);
1213 symtab_node
*node
= symtab_node::get (p
->decl
);
1215 node
->alias
= false;
1216 alias_pairs
->unordered_remove (i
);
1220 if (DECL_EXTERNAL (target_node
->decl
)
1221 /* We use local aliases for C++ thunks to force the tailcall
1222 to bind locally. This is a hack - to keep it working do
1223 the following (which is not strictly correct). */
1224 && (TREE_CODE (target_node
->decl
) != FUNCTION_DECL
1225 || ! DECL_VIRTUAL_P (target_node
->decl
))
1226 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p
->decl
)))
1228 error ("%q+D aliased to external symbol %qE",
1229 p
->decl
, p
->target
);
1232 if (TREE_CODE (p
->decl
) == FUNCTION_DECL
1233 && target_node
&& is_a
<cgraph_node
*> (target_node
))
1235 cgraph_node
*src_node
= cgraph_node::get (p
->decl
);
1236 if (src_node
&& src_node
->definition
)
1238 cgraph_node::create_alias (p
->decl
, target_node
->decl
);
1239 alias_pairs
->unordered_remove (i
);
1241 else if (TREE_CODE (p
->decl
) == VAR_DECL
1242 && target_node
&& is_a
<varpool_node
*> (target_node
))
1244 varpool_node::create_alias (p
->decl
, target_node
->decl
);
1245 alias_pairs
->unordered_remove (i
);
1249 error ("%q+D alias in between function and variable is not supported",
1251 warning (0, "%q+D aliased declaration",
1253 alias_pairs
->unordered_remove (i
);
1256 vec_free (alias_pairs
);
1260 /* Figure out what functions we want to assemble. */
1263 mark_functions_to_output (void)
1266 #ifdef ENABLE_CHECKING
1267 bool check_same_comdat_groups
= false;
1269 FOR_EACH_FUNCTION (node
)
1270 gcc_assert (!node
->process
);
1273 FOR_EACH_FUNCTION (node
)
1275 tree decl
= node
->decl
;
1277 gcc_assert (!node
->process
|| node
->same_comdat_group
);
1281 /* We need to output all local functions that are used and not
1282 always inlined, as well as those that are reachable from
1283 outside the current compilation unit. */
1285 && !node
->thunk
.thunk_p
1287 && !node
->global
.inlined_to
1288 && !TREE_ASM_WRITTEN (decl
)
1289 && !DECL_EXTERNAL (decl
))
1292 if (node
->same_comdat_group
)
1295 for (next
= dyn_cast
<cgraph_node
*> (node
->same_comdat_group
);
1297 next
= dyn_cast
<cgraph_node
*> (next
->same_comdat_group
))
1298 if (!next
->thunk
.thunk_p
&& !next
->alias
1299 && !next
->comdat_local_p ())
1303 else if (node
->same_comdat_group
)
1305 #ifdef ENABLE_CHECKING
1306 check_same_comdat_groups
= true;
1311 /* We should've reclaimed all functions that are not needed. */
1312 #ifdef ENABLE_CHECKING
1313 if (!node
->global
.inlined_to
1314 && gimple_has_body_p (decl
)
1315 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1316 are inside partition, we can end up not removing the body since we no longer
1317 have analyzed node pointing to it. */
1318 && !node
->in_other_partition
1321 && !DECL_EXTERNAL (decl
))
1324 internal_error ("failed to reclaim unneeded function");
1327 gcc_assert (node
->global
.inlined_to
1328 || !gimple_has_body_p (decl
)
1329 || node
->in_other_partition
1331 || DECL_ARTIFICIAL (decl
)
1332 || DECL_EXTERNAL (decl
));
1337 #ifdef ENABLE_CHECKING
1338 if (check_same_comdat_groups
)
1339 FOR_EACH_FUNCTION (node
)
1340 if (node
->same_comdat_group
&& !node
->process
)
1342 tree decl
= node
->decl
;
1343 if (!node
->global
.inlined_to
1344 && gimple_has_body_p (decl
)
1345 /* FIXME: in an ltrans unit when the offline copy is outside a
1346 partition but inline copies are inside a partition, we can
1347 end up not removing the body since we no longer have an
1348 analyzed node pointing to it. */
1349 && !node
->in_other_partition
1351 && !DECL_EXTERNAL (decl
))
1354 internal_error ("failed to reclaim unneeded function in same "
1361 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1362 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1364 Set current_function_decl and cfun to newly constructed empty function body.
1365 return basic block in the function body. */
1368 init_lowered_empty_function (tree decl
, bool in_ssa
, gcov_type count
)
1373 current_function_decl
= decl
;
1374 allocate_struct_function (decl
, false);
1375 gimple_register_cfg_hooks ();
1376 init_empty_tree_cfg ();
1380 init_tree_ssa (cfun
);
1381 init_ssa_operands (cfun
);
1382 cfun
->gimple_df
->in_ssa_p
= true;
1383 cfun
->curr_properties
|= PROP_ssa
;
1386 DECL_INITIAL (decl
) = make_node (BLOCK
);
1388 DECL_SAVED_TREE (decl
) = error_mark_node
;
1389 cfun
->curr_properties
|= (PROP_gimple_lcf
| PROP_gimple_leh
| PROP_gimple_any
1390 | PROP_cfg
| PROP_loops
);
1392 set_loops_for_fn (cfun
, ggc_cleared_alloc
<loops
> ());
1393 init_loops_structure (cfun
, loops_for_fn (cfun
), 1);
1394 loops_for_fn (cfun
)->state
|= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
1396 /* Create BB for body of the function and connect it properly. */
1397 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= count
;
1398 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
= REG_BR_PROB_BASE
;
1399 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= count
;
1400 EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
= REG_BR_PROB_BASE
;
1401 bb
= create_basic_block (NULL
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1403 bb
->frequency
= BB_FREQ_MAX
;
1404 e
= make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), bb
, EDGE_FALLTHRU
);
1406 e
->probability
= REG_BR_PROB_BASE
;
1407 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1409 e
->probability
= REG_BR_PROB_BASE
;
1410 add_bb_to_loop (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
1415 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1416 offset indicated by VIRTUAL_OFFSET, if that is
1417 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1418 zero for a result adjusting thunk. */
1421 thunk_adjust (gimple_stmt_iterator
* bsi
,
1422 tree ptr
, bool this_adjusting
,
1423 HOST_WIDE_INT fixed_offset
, tree virtual_offset
)
1429 && fixed_offset
!= 0)
1431 stmt
= gimple_build_assign
1432 (ptr
, fold_build_pointer_plus_hwi_loc (input_location
,
1435 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1438 /* If there's a virtual offset, look up that value in the vtable and
1439 adjust the pointer again. */
1446 if (!vtable_entry_type
)
1448 tree vfunc_type
= make_node (FUNCTION_TYPE
);
1449 TREE_TYPE (vfunc_type
) = integer_type_node
;
1450 TYPE_ARG_TYPES (vfunc_type
) = NULL_TREE
;
1451 layout_type (vfunc_type
);
1453 vtable_entry_type
= build_pointer_type (vfunc_type
);
1457 create_tmp_reg (build_pointer_type
1458 (build_pointer_type (vtable_entry_type
)), "vptr");
1460 /* The vptr is always at offset zero in the object. */
1461 stmt
= gimple_build_assign (vtabletmp
,
1462 build1 (NOP_EXPR
, TREE_TYPE (vtabletmp
),
1464 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1466 /* Form the vtable address. */
1467 vtabletmp2
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp
)),
1469 stmt
= gimple_build_assign (vtabletmp2
,
1470 build_simple_mem_ref (vtabletmp
));
1471 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1473 /* Find the entry with the vcall offset. */
1474 stmt
= gimple_build_assign (vtabletmp2
,
1475 fold_build_pointer_plus_loc (input_location
,
1478 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1480 /* Get the offset itself. */
1481 vtabletmp3
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2
)),
1483 stmt
= gimple_build_assign (vtabletmp3
,
1484 build_simple_mem_ref (vtabletmp2
));
1485 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1487 /* Adjust the `this' pointer. */
1488 ptr
= fold_build_pointer_plus_loc (input_location
, ptr
, vtabletmp3
);
1489 ptr
= force_gimple_operand_gsi (bsi
, ptr
, true, NULL_TREE
, false,
1490 GSI_CONTINUE_LINKING
);
1494 && fixed_offset
!= 0)
1495 /* Adjust the pointer by the constant. */
1499 if (TREE_CODE (ptr
) == VAR_DECL
)
1503 ptrtmp
= create_tmp_reg (TREE_TYPE (ptr
), "ptr");
1504 stmt
= gimple_build_assign (ptrtmp
, ptr
);
1505 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1507 ptr
= fold_build_pointer_plus_hwi_loc (input_location
,
1508 ptrtmp
, fixed_offset
);
1511 /* Emit the statement and gimplify the adjustment expression. */
1512 ret
= create_tmp_reg (TREE_TYPE (ptr
), "adjusted_this");
1513 stmt
= gimple_build_assign (ret
, ptr
);
1514 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1519 /* Expand thunk NODE to gimple if possible.
1520 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1521 no assembler is produced.
1522 When OUTPUT_ASM_THUNK is true, also produce assembler for
1523 thunks that are not lowered. */
1526 cgraph_node::expand_thunk (bool output_asm_thunks
, bool force_gimple_thunk
)
1528 bool this_adjusting
= thunk
.this_adjusting
;
1529 HOST_WIDE_INT fixed_offset
= thunk
.fixed_offset
;
1530 HOST_WIDE_INT virtual_value
= thunk
.virtual_value
;
1531 tree virtual_offset
= NULL
;
1532 tree alias
= callees
->callee
->decl
;
1533 tree thunk_fndecl
= decl
;
1536 /* Instrumentation thunk is the same function with
1537 a different signature. Never need to expand it. */
1538 if (thunk
.add_pointer_bounds_args
)
1541 if (!force_gimple_thunk
&& this_adjusting
1542 && targetm
.asm_out
.can_output_mi_thunk (thunk_fndecl
, fixed_offset
,
1543 virtual_value
, alias
))
1547 tree restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1549 if (!output_asm_thunks
)
1556 get_untransformed_body ();
1557 a
= DECL_ARGUMENTS (thunk_fndecl
);
1559 current_function_decl
= thunk_fndecl
;
1561 /* Ensure thunks are emitted in their correct sections. */
1562 resolve_unique_section (thunk_fndecl
, 0,
1563 flag_function_sections
);
1565 DECL_RESULT (thunk_fndecl
)
1566 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
1567 RESULT_DECL
, 0, restype
);
1568 DECL_CONTEXT (DECL_RESULT (thunk_fndecl
)) = thunk_fndecl
;
1569 fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
1571 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1573 fn_block
= make_node (BLOCK
);
1574 BLOCK_VARS (fn_block
) = a
;
1575 DECL_INITIAL (thunk_fndecl
) = fn_block
;
1576 init_function_start (thunk_fndecl
);
1578 insn_locations_init ();
1579 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl
));
1580 prologue_location
= curr_insn_location ();
1581 assemble_start_function (thunk_fndecl
, fnname
);
1583 targetm
.asm_out
.output_mi_thunk (asm_out_file
, thunk_fndecl
,
1584 fixed_offset
, virtual_value
, alias
);
1586 assemble_end_function (thunk_fndecl
, fnname
);
1587 insn_locations_finalize ();
1588 init_insn_lengths ();
1589 free_after_compilation (cfun
);
1591 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1592 thunk
.thunk_p
= false;
1595 else if (stdarg_p (TREE_TYPE (thunk_fndecl
)))
1597 error ("generic thunk code fails for method %qD which uses %<...%>",
1599 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1606 basic_block bb
, then_bb
, else_bb
, return_bb
;
1607 gimple_stmt_iterator bsi
;
1617 bool alias_is_noreturn
= TREE_THIS_VOLATILE (alias
);
1620 get_untransformed_body ();
1621 a
= DECL_ARGUMENTS (thunk_fndecl
);
1623 current_function_decl
= thunk_fndecl
;
1625 /* Ensure thunks are emitted in their correct sections. */
1626 resolve_unique_section (thunk_fndecl
, 0,
1627 flag_function_sections
);
1629 DECL_IGNORED_P (thunk_fndecl
) = 1;
1630 bitmap_obstack_initialize (NULL
);
1632 if (thunk
.virtual_offset_p
)
1633 virtual_offset
= size_int (virtual_value
);
1635 /* Build the return declaration for the function. */
1636 restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1637 if (DECL_RESULT (thunk_fndecl
) == NULL_TREE
)
1639 resdecl
= build_decl (input_location
, RESULT_DECL
, 0, restype
);
1640 DECL_ARTIFICIAL (resdecl
) = 1;
1641 DECL_IGNORED_P (resdecl
) = 1;
1642 DECL_RESULT (thunk_fndecl
) = resdecl
;
1643 DECL_CONTEXT (DECL_RESULT (thunk_fndecl
)) = thunk_fndecl
;
1646 resdecl
= DECL_RESULT (thunk_fndecl
);
1648 bb
= then_bb
= else_bb
= return_bb
1649 = init_lowered_empty_function (thunk_fndecl
, true, count
);
1651 bsi
= gsi_start_bb (bb
);
1653 /* Build call to the function being thunked. */
1654 if (!VOID_TYPE_P (restype
) && !alias_is_noreturn
)
1656 if (DECL_BY_REFERENCE (resdecl
))
1658 restmp
= gimple_fold_indirect_ref (resdecl
);
1660 restmp
= build2 (MEM_REF
,
1661 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias
))),
1663 build_int_cst (TREE_TYPE
1664 (DECL_RESULT (alias
)), 0));
1666 else if (!is_gimple_reg_type (restype
))
1668 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
)))
1672 if (TREE_CODE (restmp
) == VAR_DECL
)
1673 add_local_decl (cfun
, restmp
);
1674 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = restmp
;
1677 restmp
= create_tmp_var (restype
, "retval");
1680 restmp
= create_tmp_reg (restype
, "retval");
1683 for (arg
= a
; arg
; arg
= DECL_CHAIN (arg
))
1685 auto_vec
<tree
> vargs (nargs
);
1690 vargs
.quick_push (thunk_adjust (&bsi
, a
, 1, fixed_offset
,
1692 arg
= DECL_CHAIN (a
);
1697 for (; i
< nargs
; i
++, arg
= DECL_CHAIN (arg
))
1700 if (!is_gimple_val (arg
))
1702 tmp
= create_tmp_reg (TYPE_MAIN_VARIANT
1703 (TREE_TYPE (arg
)), "arg");
1704 gimple stmt
= gimple_build_assign (tmp
, arg
);
1705 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1707 vargs
.quick_push (tmp
);
1709 call
= gimple_build_call_vec (build_fold_addr_expr_loc (0, alias
), vargs
);
1710 callees
->call_stmt
= call
;
1711 gimple_call_set_from_thunk (call
, true);
1712 gimple_call_set_with_bounds (call
, instrumentation_clone
);
1714 /* Return slot optimization is always possible and in fact requred to
1715 return values with DECL_BY_REFERENCE. */
1716 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
))
1717 && (!is_gimple_reg_type (TREE_TYPE (resdecl
))
1718 || DECL_BY_REFERENCE (resdecl
)))
1719 gimple_call_set_return_slot_opt (call
, true);
1721 if (restmp
&& !alias_is_noreturn
)
1723 gimple_call_set_lhs (call
, restmp
);
1724 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp
),
1725 TREE_TYPE (TREE_TYPE (alias
))));
1727 gsi_insert_after (&bsi
, call
, GSI_NEW_STMT
);
1728 if (!alias_is_noreturn
)
1730 if (instrumentation_clone
1731 && !DECL_BY_REFERENCE (resdecl
)
1733 && BOUNDED_P (restmp
))
1735 resbnd
= chkp_insert_retbnd_call (NULL
, restmp
, &bsi
);
1736 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi
))),
1737 as_a
<gcall
*> (gsi_stmt (bsi
)),
1738 callees
->count
, callees
->frequency
);
1741 if (restmp
&& !this_adjusting
1742 && (fixed_offset
|| virtual_offset
))
1744 tree true_label
= NULL_TREE
;
1746 if (TREE_CODE (TREE_TYPE (restmp
)) == POINTER_TYPE
)
1750 /* If the return type is a pointer, we need to
1751 protect against NULL. We know there will be an
1752 adjustment, because that's why we're emitting a
1754 then_bb
= create_basic_block (NULL
, bb
);
1755 then_bb
->count
= count
- count
/ 16;
1756 then_bb
->frequency
= BB_FREQ_MAX
- BB_FREQ_MAX
/ 16;
1757 return_bb
= create_basic_block (NULL
, then_bb
);
1758 return_bb
->count
= count
;
1759 return_bb
->frequency
= BB_FREQ_MAX
;
1760 else_bb
= create_basic_block (NULL
, else_bb
);
1761 then_bb
->count
= count
/ 16;
1762 then_bb
->frequency
= BB_FREQ_MAX
/ 16;
1763 add_bb_to_loop (then_bb
, bb
->loop_father
);
1764 add_bb_to_loop (return_bb
, bb
->loop_father
);
1765 add_bb_to_loop (else_bb
, bb
->loop_father
);
1766 remove_edge (single_succ_edge (bb
));
1767 true_label
= gimple_block_label (then_bb
);
1768 stmt
= gimple_build_cond (NE_EXPR
, restmp
,
1769 build_zero_cst (TREE_TYPE (restmp
)),
1770 NULL_TREE
, NULL_TREE
);
1771 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1772 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1773 e
->probability
= REG_BR_PROB_BASE
- REG_BR_PROB_BASE
/ 16;
1774 e
->count
= count
- count
/ 16;
1775 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1776 e
->probability
= REG_BR_PROB_BASE
/ 16;
1777 e
->count
= count
/ 16;
1778 e
= make_edge (return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1779 e
->probability
= REG_BR_PROB_BASE
;
1781 e
= make_edge (then_bb
, return_bb
, EDGE_FALLTHRU
);
1782 e
->probability
= REG_BR_PROB_BASE
;
1783 e
->count
= count
- count
/ 16;
1784 e
= make_edge (else_bb
, return_bb
, EDGE_FALLTHRU
);
1785 e
->probability
= REG_BR_PROB_BASE
;
1786 e
->count
= count
/ 16;
1787 bsi
= gsi_last_bb (then_bb
);
1790 restmp
= thunk_adjust (&bsi
, restmp
, /*this_adjusting=*/0,
1791 fixed_offset
, virtual_offset
);
1795 bsi
= gsi_last_bb (else_bb
);
1796 stmt
= gimple_build_assign (restmp
,
1797 build_zero_cst (TREE_TYPE (restmp
)));
1798 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1799 bsi
= gsi_last_bb (return_bb
);
1803 gimple_call_set_tail (call
, true);
1805 /* Build return value. */
1806 if (!DECL_BY_REFERENCE (resdecl
))
1807 ret
= gimple_build_return (restmp
);
1809 ret
= gimple_build_return (resdecl
);
1810 gimple_return_set_retbnd (ret
, resbnd
);
1812 gsi_insert_after (&bsi
, ret
, GSI_NEW_STMT
);
1816 gimple_call_set_tail (call
, true);
1817 remove_edge (single_succ_edge (bb
));
1820 cfun
->gimple_df
->in_ssa_p
= true;
1821 profile_status_for_fn (cfun
)
1822 = count
? PROFILE_READ
: PROFILE_GUESSED
;
1823 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1824 TREE_ASM_WRITTEN (thunk_fndecl
) = false;
1825 delete_unreachable_blocks ();
1826 update_ssa (TODO_update_ssa
);
1827 #ifdef ENABLE_CHECKING
1828 verify_flow_info ();
1830 free_dominance_info (CDI_DOMINATORS
);
1832 /* Since we want to emit the thunk, we explicitly mark its name as
1834 thunk
.thunk_p
= false;
1836 bitmap_obstack_release (NULL
);
1838 current_function_decl
= NULL
;
1843 /* Assemble thunks and aliases associated to node. */
1846 cgraph_node::assemble_thunks_and_aliases (void)
1851 for (e
= callers
; e
;)
1852 if (e
->caller
->thunk
.thunk_p
1853 && !e
->caller
->thunk
.add_pointer_bounds_args
)
1855 cgraph_node
*thunk
= e
->caller
;
1858 thunk
->expand_thunk (true, false);
1859 thunk
->assemble_thunks_and_aliases ();
1864 FOR_EACH_ALIAS (this, ref
)
1866 cgraph_node
*alias
= dyn_cast
<cgraph_node
*> (ref
->referring
);
1867 bool saved_written
= TREE_ASM_WRITTEN (decl
);
1869 /* Force assemble_alias to really output the alias this time instead
1870 of buffering it in same alias pairs. */
1871 TREE_ASM_WRITTEN (decl
) = 1;
1872 do_assemble_alias (alias
->decl
,
1873 DECL_ASSEMBLER_NAME (decl
));
1874 alias
->assemble_thunks_and_aliases ();
1875 TREE_ASM_WRITTEN (decl
) = saved_written
;
1879 /* Expand function specified by node. */
1882 cgraph_node::expand (void)
1884 location_t saved_loc
;
1886 /* We ought to not compile any inline clones. */
1887 gcc_assert (!global
.inlined_to
);
1889 announce_function (decl
);
1891 gcc_assert (lowered
);
1892 get_untransformed_body ();
1894 /* Generate RTL for the body of DECL. */
1896 timevar_push (TV_REST_OF_COMPILATION
);
1898 gcc_assert (symtab
->global_info_ready
);
1900 /* Initialize the default bitmap obstack. */
1901 bitmap_obstack_initialize (NULL
);
1903 /* Initialize the RTL code for the function. */
1904 current_function_decl
= decl
;
1905 saved_loc
= input_location
;
1906 input_location
= DECL_SOURCE_LOCATION (decl
);
1907 init_function_start (decl
);
1909 gimple_register_cfg_hooks ();
1911 bitmap_obstack_initialize (®_obstack
); /* FIXME, only at RTL generation*/
1913 execute_all_ipa_transforms ();
1915 /* Perform all tree transforms and optimizations. */
1917 /* Signal the start of passes. */
1918 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START
, NULL
);
1920 execute_pass_list (cfun
, g
->get_passes ()->all_passes
);
1922 /* Signal the end of passes. */
1923 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END
, NULL
);
1925 bitmap_obstack_release (®_obstack
);
1927 /* Release the default bitmap obstack. */
1928 bitmap_obstack_release (NULL
);
1930 /* If requested, warn about function definitions where the function will
1931 return a value (usually of some struct or union type) which itself will
1932 take up a lot of stack space. */
1933 if (warn_larger_than
&& !DECL_EXTERNAL (decl
) && TREE_TYPE (decl
))
1935 tree ret_type
= TREE_TYPE (TREE_TYPE (decl
));
1937 if (ret_type
&& TYPE_SIZE_UNIT (ret_type
)
1938 && TREE_CODE (TYPE_SIZE_UNIT (ret_type
)) == INTEGER_CST
1939 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type
),
1942 unsigned int size_as_int
1943 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type
));
1945 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type
), size_as_int
) == 0)
1946 warning (OPT_Wlarger_than_
, "size of return value of %q+D is %u bytes",
1949 warning (OPT_Wlarger_than_
, "size of return value of %q+D is larger than %wd bytes",
1950 decl
, larger_than_size
);
1954 gimple_set_body (decl
, NULL
);
1955 if (DECL_STRUCT_FUNCTION (decl
) == 0
1956 && !cgraph_node::get (decl
)->origin
)
1958 /* Stop pointing to the local nodes about to be freed.
1959 But DECL_INITIAL must remain nonzero so we know this
1960 was an actual function definition.
1961 For a nested function, this is done in c_pop_function_context.
1962 If rest_of_compilation set this to 0, leave it 0. */
1963 if (DECL_INITIAL (decl
) != 0)
1964 DECL_INITIAL (decl
) = error_mark_node
;
1967 input_location
= saved_loc
;
1970 timevar_pop (TV_REST_OF_COMPILATION
);
1972 /* Make sure that BE didn't give up on compiling. */
1973 gcc_assert (TREE_ASM_WRITTEN (decl
));
1975 current_function_decl
= NULL
;
1977 /* It would make a lot more sense to output thunks before function body to get more
1978 forward and lest backwarding jumps. This however would need solving problem
1979 with comdats. See PR48668. Also aliases must come after function itself to
1980 make one pass assemblers, like one on AIX, happy. See PR 50689.
1981 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1983 assemble_thunks_and_aliases ();
1985 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1986 points to the dead function body. */
1988 remove_all_references ();
1991 /* Node comparer that is responsible for the order that corresponds
1992 to time when a function was launched for the first time. */
1995 node_cmp (const void *pa
, const void *pb
)
1997 const cgraph_node
*a
= *(const cgraph_node
* const *) pa
;
1998 const cgraph_node
*b
= *(const cgraph_node
* const *) pb
;
2000 /* Functions with time profile must be before these without profile. */
2001 if (!a
->tp_first_run
|| !b
->tp_first_run
)
2002 return a
->tp_first_run
- b
->tp_first_run
;
2004 return a
->tp_first_run
!= b
->tp_first_run
2005 ? b
->tp_first_run
- a
->tp_first_run
2006 : b
->order
- a
->order
;
2009 /* Expand all functions that must be output.
2011 Attempt to topologically sort the nodes so function is output when
2012 all called functions are already assembled to allow data to be
2013 propagated across the callgraph. Use a stack to get smaller distance
2014 between a function and its callees (later we may choose to use a more
2015 sophisticated algorithm for function reordering; we will likely want
2016 to use subsections to make the output functions appear in top-down
2020 expand_all_functions (void)
2023 cgraph_node
**order
= XCNEWVEC (cgraph_node
*,
2024 symtab
->cgraph_count
);
2025 unsigned int expanded_func_count
= 0, profiled_func_count
= 0;
2026 int order_pos
, new_order_pos
= 0;
2029 order_pos
= ipa_reverse_postorder (order
);
2030 gcc_assert (order_pos
== symtab
->cgraph_count
);
2032 /* Garbage collector may remove inline clones we eliminate during
2033 optimization. So we must be sure to not reference them. */
2034 for (i
= 0; i
< order_pos
; i
++)
2035 if (order
[i
]->process
)
2036 order
[new_order_pos
++] = order
[i
];
2038 if (flag_profile_reorder_functions
)
2039 qsort (order
, new_order_pos
, sizeof (cgraph_node
*), node_cmp
);
2041 for (i
= new_order_pos
- 1; i
>= 0; i
--)
2047 expanded_func_count
++;
2048 if(node
->tp_first_run
)
2049 profiled_func_count
++;
2051 if (symtab
->dump_file
)
2052 fprintf (symtab
->dump_file
,
2053 "Time profile order in expand_all_functions:%s:%d\n",
2054 node
->asm_name (), node
->tp_first_run
);
2061 fprintf (dump_file
, "Expanded functions with time profile (%s):%u/%u\n",
2062 main_input_filename
, profiled_func_count
, expanded_func_count
);
2064 if (symtab
->dump_file
&& flag_profile_reorder_functions
)
2065 fprintf (symtab
->dump_file
, "Expanded functions with time profile:%u/%u\n",
2066 profiled_func_count
, expanded_func_count
);
2068 symtab
->process_new_functions ();
2069 free_gimplify_stack ();
2074 /* This is used to sort the node types by the cgraph order number. */
2076 enum cgraph_order_sort_kind
2078 ORDER_UNDEFINED
= 0,
2084 struct cgraph_order_sort
2086 enum cgraph_order_sort_kind kind
;
2095 /* Output all functions, variables, and asm statements in the order
2096 according to their order fields, which is the order in which they
2097 appeared in the file. This implements -fno-toplevel-reorder. In
2098 this mode we may output functions and variables which don't really
2100 When NO_REORDER is true only do this for symbols marked no reorder. */
2103 output_in_order (bool no_reorder
)
2106 cgraph_order_sort
*nodes
;
2111 max
= symtab
->order
;
2112 nodes
= XCNEWVEC (cgraph_order_sort
, max
);
2114 FOR_EACH_DEFINED_FUNCTION (pf
)
2116 if (pf
->process
&& !pf
->thunk
.thunk_p
&& !pf
->alias
)
2118 if (no_reorder
&& !pf
->no_reorder
)
2121 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2122 nodes
[i
].kind
= ORDER_FUNCTION
;
2127 FOR_EACH_DEFINED_VARIABLE (pv
)
2128 if (!DECL_EXTERNAL (pv
->decl
))
2130 if (no_reorder
&& !pv
->no_reorder
)
2133 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2134 nodes
[i
].kind
= ORDER_VAR
;
2138 for (pa
= symtab
->first_asm_symbol (); pa
; pa
= pa
->next
)
2141 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2142 nodes
[i
].kind
= ORDER_ASM
;
2146 /* In toplevel reorder mode we output all statics; mark them as needed. */
2148 for (i
= 0; i
< max
; ++i
)
2149 if (nodes
[i
].kind
== ORDER_VAR
)
2150 nodes
[i
].u
.v
->finalize_named_section_flags ();
2152 for (i
= 0; i
< max
; ++i
)
2154 switch (nodes
[i
].kind
)
2156 case ORDER_FUNCTION
:
2157 nodes
[i
].u
.f
->process
= 0;
2158 nodes
[i
].u
.f
->expand ();
2162 nodes
[i
].u
.v
->assemble_decl ();
2166 assemble_asm (nodes
[i
].u
.a
->asm_str
);
2169 case ORDER_UNDEFINED
:
2177 symtab
->clear_asm_symbols ();
2185 gcc::pass_manager
*passes
= g
->get_passes ();
2188 current_function_decl
= NULL
;
2189 gimple_register_cfg_hooks ();
2190 bitmap_obstack_initialize (NULL
);
2192 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START
, NULL
);
2196 execute_ipa_pass_list (passes
->all_small_ipa_passes
);
2201 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2202 devirtualization and other changes where removal iterate. */
2203 symtab
->remove_unreachable_nodes (symtab
->dump_file
);
2205 /* If pass_all_early_optimizations was not scheduled, the state of
2206 the cgraph will not be properly updated. Update it now. */
2207 if (symtab
->state
< IPA_SSA
)
2208 symtab
->state
= IPA_SSA
;
2212 /* Generate coverage variables and constructors. */
2215 /* Process new functions added. */
2217 current_function_decl
= NULL
;
2218 symtab
->process_new_functions ();
2220 execute_ipa_summary_passes
2221 ((ipa_opt_pass_d
*) passes
->all_regular_ipa_passes
);
2224 /* Some targets need to handle LTO assembler output specially. */
2225 if (flag_generate_lto
|| flag_generate_offload
)
2226 targetm
.asm_out
.lto_start ();
2230 if (g
->have_offload
)
2232 section_name_prefix
= OFFLOAD_SECTION_NAME_PREFIX
;
2233 lto_stream_offload_p
= true;
2234 ipa_write_summaries ();
2235 lto_stream_offload_p
= false;
2239 section_name_prefix
= LTO_SECTION_NAME_PREFIX
;
2240 lto_stream_offload_p
= false;
2241 ipa_write_summaries ();
2245 if (flag_generate_lto
|| flag_generate_offload
)
2246 targetm
.asm_out
.lto_end ();
2248 if (!flag_ltrans
&& (in_lto_p
|| !flag_lto
|| flag_fat_lto_objects
))
2249 execute_ipa_pass_list (passes
->all_regular_ipa_passes
);
2250 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END
, NULL
);
2252 bitmap_obstack_release (NULL
);
2256 /* Return string alias is alias of. */
2259 get_alias_symbol (tree decl
)
2261 tree alias
= lookup_attribute ("alias", DECL_ATTRIBUTES (decl
));
2262 return get_identifier (TREE_STRING_POINTER
2263 (TREE_VALUE (TREE_VALUE (alias
))));
2267 /* Weakrefs may be associated to external decls and thus not output
2268 at expansion time. Emit all necessary aliases. */
2271 symbol_table::output_weakrefs (void)
2275 FOR_EACH_SYMBOL (node
)
2277 && !TREE_ASM_WRITTEN (node
->decl
)
2278 && (!(cnode
= dyn_cast
<cgraph_node
*> (node
))
2279 || !cnode
->instrumented_version
2280 || !TREE_ASM_WRITTEN (cnode
->instrumented_version
->decl
))
2285 /* Weakrefs are special by not requiring target definition in current
2286 compilation unit. It is thus bit hard to work out what we want to
2288 When alias target is defined, we need to fetch it from symtab reference,
2289 otherwise it is pointed to by alias_target. */
2290 if (node
->alias_target
)
2291 target
= (DECL_P (node
->alias_target
)
2292 ? DECL_ASSEMBLER_NAME (node
->alias_target
)
2293 : node
->alias_target
);
2294 else if (node
->analyzed
)
2295 target
= DECL_ASSEMBLER_NAME (node
->get_alias_target ()->decl
);
2299 target
= get_alias_symbol (node
->decl
);
2301 do_assemble_alias (node
->decl
, target
);
2305 /* Perform simple optimizations based on callgraph. */
2308 symbol_table::compile (void)
2313 #ifdef ENABLE_CHECKING
2314 symtab_node::verify_symtab_nodes ();
2317 timevar_push (TV_CGRAPHOPT
);
2318 if (pre_ipa_mem_report
)
2320 fprintf (stderr
, "Memory consumption before IPA\n");
2321 dump_memory_report (false);
2324 fprintf (stderr
, "Performing interprocedural optimizations\n");
2327 /* Offloading requires LTO infrastructure. */
2328 if (!in_lto_p
&& g
->have_offload
)
2329 flag_generate_offload
= 1;
2331 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2332 if (flag_generate_lto
|| flag_generate_offload
)
2333 lto_streamer_hooks_init ();
2335 /* Don't run the IPA passes if there was any error or sorry messages. */
2339 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2341 || (!in_lto_p
&& flag_lto
&& !flag_fat_lto_objects
))
2343 timevar_pop (TV_CGRAPHOPT
);
2347 global_info_ready
= true;
2350 fprintf (dump_file
, "Optimized ");
2351 symtab_node:: dump_table (dump_file
);
2353 if (post_ipa_mem_report
)
2355 fprintf (stderr
, "Memory consumption after IPA\n");
2356 dump_memory_report (false);
2358 timevar_pop (TV_CGRAPHOPT
);
2360 /* Output everything. */
2361 (*debug_hooks
->assembly_start
) ();
2363 fprintf (stderr
, "Assembling functions:\n");
2364 #ifdef ENABLE_CHECKING
2365 symtab_node::verify_symtab_nodes ();
2368 materialize_all_clones ();
2369 bitmap_obstack_initialize (NULL
);
2370 execute_ipa_pass_list (g
->get_passes ()->all_late_ipa_passes
);
2371 bitmap_obstack_release (NULL
);
2372 mark_functions_to_output ();
2374 /* When weakref support is missing, we autmatically translate all
2375 references to NODE to references to its ultimate alias target.
2376 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2379 Set up this mapping before we output any assembler but once we are sure
2380 that all symbol renaming is done.
2382 FIXME: All this uglyness can go away if we just do renaming at gimple
2383 level by physically rewritting the IL. At the moment we can only redirect
2384 calls, so we need infrastructure for renaming references as well. */
2385 #ifndef ASM_OUTPUT_WEAKREF
2388 FOR_EACH_SYMBOL (node
)
2390 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->decl
)))
2392 IDENTIFIER_TRANSPARENT_ALIAS
2393 (DECL_ASSEMBLER_NAME (node
->decl
)) = 1;
2394 TREE_CHAIN (DECL_ASSEMBLER_NAME (node
->decl
))
2395 = (node
->alias_target
? node
->alias_target
2396 : DECL_ASSEMBLER_NAME (node
->get_alias_target ()->decl
));
2402 if (!flag_toplevel_reorder
)
2403 output_in_order (false);
2406 /* Output first asm statements and anything ordered. The process
2407 flag is cleared for these nodes, so we skip them later. */
2408 output_in_order (true);
2409 expand_all_functions ();
2410 output_variables ();
2413 process_new_functions ();
2419 fprintf (dump_file
, "\nFinal ");
2420 symtab_node::dump_table (dump_file
);
2422 #ifdef ENABLE_CHECKING
2423 symtab_node::verify_symtab_nodes ();
2424 /* Double check that all inline clones are gone and that all
2425 function bodies have been released from memory. */
2429 bool error_found
= false;
2431 FOR_EACH_DEFINED_FUNCTION (node
)
2432 if (node
->global
.inlined_to
2433 || gimple_has_body_p (node
->decl
))
2439 internal_error ("nodes with unreleased memory found");
2445 /* Analyze the whole compilation unit once it is parsed completely. */
2448 symbol_table::finalize_compilation_unit (void)
2450 timevar_push (TV_CGRAPH
);
2452 /* If we're here there's no current function anymore. Some frontends
2453 are lazy in clearing these. */
2454 current_function_decl
= NULL
;
2457 /* Do not skip analyzing the functions if there were errors, we
2458 miss diagnostics for following functions otherwise. */
2460 /* Emit size functions we didn't inline. */
2461 finalize_size_functions ();
2463 /* Mark alias targets necessary and emit diagnostics. */
2464 handle_alias_pairs ();
2468 fprintf (stderr
, "\nAnalyzing compilation unit\n");
2472 if (flag_dump_passes
)
2475 /* Gimplify and lower all functions, compute reachability and
2476 remove unreachable nodes. */
2477 analyze_functions (/*first_time=*/true);
2479 /* Mark alias targets necessary and emit diagnostics. */
2480 handle_alias_pairs ();
2482 /* Gimplify and lower thunks. */
2483 analyze_functions (/*first_time=*/false);
2485 /* Emit early debug for reachable functions, and by consequence,
2486 locally scoped symbols. */
2487 struct cgraph_node
*cnode
;
2488 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode
)
2489 (*debug_hooks
->early_global_decl
) (cnode
->decl
);
2491 /* Clean up anything that needs cleaning up after initial debug
2493 (*debug_hooks
->early_finish
) ();
2495 /* Finally drive the pass manager. */
2498 timevar_pop (TV_CGRAPH
);
2501 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2502 within the same process. For use by toplev::finalize. */
2505 cgraphunit_c_finalize (void)
2507 gcc_assert (cgraph_new_nodes
.length () == 0);
2508 cgraph_new_nodes
.truncate (0);
2510 vtable_entry_type
= NULL
;
2511 queued_nodes
= &symtab_terminator
;
2513 first_analyzed
= NULL
;
2514 first_analyzed_var
= NULL
;
2517 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2518 kind of wrapper method. */
2521 cgraph_node::create_wrapper (cgraph_node
*target
)
2523 /* Preserve DECL_RESULT so we get right by reference flag. */
2524 tree decl_result
= DECL_RESULT (decl
);
2526 /* Remove the function's body but keep arguments to be reused
2528 release_body (true);
2531 DECL_UNINLINABLE (decl
) = false;
2532 DECL_RESULT (decl
) = decl_result
;
2533 DECL_INITIAL (decl
) = NULL
;
2534 allocate_struct_function (decl
, false);
2537 /* Turn alias into thunk and expand it into GIMPLE representation. */
2540 memset (&thunk
, 0, sizeof (cgraph_thunk_info
));
2541 thunk
.thunk_p
= true;
2542 create_edge (target
, NULL
, count
, CGRAPH_FREQ_BASE
);
2544 tree arguments
= DECL_ARGUMENTS (decl
);
2548 TREE_ADDRESSABLE (arguments
) = false;
2549 arguments
= TREE_CHAIN (arguments
);
2552 expand_thunk (false, true);
2554 /* Inline summary set-up. */
2556 inline_analyze_function (this);
2559 #include "gt-cgraphunit.h"