1 /* Driver of optimization process
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
70 1) Inter-procedural optimization.
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
162 #include "coretypes.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
179 #include "stor-layout.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
192 #include "symbol-summary.h"
193 #include "ipa-prop.h"
194 #include "gimple-pretty-print.h"
196 #include "ipa-inline.h"
197 #include "ipa-utils.h"
201 #include "pass_manager.h"
202 #include "tree-nested.h"
204 #include "tree-chkp.h"
205 #include "lto-section-names.h"
207 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
208 secondary queue used during optimization to accommodate passes that
209 may generate new functions that need to be optimized and expanded. */
210 vec
<cgraph_node
*> cgraph_new_nodes
;
212 static void expand_all_functions (void);
213 static void mark_functions_to_output (void);
214 static void handle_alias_pairs (void);
216 /* Used for vtable lookup in thunk adjusting. */
217 static GTY (()) tree vtable_entry_type
;
219 /* Determine if symbol declaration is needed. That is, visible to something
220 either outside this translation unit, something magic in the system
223 symtab_node::needed_p (void)
225 /* Double check that no one output the function into assembly file
227 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl
)
228 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)));
233 if (DECL_EXTERNAL (decl
))
236 /* If the user told us it is used, then it must be so. */
240 /* ABI forced symbols are needed when they are external. */
241 if (forced_by_abi
&& TREE_PUBLIC (decl
))
244 /* Keep constructors, destructors and virtual functions. */
245 if (TREE_CODE (decl
) == FUNCTION_DECL
246 && (DECL_STATIC_CONSTRUCTOR (decl
) || DECL_STATIC_DESTRUCTOR (decl
)))
249 /* Externally visible variables must be output. The exception is
250 COMDAT variables that must be output only when they are needed. */
251 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
257 /* Head and terminator of the queue of nodes to be processed while building
260 static symtab_node symtab_terminator
;
261 static symtab_node
*queued_nodes
= &symtab_terminator
;
263 /* Add NODE to queue starting at QUEUED_NODES.
264 The queue is linked via AUX pointers and terminated by pointer to 1. */
267 enqueue_node (symtab_node
*node
)
271 gcc_checking_assert (queued_nodes
);
272 node
->aux
= queued_nodes
;
276 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
277 functions into callgraph in a way so they look like ordinary reachable
278 functions inserted into callgraph already at construction time. */
281 symbol_table::process_new_functions (void)
285 if (!cgraph_new_nodes
.exists ())
288 handle_alias_pairs ();
289 /* Note that this queue may grow as its being processed, as the new
290 functions may generate new ones. */
291 for (unsigned i
= 0; i
< cgraph_new_nodes
.length (); i
++)
293 cgraph_node
*node
= cgraph_new_nodes
[i
];
298 /* At construction time we just need to finalize function and move
299 it into reachable functions list. */
301 cgraph_node::finalize_function (fndecl
, false);
302 call_cgraph_insertion_hooks (node
);
308 case IPA_SSA_AFTER_INLINING
:
309 /* When IPA optimization already started, do all essential
310 transformations that has been already performed on the whole
311 cgraph but not on this function. */
313 gimple_register_cfg_hooks ();
316 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
317 if ((state
== IPA_SSA
|| state
== IPA_SSA_AFTER_INLINING
)
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
319 g
->get_passes ()->execute_early_local_passes ();
320 else if (inline_summaries
!= NULL
)
321 compute_inline_parameters (node
, true);
322 free_dominance_info (CDI_POST_DOMINATORS
);
323 free_dominance_info (CDI_DOMINATORS
);
325 call_cgraph_insertion_hooks (node
);
329 /* Functions created during expansion shall be compiled
332 call_cgraph_insertion_hooks (node
);
342 cgraph_new_nodes
.release ();
345 /* As an GCC extension we allow redefinition of the function. The
346 semantics when both copies of bodies differ is not well defined.
347 We replace the old body with new body so in unit at a time mode
348 we always use new body, while in normal mode we may end up with
349 old body inlined into some functions and new body expanded and
352 ??? It may make more sense to use one body for inlining and other
353 body for expanding the function but this is difficult to do. */
356 cgraph_node::reset (void)
358 /* If process is set, then we have already begun whole-unit analysis.
359 This is *not* testing for whether we've already emitted the function.
360 That case can be sort-of legitimately seen with real function redefinition
361 errors. I would argue that the front end should never present us with
362 such a case, but don't enforce that for now. */
363 gcc_assert (!process
);
365 /* Reset our data structures so we can analyze the function again. */
366 memset (&local
, 0, sizeof (local
));
367 memset (&global
, 0, sizeof (global
));
368 memset (&rtl
, 0, sizeof (rtl
));
373 cpp_implicit_alias
= false;
376 remove_all_references ();
379 /* Return true when there are references to the node. INCLUDE_SELF is
380 true if a self reference counts as a reference. */
383 symtab_node::referred_to_p (bool include_self
)
387 /* See if there are any references at all. */
388 if (iterate_referring (0, ref
))
390 /* For functions check also calls. */
391 cgraph_node
*cn
= dyn_cast
<cgraph_node
*> (this);
392 if (cn
&& cn
->callers
)
396 for (cgraph_edge
*e
= cn
->callers
; e
; e
= e
->next_caller
)
397 if (e
->caller
!= this)
403 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
404 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
405 the garbage collector run at the moment. We would need to either create
406 a new GC context, or just not compile right now. */
409 cgraph_node::finalize_function (tree decl
, bool no_collect
)
411 cgraph_node
*node
= cgraph_node::get_create (decl
);
413 if (node
->definition
)
415 /* Nested functions should only be defined once. */
416 gcc_assert (!DECL_CONTEXT (decl
)
417 || TREE_CODE (DECL_CONTEXT (decl
)) != FUNCTION_DECL
);
419 node
->local
.redefined_extern_inline
= true;
422 /* Set definition first before calling notice_global_symbol so that
423 it is available to notice_global_symbol. */
424 node
->definition
= true;
425 notice_global_symbol (decl
);
426 node
->lowered
= DECL_STRUCT_FUNCTION (decl
)->cfg
!= NULL
;
428 /* With -fkeep-inline-functions we are keeping all inline functions except
429 for extern inline ones. */
430 if (flag_keep_inline_functions
431 && DECL_DECLARED_INLINE_P (decl
)
432 && !DECL_EXTERNAL (decl
)
433 && !DECL_DISREGARD_INLINE_LIMITS (decl
))
434 node
->force_output
= 1;
436 /* When not optimizing, also output the static functions. (see
437 PR24561), but don't do so for always_inline functions, functions
438 declared inline and nested functions. These were optimized out
439 in the original implementation and it is unclear whether we want
440 to change the behavior here. */
441 if (((!opt_for_fn (decl
, optimize
) || flag_keep_static_functions
)
442 && !node
->cpp_implicit_alias
443 && !DECL_DISREGARD_INLINE_LIMITS (decl
)
444 && !DECL_DECLARED_INLINE_P (decl
)
445 && !(DECL_CONTEXT (decl
)
446 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
))
447 && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
448 node
->force_output
= 1;
450 /* If we've not yet emitted decl, tell the debug info about it. */
451 if (!TREE_ASM_WRITTEN (decl
))
452 (*debug_hooks
->deferred_inline_function
) (decl
);
457 if (symtab
->state
== CONSTRUCTION
458 && (node
->needed_p () || node
->referred_to_p ()))
462 /* Add the function FNDECL to the call graph.
463 Unlike finalize_function, this function is intended to be used
464 by middle end and allows insertion of new function at arbitrary point
465 of compilation. The function can be either in high, low or SSA form
468 The function is assumed to be reachable and have address taken (so no
469 API breaking optimizations are performed on it).
471 Main work done by this function is to enqueue the function for later
472 processing to avoid need the passes to be re-entrant. */
475 cgraph_node::add_new_function (tree fndecl
, bool lowered
)
477 gcc::pass_manager
*passes
= g
->get_passes ();
482 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
483 const char *function_type
= ((gimple_has_body_p (fndecl
))
485 ? (gimple_in_ssa_p (fn
)
489 : "to-be-gimplified");
491 "Added new %s function %s to callgraph\n",
493 fndecl_name (fndecl
));
496 switch (symtab
->state
)
499 cgraph_node::finalize_function (fndecl
, false);
502 /* Just enqueue function to be processed at nearest occurrence. */
503 node
= cgraph_node::get_create (fndecl
);
505 node
->lowered
= true;
506 cgraph_new_nodes
.safe_push (node
);
511 case IPA_SSA_AFTER_INLINING
:
513 /* Bring the function into finalized state and enqueue for later
514 analyzing and compilation. */
515 node
= cgraph_node::get_create (fndecl
);
516 node
->local
.local
= false;
517 node
->definition
= true;
518 node
->force_output
= true;
519 if (!lowered
&& symtab
->state
== EXPANSION
)
521 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
522 gimple_register_cfg_hooks ();
523 bitmap_obstack_initialize (NULL
);
524 execute_pass_list (cfun
, passes
->all_lowering_passes
);
525 passes
->execute_early_local_passes ();
526 bitmap_obstack_release (NULL
);
532 node
->lowered
= true;
533 cgraph_new_nodes
.safe_push (node
);
537 /* At the very end of compilation we have to do all the work up
539 node
= cgraph_node::create (fndecl
);
541 node
->lowered
= true;
542 node
->definition
= true;
544 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
545 gimple_register_cfg_hooks ();
546 bitmap_obstack_initialize (NULL
);
547 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
548 g
->get_passes ()->execute_early_local_passes ();
549 bitmap_obstack_release (NULL
);
558 /* Set a personality if required and we already passed EH lowering. */
560 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl
))
561 == eh_personality_lang
))
562 DECL_FUNCTION_PERSONALITY (fndecl
) = lang_hooks
.eh_personality ();
565 /* Analyze the function scheduled to be output. */
567 cgraph_node::analyze (void)
569 tree decl
= this->decl
;
570 location_t saved_loc
= input_location
;
571 input_location
= DECL_SOURCE_LOCATION (decl
);
575 cgraph_node
*t
= cgraph_node::get (thunk
.alias
);
577 create_edge (t
, NULL
, 0, CGRAPH_FREQ_BASE
);
578 callees
->can_throw_external
= !TREE_NOTHROW (t
->decl
);
579 /* Target code in expand_thunk may need the thunk's target
580 to be analyzed, so recurse here. */
585 t
= t
->get_alias_target ();
589 if (!expand_thunk (false, false))
597 resolve_alias (cgraph_node::get (alias_target
));
598 else if (dispatcher_function
)
600 /* Generate the dispatcher body of multi-versioned functions. */
601 cgraph_function_version_info
*dispatcher_version_info
602 = function_version ();
603 if (dispatcher_version_info
!= NULL
604 && (dispatcher_version_info
->dispatcher_resolver
607 tree resolver
= NULL_TREE
;
608 gcc_assert (targetm
.generate_version_dispatcher_body
);
609 resolver
= targetm
.generate_version_dispatcher_body (this);
610 gcc_assert (resolver
!= NULL_TREE
);
615 push_cfun (DECL_STRUCT_FUNCTION (decl
));
617 assign_assembler_name_if_neeeded (decl
);
619 /* Make sure to gimplify bodies only once. During analyzing a
620 function we lower it, which will require gimplified nested
621 functions, so we can end up here with an already gimplified
623 if (!gimple_has_body_p (decl
))
624 gimplify_function_tree (decl
);
626 /* Lower the function. */
630 lower_nested_functions (decl
);
631 gcc_assert (!nested
);
633 gimple_register_cfg_hooks ();
634 bitmap_obstack_initialize (NULL
);
635 execute_pass_list (cfun
, g
->get_passes ()->all_lowering_passes
);
636 free_dominance_info (CDI_POST_DOMINATORS
);
637 free_dominance_info (CDI_DOMINATORS
);
639 bitmap_obstack_release (NULL
);
647 input_location
= saved_loc
;
650 /* C++ frontend produce same body aliases all over the place, even before PCH
651 gets streamed out. It relies on us linking the aliases with their function
652 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
653 first produce aliases without links, but once C++ FE is sure he won't sream
654 PCH we build the links via this function. */
657 symbol_table::process_same_body_aliases (void)
660 FOR_EACH_SYMBOL (node
)
661 if (node
->cpp_implicit_alias
&& !node
->analyzed
)
663 (TREE_CODE (node
->alias_target
) == VAR_DECL
664 ? (symtab_node
*)varpool_node::get_create (node
->alias_target
)
665 : (symtab_node
*)cgraph_node::get_create (node
->alias_target
));
666 cpp_implicit_aliases_done
= true;
669 /* Process attributes common for vars and functions. */
672 process_common_attributes (symtab_node
*node
, tree decl
)
674 tree weakref
= lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
));
676 if (weakref
&& !lookup_attribute ("alias", DECL_ATTRIBUTES (decl
)))
678 warning_at (DECL_SOURCE_LOCATION (decl
), OPT_Wattributes
,
679 "%<weakref%> attribute should be accompanied with"
680 " an %<alias%> attribute");
681 DECL_WEAK (decl
) = 0;
682 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
683 DECL_ATTRIBUTES (decl
));
686 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl
)))
687 node
->no_reorder
= 1;
690 /* Look for externally_visible and used attributes and mark cgraph nodes
693 We cannot mark the nodes at the point the attributes are processed (in
694 handle_*_attribute) because the copy of the declarations available at that
695 point may not be canonical. For example, in:
698 void f() __attribute__((used));
700 the declaration we see in handle_used_attribute will be the second
701 declaration -- but the front end will subsequently merge that declaration
702 with the original declaration and discard the second declaration.
704 Furthermore, we can't mark these nodes in finalize_function because:
707 void f() __attribute__((externally_visible));
711 So, we walk the nodes at the end of the translation unit, applying the
712 attributes at that point. */
715 process_function_and_variable_attributes (cgraph_node
*first
,
716 varpool_node
*first_var
)
721 for (node
= symtab
->first_function (); node
!= first
;
722 node
= symtab
->next_function (node
))
724 tree decl
= node
->decl
;
725 if (DECL_PRESERVE_P (decl
))
726 node
->mark_force_output ();
727 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
729 if (! TREE_PUBLIC (node
->decl
))
730 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
731 "%<externally_visible%>"
732 " attribute have effect only on public objects");
734 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
735 && (node
->definition
&& !node
->alias
))
737 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
738 "%<weakref%> attribute ignored"
739 " because function is defined");
740 DECL_WEAK (decl
) = 0;
741 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
742 DECL_ATTRIBUTES (decl
));
745 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl
))
746 && !DECL_DECLARED_INLINE_P (decl
)
747 /* redefining extern inline function makes it DECL_UNINLINABLE. */
748 && !DECL_UNINLINABLE (decl
))
749 warning_at (DECL_SOURCE_LOCATION (decl
), OPT_Wattributes
,
750 "always_inline function might not be inlinable");
752 process_common_attributes (node
, decl
);
754 for (vnode
= symtab
->first_variable (); vnode
!= first_var
;
755 vnode
= symtab
->next_variable (vnode
))
757 tree decl
= vnode
->decl
;
758 if (DECL_EXTERNAL (decl
)
759 && DECL_INITIAL (decl
))
760 varpool_node::finalize_decl (decl
);
761 if (DECL_PRESERVE_P (decl
))
762 vnode
->force_output
= true;
763 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
765 if (! TREE_PUBLIC (vnode
->decl
))
766 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
767 "%<externally_visible%>"
768 " attribute have effect only on public objects");
770 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
))
772 && DECL_INITIAL (decl
))
774 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
775 "%<weakref%> attribute ignored"
776 " because variable is initialized");
777 DECL_WEAK (decl
) = 0;
778 DECL_ATTRIBUTES (decl
) = remove_attribute ("weakref",
779 DECL_ATTRIBUTES (decl
));
781 process_common_attributes (vnode
, decl
);
785 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
786 middle end to output the variable to asm file, if needed or externally
790 varpool_node::finalize_decl (tree decl
)
792 varpool_node
*node
= varpool_node::get_create (decl
);
794 gcc_assert (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
));
796 if (node
->definition
)
798 /* Set definition first before calling notice_global_symbol so that
799 it is available to notice_global_symbol. */
800 node
->definition
= true;
801 notice_global_symbol (decl
);
802 if (TREE_THIS_VOLATILE (decl
) || DECL_PRESERVE_P (decl
)
803 /* Traditionally we do not eliminate static variables when not
804 optimizing and when not doing toplevel reoder. */
806 || ((!flag_toplevel_reorder
807 && !DECL_COMDAT (node
->decl
)
808 && !DECL_ARTIFICIAL (node
->decl
))))
809 node
->force_output
= true;
811 if (symtab
->state
== CONSTRUCTION
812 && (node
->needed_p () || node
->referred_to_p ()))
814 if (symtab
->state
>= IPA_SSA
)
816 /* Some frontends produce various interface variables after compilation
818 if (symtab
->state
== FINISHED
819 || (!flag_toplevel_reorder
820 && symtab
->state
== EXPANSION
))
821 node
->assemble_decl ();
823 if (DECL_INITIAL (decl
))
824 chkp_register_var_initializer (decl
);
827 /* EDGE is an polymorphic call. Mark all possible targets as reachable
828 and if there is only one target, perform trivial devirtualization.
829 REACHABLE_CALL_TARGETS collects target lists we already walked to
830 avoid udplicate work. */
833 walk_polymorphic_call_targets (hash_set
<void *> *reachable_call_targets
,
839 vec
<cgraph_node
*>targets
840 = possible_polymorphic_call_targets
841 (edge
, &final
, &cache_token
);
843 if (!reachable_call_targets
->add (cache_token
))
845 if (symtab
->dump_file
)
846 dump_possible_polymorphic_call_targets
847 (symtab
->dump_file
, edge
);
849 for (i
= 0; i
< targets
.length (); i
++)
851 /* Do not bother to mark virtual methods in anonymous namespace;
852 either we will find use of virtual table defining it, or it is
854 if (targets
[i
]->definition
856 (TREE_TYPE (targets
[i
]->decl
))
858 && !type_in_anonymous_namespace_p
859 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets
[i
]->decl
))))
860 enqueue_node (targets
[i
]);
864 /* Very trivial devirtualization; when the type is
865 final or anonymous (so we know all its derivation)
866 and there is only one possible virtual call target,
867 make the edge direct. */
870 if (targets
.length () <= 1 && dbg_cnt (devirt
))
873 if (targets
.length () == 1)
876 target
= cgraph_node::create
877 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
));
879 if (symtab
->dump_file
)
881 fprintf (symtab
->dump_file
,
882 "Devirtualizing call: ");
883 print_gimple_stmt (symtab
->dump_file
,
887 if (dump_enabled_p ())
889 location_t locus
= gimple_location_safe (edge
->call_stmt
);
890 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, locus
,
891 "devirtualizing call in %s to %s\n",
892 edge
->caller
->name (), target
->name ());
895 edge
->make_direct (target
);
896 edge
->redirect_call_stmt_to_callee ();
898 /* Call to __builtin_unreachable shouldn't be instrumented. */
899 if (!targets
.length ())
900 gimple_call_set_with_bounds (edge
->call_stmt
, false);
902 if (symtab
->dump_file
)
904 fprintf (symtab
->dump_file
,
905 "Devirtualized as: ");
906 print_gimple_stmt (symtab
->dump_file
,
914 /* Issue appropriate warnings for the global declaration DECL. */
917 check_global_declaration (symtab_node
*snode
)
919 tree decl
= snode
->decl
;
921 /* Warn about any function declared static but not defined. We don't
922 warn about variables, because many programs have static variables
923 that exist only to get some text into the object file. */
924 if (TREE_CODE (decl
) == FUNCTION_DECL
925 && DECL_INITIAL (decl
) == 0
926 && DECL_EXTERNAL (decl
)
927 && ! DECL_ARTIFICIAL (decl
)
928 && ! TREE_NO_WARNING (decl
)
929 && ! TREE_PUBLIC (decl
)
930 && (warn_unused_function
931 || snode
->referred_to_p (/*include_self=*/false)))
933 if (snode
->referred_to_p (/*include_self=*/false))
934 pedwarn (input_location
, 0, "%q+F used but never defined", decl
);
936 warning (OPT_Wunused_function
, "%q+F declared %<static%> but never defined", decl
);
937 /* This symbol is effectively an "extern" declaration now. */
938 TREE_PUBLIC (decl
) = 1;
941 /* Warn about static fns or vars defined but not used. */
942 if (((warn_unused_function
&& TREE_CODE (decl
) == FUNCTION_DECL
)
943 || (((warn_unused_variable
&& ! TREE_READONLY (decl
))
944 || (warn_unused_const_variable
&& TREE_READONLY (decl
)))
945 && TREE_CODE (decl
) == VAR_DECL
))
946 && ! DECL_IN_SYSTEM_HEADER (decl
)
947 && ! snode
->referred_to_p (/*include_self=*/false)
948 /* This TREE_USED check is needed in addition to referred_to_p
949 above, because the `__unused__' attribute is not being
950 considered for referred_to_p. */
951 && ! TREE_USED (decl
)
952 /* The TREE_USED bit for file-scope decls is kept in the identifier,
953 to handle multiple external decls in different scopes. */
954 && ! (DECL_NAME (decl
) && TREE_USED (DECL_NAME (decl
)))
955 && ! DECL_EXTERNAL (decl
)
956 && ! DECL_ARTIFICIAL (decl
)
957 && ! DECL_ABSTRACT_ORIGIN (decl
)
958 && ! TREE_PUBLIC (decl
)
959 /* A volatile variable might be used in some non-obvious way. */
960 && (! VAR_P (decl
) || ! TREE_THIS_VOLATILE (decl
))
961 /* Global register variables must be declared to reserve them. */
962 && ! (TREE_CODE (decl
) == VAR_DECL
&& DECL_REGISTER (decl
))
963 /* Global ctors and dtors are called by the runtime. */
964 && (TREE_CODE (decl
) != FUNCTION_DECL
965 || (!DECL_STATIC_CONSTRUCTOR (decl
)
966 && !DECL_STATIC_DESTRUCTOR (decl
)))
967 /* Otherwise, ask the language. */
968 && lang_hooks
.decls
.warn_unused_global (decl
))
969 warning_at (DECL_SOURCE_LOCATION (decl
),
970 (TREE_CODE (decl
) == FUNCTION_DECL
)
971 ? OPT_Wunused_function
972 : (TREE_READONLY (decl
)
973 ? OPT_Wunused_const_variable
974 : OPT_Wunused_variable
),
975 "%qD defined but not used", decl
);
978 /* Discover all functions and variables that are trivially needed, analyze
979 them as well as all functions and variables referred by them */
980 static cgraph_node
*first_analyzed
;
981 static varpool_node
*first_analyzed_var
;
983 /* FIRST_TIME is set to TRUE for the first time we are called for a
984 translation unit from finalize_compilation_unit() or false
988 analyze_functions (bool first_time
)
990 /* Keep track of already processed nodes when called multiple times for
991 intermodule optimization. */
992 cgraph_node
*first_handled
= first_analyzed
;
993 varpool_node
*first_handled_var
= first_analyzed_var
;
994 hash_set
<void *> reachable_call_targets
;
1000 bool changed
= true;
1001 location_t saved_loc
= input_location
;
1003 bitmap_obstack_initialize (NULL
);
1004 symtab
->state
= CONSTRUCTION
;
1005 input_location
= UNKNOWN_LOCATION
;
1007 /* Ugly, but the fixup can not happen at a time same body alias is created;
1008 C++ FE is confused about the COMDAT groups being right. */
1009 if (symtab
->cpp_implicit_aliases_done
)
1010 FOR_EACH_SYMBOL (node
)
1011 if (node
->cpp_implicit_alias
)
1012 node
->fixup_same_cpp_alias_visibility (node
->get_alias_target ());
1013 build_type_inheritance_graph ();
1015 /* Analysis adds static variables that in turn adds references to new functions.
1016 So we need to iterate the process until it stabilize. */
1020 process_function_and_variable_attributes (first_analyzed
,
1021 first_analyzed_var
);
1023 /* First identify the trivially needed symbols. */
1024 for (node
= symtab
->first_symbol ();
1025 node
!= first_analyzed
1026 && node
!= first_analyzed_var
; node
= node
->next
)
1028 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1029 node
->get_comdat_group_id ();
1030 if (node
->needed_p ())
1032 enqueue_node (node
);
1033 if (!changed
&& symtab
->dump_file
)
1034 fprintf (symtab
->dump_file
, "Trivially needed symbols:");
1036 if (symtab
->dump_file
)
1037 fprintf (symtab
->dump_file
, " %s", node
->asm_name ());
1038 if (!changed
&& symtab
->dump_file
)
1039 fprintf (symtab
->dump_file
, "\n");
1041 if (node
== first_analyzed
1042 || node
== first_analyzed_var
)
1045 symtab
->process_new_functions ();
1046 first_analyzed_var
= symtab
->first_variable ();
1047 first_analyzed
= symtab
->first_function ();
1049 if (changed
&& symtab
->dump_file
)
1050 fprintf (symtab
->dump_file
, "\n");
1052 /* Lower representation, build callgraph edges and references for all trivially
1053 needed symbols and all symbols referred by them. */
1054 while (queued_nodes
!= &symtab_terminator
)
1057 node
= queued_nodes
;
1058 queued_nodes
= (symtab_node
*)queued_nodes
->aux
;
1059 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
);
1060 if (cnode
&& cnode
->definition
)
1063 tree decl
= cnode
->decl
;
1065 /* ??? It is possible to create extern inline function
1066 and later using weak alias attribute to kill its body.
1067 See gcc.c-torture/compile/20011119-1.c */
1068 if (!DECL_STRUCT_FUNCTION (decl
)
1070 && !cnode
->thunk
.thunk_p
1071 && !cnode
->dispatcher_function
)
1074 cnode
->local
.redefined_extern_inline
= true;
1078 if (!cnode
->analyzed
)
1081 for (edge
= cnode
->callees
; edge
; edge
= edge
->next_callee
)
1082 if (edge
->callee
->definition
1083 && (!DECL_EXTERNAL (edge
->callee
->decl
)
1084 /* When not optimizing, do not try to analyze extern
1085 inline functions. Doing so is pointless. */
1086 || opt_for_fn (edge
->callee
->decl
, optimize
)
1087 /* Weakrefs needs to be preserved. */
1088 || edge
->callee
->alias
1089 /* always_inline functions are inlined aven at -O0. */
1092 DECL_ATTRIBUTES (edge
->callee
->decl
))
1093 /* Multiversioned functions needs the dispatcher to
1094 be produced locally even for extern functions. */
1095 || edge
->callee
->function_version ()))
1096 enqueue_node (edge
->callee
);
1097 if (opt_for_fn (cnode
->decl
, optimize
)
1098 && opt_for_fn (cnode
->decl
, flag_devirtualize
))
1102 for (edge
= cnode
->indirect_calls
; edge
; edge
= next
)
1104 next
= edge
->next_callee
;
1105 if (edge
->indirect_info
->polymorphic
)
1106 walk_polymorphic_call_targets (&reachable_call_targets
,
1111 /* If decl is a clone of an abstract function,
1112 mark that abstract function so that we don't release its body.
1113 The DECL_INITIAL() of that abstract function declaration
1114 will be later needed to output debug info. */
1115 if (DECL_ABSTRACT_ORIGIN (decl
))
1117 cgraph_node
*origin_node
1118 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl
));
1119 origin_node
->used_as_abstract_origin
= true;
1124 varpool_node
*vnode
= dyn_cast
<varpool_node
*> (node
);
1125 if (vnode
&& vnode
->definition
&& !vnode
->analyzed
)
1129 if (node
->same_comdat_group
)
1132 for (next
= node
->same_comdat_group
;
1134 next
= next
->same_comdat_group
)
1135 if (!next
->comdat_local_p ())
1136 enqueue_node (next
);
1138 for (i
= 0; node
->iterate_reference (i
, ref
); i
++)
1139 if (ref
->referred
->definition
1140 && (!DECL_EXTERNAL (ref
->referred
->decl
)
1141 || ((TREE_CODE (ref
->referred
->decl
) != FUNCTION_DECL
1143 || (TREE_CODE (ref
->referred
->decl
) == FUNCTION_DECL
1144 && opt_for_fn (ref
->referred
->decl
, optimize
))
1146 || ref
->referred
->alias
)))
1147 enqueue_node (ref
->referred
);
1148 symtab
->process_new_functions ();
1151 update_type_inheritance_graph ();
1153 /* Collect entry points to the unit. */
1154 if (symtab
->dump_file
)
1156 fprintf (symtab
->dump_file
, "\n\nInitial ");
1157 symtab_node::dump_table (symtab
->dump_file
);
1163 FOR_EACH_SYMBOL (snode
)
1164 check_global_declaration (snode
);
1167 if (symtab
->dump_file
)
1168 fprintf (symtab
->dump_file
, "\nRemoving unused symbols:");
1170 for (node
= symtab
->first_symbol ();
1171 node
!= first_handled
1172 && node
!= first_handled_var
; node
= next
)
1175 if (!node
->aux
&& !node
->referred_to_p ())
1177 if (symtab
->dump_file
)
1178 fprintf (symtab
->dump_file
, " %s", node
->name ());
1180 /* See if the debugger can use anything before the DECL
1181 passes away. Perhaps it can notice a DECL that is now a
1182 constant and can tag the early DIE with an appropriate
1185 Otherwise, this is the last chance the debug_hooks have
1186 at looking at optimized away DECLs, since
1187 late_global_decl will subsequently be called from the
1188 contents of the now pruned symbol table. */
1189 if (!decl_function_context (node
->decl
))
1190 (*debug_hooks
->late_global_decl
) (node
->decl
);
1195 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
))
1197 tree decl
= node
->decl
;
1199 if (cnode
->definition
&& !gimple_has_body_p (decl
)
1201 && !cnode
->thunk
.thunk_p
)
1204 gcc_assert (!cnode
->definition
|| cnode
->thunk
.thunk_p
1206 || gimple_has_body_p (decl
));
1207 gcc_assert (cnode
->analyzed
== cnode
->definition
);
1211 for (;node
; node
= node
->next
)
1213 first_analyzed
= symtab
->first_function ();
1214 first_analyzed_var
= symtab
->first_variable ();
1215 if (symtab
->dump_file
)
1217 fprintf (symtab
->dump_file
, "\n\nReclaimed ");
1218 symtab_node::dump_table (symtab
->dump_file
);
1220 bitmap_obstack_release (NULL
);
1222 /* Initialize assembler name hash, in particular we want to trigger C++
1223 mangling and same body alias creation before we free DECL_ARGUMENTS
1226 symtab
->symtab_initialize_asm_name_hash ();
1228 input_location
= saved_loc
;
1231 /* Translate the ugly representation of aliases as alias pairs into nice
1232 representation in callgraph. We don't handle all cases yet,
1236 handle_alias_pairs (void)
1241 for (i
= 0; alias_pairs
&& alias_pairs
->iterate (i
, &p
);)
1243 symtab_node
*target_node
= symtab_node::get_for_asmname (p
->target
);
1245 /* Weakrefs with target not defined in current unit are easy to handle:
1246 they behave just as external variables except we need to note the
1247 alias flag to later output the weakref pseudo op into asm file. */
1249 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p
->decl
)) != NULL
)
1251 symtab_node
*node
= symtab_node::get (p
->decl
);
1254 node
->alias_target
= p
->target
;
1255 node
->weakref
= true;
1258 alias_pairs
->unordered_remove (i
);
1261 else if (!target_node
)
1263 error ("%q+D aliased to undefined symbol %qE", p
->decl
, p
->target
);
1264 symtab_node
*node
= symtab_node::get (p
->decl
);
1266 node
->alias
= false;
1267 alias_pairs
->unordered_remove (i
);
1271 if (DECL_EXTERNAL (target_node
->decl
)
1272 /* We use local aliases for C++ thunks to force the tailcall
1273 to bind locally. This is a hack - to keep it working do
1274 the following (which is not strictly correct). */
1275 && (TREE_CODE (target_node
->decl
) != FUNCTION_DECL
1276 || ! DECL_VIRTUAL_P (target_node
->decl
))
1277 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p
->decl
)))
1279 error ("%q+D aliased to external symbol %qE",
1280 p
->decl
, p
->target
);
1283 if (TREE_CODE (p
->decl
) == FUNCTION_DECL
1284 && target_node
&& is_a
<cgraph_node
*> (target_node
))
1286 cgraph_node
*src_node
= cgraph_node::get (p
->decl
);
1287 if (src_node
&& src_node
->definition
)
1289 cgraph_node::create_alias (p
->decl
, target_node
->decl
);
1290 alias_pairs
->unordered_remove (i
);
1292 else if (TREE_CODE (p
->decl
) == VAR_DECL
1293 && target_node
&& is_a
<varpool_node
*> (target_node
))
1295 varpool_node::create_alias (p
->decl
, target_node
->decl
);
1296 alias_pairs
->unordered_remove (i
);
1300 error ("%q+D alias in between function and variable is not supported",
1302 warning (0, "%q+D aliased declaration",
1304 alias_pairs
->unordered_remove (i
);
1307 vec_free (alias_pairs
);
1311 /* Figure out what functions we want to assemble. */
1314 mark_functions_to_output (void)
1316 bool check_same_comdat_groups
= false;
1320 FOR_EACH_FUNCTION (node
)
1321 gcc_assert (!node
->process
);
1323 FOR_EACH_FUNCTION (node
)
1325 tree decl
= node
->decl
;
1327 gcc_assert (!node
->process
|| node
->same_comdat_group
);
1331 /* We need to output all local functions that are used and not
1332 always inlined, as well as those that are reachable from
1333 outside the current compilation unit. */
1335 && !node
->thunk
.thunk_p
1337 && !node
->global
.inlined_to
1338 && !TREE_ASM_WRITTEN (decl
)
1339 && !DECL_EXTERNAL (decl
))
1342 if (node
->same_comdat_group
)
1345 for (next
= dyn_cast
<cgraph_node
*> (node
->same_comdat_group
);
1347 next
= dyn_cast
<cgraph_node
*> (next
->same_comdat_group
))
1348 if (!next
->thunk
.thunk_p
&& !next
->alias
1349 && !next
->comdat_local_p ())
1353 else if (node
->same_comdat_group
)
1356 check_same_comdat_groups
= true;
1360 /* We should've reclaimed all functions that are not needed. */
1362 && !node
->global
.inlined_to
1363 && gimple_has_body_p (decl
)
1364 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1365 are inside partition, we can end up not removing the body since we no longer
1366 have analyzed node pointing to it. */
1367 && !node
->in_other_partition
1370 && !DECL_EXTERNAL (decl
))
1373 internal_error ("failed to reclaim unneeded function");
1375 gcc_assert (node
->global
.inlined_to
1376 || !gimple_has_body_p (decl
)
1377 || node
->in_other_partition
1379 || DECL_ARTIFICIAL (decl
)
1380 || DECL_EXTERNAL (decl
));
1385 if (flag_checking
&& check_same_comdat_groups
)
1386 FOR_EACH_FUNCTION (node
)
1387 if (node
->same_comdat_group
&& !node
->process
)
1389 tree decl
= node
->decl
;
1390 if (!node
->global
.inlined_to
1391 && gimple_has_body_p (decl
)
1392 /* FIXME: in an ltrans unit when the offline copy is outside a
1393 partition but inline copies are inside a partition, we can
1394 end up not removing the body since we no longer have an
1395 analyzed node pointing to it. */
1396 && !node
->in_other_partition
1398 && !DECL_EXTERNAL (decl
))
1401 internal_error ("failed to reclaim unneeded function in same "
1407 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1408 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1410 Set current_function_decl and cfun to newly constructed empty function body.
1411 return basic block in the function body. */
1414 init_lowered_empty_function (tree decl
, bool in_ssa
, gcov_type count
)
1419 current_function_decl
= decl
;
1420 allocate_struct_function (decl
, false);
1421 gimple_register_cfg_hooks ();
1422 init_empty_tree_cfg ();
1426 init_tree_ssa (cfun
);
1427 init_ssa_operands (cfun
);
1428 cfun
->gimple_df
->in_ssa_p
= true;
1429 cfun
->curr_properties
|= PROP_ssa
;
1432 DECL_INITIAL (decl
) = make_node (BLOCK
);
1434 DECL_SAVED_TREE (decl
) = error_mark_node
;
1435 cfun
->curr_properties
|= (PROP_gimple_lcf
| PROP_gimple_leh
| PROP_gimple_any
1436 | PROP_cfg
| PROP_loops
);
1438 set_loops_for_fn (cfun
, ggc_cleared_alloc
<loops
> ());
1439 init_loops_structure (cfun
, loops_for_fn (cfun
), 1);
1440 loops_for_fn (cfun
)->state
|= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
1442 /* Create BB for body of the function and connect it properly. */
1443 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= count
;
1444 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
= REG_BR_PROB_BASE
;
1445 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= count
;
1446 EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
= REG_BR_PROB_BASE
;
1447 bb
= create_basic_block (NULL
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1449 bb
->frequency
= BB_FREQ_MAX
;
1450 e
= make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), bb
, EDGE_FALLTHRU
);
1452 e
->probability
= REG_BR_PROB_BASE
;
1453 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1455 e
->probability
= REG_BR_PROB_BASE
;
1456 add_bb_to_loop (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
1461 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1462 offset indicated by VIRTUAL_OFFSET, if that is
1463 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1464 zero for a result adjusting thunk. */
1467 thunk_adjust (gimple_stmt_iterator
* bsi
,
1468 tree ptr
, bool this_adjusting
,
1469 HOST_WIDE_INT fixed_offset
, tree virtual_offset
)
1475 && fixed_offset
!= 0)
1477 stmt
= gimple_build_assign
1478 (ptr
, fold_build_pointer_plus_hwi_loc (input_location
,
1481 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1484 /* If there's a virtual offset, look up that value in the vtable and
1485 adjust the pointer again. */
1492 if (!vtable_entry_type
)
1494 tree vfunc_type
= make_node (FUNCTION_TYPE
);
1495 TREE_TYPE (vfunc_type
) = integer_type_node
;
1496 TYPE_ARG_TYPES (vfunc_type
) = NULL_TREE
;
1497 layout_type (vfunc_type
);
1499 vtable_entry_type
= build_pointer_type (vfunc_type
);
1503 create_tmp_reg (build_pointer_type
1504 (build_pointer_type (vtable_entry_type
)), "vptr");
1506 /* The vptr is always at offset zero in the object. */
1507 stmt
= gimple_build_assign (vtabletmp
,
1508 build1 (NOP_EXPR
, TREE_TYPE (vtabletmp
),
1510 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1512 /* Form the vtable address. */
1513 vtabletmp2
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp
)),
1515 stmt
= gimple_build_assign (vtabletmp2
,
1516 build_simple_mem_ref (vtabletmp
));
1517 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1519 /* Find the entry with the vcall offset. */
1520 stmt
= gimple_build_assign (vtabletmp2
,
1521 fold_build_pointer_plus_loc (input_location
,
1524 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1526 /* Get the offset itself. */
1527 vtabletmp3
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2
)),
1529 stmt
= gimple_build_assign (vtabletmp3
,
1530 build_simple_mem_ref (vtabletmp2
));
1531 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1533 /* Adjust the `this' pointer. */
1534 ptr
= fold_build_pointer_plus_loc (input_location
, ptr
, vtabletmp3
);
1535 ptr
= force_gimple_operand_gsi (bsi
, ptr
, true, NULL_TREE
, false,
1536 GSI_CONTINUE_LINKING
);
1540 && fixed_offset
!= 0)
1541 /* Adjust the pointer by the constant. */
1545 if (TREE_CODE (ptr
) == VAR_DECL
)
1549 ptrtmp
= create_tmp_reg (TREE_TYPE (ptr
), "ptr");
1550 stmt
= gimple_build_assign (ptrtmp
, ptr
);
1551 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1553 ptr
= fold_build_pointer_plus_hwi_loc (input_location
,
1554 ptrtmp
, fixed_offset
);
1557 /* Emit the statement and gimplify the adjustment expression. */
1558 ret
= create_tmp_reg (TREE_TYPE (ptr
), "adjusted_this");
1559 stmt
= gimple_build_assign (ret
, ptr
);
1560 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1565 /* Expand thunk NODE to gimple if possible.
1566 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1567 no assembler is produced.
1568 When OUTPUT_ASM_THUNK is true, also produce assembler for
1569 thunks that are not lowered. */
1572 cgraph_node::expand_thunk (bool output_asm_thunks
, bool force_gimple_thunk
)
1574 bool this_adjusting
= thunk
.this_adjusting
;
1575 HOST_WIDE_INT fixed_offset
= thunk
.fixed_offset
;
1576 HOST_WIDE_INT virtual_value
= thunk
.virtual_value
;
1577 tree virtual_offset
= NULL
;
1578 tree alias
= callees
->callee
->decl
;
1579 tree thunk_fndecl
= decl
;
1582 /* Instrumentation thunk is the same function with
1583 a different signature. Never need to expand it. */
1584 if (thunk
.add_pointer_bounds_args
)
1587 if (!force_gimple_thunk
&& this_adjusting
1588 && targetm
.asm_out
.can_output_mi_thunk (thunk_fndecl
, fixed_offset
,
1589 virtual_value
, alias
))
1593 tree restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1595 if (!output_asm_thunks
)
1602 get_untransformed_body ();
1603 a
= DECL_ARGUMENTS (thunk_fndecl
);
1605 current_function_decl
= thunk_fndecl
;
1607 /* Ensure thunks are emitted in their correct sections. */
1608 resolve_unique_section (thunk_fndecl
, 0,
1609 flag_function_sections
);
1611 DECL_RESULT (thunk_fndecl
)
1612 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
1613 RESULT_DECL
, 0, restype
);
1614 DECL_CONTEXT (DECL_RESULT (thunk_fndecl
)) = thunk_fndecl
;
1615 fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
1617 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1619 fn_block
= make_node (BLOCK
);
1620 BLOCK_VARS (fn_block
) = a
;
1621 DECL_INITIAL (thunk_fndecl
) = fn_block
;
1622 allocate_struct_function (thunk_fndecl
, false);
1623 init_function_start (thunk_fndecl
);
1625 insn_locations_init ();
1626 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl
));
1627 prologue_location
= curr_insn_location ();
1628 assemble_start_function (thunk_fndecl
, fnname
);
1630 targetm
.asm_out
.output_mi_thunk (asm_out_file
, thunk_fndecl
,
1631 fixed_offset
, virtual_value
, alias
);
1633 assemble_end_function (thunk_fndecl
, fnname
);
1634 insn_locations_finalize ();
1635 init_insn_lengths ();
1636 free_after_compilation (cfun
);
1637 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1638 thunk
.thunk_p
= false;
1641 else if (stdarg_p (TREE_TYPE (thunk_fndecl
)))
1643 error ("generic thunk code fails for method %qD which uses %<...%>",
1645 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1652 basic_block bb
, then_bb
, else_bb
, return_bb
;
1653 gimple_stmt_iterator bsi
;
1663 bool alias_is_noreturn
= TREE_THIS_VOLATILE (alias
);
1666 get_untransformed_body ();
1667 a
= DECL_ARGUMENTS (thunk_fndecl
);
1669 current_function_decl
= thunk_fndecl
;
1671 /* Ensure thunks are emitted in their correct sections. */
1672 resolve_unique_section (thunk_fndecl
, 0,
1673 flag_function_sections
);
1675 DECL_IGNORED_P (thunk_fndecl
) = 1;
1676 bitmap_obstack_initialize (NULL
);
1678 if (thunk
.virtual_offset_p
)
1679 virtual_offset
= size_int (virtual_value
);
1681 /* Build the return declaration for the function. */
1682 restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1683 if (DECL_RESULT (thunk_fndecl
) == NULL_TREE
)
1685 resdecl
= build_decl (input_location
, RESULT_DECL
, 0, restype
);
1686 DECL_ARTIFICIAL (resdecl
) = 1;
1687 DECL_IGNORED_P (resdecl
) = 1;
1688 DECL_RESULT (thunk_fndecl
) = resdecl
;
1689 DECL_CONTEXT (DECL_RESULT (thunk_fndecl
)) = thunk_fndecl
;
1692 resdecl
= DECL_RESULT (thunk_fndecl
);
1694 bb
= then_bb
= else_bb
= return_bb
1695 = init_lowered_empty_function (thunk_fndecl
, true, count
);
1697 bsi
= gsi_start_bb (bb
);
1699 /* Build call to the function being thunked. */
1700 if (!VOID_TYPE_P (restype
) && !alias_is_noreturn
)
1702 if (DECL_BY_REFERENCE (resdecl
))
1704 restmp
= gimple_fold_indirect_ref (resdecl
);
1706 restmp
= build2 (MEM_REF
,
1707 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias
))),
1709 build_int_cst (TREE_TYPE
1710 (DECL_RESULT (alias
)), 0));
1712 else if (!is_gimple_reg_type (restype
))
1714 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
)))
1718 if (TREE_CODE (restmp
) == VAR_DECL
)
1719 add_local_decl (cfun
, restmp
);
1720 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = restmp
;
1723 restmp
= create_tmp_var (restype
, "retval");
1726 restmp
= create_tmp_reg (restype
, "retval");
1729 for (arg
= a
; arg
; arg
= DECL_CHAIN (arg
))
1731 auto_vec
<tree
> vargs (nargs
);
1736 vargs
.quick_push (thunk_adjust (&bsi
, a
, 1, fixed_offset
,
1738 arg
= DECL_CHAIN (a
);
1743 for (; i
< nargs
; i
++, arg
= DECL_CHAIN (arg
))
1746 if (!is_gimple_val (arg
))
1748 tmp
= create_tmp_reg (TYPE_MAIN_VARIANT
1749 (TREE_TYPE (arg
)), "arg");
1750 gimple
*stmt
= gimple_build_assign (tmp
, arg
);
1751 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1753 vargs
.quick_push (tmp
);
1755 call
= gimple_build_call_vec (build_fold_addr_expr_loc (0, alias
), vargs
);
1756 callees
->call_stmt
= call
;
1757 gimple_call_set_from_thunk (call
, true);
1758 gimple_call_set_with_bounds (call
, instrumentation_clone
);
1760 /* Return slot optimization is always possible and in fact requred to
1761 return values with DECL_BY_REFERENCE. */
1762 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
))
1763 && (!is_gimple_reg_type (TREE_TYPE (resdecl
))
1764 || DECL_BY_REFERENCE (resdecl
)))
1765 gimple_call_set_return_slot_opt (call
, true);
1767 if (restmp
&& !alias_is_noreturn
)
1769 gimple_call_set_lhs (call
, restmp
);
1770 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp
),
1771 TREE_TYPE (TREE_TYPE (alias
))));
1773 gsi_insert_after (&bsi
, call
, GSI_NEW_STMT
);
1774 if (!alias_is_noreturn
)
1776 if (instrumentation_clone
1777 && !DECL_BY_REFERENCE (resdecl
)
1779 && BOUNDED_P (restmp
))
1781 resbnd
= chkp_insert_retbnd_call (NULL
, restmp
, &bsi
);
1782 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi
))),
1783 as_a
<gcall
*> (gsi_stmt (bsi
)),
1784 callees
->count
, callees
->frequency
);
1787 if (restmp
&& !this_adjusting
1788 && (fixed_offset
|| virtual_offset
))
1790 tree true_label
= NULL_TREE
;
1792 if (TREE_CODE (TREE_TYPE (restmp
)) == POINTER_TYPE
)
1796 /* If the return type is a pointer, we need to
1797 protect against NULL. We know there will be an
1798 adjustment, because that's why we're emitting a
1800 then_bb
= create_basic_block (NULL
, bb
);
1801 then_bb
->count
= count
- count
/ 16;
1802 then_bb
->frequency
= BB_FREQ_MAX
- BB_FREQ_MAX
/ 16;
1803 return_bb
= create_basic_block (NULL
, then_bb
);
1804 return_bb
->count
= count
;
1805 return_bb
->frequency
= BB_FREQ_MAX
;
1806 else_bb
= create_basic_block (NULL
, else_bb
);
1807 then_bb
->count
= count
/ 16;
1808 then_bb
->frequency
= BB_FREQ_MAX
/ 16;
1809 add_bb_to_loop (then_bb
, bb
->loop_father
);
1810 add_bb_to_loop (return_bb
, bb
->loop_father
);
1811 add_bb_to_loop (else_bb
, bb
->loop_father
);
1812 remove_edge (single_succ_edge (bb
));
1813 true_label
= gimple_block_label (then_bb
);
1814 stmt
= gimple_build_cond (NE_EXPR
, restmp
,
1815 build_zero_cst (TREE_TYPE (restmp
)),
1816 NULL_TREE
, NULL_TREE
);
1817 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1818 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1819 e
->probability
= REG_BR_PROB_BASE
- REG_BR_PROB_BASE
/ 16;
1820 e
->count
= count
- count
/ 16;
1821 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1822 e
->probability
= REG_BR_PROB_BASE
/ 16;
1823 e
->count
= count
/ 16;
1824 e
= make_edge (return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1825 e
->probability
= REG_BR_PROB_BASE
;
1827 e
= make_edge (then_bb
, return_bb
, EDGE_FALLTHRU
);
1828 e
->probability
= REG_BR_PROB_BASE
;
1829 e
->count
= count
- count
/ 16;
1830 e
= make_edge (else_bb
, return_bb
, EDGE_FALLTHRU
);
1831 e
->probability
= REG_BR_PROB_BASE
;
1832 e
->count
= count
/ 16;
1833 bsi
= gsi_last_bb (then_bb
);
1836 restmp
= thunk_adjust (&bsi
, restmp
, /*this_adjusting=*/0,
1837 fixed_offset
, virtual_offset
);
1841 bsi
= gsi_last_bb (else_bb
);
1842 stmt
= gimple_build_assign (restmp
,
1843 build_zero_cst (TREE_TYPE (restmp
)));
1844 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1845 bsi
= gsi_last_bb (return_bb
);
1849 gimple_call_set_tail (call
, true);
1851 /* Build return value. */
1852 if (!DECL_BY_REFERENCE (resdecl
))
1853 ret
= gimple_build_return (restmp
);
1855 ret
= gimple_build_return (resdecl
);
1856 gimple_return_set_retbnd (ret
, resbnd
);
1858 gsi_insert_after (&bsi
, ret
, GSI_NEW_STMT
);
1862 gimple_call_set_tail (call
, true);
1863 remove_edge (single_succ_edge (bb
));
1866 cfun
->gimple_df
->in_ssa_p
= true;
1867 profile_status_for_fn (cfun
)
1868 = count
? PROFILE_READ
: PROFILE_GUESSED
;
1869 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1870 TREE_ASM_WRITTEN (thunk_fndecl
) = false;
1871 delete_unreachable_blocks ();
1872 update_ssa (TODO_update_ssa
);
1873 checking_verify_flow_info ();
1874 free_dominance_info (CDI_DOMINATORS
);
1876 /* Since we want to emit the thunk, we explicitly mark its name as
1878 thunk
.thunk_p
= false;
1880 bitmap_obstack_release (NULL
);
1882 current_function_decl
= NULL
;
1887 /* Assemble thunks and aliases associated to node. */
1890 cgraph_node::assemble_thunks_and_aliases (void)
1895 for (e
= callers
; e
;)
1896 if (e
->caller
->thunk
.thunk_p
1897 && !e
->caller
->thunk
.add_pointer_bounds_args
)
1899 cgraph_node
*thunk
= e
->caller
;
1902 thunk
->expand_thunk (true, false);
1903 thunk
->assemble_thunks_and_aliases ();
1908 FOR_EACH_ALIAS (this, ref
)
1910 cgraph_node
*alias
= dyn_cast
<cgraph_node
*> (ref
->referring
);
1911 bool saved_written
= TREE_ASM_WRITTEN (decl
);
1913 /* Force assemble_alias to really output the alias this time instead
1914 of buffering it in same alias pairs. */
1915 TREE_ASM_WRITTEN (decl
) = 1;
1916 do_assemble_alias (alias
->decl
,
1917 DECL_ASSEMBLER_NAME (decl
));
1918 alias
->assemble_thunks_and_aliases ();
1919 TREE_ASM_WRITTEN (decl
) = saved_written
;
1923 /* Expand function specified by node. */
1926 cgraph_node::expand (void)
1928 location_t saved_loc
;
1930 /* We ought to not compile any inline clones. */
1931 gcc_assert (!global
.inlined_to
);
1933 announce_function (decl
);
1935 gcc_assert (lowered
);
1936 get_untransformed_body ();
1938 /* Generate RTL for the body of DECL. */
1940 timevar_push (TV_REST_OF_COMPILATION
);
1942 gcc_assert (symtab
->global_info_ready
);
1944 /* Initialize the default bitmap obstack. */
1945 bitmap_obstack_initialize (NULL
);
1947 /* Initialize the RTL code for the function. */
1948 saved_loc
= input_location
;
1949 input_location
= DECL_SOURCE_LOCATION (decl
);
1951 gcc_assert (DECL_STRUCT_FUNCTION (decl
));
1952 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1953 init_function_start (decl
);
1955 gimple_register_cfg_hooks ();
1957 bitmap_obstack_initialize (®_obstack
); /* FIXME, only at RTL generation*/
1959 execute_all_ipa_transforms ();
1961 /* Perform all tree transforms and optimizations. */
1963 /* Signal the start of passes. */
1964 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START
, NULL
);
1966 execute_pass_list (cfun
, g
->get_passes ()->all_passes
);
1968 /* Signal the end of passes. */
1969 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END
, NULL
);
1971 bitmap_obstack_release (®_obstack
);
1973 /* Release the default bitmap obstack. */
1974 bitmap_obstack_release (NULL
);
1976 /* If requested, warn about function definitions where the function will
1977 return a value (usually of some struct or union type) which itself will
1978 take up a lot of stack space. */
1979 if (warn_larger_than
&& !DECL_EXTERNAL (decl
) && TREE_TYPE (decl
))
1981 tree ret_type
= TREE_TYPE (TREE_TYPE (decl
));
1983 if (ret_type
&& TYPE_SIZE_UNIT (ret_type
)
1984 && TREE_CODE (TYPE_SIZE_UNIT (ret_type
)) == INTEGER_CST
1985 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type
),
1988 unsigned int size_as_int
1989 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type
));
1991 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type
), size_as_int
) == 0)
1992 warning (OPT_Wlarger_than_
, "size of return value of %q+D is %u bytes",
1995 warning (OPT_Wlarger_than_
, "size of return value of %q+D is larger than %wd bytes",
1996 decl
, larger_than_size
);
2000 gimple_set_body (decl
, NULL
);
2001 if (DECL_STRUCT_FUNCTION (decl
) == 0
2002 && !cgraph_node::get (decl
)->origin
)
2004 /* Stop pointing to the local nodes about to be freed.
2005 But DECL_INITIAL must remain nonzero so we know this
2006 was an actual function definition.
2007 For a nested function, this is done in c_pop_function_context.
2008 If rest_of_compilation set this to 0, leave it 0. */
2009 if (DECL_INITIAL (decl
) != 0)
2010 DECL_INITIAL (decl
) = error_mark_node
;
2013 input_location
= saved_loc
;
2016 timevar_pop (TV_REST_OF_COMPILATION
);
2018 /* Make sure that BE didn't give up on compiling. */
2019 gcc_assert (TREE_ASM_WRITTEN (decl
));
2023 /* It would make a lot more sense to output thunks before function body to get more
2024 forward and lest backwarding jumps. This however would need solving problem
2025 with comdats. See PR48668. Also aliases must come after function itself to
2026 make one pass assemblers, like one on AIX, happy. See PR 50689.
2027 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2029 assemble_thunks_and_aliases ();
2031 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2032 points to the dead function body. */
2034 remove_all_references ();
2037 /* Node comparer that is responsible for the order that corresponds
2038 to time when a function was launched for the first time. */
2041 node_cmp (const void *pa
, const void *pb
)
2043 const cgraph_node
*a
= *(const cgraph_node
* const *) pa
;
2044 const cgraph_node
*b
= *(const cgraph_node
* const *) pb
;
2046 /* Functions with time profile must be before these without profile. */
2047 if (!a
->tp_first_run
|| !b
->tp_first_run
)
2048 return a
->tp_first_run
- b
->tp_first_run
;
2050 return a
->tp_first_run
!= b
->tp_first_run
2051 ? b
->tp_first_run
- a
->tp_first_run
2052 : b
->order
- a
->order
;
2055 /* Expand all functions that must be output.
2057 Attempt to topologically sort the nodes so function is output when
2058 all called functions are already assembled to allow data to be
2059 propagated across the callgraph. Use a stack to get smaller distance
2060 between a function and its callees (later we may choose to use a more
2061 sophisticated algorithm for function reordering; we will likely want
2062 to use subsections to make the output functions appear in top-down
2066 expand_all_functions (void)
2069 cgraph_node
**order
= XCNEWVEC (cgraph_node
*,
2070 symtab
->cgraph_count
);
2071 unsigned int expanded_func_count
= 0, profiled_func_count
= 0;
2072 int order_pos
, new_order_pos
= 0;
2075 order_pos
= ipa_reverse_postorder (order
);
2076 gcc_assert (order_pos
== symtab
->cgraph_count
);
2078 /* Garbage collector may remove inline clones we eliminate during
2079 optimization. So we must be sure to not reference them. */
2080 for (i
= 0; i
< order_pos
; i
++)
2081 if (order
[i
]->process
)
2082 order
[new_order_pos
++] = order
[i
];
2084 if (flag_profile_reorder_functions
)
2085 qsort (order
, new_order_pos
, sizeof (cgraph_node
*), node_cmp
);
2087 for (i
= new_order_pos
- 1; i
>= 0; i
--)
2093 expanded_func_count
++;
2094 if(node
->tp_first_run
)
2095 profiled_func_count
++;
2097 if (symtab
->dump_file
)
2098 fprintf (symtab
->dump_file
,
2099 "Time profile order in expand_all_functions:%s:%d\n",
2100 node
->asm_name (), node
->tp_first_run
);
2107 fprintf (dump_file
, "Expanded functions with time profile (%s):%u/%u\n",
2108 main_input_filename
, profiled_func_count
, expanded_func_count
);
2110 if (symtab
->dump_file
&& flag_profile_reorder_functions
)
2111 fprintf (symtab
->dump_file
, "Expanded functions with time profile:%u/%u\n",
2112 profiled_func_count
, expanded_func_count
);
2114 symtab
->process_new_functions ();
2115 free_gimplify_stack ();
2120 /* This is used to sort the node types by the cgraph order number. */
2122 enum cgraph_order_sort_kind
2124 ORDER_UNDEFINED
= 0,
2130 struct cgraph_order_sort
2132 enum cgraph_order_sort_kind kind
;
2141 /* Output all functions, variables, and asm statements in the order
2142 according to their order fields, which is the order in which they
2143 appeared in the file. This implements -fno-toplevel-reorder. In
2144 this mode we may output functions and variables which don't really
2146 When NO_REORDER is true only do this for symbols marked no reorder. */
2149 output_in_order (bool no_reorder
)
2152 cgraph_order_sort
*nodes
;
2157 max
= symtab
->order
;
2158 nodes
= XCNEWVEC (cgraph_order_sort
, max
);
2160 FOR_EACH_DEFINED_FUNCTION (pf
)
2162 if (pf
->process
&& !pf
->thunk
.thunk_p
&& !pf
->alias
)
2164 if (no_reorder
&& !pf
->no_reorder
)
2167 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2168 nodes
[i
].kind
= ORDER_FUNCTION
;
2173 FOR_EACH_DEFINED_VARIABLE (pv
)
2174 if (!DECL_EXTERNAL (pv
->decl
))
2176 if (no_reorder
&& !pv
->no_reorder
)
2179 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2180 nodes
[i
].kind
= ORDER_VAR
;
2184 for (pa
= symtab
->first_asm_symbol (); pa
; pa
= pa
->next
)
2187 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
2188 nodes
[i
].kind
= ORDER_ASM
;
2192 /* In toplevel reorder mode we output all statics; mark them as needed. */
2194 for (i
= 0; i
< max
; ++i
)
2195 if (nodes
[i
].kind
== ORDER_VAR
)
2196 nodes
[i
].u
.v
->finalize_named_section_flags ();
2198 for (i
= 0; i
< max
; ++i
)
2200 switch (nodes
[i
].kind
)
2202 case ORDER_FUNCTION
:
2203 nodes
[i
].u
.f
->process
= 0;
2204 nodes
[i
].u
.f
->expand ();
2208 nodes
[i
].u
.v
->assemble_decl ();
2212 assemble_asm (nodes
[i
].u
.a
->asm_str
);
2215 case ORDER_UNDEFINED
:
2223 symtab
->clear_asm_symbols ();
2231 gcc::pass_manager
*passes
= g
->get_passes ();
2234 current_function_decl
= NULL
;
2235 gimple_register_cfg_hooks ();
2236 bitmap_obstack_initialize (NULL
);
2238 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START
, NULL
);
2242 execute_ipa_pass_list (passes
->all_small_ipa_passes
);
2247 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2248 devirtualization and other changes where removal iterate. */
2249 symtab
->remove_unreachable_nodes (symtab
->dump_file
);
2251 /* If pass_all_early_optimizations was not scheduled, the state of
2252 the cgraph will not be properly updated. Update it now. */
2253 if (symtab
->state
< IPA_SSA
)
2254 symtab
->state
= IPA_SSA
;
2258 /* Generate coverage variables and constructors. */
2261 /* Process new functions added. */
2263 current_function_decl
= NULL
;
2264 symtab
->process_new_functions ();
2266 execute_ipa_summary_passes
2267 ((ipa_opt_pass_d
*) passes
->all_regular_ipa_passes
);
2270 /* Some targets need to handle LTO assembler output specially. */
2271 if (flag_generate_lto
|| flag_generate_offload
)
2272 targetm
.asm_out
.lto_start ();
2276 if (g
->have_offload
)
2278 section_name_prefix
= OFFLOAD_SECTION_NAME_PREFIX
;
2279 lto_stream_offload_p
= true;
2280 ipa_write_summaries ();
2281 lto_stream_offload_p
= false;
2285 section_name_prefix
= LTO_SECTION_NAME_PREFIX
;
2286 lto_stream_offload_p
= false;
2287 ipa_write_summaries ();
2291 if (flag_generate_lto
|| flag_generate_offload
)
2292 targetm
.asm_out
.lto_end ();
2294 if (!flag_ltrans
&& (in_lto_p
|| !flag_lto
|| flag_fat_lto_objects
))
2295 execute_ipa_pass_list (passes
->all_regular_ipa_passes
);
2296 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END
, NULL
);
2298 bitmap_obstack_release (NULL
);
2302 /* Return string alias is alias of. */
2305 get_alias_symbol (tree decl
)
2307 tree alias
= lookup_attribute ("alias", DECL_ATTRIBUTES (decl
));
2308 return get_identifier (TREE_STRING_POINTER
2309 (TREE_VALUE (TREE_VALUE (alias
))));
2313 /* Weakrefs may be associated to external decls and thus not output
2314 at expansion time. Emit all necessary aliases. */
2317 symbol_table::output_weakrefs (void)
2321 FOR_EACH_SYMBOL (node
)
2323 && !TREE_ASM_WRITTEN (node
->decl
)
2324 && (!(cnode
= dyn_cast
<cgraph_node
*> (node
))
2325 || !cnode
->instrumented_version
2326 || !TREE_ASM_WRITTEN (cnode
->instrumented_version
->decl
))
2331 /* Weakrefs are special by not requiring target definition in current
2332 compilation unit. It is thus bit hard to work out what we want to
2334 When alias target is defined, we need to fetch it from symtab reference,
2335 otherwise it is pointed to by alias_target. */
2336 if (node
->alias_target
)
2337 target
= (DECL_P (node
->alias_target
)
2338 ? DECL_ASSEMBLER_NAME (node
->alias_target
)
2339 : node
->alias_target
);
2340 else if (node
->analyzed
)
2341 target
= DECL_ASSEMBLER_NAME (node
->get_alias_target ()->decl
);
2345 target
= get_alias_symbol (node
->decl
);
2347 do_assemble_alias (node
->decl
, target
);
2351 /* Perform simple optimizations based on callgraph. */
2354 symbol_table::compile (void)
2359 symtab_node::checking_verify_symtab_nodes ();
2361 timevar_push (TV_CGRAPHOPT
);
2362 if (pre_ipa_mem_report
)
2364 fprintf (stderr
, "Memory consumption before IPA\n");
2365 dump_memory_report (false);
2368 fprintf (stderr
, "Performing interprocedural optimizations\n");
2371 /* Offloading requires LTO infrastructure. */
2372 if (!in_lto_p
&& g
->have_offload
)
2373 flag_generate_offload
= 1;
2375 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2376 if (flag_generate_lto
|| flag_generate_offload
)
2377 lto_streamer_hooks_init ();
2379 /* Don't run the IPA passes if there was any error or sorry messages. */
2383 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2385 || (!in_lto_p
&& flag_lto
&& !flag_fat_lto_objects
))
2387 timevar_pop (TV_CGRAPHOPT
);
2391 global_info_ready
= true;
2394 fprintf (dump_file
, "Optimized ");
2395 symtab_node:: dump_table (dump_file
);
2397 if (post_ipa_mem_report
)
2399 fprintf (stderr
, "Memory consumption after IPA\n");
2400 dump_memory_report (false);
2402 timevar_pop (TV_CGRAPHOPT
);
2404 /* Output everything. */
2405 (*debug_hooks
->assembly_start
) ();
2407 fprintf (stderr
, "Assembling functions:\n");
2408 symtab_node::checking_verify_symtab_nodes ();
2410 materialize_all_clones ();
2411 bitmap_obstack_initialize (NULL
);
2412 execute_ipa_pass_list (g
->get_passes ()->all_late_ipa_passes
);
2413 bitmap_obstack_release (NULL
);
2414 mark_functions_to_output ();
2416 /* When weakref support is missing, we autmatically translate all
2417 references to NODE to references to its ultimate alias target.
2418 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2421 Set up this mapping before we output any assembler but once we are sure
2422 that all symbol renaming is done.
2424 FIXME: All this uglyness can go away if we just do renaming at gimple
2425 level by physically rewritting the IL. At the moment we can only redirect
2426 calls, so we need infrastructure for renaming references as well. */
2427 #ifndef ASM_OUTPUT_WEAKREF
2430 FOR_EACH_SYMBOL (node
)
2432 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->decl
)))
2434 IDENTIFIER_TRANSPARENT_ALIAS
2435 (DECL_ASSEMBLER_NAME (node
->decl
)) = 1;
2436 TREE_CHAIN (DECL_ASSEMBLER_NAME (node
->decl
))
2437 = (node
->alias_target
? node
->alias_target
2438 : DECL_ASSEMBLER_NAME (node
->get_alias_target ()->decl
));
2444 if (!flag_toplevel_reorder
)
2445 output_in_order (false);
2448 /* Output first asm statements and anything ordered. The process
2449 flag is cleared for these nodes, so we skip them later. */
2450 output_in_order (true);
2451 expand_all_functions ();
2452 output_variables ();
2455 process_new_functions ();
2461 fprintf (dump_file
, "\nFinal ");
2462 symtab_node::dump_table (dump_file
);
2466 symtab_node::verify_symtab_nodes ();
2467 /* Double check that all inline clones are gone and that all
2468 function bodies have been released from memory. */
2472 bool error_found
= false;
2474 FOR_EACH_DEFINED_FUNCTION (node
)
2475 if (node
->global
.inlined_to
2476 || gimple_has_body_p (node
->decl
))
2482 internal_error ("nodes with unreleased memory found");
2487 /* Analyze the whole compilation unit once it is parsed completely. */
2490 symbol_table::finalize_compilation_unit (void)
2492 timevar_push (TV_CGRAPH
);
2494 /* If we're here there's no current function anymore. Some frontends
2495 are lazy in clearing these. */
2496 current_function_decl
= NULL
;
2499 /* Do not skip analyzing the functions if there were errors, we
2500 miss diagnostics for following functions otherwise. */
2502 /* Emit size functions we didn't inline. */
2503 finalize_size_functions ();
2505 /* Mark alias targets necessary and emit diagnostics. */
2506 handle_alias_pairs ();
2510 fprintf (stderr
, "\nAnalyzing compilation unit\n");
2514 if (flag_dump_passes
)
2517 /* Gimplify and lower all functions, compute reachability and
2518 remove unreachable nodes. */
2519 analyze_functions (/*first_time=*/true);
2521 /* Mark alias targets necessary and emit diagnostics. */
2522 handle_alias_pairs ();
2524 /* Gimplify and lower thunks. */
2525 analyze_functions (/*first_time=*/false);
2529 /* Emit early debug for reachable functions, and by consequence,
2530 locally scoped symbols. */
2531 struct cgraph_node
*cnode
;
2532 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode
)
2533 (*debug_hooks
->early_global_decl
) (cnode
->decl
);
2535 /* Clean up anything that needs cleaning up after initial debug
2537 (*debug_hooks
->early_finish
) ();
2540 /* Finally drive the pass manager. */
2543 timevar_pop (TV_CGRAPH
);
2546 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2547 within the same process. For use by toplev::finalize. */
2550 cgraphunit_c_finalize (void)
2552 gcc_assert (cgraph_new_nodes
.length () == 0);
2553 cgraph_new_nodes
.truncate (0);
2555 vtable_entry_type
= NULL
;
2556 queued_nodes
= &symtab_terminator
;
2558 first_analyzed
= NULL
;
2559 first_analyzed_var
= NULL
;
2562 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2563 kind of wrapper method. */
2566 cgraph_node::create_wrapper (cgraph_node
*target
)
2568 /* Preserve DECL_RESULT so we get right by reference flag. */
2569 tree decl_result
= DECL_RESULT (decl
);
2571 /* Remove the function's body but keep arguments to be reused
2573 release_body (true);
2576 DECL_UNINLINABLE (decl
) = false;
2577 DECL_RESULT (decl
) = decl_result
;
2578 DECL_INITIAL (decl
) = NULL
;
2579 allocate_struct_function (decl
, false);
2582 /* Turn alias into thunk and expand it into GIMPLE representation. */
2585 memset (&thunk
, 0, sizeof (cgraph_thunk_info
));
2586 thunk
.thunk_p
= true;
2587 create_edge (target
, NULL
, count
, CGRAPH_FREQ_BASE
);
2588 callees
->can_throw_external
= !TREE_NOTHROW (target
->decl
);
2590 tree arguments
= DECL_ARGUMENTS (decl
);
2594 TREE_ADDRESSABLE (arguments
) = false;
2595 arguments
= TREE_CHAIN (arguments
);
2598 expand_thunk (false, true);
2600 /* Inline summary set-up. */
2602 inline_analyze_function (this);
2605 #include "gt-cgraphunit.h"