LWG 3035. std::allocator's constructors should be constexpr
[official-gcc.git] / gcc / cgraphunit.c
blob04b6919be48b8fa9fd709fb126e3653910f3ff23
1 /* Driver of optimization process
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
209 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
210 secondary queue used during optimization to accommodate passes that
211 may generate new functions that need to be optimized and expanded. */
212 vec<cgraph_node *> cgraph_new_nodes;
214 static void expand_all_functions (void);
215 static void mark_functions_to_output (void);
216 static void handle_alias_pairs (void);
218 /* Used for vtable lookup in thunk adjusting. */
219 static GTY (()) tree vtable_entry_type;
221 /* Return true if this symbol is a function from the C frontend specified
222 directly in RTL form (with "__RTL"). */
224 bool
225 symtab_node::native_rtl_p () const
227 if (TREE_CODE (decl) != FUNCTION_DECL)
228 return false;
229 if (!DECL_STRUCT_FUNCTION (decl))
230 return false;
231 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
234 /* Determine if symbol declaration is needed. That is, visible to something
235 either outside this translation unit, something magic in the system
236 configury */
237 bool
238 symtab_node::needed_p (void)
240 /* Double check that no one output the function into assembly file
241 early. */
242 if (!native_rtl_p ())
243 gcc_checking_assert
244 (!DECL_ASSEMBLER_NAME_SET_P (decl)
245 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
247 if (!definition)
248 return false;
250 if (DECL_EXTERNAL (decl))
251 return false;
253 /* If the user told us it is used, then it must be so. */
254 if (force_output)
255 return true;
257 /* ABI forced symbols are needed when they are external. */
258 if (forced_by_abi && TREE_PUBLIC (decl))
259 return true;
261 /* Keep constructors, destructors and virtual functions. */
262 if (TREE_CODE (decl) == FUNCTION_DECL
263 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
264 return true;
266 /* Externally visible variables must be output. The exception is
267 COMDAT variables that must be output only when they are needed. */
268 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
269 return true;
271 return false;
274 /* Head and terminator of the queue of nodes to be processed while building
275 callgraph. */
277 static symtab_node symtab_terminator;
278 static symtab_node *queued_nodes = &symtab_terminator;
280 /* Add NODE to queue starting at QUEUED_NODES.
281 The queue is linked via AUX pointers and terminated by pointer to 1. */
283 static void
284 enqueue_node (symtab_node *node)
286 if (node->aux)
287 return;
288 gcc_checking_assert (queued_nodes);
289 node->aux = queued_nodes;
290 queued_nodes = node;
293 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
294 functions into callgraph in a way so they look like ordinary reachable
295 functions inserted into callgraph already at construction time. */
297 void
298 symbol_table::process_new_functions (void)
300 tree fndecl;
302 if (!cgraph_new_nodes.exists ())
303 return;
305 handle_alias_pairs ();
306 /* Note that this queue may grow as its being processed, as the new
307 functions may generate new ones. */
308 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
310 cgraph_node *node = cgraph_new_nodes[i];
311 fndecl = node->decl;
312 switch (state)
314 case CONSTRUCTION:
315 /* At construction time we just need to finalize function and move
316 it into reachable functions list. */
318 cgraph_node::finalize_function (fndecl, false);
319 call_cgraph_insertion_hooks (node);
320 enqueue_node (node);
321 break;
323 case IPA:
324 case IPA_SSA:
325 case IPA_SSA_AFTER_INLINING:
326 /* When IPA optimization already started, do all essential
327 transformations that has been already performed on the whole
328 cgraph but not on this function. */
330 gimple_register_cfg_hooks ();
331 if (!node->analyzed)
332 node->analyze ();
333 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
334 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
335 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
337 bool summaried_computed = ipa_fn_summaries != NULL;
338 g->get_passes ()->execute_early_local_passes ();
339 /* Early passes compure inline parameters to do inlining
340 and splitting. This is redundant for functions added late.
341 Just throw away whatever it did. */
342 if (!summaried_computed)
343 ipa_free_fn_summary ();
345 else if (ipa_fn_summaries != NULL)
346 compute_fn_summary (node, true);
347 free_dominance_info (CDI_POST_DOMINATORS);
348 free_dominance_info (CDI_DOMINATORS);
349 pop_cfun ();
350 call_cgraph_insertion_hooks (node);
351 break;
353 case EXPANSION:
354 /* Functions created during expansion shall be compiled
355 directly. */
356 node->process = 0;
357 call_cgraph_insertion_hooks (node);
358 node->expand ();
359 break;
361 default:
362 gcc_unreachable ();
363 break;
367 cgraph_new_nodes.release ();
370 /* As an GCC extension we allow redefinition of the function. The
371 semantics when both copies of bodies differ is not well defined.
372 We replace the old body with new body so in unit at a time mode
373 we always use new body, while in normal mode we may end up with
374 old body inlined into some functions and new body expanded and
375 inlined in others.
377 ??? It may make more sense to use one body for inlining and other
378 body for expanding the function but this is difficult to do. */
380 void
381 cgraph_node::reset (void)
383 /* If process is set, then we have already begun whole-unit analysis.
384 This is *not* testing for whether we've already emitted the function.
385 That case can be sort-of legitimately seen with real function redefinition
386 errors. I would argue that the front end should never present us with
387 such a case, but don't enforce that for now. */
388 gcc_assert (!process);
390 /* Reset our data structures so we can analyze the function again. */
391 memset (&local, 0, sizeof (local));
392 memset (&global, 0, sizeof (global));
393 memset (&rtl, 0, sizeof (rtl));
394 analyzed = false;
395 definition = false;
396 alias = false;
397 transparent_alias = false;
398 weakref = false;
399 cpp_implicit_alias = false;
401 remove_callees ();
402 remove_all_references ();
405 /* Return true when there are references to the node. INCLUDE_SELF is
406 true if a self reference counts as a reference. */
408 bool
409 symtab_node::referred_to_p (bool include_self)
411 ipa_ref *ref = NULL;
413 /* See if there are any references at all. */
414 if (iterate_referring (0, ref))
415 return true;
416 /* For functions check also calls. */
417 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
418 if (cn && cn->callers)
420 if (include_self)
421 return true;
422 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
423 if (e->caller != this)
424 return true;
426 return false;
429 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
430 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
431 the garbage collector run at the moment. We would need to either create
432 a new GC context, or just not compile right now. */
434 void
435 cgraph_node::finalize_function (tree decl, bool no_collect)
437 cgraph_node *node = cgraph_node::get_create (decl);
439 if (node->definition)
441 /* Nested functions should only be defined once. */
442 gcc_assert (!DECL_CONTEXT (decl)
443 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
444 node->reset ();
445 node->local.redefined_extern_inline = true;
448 /* Set definition first before calling notice_global_symbol so that
449 it is available to notice_global_symbol. */
450 node->definition = true;
451 notice_global_symbol (decl);
452 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
453 if (!flag_toplevel_reorder)
454 node->no_reorder = true;
456 /* With -fkeep-inline-functions we are keeping all inline functions except
457 for extern inline ones. */
458 if (flag_keep_inline_functions
459 && DECL_DECLARED_INLINE_P (decl)
460 && !DECL_EXTERNAL (decl)
461 && !DECL_DISREGARD_INLINE_LIMITS (decl))
462 node->force_output = 1;
464 /* __RTL functions were already output as soon as they were parsed (due
465 to the large amount of global state in the backend).
466 Mark such functions as "force_output" to reflect the fact that they
467 will be in the asm file when considering the symbols they reference.
468 The attempt to output them later on will bail out immediately. */
469 if (node->native_rtl_p ())
470 node->force_output = 1;
472 /* When not optimizing, also output the static functions. (see
473 PR24561), but don't do so for always_inline functions, functions
474 declared inline and nested functions. These were optimized out
475 in the original implementation and it is unclear whether we want
476 to change the behavior here. */
477 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
478 || node->no_reorder)
479 && !node->cpp_implicit_alias
480 && !DECL_DISREGARD_INLINE_LIMITS (decl)
481 && !DECL_DECLARED_INLINE_P (decl)
482 && !(DECL_CONTEXT (decl)
483 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
484 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
485 node->force_output = 1;
487 /* If we've not yet emitted decl, tell the debug info about it. */
488 if (!TREE_ASM_WRITTEN (decl))
489 (*debug_hooks->deferred_inline_function) (decl);
491 if (!no_collect)
492 ggc_collect ();
494 if (symtab->state == CONSTRUCTION
495 && (node->needed_p () || node->referred_to_p ()))
496 enqueue_node (node);
499 /* Add the function FNDECL to the call graph.
500 Unlike finalize_function, this function is intended to be used
501 by middle end and allows insertion of new function at arbitrary point
502 of compilation. The function can be either in high, low or SSA form
503 GIMPLE.
505 The function is assumed to be reachable and have address taken (so no
506 API breaking optimizations are performed on it).
508 Main work done by this function is to enqueue the function for later
509 processing to avoid need the passes to be re-entrant. */
511 void
512 cgraph_node::add_new_function (tree fndecl, bool lowered)
514 gcc::pass_manager *passes = g->get_passes ();
515 cgraph_node *node;
517 if (dump_file)
519 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
520 const char *function_type = ((gimple_has_body_p (fndecl))
521 ? (lowered
522 ? (gimple_in_ssa_p (fn)
523 ? "ssa gimple"
524 : "low gimple")
525 : "high gimple")
526 : "to-be-gimplified");
527 fprintf (dump_file,
528 "Added new %s function %s to callgraph\n",
529 function_type,
530 fndecl_name (fndecl));
533 switch (symtab->state)
535 case PARSING:
536 cgraph_node::finalize_function (fndecl, false);
537 break;
538 case CONSTRUCTION:
539 /* Just enqueue function to be processed at nearest occurrence. */
540 node = cgraph_node::get_create (fndecl);
541 if (lowered)
542 node->lowered = true;
543 cgraph_new_nodes.safe_push (node);
544 break;
546 case IPA:
547 case IPA_SSA:
548 case IPA_SSA_AFTER_INLINING:
549 case EXPANSION:
550 /* Bring the function into finalized state and enqueue for later
551 analyzing and compilation. */
552 node = cgraph_node::get_create (fndecl);
553 node->local.local = false;
554 node->definition = true;
555 node->force_output = true;
556 if (TREE_PUBLIC (fndecl))
557 node->externally_visible = true;
558 if (!lowered && symtab->state == EXPANSION)
560 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
561 gimple_register_cfg_hooks ();
562 bitmap_obstack_initialize (NULL);
563 execute_pass_list (cfun, passes->all_lowering_passes);
564 passes->execute_early_local_passes ();
565 bitmap_obstack_release (NULL);
566 pop_cfun ();
568 lowered = true;
570 if (lowered)
571 node->lowered = true;
572 cgraph_new_nodes.safe_push (node);
573 break;
575 case FINISHED:
576 /* At the very end of compilation we have to do all the work up
577 to expansion. */
578 node = cgraph_node::create (fndecl);
579 if (lowered)
580 node->lowered = true;
581 node->definition = true;
582 node->analyze ();
583 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
584 gimple_register_cfg_hooks ();
585 bitmap_obstack_initialize (NULL);
586 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
587 g->get_passes ()->execute_early_local_passes ();
588 bitmap_obstack_release (NULL);
589 pop_cfun ();
590 node->expand ();
591 break;
593 default:
594 gcc_unreachable ();
597 /* Set a personality if required and we already passed EH lowering. */
598 if (lowered
599 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
600 == eh_personality_lang))
601 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
604 /* Analyze the function scheduled to be output. */
605 void
606 cgraph_node::analyze (void)
608 if (native_rtl_p ())
610 analyzed = true;
611 return;
614 tree decl = this->decl;
615 location_t saved_loc = input_location;
616 input_location = DECL_SOURCE_LOCATION (decl);
618 if (thunk.thunk_p)
620 cgraph_node *t = cgraph_node::get (thunk.alias);
622 create_edge (t, NULL, t->count);
623 callees->can_throw_external = !TREE_NOTHROW (t->decl);
624 /* Target code in expand_thunk may need the thunk's target
625 to be analyzed, so recurse here. */
626 if (!t->analyzed)
627 t->analyze ();
628 if (t->alias)
630 t = t->get_alias_target ();
631 if (!t->analyzed)
632 t->analyze ();
634 if (!expand_thunk (false, false))
636 thunk.alias = NULL;
637 return;
639 thunk.alias = NULL;
641 if (alias)
642 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
643 else if (dispatcher_function)
645 /* Generate the dispatcher body of multi-versioned functions. */
646 cgraph_function_version_info *dispatcher_version_info
647 = function_version ();
648 if (dispatcher_version_info != NULL
649 && (dispatcher_version_info->dispatcher_resolver
650 == NULL_TREE))
652 tree resolver = NULL_TREE;
653 gcc_assert (targetm.generate_version_dispatcher_body);
654 resolver = targetm.generate_version_dispatcher_body (this);
655 gcc_assert (resolver != NULL_TREE);
658 else
660 push_cfun (DECL_STRUCT_FUNCTION (decl));
662 assign_assembler_name_if_needed (decl);
664 /* Make sure to gimplify bodies only once. During analyzing a
665 function we lower it, which will require gimplified nested
666 functions, so we can end up here with an already gimplified
667 body. */
668 if (!gimple_has_body_p (decl))
669 gimplify_function_tree (decl);
671 /* Lower the function. */
672 if (!lowered)
674 if (nested)
675 lower_nested_functions (decl);
676 gcc_assert (!nested);
678 gimple_register_cfg_hooks ();
679 bitmap_obstack_initialize (NULL);
680 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
681 free_dominance_info (CDI_POST_DOMINATORS);
682 free_dominance_info (CDI_DOMINATORS);
683 compact_blocks ();
684 bitmap_obstack_release (NULL);
685 lowered = true;
688 pop_cfun ();
690 analyzed = true;
692 input_location = saved_loc;
695 /* C++ frontend produce same body aliases all over the place, even before PCH
696 gets streamed out. It relies on us linking the aliases with their function
697 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
698 first produce aliases without links, but once C++ FE is sure he won't sream
699 PCH we build the links via this function. */
701 void
702 symbol_table::process_same_body_aliases (void)
704 symtab_node *node;
705 FOR_EACH_SYMBOL (node)
706 if (node->cpp_implicit_alias && !node->analyzed)
707 node->resolve_alias
708 (VAR_P (node->alias_target)
709 ? (symtab_node *)varpool_node::get_create (node->alias_target)
710 : (symtab_node *)cgraph_node::get_create (node->alias_target));
711 cpp_implicit_aliases_done = true;
714 /* Process attributes common for vars and functions. */
716 static void
717 process_common_attributes (symtab_node *node, tree decl)
719 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
721 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
723 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
724 "%<weakref%> attribute should be accompanied with"
725 " an %<alias%> attribute");
726 DECL_WEAK (decl) = 0;
727 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
728 DECL_ATTRIBUTES (decl));
731 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
732 node->no_reorder = 1;
735 /* Look for externally_visible and used attributes and mark cgraph nodes
736 accordingly.
738 We cannot mark the nodes at the point the attributes are processed (in
739 handle_*_attribute) because the copy of the declarations available at that
740 point may not be canonical. For example, in:
742 void f();
743 void f() __attribute__((used));
745 the declaration we see in handle_used_attribute will be the second
746 declaration -- but the front end will subsequently merge that declaration
747 with the original declaration and discard the second declaration.
749 Furthermore, we can't mark these nodes in finalize_function because:
751 void f() {}
752 void f() __attribute__((externally_visible));
754 is valid.
756 So, we walk the nodes at the end of the translation unit, applying the
757 attributes at that point. */
759 static void
760 process_function_and_variable_attributes (cgraph_node *first,
761 varpool_node *first_var)
763 cgraph_node *node;
764 varpool_node *vnode;
766 for (node = symtab->first_function (); node != first;
767 node = symtab->next_function (node))
769 tree decl = node->decl;
770 if (DECL_PRESERVE_P (decl))
771 node->mark_force_output ();
772 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
774 if (! TREE_PUBLIC (node->decl))
775 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
776 "%<externally_visible%>"
777 " attribute have effect only on public objects");
779 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
780 && (node->definition && !node->alias))
782 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
783 "%<weakref%> attribute ignored"
784 " because function is defined");
785 DECL_WEAK (decl) = 0;
786 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
787 DECL_ATTRIBUTES (decl));
790 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
791 && !DECL_DECLARED_INLINE_P (decl)
792 /* redefining extern inline function makes it DECL_UNINLINABLE. */
793 && !DECL_UNINLINABLE (decl))
794 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
795 "always_inline function might not be inlinable");
797 process_common_attributes (node, decl);
799 for (vnode = symtab->first_variable (); vnode != first_var;
800 vnode = symtab->next_variable (vnode))
802 tree decl = vnode->decl;
803 if (DECL_EXTERNAL (decl)
804 && DECL_INITIAL (decl))
805 varpool_node::finalize_decl (decl);
806 if (DECL_PRESERVE_P (decl))
807 vnode->force_output = true;
808 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
810 if (! TREE_PUBLIC (vnode->decl))
811 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
812 "%<externally_visible%>"
813 " attribute have effect only on public objects");
815 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
816 && vnode->definition
817 && DECL_INITIAL (decl))
819 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
820 "%<weakref%> attribute ignored"
821 " because variable is initialized");
822 DECL_WEAK (decl) = 0;
823 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
824 DECL_ATTRIBUTES (decl));
826 process_common_attributes (vnode, decl);
830 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
831 middle end to output the variable to asm file, if needed or externally
832 visible. */
834 void
835 varpool_node::finalize_decl (tree decl)
837 varpool_node *node = varpool_node::get_create (decl);
839 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
841 if (node->definition)
842 return;
843 /* Set definition first before calling notice_global_symbol so that
844 it is available to notice_global_symbol. */
845 node->definition = true;
846 notice_global_symbol (decl);
847 if (!flag_toplevel_reorder)
848 node->no_reorder = true;
849 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
850 /* Traditionally we do not eliminate static variables when not
851 optimizing and when not doing toplevel reoder. */
852 || (node->no_reorder && !DECL_COMDAT (node->decl)
853 && !DECL_ARTIFICIAL (node->decl)))
854 node->force_output = true;
856 if (symtab->state == CONSTRUCTION
857 && (node->needed_p () || node->referred_to_p ()))
858 enqueue_node (node);
859 if (symtab->state >= IPA_SSA)
860 node->analyze ();
861 /* Some frontends produce various interface variables after compilation
862 finished. */
863 if (symtab->state == FINISHED
864 || (node->no_reorder
865 && symtab->state == EXPANSION))
866 node->assemble_decl ();
869 /* EDGE is an polymorphic call. Mark all possible targets as reachable
870 and if there is only one target, perform trivial devirtualization.
871 REACHABLE_CALL_TARGETS collects target lists we already walked to
872 avoid udplicate work. */
874 static void
875 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
876 cgraph_edge *edge)
878 unsigned int i;
879 void *cache_token;
880 bool final;
881 vec <cgraph_node *>targets
882 = possible_polymorphic_call_targets
883 (edge, &final, &cache_token);
885 if (!reachable_call_targets->add (cache_token))
887 if (symtab->dump_file)
888 dump_possible_polymorphic_call_targets
889 (symtab->dump_file, edge);
891 for (i = 0; i < targets.length (); i++)
893 /* Do not bother to mark virtual methods in anonymous namespace;
894 either we will find use of virtual table defining it, or it is
895 unused. */
896 if (targets[i]->definition
897 && TREE_CODE
898 (TREE_TYPE (targets[i]->decl))
899 == METHOD_TYPE
900 && !type_in_anonymous_namespace_p
901 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
902 enqueue_node (targets[i]);
906 /* Very trivial devirtualization; when the type is
907 final or anonymous (so we know all its derivation)
908 and there is only one possible virtual call target,
909 make the edge direct. */
910 if (final)
912 if (targets.length () <= 1 && dbg_cnt (devirt))
914 cgraph_node *target;
915 if (targets.length () == 1)
916 target = targets[0];
917 else
918 target = cgraph_node::create
919 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
921 if (symtab->dump_file)
923 fprintf (symtab->dump_file,
924 "Devirtualizing call: ");
925 print_gimple_stmt (symtab->dump_file,
926 edge->call_stmt, 0,
927 TDF_SLIM);
929 if (dump_enabled_p ())
931 location_t locus = gimple_location_safe (edge->call_stmt);
932 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
933 "devirtualizing call in %s to %s\n",
934 edge->caller->name (), target->name ());
937 edge->make_direct (target);
938 edge->redirect_call_stmt_to_callee ();
940 if (symtab->dump_file)
942 fprintf (symtab->dump_file,
943 "Devirtualized as: ");
944 print_gimple_stmt (symtab->dump_file,
945 edge->call_stmt, 0,
946 TDF_SLIM);
952 /* Issue appropriate warnings for the global declaration DECL. */
954 static void
955 check_global_declaration (symtab_node *snode)
957 const char *decl_file;
958 tree decl = snode->decl;
960 /* Warn about any function declared static but not defined. We don't
961 warn about variables, because many programs have static variables
962 that exist only to get some text into the object file. */
963 if (TREE_CODE (decl) == FUNCTION_DECL
964 && DECL_INITIAL (decl) == 0
965 && DECL_EXTERNAL (decl)
966 && ! DECL_ARTIFICIAL (decl)
967 && ! TREE_NO_WARNING (decl)
968 && ! TREE_PUBLIC (decl)
969 && (warn_unused_function
970 || snode->referred_to_p (/*include_self=*/false)))
972 if (snode->referred_to_p (/*include_self=*/false))
973 pedwarn (input_location, 0, "%q+F used but never defined", decl);
974 else
975 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
976 /* This symbol is effectively an "extern" declaration now. */
977 TREE_PUBLIC (decl) = 1;
980 /* Warn about static fns or vars defined but not used. */
981 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
982 || (((warn_unused_variable && ! TREE_READONLY (decl))
983 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
984 && (warn_unused_const_variable == 2
985 || (main_input_filename != NULL
986 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
987 && filename_cmp (main_input_filename,
988 decl_file) == 0))))
989 && VAR_P (decl)))
990 && ! DECL_IN_SYSTEM_HEADER (decl)
991 && ! snode->referred_to_p (/*include_self=*/false)
992 /* This TREE_USED check is needed in addition to referred_to_p
993 above, because the `__unused__' attribute is not being
994 considered for referred_to_p. */
995 && ! TREE_USED (decl)
996 /* The TREE_USED bit for file-scope decls is kept in the identifier,
997 to handle multiple external decls in different scopes. */
998 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
999 && ! DECL_EXTERNAL (decl)
1000 && ! DECL_ARTIFICIAL (decl)
1001 && ! DECL_ABSTRACT_ORIGIN (decl)
1002 && ! TREE_PUBLIC (decl)
1003 /* A volatile variable might be used in some non-obvious way. */
1004 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1005 /* Global register variables must be declared to reserve them. */
1006 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1007 /* Global ctors and dtors are called by the runtime. */
1008 && (TREE_CODE (decl) != FUNCTION_DECL
1009 || (!DECL_STATIC_CONSTRUCTOR (decl)
1010 && !DECL_STATIC_DESTRUCTOR (decl)))
1011 /* Otherwise, ask the language. */
1012 && lang_hooks.decls.warn_unused_global (decl))
1013 warning_at (DECL_SOURCE_LOCATION (decl),
1014 (TREE_CODE (decl) == FUNCTION_DECL)
1015 ? OPT_Wunused_function
1016 : (TREE_READONLY (decl)
1017 ? OPT_Wunused_const_variable_
1018 : OPT_Wunused_variable),
1019 "%qD defined but not used", decl);
1022 /* Discover all functions and variables that are trivially needed, analyze
1023 them as well as all functions and variables referred by them */
1024 static cgraph_node *first_analyzed;
1025 static varpool_node *first_analyzed_var;
1027 /* FIRST_TIME is set to TRUE for the first time we are called for a
1028 translation unit from finalize_compilation_unit() or false
1029 otherwise. */
1031 static void
1032 analyze_functions (bool first_time)
1034 /* Keep track of already processed nodes when called multiple times for
1035 intermodule optimization. */
1036 cgraph_node *first_handled = first_analyzed;
1037 varpool_node *first_handled_var = first_analyzed_var;
1038 hash_set<void *> reachable_call_targets;
1040 symtab_node *node;
1041 symtab_node *next;
1042 int i;
1043 ipa_ref *ref;
1044 bool changed = true;
1045 location_t saved_loc = input_location;
1047 bitmap_obstack_initialize (NULL);
1048 symtab->state = CONSTRUCTION;
1049 input_location = UNKNOWN_LOCATION;
1051 /* Ugly, but the fixup can not happen at a time same body alias is created;
1052 C++ FE is confused about the COMDAT groups being right. */
1053 if (symtab->cpp_implicit_aliases_done)
1054 FOR_EACH_SYMBOL (node)
1055 if (node->cpp_implicit_alias)
1056 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1057 build_type_inheritance_graph ();
1059 /* Analysis adds static variables that in turn adds references to new functions.
1060 So we need to iterate the process until it stabilize. */
1061 while (changed)
1063 changed = false;
1064 process_function_and_variable_attributes (first_analyzed,
1065 first_analyzed_var);
1067 /* First identify the trivially needed symbols. */
1068 for (node = symtab->first_symbol ();
1069 node != first_analyzed
1070 && node != first_analyzed_var; node = node->next)
1072 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1073 node->get_comdat_group_id ();
1074 if (node->needed_p ())
1076 enqueue_node (node);
1077 if (!changed && symtab->dump_file)
1078 fprintf (symtab->dump_file, "Trivially needed symbols:");
1079 changed = true;
1080 if (symtab->dump_file)
1081 fprintf (symtab->dump_file, " %s", node->asm_name ());
1082 if (!changed && symtab->dump_file)
1083 fprintf (symtab->dump_file, "\n");
1085 if (node == first_analyzed
1086 || node == first_analyzed_var)
1087 break;
1089 symtab->process_new_functions ();
1090 first_analyzed_var = symtab->first_variable ();
1091 first_analyzed = symtab->first_function ();
1093 if (changed && symtab->dump_file)
1094 fprintf (symtab->dump_file, "\n");
1096 /* Lower representation, build callgraph edges and references for all trivially
1097 needed symbols and all symbols referred by them. */
1098 while (queued_nodes != &symtab_terminator)
1100 changed = true;
1101 node = queued_nodes;
1102 queued_nodes = (symtab_node *)queued_nodes->aux;
1103 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1104 if (cnode && cnode->definition)
1106 cgraph_edge *edge;
1107 tree decl = cnode->decl;
1109 /* ??? It is possible to create extern inline function
1110 and later using weak alias attribute to kill its body.
1111 See gcc.c-torture/compile/20011119-1.c */
1112 if (!DECL_STRUCT_FUNCTION (decl)
1113 && !cnode->alias
1114 && !cnode->thunk.thunk_p
1115 && !cnode->dispatcher_function)
1117 cnode->reset ();
1118 cnode->local.redefined_extern_inline = true;
1119 continue;
1122 if (!cnode->analyzed)
1123 cnode->analyze ();
1125 for (edge = cnode->callees; edge; edge = edge->next_callee)
1126 if (edge->callee->definition
1127 && (!DECL_EXTERNAL (edge->callee->decl)
1128 /* When not optimizing, do not try to analyze extern
1129 inline functions. Doing so is pointless. */
1130 || opt_for_fn (edge->callee->decl, optimize)
1131 /* Weakrefs needs to be preserved. */
1132 || edge->callee->alias
1133 /* always_inline functions are inlined aven at -O0. */
1134 || lookup_attribute
1135 ("always_inline",
1136 DECL_ATTRIBUTES (edge->callee->decl))
1137 /* Multiversioned functions needs the dispatcher to
1138 be produced locally even for extern functions. */
1139 || edge->callee->function_version ()))
1140 enqueue_node (edge->callee);
1141 if (opt_for_fn (cnode->decl, optimize)
1142 && opt_for_fn (cnode->decl, flag_devirtualize))
1144 cgraph_edge *next;
1146 for (edge = cnode->indirect_calls; edge; edge = next)
1148 next = edge->next_callee;
1149 if (edge->indirect_info->polymorphic)
1150 walk_polymorphic_call_targets (&reachable_call_targets,
1151 edge);
1155 /* If decl is a clone of an abstract function,
1156 mark that abstract function so that we don't release its body.
1157 The DECL_INITIAL() of that abstract function declaration
1158 will be later needed to output debug info. */
1159 if (DECL_ABSTRACT_ORIGIN (decl))
1161 cgraph_node *origin_node
1162 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1163 origin_node->used_as_abstract_origin = true;
1165 /* Preserve a functions function context node. It will
1166 later be needed to output debug info. */
1167 if (tree fn = decl_function_context (decl))
1169 cgraph_node *origin_node = cgraph_node::get_create (fn);
1170 enqueue_node (origin_node);
1173 else
1175 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1176 if (vnode && vnode->definition && !vnode->analyzed)
1177 vnode->analyze ();
1180 if (node->same_comdat_group)
1182 symtab_node *next;
1183 for (next = node->same_comdat_group;
1184 next != node;
1185 next = next->same_comdat_group)
1186 if (!next->comdat_local_p ())
1187 enqueue_node (next);
1189 for (i = 0; node->iterate_reference (i, ref); i++)
1190 if (ref->referred->definition
1191 && (!DECL_EXTERNAL (ref->referred->decl)
1192 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1193 && optimize)
1194 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1195 && opt_for_fn (ref->referred->decl, optimize))
1196 || node->alias
1197 || ref->referred->alias)))
1198 enqueue_node (ref->referred);
1199 symtab->process_new_functions ();
1202 update_type_inheritance_graph ();
1204 /* Collect entry points to the unit. */
1205 if (symtab->dump_file)
1207 fprintf (symtab->dump_file, "\n\nInitial ");
1208 symtab->dump (symtab->dump_file);
1211 if (first_time)
1213 symtab_node *snode;
1214 FOR_EACH_SYMBOL (snode)
1215 check_global_declaration (snode);
1218 if (symtab->dump_file)
1219 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1221 for (node = symtab->first_symbol ();
1222 node != first_handled
1223 && node != first_handled_var; node = next)
1225 next = node->next;
1226 if (!node->aux && !node->referred_to_p ())
1228 if (symtab->dump_file)
1229 fprintf (symtab->dump_file, " %s", node->name ());
1231 /* See if the debugger can use anything before the DECL
1232 passes away. Perhaps it can notice a DECL that is now a
1233 constant and can tag the early DIE with an appropriate
1234 attribute.
1236 Otherwise, this is the last chance the debug_hooks have
1237 at looking at optimized away DECLs, since
1238 late_global_decl will subsequently be called from the
1239 contents of the now pruned symbol table. */
1240 if (VAR_P (node->decl)
1241 && !decl_function_context (node->decl))
1243 /* We are reclaiming totally unreachable code and variables
1244 so they effectively appear as readonly. Show that to
1245 the debug machinery. */
1246 TREE_READONLY (node->decl) = 1;
1247 node->definition = false;
1248 (*debug_hooks->late_global_decl) (node->decl);
1251 node->remove ();
1252 continue;
1254 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1256 tree decl = node->decl;
1258 if (cnode->definition && !gimple_has_body_p (decl)
1259 && !cnode->alias
1260 && !cnode->thunk.thunk_p)
1261 cnode->reset ();
1263 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1264 || cnode->alias
1265 || gimple_has_body_p (decl)
1266 || cnode->native_rtl_p ());
1267 gcc_assert (cnode->analyzed == cnode->definition);
1269 node->aux = NULL;
1271 for (;node; node = node->next)
1272 node->aux = NULL;
1273 first_analyzed = symtab->first_function ();
1274 first_analyzed_var = symtab->first_variable ();
1275 if (symtab->dump_file)
1277 fprintf (symtab->dump_file, "\n\nReclaimed ");
1278 symtab->dump (symtab->dump_file);
1280 bitmap_obstack_release (NULL);
1281 ggc_collect ();
1282 /* Initialize assembler name hash, in particular we want to trigger C++
1283 mangling and same body alias creation before we free DECL_ARGUMENTS
1284 used by it. */
1285 if (!seen_error ())
1286 symtab->symtab_initialize_asm_name_hash ();
1288 input_location = saved_loc;
1291 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1292 (which may be an ifunc resolver) and issue a diagnostic when they are
1293 not compatible according to language rules (plus a C++ extension for
1294 non-static member functions). */
1296 static void
1297 maybe_diag_incompatible_alias (tree alias, tree target)
1299 tree altype = TREE_TYPE (alias);
1300 tree targtype = TREE_TYPE (target);
1302 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1303 tree funcptr = altype;
1305 if (ifunc)
1307 /* Handle attribute ifunc first. */
1308 if (TREE_CODE (altype) == METHOD_TYPE)
1310 /* Set FUNCPTR to the type of the alias target. If the type
1311 is a non-static member function of class C, construct a type
1312 of an ordinary function taking C* as the first argument,
1313 followed by the member function argument list, and use it
1314 instead to check for incompatibility. This conversion is
1315 not defined by the language but an extension provided by
1316 G++. */
1318 tree rettype = TREE_TYPE (altype);
1319 tree args = TYPE_ARG_TYPES (altype);
1320 altype = build_function_type (rettype, args);
1321 funcptr = altype;
1324 targtype = TREE_TYPE (targtype);
1326 if (POINTER_TYPE_P (targtype))
1328 targtype = TREE_TYPE (targtype);
1330 /* Only issue Wattribute-alias for conversions to void* with
1331 -Wextra. */
1332 if (VOID_TYPE_P (targtype) && !extra_warnings)
1333 return;
1335 /* Proceed to handle incompatible ifunc resolvers below. */
1337 else
1339 funcptr = build_pointer_type (funcptr);
1341 error_at (DECL_SOURCE_LOCATION (target),
1342 "%<ifunc%> resolver for %qD must return %qT",
1343 alias, funcptr);
1344 inform (DECL_SOURCE_LOCATION (alias),
1345 "resolver indirect function declared here");
1346 return;
1350 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1351 || (prototype_p (altype)
1352 && prototype_p (targtype)
1353 && !types_compatible_p (altype, targtype))))
1355 /* Warn for incompatibilities. Avoid warning for functions
1356 without a prototype to make it possible to declare aliases
1357 without knowing the exact type, as libstdc++ does. */
1358 if (ifunc)
1360 funcptr = build_pointer_type (funcptr);
1362 if (warning_at (DECL_SOURCE_LOCATION (target),
1363 OPT_Wattribute_alias,
1364 "%<ifunc%> resolver for %qD should return %qT",
1365 alias, funcptr))
1366 inform (DECL_SOURCE_LOCATION (alias),
1367 "resolver indirect function declared here");
1369 else if (warning_at (DECL_SOURCE_LOCATION (alias),
1370 OPT_Wattribute_alias,
1371 "%qD alias between functions of incompatible "
1372 "types %qT and %qT", alias, altype, targtype))
1373 inform (DECL_SOURCE_LOCATION (target),
1374 "aliased declaration here");
1378 /* Translate the ugly representation of aliases as alias pairs into nice
1379 representation in callgraph. We don't handle all cases yet,
1380 unfortunately. */
1382 static void
1383 handle_alias_pairs (void)
1385 alias_pair *p;
1386 unsigned i;
1388 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1390 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1392 /* Weakrefs with target not defined in current unit are easy to handle:
1393 they behave just as external variables except we need to note the
1394 alias flag to later output the weakref pseudo op into asm file. */
1395 if (!target_node
1396 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1398 symtab_node *node = symtab_node::get (p->decl);
1399 if (node)
1401 node->alias_target = p->target;
1402 node->weakref = true;
1403 node->alias = true;
1404 node->transparent_alias = true;
1406 alias_pairs->unordered_remove (i);
1407 continue;
1409 else if (!target_node)
1411 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1412 symtab_node *node = symtab_node::get (p->decl);
1413 if (node)
1414 node->alias = false;
1415 alias_pairs->unordered_remove (i);
1416 continue;
1419 if (DECL_EXTERNAL (target_node->decl)
1420 /* We use local aliases for C++ thunks to force the tailcall
1421 to bind locally. This is a hack - to keep it working do
1422 the following (which is not strictly correct). */
1423 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1424 || ! DECL_VIRTUAL_P (target_node->decl))
1425 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1427 error ("%q+D aliased to external symbol %qE",
1428 p->decl, p->target);
1431 if (TREE_CODE (p->decl) == FUNCTION_DECL
1432 && target_node && is_a <cgraph_node *> (target_node))
1434 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1436 cgraph_node *src_node = cgraph_node::get (p->decl);
1437 if (src_node && src_node->definition)
1438 src_node->reset ();
1439 cgraph_node::create_alias (p->decl, target_node->decl);
1440 alias_pairs->unordered_remove (i);
1442 else if (VAR_P (p->decl)
1443 && target_node && is_a <varpool_node *> (target_node))
1445 varpool_node::create_alias (p->decl, target_node->decl);
1446 alias_pairs->unordered_remove (i);
1448 else
1450 error ("%q+D alias between function and variable is not supported",
1451 p->decl);
1452 inform (DECL_SOURCE_LOCATION (target_node->decl),
1453 "aliased declaration here");
1455 alias_pairs->unordered_remove (i);
1458 vec_free (alias_pairs);
1462 /* Figure out what functions we want to assemble. */
1464 static void
1465 mark_functions_to_output (void)
1467 bool check_same_comdat_groups = false;
1468 cgraph_node *node;
1470 if (flag_checking)
1471 FOR_EACH_FUNCTION (node)
1472 gcc_assert (!node->process);
1474 FOR_EACH_FUNCTION (node)
1476 tree decl = node->decl;
1478 gcc_assert (!node->process || node->same_comdat_group);
1479 if (node->process)
1480 continue;
1482 /* We need to output all local functions that are used and not
1483 always inlined, as well as those that are reachable from
1484 outside the current compilation unit. */
1485 if (node->analyzed
1486 && !node->thunk.thunk_p
1487 && !node->alias
1488 && !node->global.inlined_to
1489 && !TREE_ASM_WRITTEN (decl)
1490 && !DECL_EXTERNAL (decl))
1492 node->process = 1;
1493 if (node->same_comdat_group)
1495 cgraph_node *next;
1496 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1497 next != node;
1498 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1499 if (!next->thunk.thunk_p && !next->alias
1500 && !next->comdat_local_p ())
1501 next->process = 1;
1504 else if (node->same_comdat_group)
1506 if (flag_checking)
1507 check_same_comdat_groups = true;
1509 else
1511 /* We should've reclaimed all functions that are not needed. */
1512 if (flag_checking
1513 && !node->global.inlined_to
1514 && gimple_has_body_p (decl)
1515 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1516 are inside partition, we can end up not removing the body since we no longer
1517 have analyzed node pointing to it. */
1518 && !node->in_other_partition
1519 && !node->alias
1520 && !node->clones
1521 && !DECL_EXTERNAL (decl))
1523 node->debug ();
1524 internal_error ("failed to reclaim unneeded function");
1526 gcc_assert (node->global.inlined_to
1527 || !gimple_has_body_p (decl)
1528 || node->in_other_partition
1529 || node->clones
1530 || DECL_ARTIFICIAL (decl)
1531 || DECL_EXTERNAL (decl));
1536 if (flag_checking && check_same_comdat_groups)
1537 FOR_EACH_FUNCTION (node)
1538 if (node->same_comdat_group && !node->process)
1540 tree decl = node->decl;
1541 if (!node->global.inlined_to
1542 && gimple_has_body_p (decl)
1543 /* FIXME: in an ltrans unit when the offline copy is outside a
1544 partition but inline copies are inside a partition, we can
1545 end up not removing the body since we no longer have an
1546 analyzed node pointing to it. */
1547 && !node->in_other_partition
1548 && !node->clones
1549 && !DECL_EXTERNAL (decl))
1551 node->debug ();
1552 internal_error ("failed to reclaim unneeded function in same "
1553 "comdat group");
1558 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1559 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1561 Set current_function_decl and cfun to newly constructed empty function body.
1562 return basic block in the function body. */
1564 basic_block
1565 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1567 basic_block bb;
1568 edge e;
1570 current_function_decl = decl;
1571 allocate_struct_function (decl, false);
1572 gimple_register_cfg_hooks ();
1573 init_empty_tree_cfg ();
1574 init_tree_ssa (cfun);
1576 if (in_ssa)
1578 init_ssa_operands (cfun);
1579 cfun->gimple_df->in_ssa_p = true;
1580 cfun->curr_properties |= PROP_ssa;
1583 DECL_INITIAL (decl) = make_node (BLOCK);
1584 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1586 DECL_SAVED_TREE (decl) = error_mark_node;
1587 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1588 | PROP_cfg | PROP_loops);
1590 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1591 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1592 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1594 /* Create BB for body of the function and connect it properly. */
1595 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1596 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1597 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1598 bb->count = count;
1599 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1600 e->probability = profile_probability::always ();
1601 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1602 e->probability = profile_probability::always ();
1603 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1605 return bb;
1608 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1609 offset indicated by VIRTUAL_OFFSET, if that is
1610 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1611 zero for a result adjusting thunk. */
1613 tree
1614 thunk_adjust (gimple_stmt_iterator * bsi,
1615 tree ptr, bool this_adjusting,
1616 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1618 gassign *stmt;
1619 tree ret;
1621 if (this_adjusting
1622 && fixed_offset != 0)
1624 stmt = gimple_build_assign
1625 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1626 ptr,
1627 fixed_offset));
1628 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1631 /* If there's a virtual offset, look up that value in the vtable and
1632 adjust the pointer again. */
1633 if (virtual_offset)
1635 tree vtabletmp;
1636 tree vtabletmp2;
1637 tree vtabletmp3;
1639 if (!vtable_entry_type)
1641 tree vfunc_type = make_node (FUNCTION_TYPE);
1642 TREE_TYPE (vfunc_type) = integer_type_node;
1643 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1644 layout_type (vfunc_type);
1646 vtable_entry_type = build_pointer_type (vfunc_type);
1649 vtabletmp =
1650 create_tmp_reg (build_pointer_type
1651 (build_pointer_type (vtable_entry_type)), "vptr");
1653 /* The vptr is always at offset zero in the object. */
1654 stmt = gimple_build_assign (vtabletmp,
1655 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1656 ptr));
1657 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1659 /* Form the vtable address. */
1660 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1661 "vtableaddr");
1662 stmt = gimple_build_assign (vtabletmp2,
1663 build_simple_mem_ref (vtabletmp));
1664 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1666 /* Find the entry with the vcall offset. */
1667 stmt = gimple_build_assign (vtabletmp2,
1668 fold_build_pointer_plus_loc (input_location,
1669 vtabletmp2,
1670 virtual_offset));
1671 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1673 /* Get the offset itself. */
1674 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1675 "vcalloffset");
1676 stmt = gimple_build_assign (vtabletmp3,
1677 build_simple_mem_ref (vtabletmp2));
1678 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1680 /* Adjust the `this' pointer. */
1681 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1682 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1683 GSI_CONTINUE_LINKING);
1686 if (!this_adjusting
1687 && fixed_offset != 0)
1688 /* Adjust the pointer by the constant. */
1690 tree ptrtmp;
1692 if (VAR_P (ptr))
1693 ptrtmp = ptr;
1694 else
1696 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1697 stmt = gimple_build_assign (ptrtmp, ptr);
1698 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1700 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1701 ptrtmp, fixed_offset);
1704 /* Emit the statement and gimplify the adjustment expression. */
1705 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1706 stmt = gimple_build_assign (ret, ptr);
1707 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1709 return ret;
1712 /* Expand thunk NODE to gimple if possible.
1713 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1714 no assembler is produced.
1715 When OUTPUT_ASM_THUNK is true, also produce assembler for
1716 thunks that are not lowered. */
1718 bool
1719 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1721 bool this_adjusting = thunk.this_adjusting;
1722 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1723 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1724 tree virtual_offset = NULL;
1725 tree alias = callees->callee->decl;
1726 tree thunk_fndecl = decl;
1727 tree a;
1729 /* Instrumentation thunk is the same function with
1730 a different signature. Never need to expand it. */
1731 if (thunk.add_pointer_bounds_args)
1732 return false;
1734 if (!force_gimple_thunk && this_adjusting
1735 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1736 virtual_value, alias))
1738 const char *fnname;
1739 tree fn_block;
1740 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1742 if (!output_asm_thunks)
1744 analyzed = true;
1745 return false;
1748 if (in_lto_p)
1749 get_untransformed_body ();
1750 a = DECL_ARGUMENTS (thunk_fndecl);
1752 current_function_decl = thunk_fndecl;
1754 /* Ensure thunks are emitted in their correct sections. */
1755 resolve_unique_section (thunk_fndecl, 0,
1756 flag_function_sections);
1758 DECL_RESULT (thunk_fndecl)
1759 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1760 RESULT_DECL, 0, restype);
1761 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1762 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1764 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1765 create one. */
1766 fn_block = make_node (BLOCK);
1767 BLOCK_VARS (fn_block) = a;
1768 DECL_INITIAL (thunk_fndecl) = fn_block;
1769 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1770 allocate_struct_function (thunk_fndecl, false);
1771 init_function_start (thunk_fndecl);
1772 cfun->is_thunk = 1;
1773 insn_locations_init ();
1774 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1775 prologue_location = curr_insn_location ();
1776 assemble_start_function (thunk_fndecl, fnname);
1778 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1779 fixed_offset, virtual_value, alias);
1781 assemble_end_function (thunk_fndecl, fnname);
1782 insn_locations_finalize ();
1783 init_insn_lengths ();
1784 free_after_compilation (cfun);
1785 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1786 thunk.thunk_p = false;
1787 analyzed = false;
1789 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1791 error ("generic thunk code fails for method %qD which uses %<...%>",
1792 thunk_fndecl);
1793 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1794 analyzed = true;
1795 return false;
1797 else
1799 tree restype;
1800 basic_block bb, then_bb, else_bb, return_bb;
1801 gimple_stmt_iterator bsi;
1802 int nargs = 0;
1803 tree arg;
1804 int i;
1805 tree resdecl;
1806 tree restmp = NULL;
1807 tree resbnd = NULL;
1809 gcall *call;
1810 greturn *ret;
1811 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1813 /* We may be called from expand_thunk that releses body except for
1814 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1815 if (in_lto_p && !force_gimple_thunk)
1816 get_untransformed_body ();
1817 a = DECL_ARGUMENTS (thunk_fndecl);
1819 current_function_decl = thunk_fndecl;
1821 /* Ensure thunks are emitted in their correct sections. */
1822 resolve_unique_section (thunk_fndecl, 0,
1823 flag_function_sections);
1825 DECL_IGNORED_P (thunk_fndecl) = 1;
1826 bitmap_obstack_initialize (NULL);
1828 if (thunk.virtual_offset_p)
1829 virtual_offset = size_int (virtual_value);
1831 /* Build the return declaration for the function. */
1832 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1833 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1835 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1836 DECL_ARTIFICIAL (resdecl) = 1;
1837 DECL_IGNORED_P (resdecl) = 1;
1838 DECL_RESULT (thunk_fndecl) = resdecl;
1839 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1841 else
1842 resdecl = DECL_RESULT (thunk_fndecl);
1844 profile_count cfg_count = count;
1845 if (!cfg_count.initialized_p ())
1846 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1848 bb = then_bb = else_bb = return_bb
1849 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1851 bsi = gsi_start_bb (bb);
1853 /* Build call to the function being thunked. */
1854 if (!VOID_TYPE_P (restype)
1855 && (!alias_is_noreturn
1856 || TREE_ADDRESSABLE (restype)
1857 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1859 if (DECL_BY_REFERENCE (resdecl))
1861 restmp = gimple_fold_indirect_ref (resdecl);
1862 if (!restmp)
1863 restmp = build2 (MEM_REF,
1864 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1865 resdecl,
1866 build_int_cst (TREE_TYPE
1867 (DECL_RESULT (alias)), 0));
1869 else if (!is_gimple_reg_type (restype))
1871 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1873 restmp = resdecl;
1875 if (VAR_P (restmp))
1876 add_local_decl (cfun, restmp);
1877 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1879 else
1880 restmp = create_tmp_var (restype, "retval");
1882 else
1883 restmp = create_tmp_reg (restype, "retval");
1886 for (arg = a; arg; arg = DECL_CHAIN (arg))
1887 nargs++;
1888 auto_vec<tree> vargs (nargs);
1889 i = 0;
1890 arg = a;
1891 if (this_adjusting)
1893 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1894 virtual_offset));
1895 arg = DECL_CHAIN (a);
1896 i = 1;
1899 if (nargs)
1900 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1902 tree tmp = arg;
1903 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1904 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1905 DECL_GIMPLE_REG_P (arg) = 1;
1907 if (!is_gimple_val (arg))
1909 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1910 (TREE_TYPE (arg)), "arg");
1911 gimple *stmt = gimple_build_assign (tmp, arg);
1912 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1914 vargs.quick_push (tmp);
1916 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1917 callees->call_stmt = call;
1918 gimple_call_set_from_thunk (call, true);
1920 /* Return slot optimization is always possible and in fact requred to
1921 return values with DECL_BY_REFERENCE. */
1922 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1923 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1924 || DECL_BY_REFERENCE (resdecl)))
1925 gimple_call_set_return_slot_opt (call, true);
1927 if (restmp)
1929 gimple_call_set_lhs (call, restmp);
1930 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1931 TREE_TYPE (TREE_TYPE (alias))));
1933 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1934 if (!alias_is_noreturn)
1936 if (restmp && !this_adjusting
1937 && (fixed_offset || virtual_offset))
1939 tree true_label = NULL_TREE;
1941 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1943 gimple *stmt;
1944 edge e;
1945 /* If the return type is a pointer, we need to
1946 protect against NULL. We know there will be an
1947 adjustment, because that's why we're emitting a
1948 thunk. */
1949 then_bb = create_basic_block (NULL, bb);
1950 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
1951 return_bb = create_basic_block (NULL, then_bb);
1952 return_bb->count = cfg_count;
1953 else_bb = create_basic_block (NULL, else_bb);
1954 else_bb->count = cfg_count.apply_scale (1, 16);
1955 add_bb_to_loop (then_bb, bb->loop_father);
1956 add_bb_to_loop (return_bb, bb->loop_father);
1957 add_bb_to_loop (else_bb, bb->loop_father);
1958 remove_edge (single_succ_edge (bb));
1959 true_label = gimple_block_label (then_bb);
1960 stmt = gimple_build_cond (NE_EXPR, restmp,
1961 build_zero_cst (TREE_TYPE (restmp)),
1962 NULL_TREE, NULL_TREE);
1963 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1964 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1965 e->probability = profile_probability::guessed_always ()
1966 .apply_scale (1, 16);
1967 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1968 e->probability = profile_probability::guessed_always ()
1969 .apply_scale (1, 16);
1970 make_single_succ_edge (return_bb,
1971 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1972 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
1973 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1974 e->probability = profile_probability::always ();
1975 bsi = gsi_last_bb (then_bb);
1978 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1979 fixed_offset, virtual_offset);
1980 if (true_label)
1982 gimple *stmt;
1983 bsi = gsi_last_bb (else_bb);
1984 stmt = gimple_build_assign (restmp,
1985 build_zero_cst (TREE_TYPE (restmp)));
1986 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1987 bsi = gsi_last_bb (return_bb);
1990 else
1991 gimple_call_set_tail (call, true);
1993 /* Build return value. */
1994 if (!DECL_BY_REFERENCE (resdecl))
1995 ret = gimple_build_return (restmp);
1996 else
1997 ret = gimple_build_return (resdecl);
1998 gimple_return_set_retbnd (ret, resbnd);
2000 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2002 else
2004 gimple_call_set_tail (call, true);
2005 remove_edge (single_succ_edge (bb));
2008 cfun->gimple_df->in_ssa_p = true;
2009 update_max_bb_count ();
2010 profile_status_for_fn (cfun)
2011 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2012 ? PROFILE_READ : PROFILE_GUESSED;
2013 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2014 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2015 delete_unreachable_blocks ();
2016 update_ssa (TODO_update_ssa);
2017 checking_verify_flow_info ();
2018 free_dominance_info (CDI_DOMINATORS);
2020 /* Since we want to emit the thunk, we explicitly mark its name as
2021 referenced. */
2022 thunk.thunk_p = false;
2023 lowered = true;
2024 bitmap_obstack_release (NULL);
2026 current_function_decl = NULL;
2027 set_cfun (NULL);
2028 return true;
2031 /* Assemble thunks and aliases associated to node. */
2033 void
2034 cgraph_node::assemble_thunks_and_aliases (void)
2036 cgraph_edge *e;
2037 ipa_ref *ref;
2039 for (e = callers; e;)
2040 if (e->caller->thunk.thunk_p
2041 && !e->caller->global.inlined_to
2042 && !e->caller->thunk.add_pointer_bounds_args)
2044 cgraph_node *thunk = e->caller;
2046 e = e->next_caller;
2047 thunk->expand_thunk (true, false);
2048 thunk->assemble_thunks_and_aliases ();
2050 else
2051 e = e->next_caller;
2053 FOR_EACH_ALIAS (this, ref)
2055 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2056 if (!alias->transparent_alias)
2058 bool saved_written = TREE_ASM_WRITTEN (decl);
2060 /* Force assemble_alias to really output the alias this time instead
2061 of buffering it in same alias pairs. */
2062 TREE_ASM_WRITTEN (decl) = 1;
2063 do_assemble_alias (alias->decl,
2064 DECL_ASSEMBLER_NAME (decl));
2065 alias->assemble_thunks_and_aliases ();
2066 TREE_ASM_WRITTEN (decl) = saved_written;
2071 /* Expand function specified by node. */
2073 void
2074 cgraph_node::expand (void)
2076 location_t saved_loc;
2078 /* We ought to not compile any inline clones. */
2079 gcc_assert (!global.inlined_to);
2081 /* __RTL functions are compiled as soon as they are parsed, so don't
2082 do it again. */
2083 if (native_rtl_p ())
2084 return;
2086 announce_function (decl);
2087 process = 0;
2088 gcc_assert (lowered);
2089 get_untransformed_body ();
2091 /* Generate RTL for the body of DECL. */
2093 timevar_push (TV_REST_OF_COMPILATION);
2095 gcc_assert (symtab->global_info_ready);
2097 /* Initialize the default bitmap obstack. */
2098 bitmap_obstack_initialize (NULL);
2100 /* Initialize the RTL code for the function. */
2101 saved_loc = input_location;
2102 input_location = DECL_SOURCE_LOCATION (decl);
2104 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2105 push_cfun (DECL_STRUCT_FUNCTION (decl));
2106 init_function_start (decl);
2108 gimple_register_cfg_hooks ();
2110 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2112 execute_all_ipa_transforms ();
2114 /* Perform all tree transforms and optimizations. */
2116 /* Signal the start of passes. */
2117 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2119 execute_pass_list (cfun, g->get_passes ()->all_passes);
2121 /* Signal the end of passes. */
2122 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2124 bitmap_obstack_release (&reg_obstack);
2126 /* Release the default bitmap obstack. */
2127 bitmap_obstack_release (NULL);
2129 /* If requested, warn about function definitions where the function will
2130 return a value (usually of some struct or union type) which itself will
2131 take up a lot of stack space. */
2132 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2134 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2136 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2137 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2138 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2139 larger_than_size) > 0)
2141 unsigned int size_as_int
2142 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2144 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2145 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2146 decl, size_as_int);
2147 else
2148 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2149 decl, larger_than_size);
2153 gimple_set_body (decl, NULL);
2154 if (DECL_STRUCT_FUNCTION (decl) == 0
2155 && !cgraph_node::get (decl)->origin)
2157 /* Stop pointing to the local nodes about to be freed.
2158 But DECL_INITIAL must remain nonzero so we know this
2159 was an actual function definition.
2160 For a nested function, this is done in c_pop_function_context.
2161 If rest_of_compilation set this to 0, leave it 0. */
2162 if (DECL_INITIAL (decl) != 0)
2163 DECL_INITIAL (decl) = error_mark_node;
2166 input_location = saved_loc;
2168 ggc_collect ();
2169 timevar_pop (TV_REST_OF_COMPILATION);
2171 /* Make sure that BE didn't give up on compiling. */
2172 gcc_assert (TREE_ASM_WRITTEN (decl));
2173 if (cfun)
2174 pop_cfun ();
2176 /* It would make a lot more sense to output thunks before function body to get more
2177 forward and lest backwarding jumps. This however would need solving problem
2178 with comdats. See PR48668. Also aliases must come after function itself to
2179 make one pass assemblers, like one on AIX, happy. See PR 50689.
2180 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2181 groups. */
2182 assemble_thunks_and_aliases ();
2183 release_body ();
2184 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2185 points to the dead function body. */
2186 remove_callees ();
2187 remove_all_references ();
2190 /* Node comparer that is responsible for the order that corresponds
2191 to time when a function was launched for the first time. */
2193 static int
2194 node_cmp (const void *pa, const void *pb)
2196 const cgraph_node *a = *(const cgraph_node * const *) pa;
2197 const cgraph_node *b = *(const cgraph_node * const *) pb;
2199 /* Functions with time profile must be before these without profile. */
2200 if (!a->tp_first_run || !b->tp_first_run)
2201 return a->tp_first_run - b->tp_first_run;
2203 return a->tp_first_run != b->tp_first_run
2204 ? b->tp_first_run - a->tp_first_run
2205 : b->order - a->order;
2208 /* Expand all functions that must be output.
2210 Attempt to topologically sort the nodes so function is output when
2211 all called functions are already assembled to allow data to be
2212 propagated across the callgraph. Use a stack to get smaller distance
2213 between a function and its callees (later we may choose to use a more
2214 sophisticated algorithm for function reordering; we will likely want
2215 to use subsections to make the output functions appear in top-down
2216 order). */
2218 static void
2219 expand_all_functions (void)
2221 cgraph_node *node;
2222 cgraph_node **order = XCNEWVEC (cgraph_node *,
2223 symtab->cgraph_count);
2224 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2225 int order_pos, new_order_pos = 0;
2226 int i;
2228 order_pos = ipa_reverse_postorder (order);
2229 gcc_assert (order_pos == symtab->cgraph_count);
2231 /* Garbage collector may remove inline clones we eliminate during
2232 optimization. So we must be sure to not reference them. */
2233 for (i = 0; i < order_pos; i++)
2234 if (order[i]->process)
2235 order[new_order_pos++] = order[i];
2237 if (flag_profile_reorder_functions)
2238 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2240 for (i = new_order_pos - 1; i >= 0; i--)
2242 node = order[i];
2244 if (node->process)
2246 expanded_func_count++;
2247 if(node->tp_first_run)
2248 profiled_func_count++;
2250 if (symtab->dump_file)
2251 fprintf (symtab->dump_file,
2252 "Time profile order in expand_all_functions:%s:%d\n",
2253 node->asm_name (), node->tp_first_run);
2254 node->process = 0;
2255 node->expand ();
2259 if (dump_file)
2260 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2261 main_input_filename, profiled_func_count, expanded_func_count);
2263 if (symtab->dump_file && flag_profile_reorder_functions)
2264 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2265 profiled_func_count, expanded_func_count);
2267 symtab->process_new_functions ();
2268 free_gimplify_stack ();
2270 free (order);
2273 /* This is used to sort the node types by the cgraph order number. */
2275 enum cgraph_order_sort_kind
2277 ORDER_UNDEFINED = 0,
2278 ORDER_FUNCTION,
2279 ORDER_VAR,
2280 ORDER_VAR_UNDEF,
2281 ORDER_ASM
2284 struct cgraph_order_sort
2286 enum cgraph_order_sort_kind kind;
2287 union
2289 cgraph_node *f;
2290 varpool_node *v;
2291 asm_node *a;
2292 } u;
2295 /* Output all functions, variables, and asm statements in the order
2296 according to their order fields, which is the order in which they
2297 appeared in the file. This implements -fno-toplevel-reorder. In
2298 this mode we may output functions and variables which don't really
2299 need to be output. */
2301 static void
2302 output_in_order (void)
2304 int max;
2305 cgraph_order_sort *nodes;
2306 int i;
2307 cgraph_node *pf;
2308 varpool_node *pv;
2309 asm_node *pa;
2310 max = symtab->order;
2311 nodes = XCNEWVEC (cgraph_order_sort, max);
2313 FOR_EACH_DEFINED_FUNCTION (pf)
2315 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2317 if (!pf->no_reorder)
2318 continue;
2319 i = pf->order;
2320 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2321 nodes[i].kind = ORDER_FUNCTION;
2322 nodes[i].u.f = pf;
2326 /* There is a similar loop in symbol_table::output_variables.
2327 Please keep them in sync. */
2328 FOR_EACH_VARIABLE (pv)
2330 if (!pv->no_reorder)
2331 continue;
2332 if (DECL_HARD_REGISTER (pv->decl)
2333 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2334 continue;
2335 i = pv->order;
2336 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2337 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2338 nodes[i].u.v = pv;
2341 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2343 i = pa->order;
2344 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2345 nodes[i].kind = ORDER_ASM;
2346 nodes[i].u.a = pa;
2349 /* In toplevel reorder mode we output all statics; mark them as needed. */
2351 for (i = 0; i < max; ++i)
2352 if (nodes[i].kind == ORDER_VAR)
2353 nodes[i].u.v->finalize_named_section_flags ();
2355 for (i = 0; i < max; ++i)
2357 switch (nodes[i].kind)
2359 case ORDER_FUNCTION:
2360 nodes[i].u.f->process = 0;
2361 nodes[i].u.f->expand ();
2362 break;
2364 case ORDER_VAR:
2365 nodes[i].u.v->assemble_decl ();
2366 break;
2368 case ORDER_VAR_UNDEF:
2369 assemble_undefined_decl (nodes[i].u.v->decl);
2370 break;
2372 case ORDER_ASM:
2373 assemble_asm (nodes[i].u.a->asm_str);
2374 break;
2376 case ORDER_UNDEFINED:
2377 break;
2379 default:
2380 gcc_unreachable ();
2384 symtab->clear_asm_symbols ();
2386 free (nodes);
2389 static void
2390 ipa_passes (void)
2392 gcc::pass_manager *passes = g->get_passes ();
2394 set_cfun (NULL);
2395 current_function_decl = NULL;
2396 gimple_register_cfg_hooks ();
2397 bitmap_obstack_initialize (NULL);
2399 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2401 if (!in_lto_p)
2403 execute_ipa_pass_list (passes->all_small_ipa_passes);
2404 if (seen_error ())
2405 return;
2408 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2409 devirtualization and other changes where removal iterate. */
2410 symtab->remove_unreachable_nodes (symtab->dump_file);
2412 /* If pass_all_early_optimizations was not scheduled, the state of
2413 the cgraph will not be properly updated. Update it now. */
2414 if (symtab->state < IPA_SSA)
2415 symtab->state = IPA_SSA;
2417 if (!in_lto_p)
2419 /* Generate coverage variables and constructors. */
2420 coverage_finish ();
2422 /* Process new functions added. */
2423 set_cfun (NULL);
2424 current_function_decl = NULL;
2425 symtab->process_new_functions ();
2427 execute_ipa_summary_passes
2428 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2431 /* Some targets need to handle LTO assembler output specially. */
2432 if (flag_generate_lto || flag_generate_offload)
2433 targetm.asm_out.lto_start ();
2435 if (!in_lto_p
2436 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2438 if (!quiet_flag)
2439 fprintf (stderr, "Streaming LTO\n");
2440 if (g->have_offload)
2442 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2443 lto_stream_offload_p = true;
2444 ipa_write_summaries ();
2445 lto_stream_offload_p = false;
2447 if (flag_lto)
2449 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2450 lto_stream_offload_p = false;
2451 ipa_write_summaries ();
2455 if (flag_generate_lto || flag_generate_offload)
2456 targetm.asm_out.lto_end ();
2458 if (!flag_ltrans
2459 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2460 || !flag_lto || flag_fat_lto_objects))
2461 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2462 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2464 bitmap_obstack_release (NULL);
2468 /* Return string alias is alias of. */
2470 static tree
2471 get_alias_symbol (tree decl)
2473 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2474 return get_identifier (TREE_STRING_POINTER
2475 (TREE_VALUE (TREE_VALUE (alias))));
2479 /* Weakrefs may be associated to external decls and thus not output
2480 at expansion time. Emit all necessary aliases. */
2482 void
2483 symbol_table::output_weakrefs (void)
2485 symtab_node *node;
2486 FOR_EACH_SYMBOL (node)
2487 if (node->alias
2488 && !TREE_ASM_WRITTEN (node->decl)
2489 && node->weakref)
2491 tree target;
2493 /* Weakrefs are special by not requiring target definition in current
2494 compilation unit. It is thus bit hard to work out what we want to
2495 alias.
2496 When alias target is defined, we need to fetch it from symtab reference,
2497 otherwise it is pointed to by alias_target. */
2498 if (node->alias_target)
2499 target = (DECL_P (node->alias_target)
2500 ? DECL_ASSEMBLER_NAME (node->alias_target)
2501 : node->alias_target);
2502 else if (node->analyzed)
2503 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2504 else
2506 gcc_unreachable ();
2507 target = get_alias_symbol (node->decl);
2509 do_assemble_alias (node->decl, target);
2513 /* Perform simple optimizations based on callgraph. */
2515 void
2516 symbol_table::compile (void)
2518 if (seen_error ())
2519 return;
2521 symtab_node::checking_verify_symtab_nodes ();
2523 timevar_push (TV_CGRAPHOPT);
2524 if (pre_ipa_mem_report)
2526 fprintf (stderr, "Memory consumption before IPA\n");
2527 dump_memory_report (false);
2529 if (!quiet_flag)
2530 fprintf (stderr, "Performing interprocedural optimizations\n");
2531 state = IPA;
2533 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2534 if (flag_generate_lto || flag_generate_offload)
2535 lto_streamer_hooks_init ();
2537 /* Don't run the IPA passes if there was any error or sorry messages. */
2538 if (!seen_error ())
2539 ipa_passes ();
2541 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2542 if (seen_error ()
2543 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2544 && flag_lto && !flag_fat_lto_objects))
2546 timevar_pop (TV_CGRAPHOPT);
2547 return;
2550 global_info_ready = true;
2551 if (dump_file)
2553 fprintf (dump_file, "Optimized ");
2554 symtab->dump (dump_file);
2556 if (post_ipa_mem_report)
2558 fprintf (stderr, "Memory consumption after IPA\n");
2559 dump_memory_report (false);
2561 timevar_pop (TV_CGRAPHOPT);
2563 /* Output everything. */
2564 switch_to_section (text_section);
2565 (*debug_hooks->assembly_start) ();
2566 if (!quiet_flag)
2567 fprintf (stderr, "Assembling functions:\n");
2568 symtab_node::checking_verify_symtab_nodes ();
2570 bitmap_obstack_initialize (NULL);
2571 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2572 bitmap_obstack_release (NULL);
2573 mark_functions_to_output ();
2575 /* When weakref support is missing, we automatically translate all
2576 references to NODE to references to its ultimate alias target.
2577 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2578 TREE_CHAIN.
2580 Set up this mapping before we output any assembler but once we are sure
2581 that all symbol renaming is done.
2583 FIXME: All this uglyness can go away if we just do renaming at gimple
2584 level by physically rewritting the IL. At the moment we can only redirect
2585 calls, so we need infrastructure for renaming references as well. */
2586 #ifndef ASM_OUTPUT_WEAKREF
2587 symtab_node *node;
2589 FOR_EACH_SYMBOL (node)
2590 if (node->alias
2591 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2593 IDENTIFIER_TRANSPARENT_ALIAS
2594 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2595 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2596 = (node->alias_target ? node->alias_target
2597 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2599 #endif
2601 state = EXPANSION;
2603 /* Output first asm statements and anything ordered. The process
2604 flag is cleared for these nodes, so we skip them later. */
2605 output_in_order ();
2606 expand_all_functions ();
2607 output_variables ();
2609 process_new_functions ();
2610 state = FINISHED;
2611 output_weakrefs ();
2613 if (dump_file)
2615 fprintf (dump_file, "\nFinal ");
2616 symtab->dump (dump_file);
2618 if (!flag_checking)
2619 return;
2620 symtab_node::verify_symtab_nodes ();
2621 /* Double check that all inline clones are gone and that all
2622 function bodies have been released from memory. */
2623 if (!seen_error ())
2625 cgraph_node *node;
2626 bool error_found = false;
2628 FOR_EACH_DEFINED_FUNCTION (node)
2629 if (node->global.inlined_to
2630 || gimple_has_body_p (node->decl))
2632 error_found = true;
2633 node->debug ();
2635 if (error_found)
2636 internal_error ("nodes with unreleased memory found");
2641 /* Analyze the whole compilation unit once it is parsed completely. */
2643 void
2644 symbol_table::finalize_compilation_unit (void)
2646 timevar_push (TV_CGRAPH);
2648 /* If we're here there's no current function anymore. Some frontends
2649 are lazy in clearing these. */
2650 current_function_decl = NULL;
2651 set_cfun (NULL);
2653 /* Do not skip analyzing the functions if there were errors, we
2654 miss diagnostics for following functions otherwise. */
2656 /* Emit size functions we didn't inline. */
2657 finalize_size_functions ();
2659 /* Mark alias targets necessary and emit diagnostics. */
2660 handle_alias_pairs ();
2662 if (!quiet_flag)
2664 fprintf (stderr, "\nAnalyzing compilation unit\n");
2665 fflush (stderr);
2668 if (flag_dump_passes)
2669 dump_passes ();
2671 /* Gimplify and lower all functions, compute reachability and
2672 remove unreachable nodes. */
2673 analyze_functions (/*first_time=*/true);
2675 /* Mark alias targets necessary and emit diagnostics. */
2676 handle_alias_pairs ();
2678 /* Gimplify and lower thunks. */
2679 analyze_functions (/*first_time=*/false);
2681 /* Offloading requires LTO infrastructure. */
2682 if (!in_lto_p && g->have_offload)
2683 flag_generate_offload = 1;
2685 if (!seen_error ())
2687 /* Emit early debug for reachable functions, and by consequence,
2688 locally scoped symbols. */
2689 struct cgraph_node *cnode;
2690 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2691 (*debug_hooks->early_global_decl) (cnode->decl);
2693 /* Clean up anything that needs cleaning up after initial debug
2694 generation. */
2695 (*debug_hooks->early_finish) (main_input_filename);
2698 /* Finally drive the pass manager. */
2699 compile ();
2701 timevar_pop (TV_CGRAPH);
2704 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2705 within the same process. For use by toplev::finalize. */
2707 void
2708 cgraphunit_c_finalize (void)
2710 gcc_assert (cgraph_new_nodes.length () == 0);
2711 cgraph_new_nodes.truncate (0);
2713 vtable_entry_type = NULL;
2714 queued_nodes = &symtab_terminator;
2716 first_analyzed = NULL;
2717 first_analyzed_var = NULL;
2720 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2721 kind of wrapper method. */
2723 void
2724 cgraph_node::create_wrapper (cgraph_node *target)
2726 /* Preserve DECL_RESULT so we get right by reference flag. */
2727 tree decl_result = DECL_RESULT (decl);
2729 /* Remove the function's body but keep arguments to be reused
2730 for thunk. */
2731 release_body (true);
2732 reset ();
2734 DECL_UNINLINABLE (decl) = false;
2735 DECL_RESULT (decl) = decl_result;
2736 DECL_INITIAL (decl) = NULL;
2737 allocate_struct_function (decl, false);
2738 set_cfun (NULL);
2740 /* Turn alias into thunk and expand it into GIMPLE representation. */
2741 definition = true;
2743 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2744 thunk.thunk_p = true;
2745 create_edge (target, NULL, count);
2746 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2748 tree arguments = DECL_ARGUMENTS (decl);
2750 while (arguments)
2752 TREE_ADDRESSABLE (arguments) = false;
2753 arguments = TREE_CHAIN (arguments);
2756 expand_thunk (false, true);
2758 /* Inline summary set-up. */
2759 analyze ();
2760 inline_analyze_function (this);
2763 #include "gt-cgraphunit.h"