* config/riscv/riscv.c: Remove unnecessary includes. Reorder
[official-gcc.git] / gcc / cgraphunit.c
blob6072c567bc36660cbf8b6f4d5fcba42544cef751
1 /* Driver of optimization process
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "tree-chkp.h"
206 #include "lto-section-names.h"
208 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
209 secondary queue used during optimization to accommodate passes that
210 may generate new functions that need to be optimized and expanded. */
211 vec<cgraph_node *> cgraph_new_nodes;
213 static void expand_all_functions (void);
214 static void mark_functions_to_output (void);
215 static void handle_alias_pairs (void);
217 /* Used for vtable lookup in thunk adjusting. */
218 static GTY (()) tree vtable_entry_type;
220 /* Return true if this symbol is a function from the C frontend specified
221 directly in RTL form (with "__RTL"). */
223 bool
224 symtab_node::native_rtl_p () const
226 if (TREE_CODE (decl) != FUNCTION_DECL)
227 return false;
228 if (!DECL_STRUCT_FUNCTION (decl))
229 return false;
230 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
233 /* Determine if symbol declaration is needed. That is, visible to something
234 either outside this translation unit, something magic in the system
235 configury */
236 bool
237 symtab_node::needed_p (void)
239 /* Double check that no one output the function into assembly file
240 early. */
241 if (!native_rtl_p ())
242 gcc_checking_assert
243 (!DECL_ASSEMBLER_NAME_SET_P (decl)
244 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
246 if (!definition)
247 return false;
249 if (DECL_EXTERNAL (decl))
250 return false;
252 /* If the user told us it is used, then it must be so. */
253 if (force_output)
254 return true;
256 /* ABI forced symbols are needed when they are external. */
257 if (forced_by_abi && TREE_PUBLIC (decl))
258 return true;
260 /* Keep constructors, destructors and virtual functions. */
261 if (TREE_CODE (decl) == FUNCTION_DECL
262 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
263 return true;
265 /* Externally visible variables must be output. The exception is
266 COMDAT variables that must be output only when they are needed. */
267 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
268 return true;
270 return false;
273 /* Head and terminator of the queue of nodes to be processed while building
274 callgraph. */
276 static symtab_node symtab_terminator;
277 static symtab_node *queued_nodes = &symtab_terminator;
279 /* Add NODE to queue starting at QUEUED_NODES.
280 The queue is linked via AUX pointers and terminated by pointer to 1. */
282 static void
283 enqueue_node (symtab_node *node)
285 if (node->aux)
286 return;
287 gcc_checking_assert (queued_nodes);
288 node->aux = queued_nodes;
289 queued_nodes = node;
292 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
293 functions into callgraph in a way so they look like ordinary reachable
294 functions inserted into callgraph already at construction time. */
296 void
297 symbol_table::process_new_functions (void)
299 tree fndecl;
301 if (!cgraph_new_nodes.exists ())
302 return;
304 handle_alias_pairs ();
305 /* Note that this queue may grow as its being processed, as the new
306 functions may generate new ones. */
307 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
309 cgraph_node *node = cgraph_new_nodes[i];
310 fndecl = node->decl;
311 switch (state)
313 case CONSTRUCTION:
314 /* At construction time we just need to finalize function and move
315 it into reachable functions list. */
317 cgraph_node::finalize_function (fndecl, false);
318 call_cgraph_insertion_hooks (node);
319 enqueue_node (node);
320 break;
322 case IPA:
323 case IPA_SSA:
324 case IPA_SSA_AFTER_INLINING:
325 /* When IPA optimization already started, do all essential
326 transformations that has been already performed on the whole
327 cgraph but not on this function. */
329 gimple_register_cfg_hooks ();
330 if (!node->analyzed)
331 node->analyze ();
332 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
333 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
334 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
336 bool summaried_computed = ipa_fn_summaries != NULL;
337 g->get_passes ()->execute_early_local_passes ();
338 /* Early passes compure inline parameters to do inlining
339 and splitting. This is redundant for functions added late.
340 Just throw away whatever it did. */
341 if (!summaried_computed)
342 ipa_free_fn_summary ();
344 else if (ipa_fn_summaries != NULL)
345 compute_fn_summary (node, true);
346 free_dominance_info (CDI_POST_DOMINATORS);
347 free_dominance_info (CDI_DOMINATORS);
348 pop_cfun ();
349 call_cgraph_insertion_hooks (node);
350 break;
352 case EXPANSION:
353 /* Functions created during expansion shall be compiled
354 directly. */
355 node->process = 0;
356 call_cgraph_insertion_hooks (node);
357 node->expand ();
358 break;
360 default:
361 gcc_unreachable ();
362 break;
366 cgraph_new_nodes.release ();
369 /* As an GCC extension we allow redefinition of the function. The
370 semantics when both copies of bodies differ is not well defined.
371 We replace the old body with new body so in unit at a time mode
372 we always use new body, while in normal mode we may end up with
373 old body inlined into some functions and new body expanded and
374 inlined in others.
376 ??? It may make more sense to use one body for inlining and other
377 body for expanding the function but this is difficult to do. */
379 void
380 cgraph_node::reset (void)
382 /* If process is set, then we have already begun whole-unit analysis.
383 This is *not* testing for whether we've already emitted the function.
384 That case can be sort-of legitimately seen with real function redefinition
385 errors. I would argue that the front end should never present us with
386 such a case, but don't enforce that for now. */
387 gcc_assert (!process);
389 /* Reset our data structures so we can analyze the function again. */
390 memset (&local, 0, sizeof (local));
391 memset (&global, 0, sizeof (global));
392 memset (&rtl, 0, sizeof (rtl));
393 analyzed = false;
394 definition = false;
395 alias = false;
396 transparent_alias = false;
397 weakref = false;
398 cpp_implicit_alias = false;
400 remove_callees ();
401 remove_all_references ();
404 /* Return true when there are references to the node. INCLUDE_SELF is
405 true if a self reference counts as a reference. */
407 bool
408 symtab_node::referred_to_p (bool include_self)
410 ipa_ref *ref = NULL;
412 /* See if there are any references at all. */
413 if (iterate_referring (0, ref))
414 return true;
415 /* For functions check also calls. */
416 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
417 if (cn && cn->callers)
419 if (include_self)
420 return true;
421 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
422 if (e->caller != this)
423 return true;
425 return false;
428 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
429 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
430 the garbage collector run at the moment. We would need to either create
431 a new GC context, or just not compile right now. */
433 void
434 cgraph_node::finalize_function (tree decl, bool no_collect)
436 cgraph_node *node = cgraph_node::get_create (decl);
438 if (node->definition)
440 /* Nested functions should only be defined once. */
441 gcc_assert (!DECL_CONTEXT (decl)
442 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
443 node->reset ();
444 node->local.redefined_extern_inline = true;
447 /* Set definition first before calling notice_global_symbol so that
448 it is available to notice_global_symbol. */
449 node->definition = true;
450 notice_global_symbol (decl);
451 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
452 if (!flag_toplevel_reorder)
453 node->no_reorder = true;
455 /* With -fkeep-inline-functions we are keeping all inline functions except
456 for extern inline ones. */
457 if (flag_keep_inline_functions
458 && DECL_DECLARED_INLINE_P (decl)
459 && !DECL_EXTERNAL (decl)
460 && !DECL_DISREGARD_INLINE_LIMITS (decl))
461 node->force_output = 1;
463 /* __RTL functions were already output as soon as they were parsed (due
464 to the large amount of global state in the backend).
465 Mark such functions as "force_output" to reflect the fact that they
466 will be in the asm file when considering the symbols they reference.
467 The attempt to output them later on will bail out immediately. */
468 if (node->native_rtl_p ())
469 node->force_output = 1;
471 /* When not optimizing, also output the static functions. (see
472 PR24561), but don't do so for always_inline functions, functions
473 declared inline and nested functions. These were optimized out
474 in the original implementation and it is unclear whether we want
475 to change the behavior here. */
476 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
477 || node->no_reorder)
478 && !node->cpp_implicit_alias
479 && !DECL_DISREGARD_INLINE_LIMITS (decl)
480 && !DECL_DECLARED_INLINE_P (decl)
481 && !(DECL_CONTEXT (decl)
482 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
483 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
484 node->force_output = 1;
486 /* If we've not yet emitted decl, tell the debug info about it. */
487 if (!TREE_ASM_WRITTEN (decl))
488 (*debug_hooks->deferred_inline_function) (decl);
490 if (!no_collect)
491 ggc_collect ();
493 if (symtab->state == CONSTRUCTION
494 && (node->needed_p () || node->referred_to_p ()))
495 enqueue_node (node);
498 /* Add the function FNDECL to the call graph.
499 Unlike finalize_function, this function is intended to be used
500 by middle end and allows insertion of new function at arbitrary point
501 of compilation. The function can be either in high, low or SSA form
502 GIMPLE.
504 The function is assumed to be reachable and have address taken (so no
505 API breaking optimizations are performed on it).
507 Main work done by this function is to enqueue the function for later
508 processing to avoid need the passes to be re-entrant. */
510 void
511 cgraph_node::add_new_function (tree fndecl, bool lowered)
513 gcc::pass_manager *passes = g->get_passes ();
514 cgraph_node *node;
516 if (dump_file)
518 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
519 const char *function_type = ((gimple_has_body_p (fndecl))
520 ? (lowered
521 ? (gimple_in_ssa_p (fn)
522 ? "ssa gimple"
523 : "low gimple")
524 : "high gimple")
525 : "to-be-gimplified");
526 fprintf (dump_file,
527 "Added new %s function %s to callgraph\n",
528 function_type,
529 fndecl_name (fndecl));
532 switch (symtab->state)
534 case PARSING:
535 cgraph_node::finalize_function (fndecl, false);
536 break;
537 case CONSTRUCTION:
538 /* Just enqueue function to be processed at nearest occurrence. */
539 node = cgraph_node::get_create (fndecl);
540 if (lowered)
541 node->lowered = true;
542 cgraph_new_nodes.safe_push (node);
543 break;
545 case IPA:
546 case IPA_SSA:
547 case IPA_SSA_AFTER_INLINING:
548 case EXPANSION:
549 /* Bring the function into finalized state and enqueue for later
550 analyzing and compilation. */
551 node = cgraph_node::get_create (fndecl);
552 node->local.local = false;
553 node->definition = true;
554 node->force_output = true;
555 if (TREE_PUBLIC (fndecl))
556 node->externally_visible = true;
557 if (!lowered && symtab->state == EXPANSION)
559 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
560 gimple_register_cfg_hooks ();
561 bitmap_obstack_initialize (NULL);
562 execute_pass_list (cfun, passes->all_lowering_passes);
563 passes->execute_early_local_passes ();
564 bitmap_obstack_release (NULL);
565 pop_cfun ();
567 lowered = true;
569 if (lowered)
570 node->lowered = true;
571 cgraph_new_nodes.safe_push (node);
572 break;
574 case FINISHED:
575 /* At the very end of compilation we have to do all the work up
576 to expansion. */
577 node = cgraph_node::create (fndecl);
578 if (lowered)
579 node->lowered = true;
580 node->definition = true;
581 node->analyze ();
582 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
583 gimple_register_cfg_hooks ();
584 bitmap_obstack_initialize (NULL);
585 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
586 g->get_passes ()->execute_early_local_passes ();
587 bitmap_obstack_release (NULL);
588 pop_cfun ();
589 node->expand ();
590 break;
592 default:
593 gcc_unreachable ();
596 /* Set a personality if required and we already passed EH lowering. */
597 if (lowered
598 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
599 == eh_personality_lang))
600 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
603 /* Analyze the function scheduled to be output. */
604 void
605 cgraph_node::analyze (void)
607 if (native_rtl_p ())
609 analyzed = true;
610 return;
613 tree decl = this->decl;
614 location_t saved_loc = input_location;
615 input_location = DECL_SOURCE_LOCATION (decl);
617 if (thunk.thunk_p)
619 cgraph_node *t = cgraph_node::get (thunk.alias);
621 create_edge (t, NULL, t->count, CGRAPH_FREQ_BASE);
622 callees->can_throw_external = !TREE_NOTHROW (t->decl);
623 /* Target code in expand_thunk may need the thunk's target
624 to be analyzed, so recurse here. */
625 if (!t->analyzed)
626 t->analyze ();
627 if (t->alias)
629 t = t->get_alias_target ();
630 if (!t->analyzed)
631 t->analyze ();
633 if (!expand_thunk (false, false))
635 thunk.alias = NULL;
636 return;
638 thunk.alias = NULL;
640 if (alias)
641 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
642 else if (dispatcher_function)
644 /* Generate the dispatcher body of multi-versioned functions. */
645 cgraph_function_version_info *dispatcher_version_info
646 = function_version ();
647 if (dispatcher_version_info != NULL
648 && (dispatcher_version_info->dispatcher_resolver
649 == NULL_TREE))
651 tree resolver = NULL_TREE;
652 gcc_assert (targetm.generate_version_dispatcher_body);
653 resolver = targetm.generate_version_dispatcher_body (this);
654 gcc_assert (resolver != NULL_TREE);
657 else
659 push_cfun (DECL_STRUCT_FUNCTION (decl));
661 assign_assembler_name_if_needed (decl);
663 /* Make sure to gimplify bodies only once. During analyzing a
664 function we lower it, which will require gimplified nested
665 functions, so we can end up here with an already gimplified
666 body. */
667 if (!gimple_has_body_p (decl))
668 gimplify_function_tree (decl);
670 /* Lower the function. */
671 if (!lowered)
673 if (nested)
674 lower_nested_functions (decl);
675 gcc_assert (!nested);
677 gimple_register_cfg_hooks ();
678 bitmap_obstack_initialize (NULL);
679 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
680 free_dominance_info (CDI_POST_DOMINATORS);
681 free_dominance_info (CDI_DOMINATORS);
682 compact_blocks ();
683 bitmap_obstack_release (NULL);
684 lowered = true;
687 pop_cfun ();
689 analyzed = true;
691 input_location = saved_loc;
694 /* C++ frontend produce same body aliases all over the place, even before PCH
695 gets streamed out. It relies on us linking the aliases with their function
696 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
697 first produce aliases without links, but once C++ FE is sure he won't sream
698 PCH we build the links via this function. */
700 void
701 symbol_table::process_same_body_aliases (void)
703 symtab_node *node;
704 FOR_EACH_SYMBOL (node)
705 if (node->cpp_implicit_alias && !node->analyzed)
706 node->resolve_alias
707 (VAR_P (node->alias_target)
708 ? (symtab_node *)varpool_node::get_create (node->alias_target)
709 : (symtab_node *)cgraph_node::get_create (node->alias_target));
710 cpp_implicit_aliases_done = true;
713 /* Process attributes common for vars and functions. */
715 static void
716 process_common_attributes (symtab_node *node, tree decl)
718 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
720 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
722 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
723 "%<weakref%> attribute should be accompanied with"
724 " an %<alias%> attribute");
725 DECL_WEAK (decl) = 0;
726 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
727 DECL_ATTRIBUTES (decl));
730 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
731 node->no_reorder = 1;
734 /* Look for externally_visible and used attributes and mark cgraph nodes
735 accordingly.
737 We cannot mark the nodes at the point the attributes are processed (in
738 handle_*_attribute) because the copy of the declarations available at that
739 point may not be canonical. For example, in:
741 void f();
742 void f() __attribute__((used));
744 the declaration we see in handle_used_attribute will be the second
745 declaration -- but the front end will subsequently merge that declaration
746 with the original declaration and discard the second declaration.
748 Furthermore, we can't mark these nodes in finalize_function because:
750 void f() {}
751 void f() __attribute__((externally_visible));
753 is valid.
755 So, we walk the nodes at the end of the translation unit, applying the
756 attributes at that point. */
758 static void
759 process_function_and_variable_attributes (cgraph_node *first,
760 varpool_node *first_var)
762 cgraph_node *node;
763 varpool_node *vnode;
765 for (node = symtab->first_function (); node != first;
766 node = symtab->next_function (node))
768 tree decl = node->decl;
769 if (DECL_PRESERVE_P (decl))
770 node->mark_force_output ();
771 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
773 if (! TREE_PUBLIC (node->decl))
774 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
775 "%<externally_visible%>"
776 " attribute have effect only on public objects");
778 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
779 && (node->definition && !node->alias))
781 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
782 "%<weakref%> attribute ignored"
783 " because function is defined");
784 DECL_WEAK (decl) = 0;
785 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
786 DECL_ATTRIBUTES (decl));
789 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
790 && !DECL_DECLARED_INLINE_P (decl)
791 /* redefining extern inline function makes it DECL_UNINLINABLE. */
792 && !DECL_UNINLINABLE (decl))
793 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
794 "always_inline function might not be inlinable");
796 process_common_attributes (node, decl);
798 for (vnode = symtab->first_variable (); vnode != first_var;
799 vnode = symtab->next_variable (vnode))
801 tree decl = vnode->decl;
802 if (DECL_EXTERNAL (decl)
803 && DECL_INITIAL (decl))
804 varpool_node::finalize_decl (decl);
805 if (DECL_PRESERVE_P (decl))
806 vnode->force_output = true;
807 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
809 if (! TREE_PUBLIC (vnode->decl))
810 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
811 "%<externally_visible%>"
812 " attribute have effect only on public objects");
814 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
815 && vnode->definition
816 && DECL_INITIAL (decl))
818 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
819 "%<weakref%> attribute ignored"
820 " because variable is initialized");
821 DECL_WEAK (decl) = 0;
822 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
823 DECL_ATTRIBUTES (decl));
825 process_common_attributes (vnode, decl);
829 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
830 middle end to output the variable to asm file, if needed or externally
831 visible. */
833 void
834 varpool_node::finalize_decl (tree decl)
836 varpool_node *node = varpool_node::get_create (decl);
838 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
840 if (node->definition)
841 return;
842 /* Set definition first before calling notice_global_symbol so that
843 it is available to notice_global_symbol. */
844 node->definition = true;
845 notice_global_symbol (decl);
846 if (!flag_toplevel_reorder)
847 node->no_reorder = true;
848 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
849 /* Traditionally we do not eliminate static variables when not
850 optimizing and when not doing toplevel reoder. */
851 || (node->no_reorder && !DECL_COMDAT (node->decl)
852 && !DECL_ARTIFICIAL (node->decl)))
853 node->force_output = true;
855 if (symtab->state == CONSTRUCTION
856 && (node->needed_p () || node->referred_to_p ()))
857 enqueue_node (node);
858 if (symtab->state >= IPA_SSA)
859 node->analyze ();
860 /* Some frontends produce various interface variables after compilation
861 finished. */
862 if (symtab->state == FINISHED
863 || (node->no_reorder
864 && symtab->state == EXPANSION))
865 node->assemble_decl ();
867 if (DECL_INITIAL (decl))
868 chkp_register_var_initializer (decl);
871 /* EDGE is an polymorphic call. Mark all possible targets as reachable
872 and if there is only one target, perform trivial devirtualization.
873 REACHABLE_CALL_TARGETS collects target lists we already walked to
874 avoid udplicate work. */
876 static void
877 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
878 cgraph_edge *edge)
880 unsigned int i;
881 void *cache_token;
882 bool final;
883 vec <cgraph_node *>targets
884 = possible_polymorphic_call_targets
885 (edge, &final, &cache_token);
887 if (!reachable_call_targets->add (cache_token))
889 if (symtab->dump_file)
890 dump_possible_polymorphic_call_targets
891 (symtab->dump_file, edge);
893 for (i = 0; i < targets.length (); i++)
895 /* Do not bother to mark virtual methods in anonymous namespace;
896 either we will find use of virtual table defining it, or it is
897 unused. */
898 if (targets[i]->definition
899 && TREE_CODE
900 (TREE_TYPE (targets[i]->decl))
901 == METHOD_TYPE
902 && !type_in_anonymous_namespace_p
903 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
904 enqueue_node (targets[i]);
908 /* Very trivial devirtualization; when the type is
909 final or anonymous (so we know all its derivation)
910 and there is only one possible virtual call target,
911 make the edge direct. */
912 if (final)
914 if (targets.length () <= 1 && dbg_cnt (devirt))
916 cgraph_node *target;
917 if (targets.length () == 1)
918 target = targets[0];
919 else
920 target = cgraph_node::create
921 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
923 if (symtab->dump_file)
925 fprintf (symtab->dump_file,
926 "Devirtualizing call: ");
927 print_gimple_stmt (symtab->dump_file,
928 edge->call_stmt, 0,
929 TDF_SLIM);
931 if (dump_enabled_p ())
933 location_t locus = gimple_location_safe (edge->call_stmt);
934 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
935 "devirtualizing call in %s to %s\n",
936 edge->caller->name (), target->name ());
939 edge->make_direct (target);
940 edge->redirect_call_stmt_to_callee ();
942 /* Call to __builtin_unreachable shouldn't be instrumented. */
943 if (!targets.length ())
944 gimple_call_set_with_bounds (edge->call_stmt, false);
946 if (symtab->dump_file)
948 fprintf (symtab->dump_file,
949 "Devirtualized as: ");
950 print_gimple_stmt (symtab->dump_file,
951 edge->call_stmt, 0,
952 TDF_SLIM);
958 /* Issue appropriate warnings for the global declaration DECL. */
960 static void
961 check_global_declaration (symtab_node *snode)
963 const char *decl_file;
964 tree decl = snode->decl;
966 /* Warn about any function declared static but not defined. We don't
967 warn about variables, because many programs have static variables
968 that exist only to get some text into the object file. */
969 if (TREE_CODE (decl) == FUNCTION_DECL
970 && DECL_INITIAL (decl) == 0
971 && DECL_EXTERNAL (decl)
972 && ! DECL_ARTIFICIAL (decl)
973 && ! TREE_NO_WARNING (decl)
974 && ! TREE_PUBLIC (decl)
975 && (warn_unused_function
976 || snode->referred_to_p (/*include_self=*/false)))
978 if (snode->referred_to_p (/*include_self=*/false))
979 pedwarn (input_location, 0, "%q+F used but never defined", decl);
980 else
981 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
982 /* This symbol is effectively an "extern" declaration now. */
983 TREE_PUBLIC (decl) = 1;
986 /* Warn about static fns or vars defined but not used. */
987 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
988 || (((warn_unused_variable && ! TREE_READONLY (decl))
989 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
990 && (warn_unused_const_variable == 2
991 || (main_input_filename != NULL
992 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
993 && filename_cmp (main_input_filename,
994 decl_file) == 0))))
995 && VAR_P (decl)))
996 && ! DECL_IN_SYSTEM_HEADER (decl)
997 && ! snode->referred_to_p (/*include_self=*/false)
998 /* This TREE_USED check is needed in addition to referred_to_p
999 above, because the `__unused__' attribute is not being
1000 considered for referred_to_p. */
1001 && ! TREE_USED (decl)
1002 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1003 to handle multiple external decls in different scopes. */
1004 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1005 && ! DECL_EXTERNAL (decl)
1006 && ! DECL_ARTIFICIAL (decl)
1007 && ! DECL_ABSTRACT_ORIGIN (decl)
1008 && ! TREE_PUBLIC (decl)
1009 /* A volatile variable might be used in some non-obvious way. */
1010 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1011 /* Global register variables must be declared to reserve them. */
1012 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1013 /* Global ctors and dtors are called by the runtime. */
1014 && (TREE_CODE (decl) != FUNCTION_DECL
1015 || (!DECL_STATIC_CONSTRUCTOR (decl)
1016 && !DECL_STATIC_DESTRUCTOR (decl)))
1017 /* Otherwise, ask the language. */
1018 && lang_hooks.decls.warn_unused_global (decl))
1019 warning_at (DECL_SOURCE_LOCATION (decl),
1020 (TREE_CODE (decl) == FUNCTION_DECL)
1021 ? OPT_Wunused_function
1022 : (TREE_READONLY (decl)
1023 ? OPT_Wunused_const_variable_
1024 : OPT_Wunused_variable),
1025 "%qD defined but not used", decl);
1028 /* Discover all functions and variables that are trivially needed, analyze
1029 them as well as all functions and variables referred by them */
1030 static cgraph_node *first_analyzed;
1031 static varpool_node *first_analyzed_var;
1033 /* FIRST_TIME is set to TRUE for the first time we are called for a
1034 translation unit from finalize_compilation_unit() or false
1035 otherwise. */
1037 static void
1038 analyze_functions (bool first_time)
1040 /* Keep track of already processed nodes when called multiple times for
1041 intermodule optimization. */
1042 cgraph_node *first_handled = first_analyzed;
1043 varpool_node *first_handled_var = first_analyzed_var;
1044 hash_set<void *> reachable_call_targets;
1046 symtab_node *node;
1047 symtab_node *next;
1048 int i;
1049 ipa_ref *ref;
1050 bool changed = true;
1051 location_t saved_loc = input_location;
1053 bitmap_obstack_initialize (NULL);
1054 symtab->state = CONSTRUCTION;
1055 input_location = UNKNOWN_LOCATION;
1057 /* Ugly, but the fixup can not happen at a time same body alias is created;
1058 C++ FE is confused about the COMDAT groups being right. */
1059 if (symtab->cpp_implicit_aliases_done)
1060 FOR_EACH_SYMBOL (node)
1061 if (node->cpp_implicit_alias)
1062 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1063 build_type_inheritance_graph ();
1065 /* Analysis adds static variables that in turn adds references to new functions.
1066 So we need to iterate the process until it stabilize. */
1067 while (changed)
1069 changed = false;
1070 process_function_and_variable_attributes (first_analyzed,
1071 first_analyzed_var);
1073 /* First identify the trivially needed symbols. */
1074 for (node = symtab->first_symbol ();
1075 node != first_analyzed
1076 && node != first_analyzed_var; node = node->next)
1078 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1079 node->get_comdat_group_id ();
1080 if (node->needed_p ())
1082 enqueue_node (node);
1083 if (!changed && symtab->dump_file)
1084 fprintf (symtab->dump_file, "Trivially needed symbols:");
1085 changed = true;
1086 if (symtab->dump_file)
1087 fprintf (symtab->dump_file, " %s", node->asm_name ());
1088 if (!changed && symtab->dump_file)
1089 fprintf (symtab->dump_file, "\n");
1091 if (node == first_analyzed
1092 || node == first_analyzed_var)
1093 break;
1095 symtab->process_new_functions ();
1096 first_analyzed_var = symtab->first_variable ();
1097 first_analyzed = symtab->first_function ();
1099 if (changed && symtab->dump_file)
1100 fprintf (symtab->dump_file, "\n");
1102 /* Lower representation, build callgraph edges and references for all trivially
1103 needed symbols and all symbols referred by them. */
1104 while (queued_nodes != &symtab_terminator)
1106 changed = true;
1107 node = queued_nodes;
1108 queued_nodes = (symtab_node *)queued_nodes->aux;
1109 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1110 if (cnode && cnode->definition)
1112 cgraph_edge *edge;
1113 tree decl = cnode->decl;
1115 /* ??? It is possible to create extern inline function
1116 and later using weak alias attribute to kill its body.
1117 See gcc.c-torture/compile/20011119-1.c */
1118 if (!DECL_STRUCT_FUNCTION (decl)
1119 && !cnode->alias
1120 && !cnode->thunk.thunk_p
1121 && !cnode->dispatcher_function)
1123 cnode->reset ();
1124 cnode->local.redefined_extern_inline = true;
1125 continue;
1128 if (!cnode->analyzed)
1129 cnode->analyze ();
1131 for (edge = cnode->callees; edge; edge = edge->next_callee)
1132 if (edge->callee->definition
1133 && (!DECL_EXTERNAL (edge->callee->decl)
1134 /* When not optimizing, do not try to analyze extern
1135 inline functions. Doing so is pointless. */
1136 || opt_for_fn (edge->callee->decl, optimize)
1137 /* Weakrefs needs to be preserved. */
1138 || edge->callee->alias
1139 /* always_inline functions are inlined aven at -O0. */
1140 || lookup_attribute
1141 ("always_inline",
1142 DECL_ATTRIBUTES (edge->callee->decl))
1143 /* Multiversioned functions needs the dispatcher to
1144 be produced locally even for extern functions. */
1145 || edge->callee->function_version ()))
1146 enqueue_node (edge->callee);
1147 if (opt_for_fn (cnode->decl, optimize)
1148 && opt_for_fn (cnode->decl, flag_devirtualize))
1150 cgraph_edge *next;
1152 for (edge = cnode->indirect_calls; edge; edge = next)
1154 next = edge->next_callee;
1155 if (edge->indirect_info->polymorphic)
1156 walk_polymorphic_call_targets (&reachable_call_targets,
1157 edge);
1161 /* If decl is a clone of an abstract function,
1162 mark that abstract function so that we don't release its body.
1163 The DECL_INITIAL() of that abstract function declaration
1164 will be later needed to output debug info. */
1165 if (DECL_ABSTRACT_ORIGIN (decl))
1167 cgraph_node *origin_node
1168 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1169 origin_node->used_as_abstract_origin = true;
1171 /* Preserve a functions function context node. It will
1172 later be needed to output debug info. */
1173 if (tree fn = decl_function_context (decl))
1175 cgraph_node *origin_node = cgraph_node::get_create (fn);
1176 enqueue_node (origin_node);
1179 else
1181 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1182 if (vnode && vnode->definition && !vnode->analyzed)
1183 vnode->analyze ();
1186 if (node->same_comdat_group)
1188 symtab_node *next;
1189 for (next = node->same_comdat_group;
1190 next != node;
1191 next = next->same_comdat_group)
1192 if (!next->comdat_local_p ())
1193 enqueue_node (next);
1195 for (i = 0; node->iterate_reference (i, ref); i++)
1196 if (ref->referred->definition
1197 && (!DECL_EXTERNAL (ref->referred->decl)
1198 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1199 && optimize)
1200 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1201 && opt_for_fn (ref->referred->decl, optimize))
1202 || node->alias
1203 || ref->referred->alias)))
1204 enqueue_node (ref->referred);
1205 symtab->process_new_functions ();
1208 update_type_inheritance_graph ();
1210 /* Collect entry points to the unit. */
1211 if (symtab->dump_file)
1213 fprintf (symtab->dump_file, "\n\nInitial ");
1214 symtab->dump (symtab->dump_file);
1217 if (first_time)
1219 symtab_node *snode;
1220 FOR_EACH_SYMBOL (snode)
1221 check_global_declaration (snode);
1224 if (symtab->dump_file)
1225 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1227 for (node = symtab->first_symbol ();
1228 node != first_handled
1229 && node != first_handled_var; node = next)
1231 next = node->next;
1232 if (!node->aux && !node->referred_to_p ())
1234 if (symtab->dump_file)
1235 fprintf (symtab->dump_file, " %s", node->name ());
1237 /* See if the debugger can use anything before the DECL
1238 passes away. Perhaps it can notice a DECL that is now a
1239 constant and can tag the early DIE with an appropriate
1240 attribute.
1242 Otherwise, this is the last chance the debug_hooks have
1243 at looking at optimized away DECLs, since
1244 late_global_decl will subsequently be called from the
1245 contents of the now pruned symbol table. */
1246 if (VAR_P (node->decl)
1247 && !decl_function_context (node->decl))
1249 /* We are reclaiming totally unreachable code and variables
1250 so they effectively appear as readonly. Show that to
1251 the debug machinery. */
1252 TREE_READONLY (node->decl) = 1;
1253 node->definition = false;
1254 (*debug_hooks->late_global_decl) (node->decl);
1257 node->remove ();
1258 continue;
1260 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1262 tree decl = node->decl;
1264 if (cnode->definition && !gimple_has_body_p (decl)
1265 && !cnode->alias
1266 && !cnode->thunk.thunk_p)
1267 cnode->reset ();
1269 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1270 || cnode->alias
1271 || gimple_has_body_p (decl)
1272 || cnode->native_rtl_p ());
1273 gcc_assert (cnode->analyzed == cnode->definition);
1275 node->aux = NULL;
1277 for (;node; node = node->next)
1278 node->aux = NULL;
1279 first_analyzed = symtab->first_function ();
1280 first_analyzed_var = symtab->first_variable ();
1281 if (symtab->dump_file)
1283 fprintf (symtab->dump_file, "\n\nReclaimed ");
1284 symtab->dump (symtab->dump_file);
1286 bitmap_obstack_release (NULL);
1287 ggc_collect ();
1288 /* Initialize assembler name hash, in particular we want to trigger C++
1289 mangling and same body alias creation before we free DECL_ARGUMENTS
1290 used by it. */
1291 if (!seen_error ())
1292 symtab->symtab_initialize_asm_name_hash ();
1294 input_location = saved_loc;
1297 /* Translate the ugly representation of aliases as alias pairs into nice
1298 representation in callgraph. We don't handle all cases yet,
1299 unfortunately. */
1301 static void
1302 handle_alias_pairs (void)
1304 alias_pair *p;
1305 unsigned i;
1307 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1309 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1311 /* Weakrefs with target not defined in current unit are easy to handle:
1312 they behave just as external variables except we need to note the
1313 alias flag to later output the weakref pseudo op into asm file. */
1314 if (!target_node
1315 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1317 symtab_node *node = symtab_node::get (p->decl);
1318 if (node)
1320 node->alias_target = p->target;
1321 node->weakref = true;
1322 node->alias = true;
1323 node->transparent_alias = true;
1325 alias_pairs->unordered_remove (i);
1326 continue;
1328 else if (!target_node)
1330 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1331 symtab_node *node = symtab_node::get (p->decl);
1332 if (node)
1333 node->alias = false;
1334 alias_pairs->unordered_remove (i);
1335 continue;
1338 if (DECL_EXTERNAL (target_node->decl)
1339 /* We use local aliases for C++ thunks to force the tailcall
1340 to bind locally. This is a hack - to keep it working do
1341 the following (which is not strictly correct). */
1342 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1343 || ! DECL_VIRTUAL_P (target_node->decl))
1344 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1346 error ("%q+D aliased to external symbol %qE",
1347 p->decl, p->target);
1350 if (TREE_CODE (p->decl) == FUNCTION_DECL
1351 && target_node && is_a <cgraph_node *> (target_node))
1353 cgraph_node *src_node = cgraph_node::get (p->decl);
1354 if (src_node && src_node->definition)
1355 src_node->reset ();
1356 cgraph_node::create_alias (p->decl, target_node->decl);
1357 alias_pairs->unordered_remove (i);
1359 else if (VAR_P (p->decl)
1360 && target_node && is_a <varpool_node *> (target_node))
1362 varpool_node::create_alias (p->decl, target_node->decl);
1363 alias_pairs->unordered_remove (i);
1365 else
1367 error ("%q+D alias in between function and variable is not supported",
1368 p->decl);
1369 warning (0, "%q+D aliased declaration",
1370 target_node->decl);
1371 alias_pairs->unordered_remove (i);
1374 vec_free (alias_pairs);
1378 /* Figure out what functions we want to assemble. */
1380 static void
1381 mark_functions_to_output (void)
1383 bool check_same_comdat_groups = false;
1384 cgraph_node *node;
1386 if (flag_checking)
1387 FOR_EACH_FUNCTION (node)
1388 gcc_assert (!node->process);
1390 FOR_EACH_FUNCTION (node)
1392 tree decl = node->decl;
1394 gcc_assert (!node->process || node->same_comdat_group);
1395 if (node->process)
1396 continue;
1398 /* We need to output all local functions that are used and not
1399 always inlined, as well as those that are reachable from
1400 outside the current compilation unit. */
1401 if (node->analyzed
1402 && !node->thunk.thunk_p
1403 && !node->alias
1404 && !node->global.inlined_to
1405 && !TREE_ASM_WRITTEN (decl)
1406 && !DECL_EXTERNAL (decl))
1408 node->process = 1;
1409 if (node->same_comdat_group)
1411 cgraph_node *next;
1412 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1413 next != node;
1414 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1415 if (!next->thunk.thunk_p && !next->alias
1416 && !next->comdat_local_p ())
1417 next->process = 1;
1420 else if (node->same_comdat_group)
1422 if (flag_checking)
1423 check_same_comdat_groups = true;
1425 else
1427 /* We should've reclaimed all functions that are not needed. */
1428 if (flag_checking
1429 && !node->global.inlined_to
1430 && gimple_has_body_p (decl)
1431 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1432 are inside partition, we can end up not removing the body since we no longer
1433 have analyzed node pointing to it. */
1434 && !node->in_other_partition
1435 && !node->alias
1436 && !node->clones
1437 && !DECL_EXTERNAL (decl))
1439 node->debug ();
1440 internal_error ("failed to reclaim unneeded function");
1442 gcc_assert (node->global.inlined_to
1443 || !gimple_has_body_p (decl)
1444 || node->in_other_partition
1445 || node->clones
1446 || DECL_ARTIFICIAL (decl)
1447 || DECL_EXTERNAL (decl));
1452 if (flag_checking && check_same_comdat_groups)
1453 FOR_EACH_FUNCTION (node)
1454 if (node->same_comdat_group && !node->process)
1456 tree decl = node->decl;
1457 if (!node->global.inlined_to
1458 && gimple_has_body_p (decl)
1459 /* FIXME: in an ltrans unit when the offline copy is outside a
1460 partition but inline copies are inside a partition, we can
1461 end up not removing the body since we no longer have an
1462 analyzed node pointing to it. */
1463 && !node->in_other_partition
1464 && !node->clones
1465 && !DECL_EXTERNAL (decl))
1467 node->debug ();
1468 internal_error ("failed to reclaim unneeded function in same "
1469 "comdat group");
1474 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1475 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1477 Set current_function_decl and cfun to newly constructed empty function body.
1478 return basic block in the function body. */
1480 basic_block
1481 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1483 basic_block bb;
1484 edge e;
1486 current_function_decl = decl;
1487 allocate_struct_function (decl, false);
1488 gimple_register_cfg_hooks ();
1489 init_empty_tree_cfg ();
1490 init_tree_ssa (cfun);
1492 if (in_ssa)
1494 init_ssa_operands (cfun);
1495 cfun->gimple_df->in_ssa_p = true;
1496 cfun->curr_properties |= PROP_ssa;
1499 DECL_INITIAL (decl) = make_node (BLOCK);
1500 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1502 DECL_SAVED_TREE (decl) = error_mark_node;
1503 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1504 | PROP_cfg | PROP_loops);
1506 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1507 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1508 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1510 /* Create BB for body of the function and connect it properly. */
1511 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1512 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1513 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1514 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1515 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1516 bb->count = count;
1517 bb->frequency = BB_FREQ_MAX;
1518 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1519 e->count = count;
1520 e->probability = profile_probability::always ();
1521 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1522 e->count = count;
1523 e->probability = profile_probability::always ();
1524 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1526 return bb;
1529 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1530 offset indicated by VIRTUAL_OFFSET, if that is
1531 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1532 zero for a result adjusting thunk. */
1534 tree
1535 thunk_adjust (gimple_stmt_iterator * bsi,
1536 tree ptr, bool this_adjusting,
1537 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1539 gassign *stmt;
1540 tree ret;
1542 if (this_adjusting
1543 && fixed_offset != 0)
1545 stmt = gimple_build_assign
1546 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1547 ptr,
1548 fixed_offset));
1549 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1552 /* If there's a virtual offset, look up that value in the vtable and
1553 adjust the pointer again. */
1554 if (virtual_offset)
1556 tree vtabletmp;
1557 tree vtabletmp2;
1558 tree vtabletmp3;
1560 if (!vtable_entry_type)
1562 tree vfunc_type = make_node (FUNCTION_TYPE);
1563 TREE_TYPE (vfunc_type) = integer_type_node;
1564 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1565 layout_type (vfunc_type);
1567 vtable_entry_type = build_pointer_type (vfunc_type);
1570 vtabletmp =
1571 create_tmp_reg (build_pointer_type
1572 (build_pointer_type (vtable_entry_type)), "vptr");
1574 /* The vptr is always at offset zero in the object. */
1575 stmt = gimple_build_assign (vtabletmp,
1576 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1577 ptr));
1578 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1580 /* Form the vtable address. */
1581 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1582 "vtableaddr");
1583 stmt = gimple_build_assign (vtabletmp2,
1584 build_simple_mem_ref (vtabletmp));
1585 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1587 /* Find the entry with the vcall offset. */
1588 stmt = gimple_build_assign (vtabletmp2,
1589 fold_build_pointer_plus_loc (input_location,
1590 vtabletmp2,
1591 virtual_offset));
1592 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1594 /* Get the offset itself. */
1595 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1596 "vcalloffset");
1597 stmt = gimple_build_assign (vtabletmp3,
1598 build_simple_mem_ref (vtabletmp2));
1599 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1601 /* Adjust the `this' pointer. */
1602 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1603 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1604 GSI_CONTINUE_LINKING);
1607 if (!this_adjusting
1608 && fixed_offset != 0)
1609 /* Adjust the pointer by the constant. */
1611 tree ptrtmp;
1613 if (VAR_P (ptr))
1614 ptrtmp = ptr;
1615 else
1617 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1618 stmt = gimple_build_assign (ptrtmp, ptr);
1619 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1621 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1622 ptrtmp, fixed_offset);
1625 /* Emit the statement and gimplify the adjustment expression. */
1626 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1627 stmt = gimple_build_assign (ret, ptr);
1628 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1630 return ret;
1633 /* Expand thunk NODE to gimple if possible.
1634 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1635 no assembler is produced.
1636 When OUTPUT_ASM_THUNK is true, also produce assembler for
1637 thunks that are not lowered. */
1639 bool
1640 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1642 bool this_adjusting = thunk.this_adjusting;
1643 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1644 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1645 tree virtual_offset = NULL;
1646 tree alias = callees->callee->decl;
1647 tree thunk_fndecl = decl;
1648 tree a;
1650 /* Instrumentation thunk is the same function with
1651 a different signature. Never need to expand it. */
1652 if (thunk.add_pointer_bounds_args)
1653 return false;
1655 if (!force_gimple_thunk && this_adjusting
1656 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1657 virtual_value, alias))
1659 const char *fnname;
1660 tree fn_block;
1661 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1663 if (!output_asm_thunks)
1665 analyzed = true;
1666 return false;
1669 if (in_lto_p)
1670 get_untransformed_body ();
1671 a = DECL_ARGUMENTS (thunk_fndecl);
1673 current_function_decl = thunk_fndecl;
1675 /* Ensure thunks are emitted in their correct sections. */
1676 resolve_unique_section (thunk_fndecl, 0,
1677 flag_function_sections);
1679 DECL_RESULT (thunk_fndecl)
1680 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1681 RESULT_DECL, 0, restype);
1682 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1683 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1685 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1686 create one. */
1687 fn_block = make_node (BLOCK);
1688 BLOCK_VARS (fn_block) = a;
1689 DECL_INITIAL (thunk_fndecl) = fn_block;
1690 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1691 allocate_struct_function (thunk_fndecl, false);
1692 init_function_start (thunk_fndecl);
1693 cfun->is_thunk = 1;
1694 insn_locations_init ();
1695 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1696 prologue_location = curr_insn_location ();
1697 assemble_start_function (thunk_fndecl, fnname);
1699 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1700 fixed_offset, virtual_value, alias);
1702 assemble_end_function (thunk_fndecl, fnname);
1703 insn_locations_finalize ();
1704 init_insn_lengths ();
1705 free_after_compilation (cfun);
1706 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1707 thunk.thunk_p = false;
1708 analyzed = false;
1710 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1712 error ("generic thunk code fails for method %qD which uses %<...%>",
1713 thunk_fndecl);
1714 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1715 analyzed = true;
1716 return false;
1718 else
1720 tree restype;
1721 basic_block bb, then_bb, else_bb, return_bb;
1722 gimple_stmt_iterator bsi;
1723 int nargs = 0;
1724 tree arg;
1725 int i;
1726 tree resdecl;
1727 tree restmp = NULL;
1728 tree resbnd = NULL;
1730 gcall *call;
1731 greturn *ret;
1732 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1734 /* We may be called from expand_thunk that releses body except for
1735 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1736 if (in_lto_p && !force_gimple_thunk)
1737 get_untransformed_body ();
1738 a = DECL_ARGUMENTS (thunk_fndecl);
1740 current_function_decl = thunk_fndecl;
1742 /* Ensure thunks are emitted in their correct sections. */
1743 resolve_unique_section (thunk_fndecl, 0,
1744 flag_function_sections);
1746 DECL_IGNORED_P (thunk_fndecl) = 1;
1747 bitmap_obstack_initialize (NULL);
1749 if (thunk.virtual_offset_p)
1750 virtual_offset = size_int (virtual_value);
1752 /* Build the return declaration for the function. */
1753 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1754 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1756 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1757 DECL_ARTIFICIAL (resdecl) = 1;
1758 DECL_IGNORED_P (resdecl) = 1;
1759 DECL_RESULT (thunk_fndecl) = resdecl;
1760 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1762 else
1763 resdecl = DECL_RESULT (thunk_fndecl);
1765 bb = then_bb = else_bb = return_bb
1766 = init_lowered_empty_function (thunk_fndecl, true, count);
1768 bsi = gsi_start_bb (bb);
1770 /* Build call to the function being thunked. */
1771 if (!VOID_TYPE_P (restype)
1772 && (!alias_is_noreturn
1773 || TREE_ADDRESSABLE (restype)
1774 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1776 if (DECL_BY_REFERENCE (resdecl))
1778 restmp = gimple_fold_indirect_ref (resdecl);
1779 if (!restmp)
1780 restmp = build2 (MEM_REF,
1781 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1782 resdecl,
1783 build_int_cst (TREE_TYPE
1784 (DECL_RESULT (alias)), 0));
1786 else if (!is_gimple_reg_type (restype))
1788 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1790 restmp = resdecl;
1792 if (VAR_P (restmp))
1793 add_local_decl (cfun, restmp);
1794 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1796 else
1797 restmp = create_tmp_var (restype, "retval");
1799 else
1800 restmp = create_tmp_reg (restype, "retval");
1803 for (arg = a; arg; arg = DECL_CHAIN (arg))
1804 nargs++;
1805 auto_vec<tree> vargs (nargs);
1806 i = 0;
1807 arg = a;
1808 if (this_adjusting)
1810 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1811 virtual_offset));
1812 arg = DECL_CHAIN (a);
1813 i = 1;
1816 if (nargs)
1817 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1819 tree tmp = arg;
1820 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1821 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1822 DECL_GIMPLE_REG_P (arg) = 1;
1824 if (!is_gimple_val (arg))
1826 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1827 (TREE_TYPE (arg)), "arg");
1828 gimple *stmt = gimple_build_assign (tmp, arg);
1829 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1831 vargs.quick_push (tmp);
1833 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1834 callees->call_stmt = call;
1835 gimple_call_set_from_thunk (call, true);
1836 gimple_call_set_with_bounds (call, instrumentation_clone);
1838 /* Return slot optimization is always possible and in fact requred to
1839 return values with DECL_BY_REFERENCE. */
1840 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1841 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1842 || DECL_BY_REFERENCE (resdecl)))
1843 gimple_call_set_return_slot_opt (call, true);
1845 if (restmp)
1847 gimple_call_set_lhs (call, restmp);
1848 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1849 TREE_TYPE (TREE_TYPE (alias))));
1851 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1852 if (!alias_is_noreturn)
1854 if (instrumentation_clone
1855 && !DECL_BY_REFERENCE (resdecl)
1856 && restmp
1857 && BOUNDED_P (restmp))
1859 resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1860 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1861 as_a <gcall *> (gsi_stmt (bsi)),
1862 callees->count, callees->frequency);
1865 if (restmp && !this_adjusting
1866 && (fixed_offset || virtual_offset))
1868 tree true_label = NULL_TREE;
1870 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1872 gimple *stmt;
1873 edge e;
1874 /* If the return type is a pointer, we need to
1875 protect against NULL. We know there will be an
1876 adjustment, because that's why we're emitting a
1877 thunk. */
1878 then_bb = create_basic_block (NULL, bb);
1879 then_bb->count = count - count.apply_scale (1, 16);
1880 then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1881 return_bb = create_basic_block (NULL, then_bb);
1882 return_bb->count = count;
1883 return_bb->frequency = BB_FREQ_MAX;
1884 else_bb = create_basic_block (NULL, else_bb);
1885 then_bb->count = count.apply_scale (1, 16);
1886 then_bb->frequency = BB_FREQ_MAX / 16;
1887 add_bb_to_loop (then_bb, bb->loop_father);
1888 add_bb_to_loop (return_bb, bb->loop_father);
1889 add_bb_to_loop (else_bb, bb->loop_father);
1890 remove_edge (single_succ_edge (bb));
1891 true_label = gimple_block_label (then_bb);
1892 stmt = gimple_build_cond (NE_EXPR, restmp,
1893 build_zero_cst (TREE_TYPE (restmp)),
1894 NULL_TREE, NULL_TREE);
1895 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1896 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1897 e->probability = profile_probability::guessed_always ()
1898 .apply_scale (1, 16);
1899 e->count = count - count.apply_scale (1, 16);
1900 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1901 e->probability = profile_probability::guessed_always ()
1902 .apply_scale (1, 16);
1903 e->count = count.apply_scale (1, 16);
1904 make_single_succ_edge (return_bb,
1905 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1906 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
1907 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1908 e->probability = profile_probability::always ();
1909 e->count = count.apply_scale (1, 16);
1910 bsi = gsi_last_bb (then_bb);
1913 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1914 fixed_offset, virtual_offset);
1915 if (true_label)
1917 gimple *stmt;
1918 bsi = gsi_last_bb (else_bb);
1919 stmt = gimple_build_assign (restmp,
1920 build_zero_cst (TREE_TYPE (restmp)));
1921 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1922 bsi = gsi_last_bb (return_bb);
1925 else
1926 gimple_call_set_tail (call, true);
1928 /* Build return value. */
1929 if (!DECL_BY_REFERENCE (resdecl))
1930 ret = gimple_build_return (restmp);
1931 else
1932 ret = gimple_build_return (resdecl);
1933 gimple_return_set_retbnd (ret, resbnd);
1935 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1937 else
1939 gimple_call_set_tail (call, true);
1940 remove_edge (single_succ_edge (bb));
1943 cfun->gimple_df->in_ssa_p = true;
1944 profile_status_for_fn (cfun)
1945 = count.initialized_p () ? PROFILE_READ : PROFILE_GUESSED;
1946 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1947 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1948 delete_unreachable_blocks ();
1949 update_ssa (TODO_update_ssa);
1950 checking_verify_flow_info ();
1951 free_dominance_info (CDI_DOMINATORS);
1953 /* Since we want to emit the thunk, we explicitly mark its name as
1954 referenced. */
1955 thunk.thunk_p = false;
1956 lowered = true;
1957 bitmap_obstack_release (NULL);
1959 current_function_decl = NULL;
1960 set_cfun (NULL);
1961 return true;
1964 /* Assemble thunks and aliases associated to node. */
1966 void
1967 cgraph_node::assemble_thunks_and_aliases (void)
1969 cgraph_edge *e;
1970 ipa_ref *ref;
1972 for (e = callers; e;)
1973 if (e->caller->thunk.thunk_p
1974 && !e->caller->global.inlined_to
1975 && !e->caller->thunk.add_pointer_bounds_args)
1977 cgraph_node *thunk = e->caller;
1979 e = e->next_caller;
1980 thunk->expand_thunk (true, false);
1981 thunk->assemble_thunks_and_aliases ();
1983 else
1984 e = e->next_caller;
1986 FOR_EACH_ALIAS (this, ref)
1988 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1989 if (!alias->transparent_alias)
1991 bool saved_written = TREE_ASM_WRITTEN (decl);
1993 /* Force assemble_alias to really output the alias this time instead
1994 of buffering it in same alias pairs. */
1995 TREE_ASM_WRITTEN (decl) = 1;
1996 do_assemble_alias (alias->decl,
1997 DECL_ASSEMBLER_NAME (decl));
1998 alias->assemble_thunks_and_aliases ();
1999 TREE_ASM_WRITTEN (decl) = saved_written;
2004 /* Expand function specified by node. */
2006 void
2007 cgraph_node::expand (void)
2009 location_t saved_loc;
2011 /* We ought to not compile any inline clones. */
2012 gcc_assert (!global.inlined_to);
2014 /* __RTL functions are compiled as soon as they are parsed, so don't
2015 do it again. */
2016 if (native_rtl_p ())
2017 return;
2019 announce_function (decl);
2020 process = 0;
2021 gcc_assert (lowered);
2022 get_untransformed_body ();
2024 /* Generate RTL for the body of DECL. */
2026 timevar_push (TV_REST_OF_COMPILATION);
2028 gcc_assert (symtab->global_info_ready);
2030 /* Initialize the default bitmap obstack. */
2031 bitmap_obstack_initialize (NULL);
2033 /* Initialize the RTL code for the function. */
2034 saved_loc = input_location;
2035 input_location = DECL_SOURCE_LOCATION (decl);
2037 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2038 push_cfun (DECL_STRUCT_FUNCTION (decl));
2039 init_function_start (decl);
2041 gimple_register_cfg_hooks ();
2043 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2045 execute_all_ipa_transforms ();
2047 /* Perform all tree transforms and optimizations. */
2049 /* Signal the start of passes. */
2050 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2052 execute_pass_list (cfun, g->get_passes ()->all_passes);
2054 /* Signal the end of passes. */
2055 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2057 bitmap_obstack_release (&reg_obstack);
2059 /* Release the default bitmap obstack. */
2060 bitmap_obstack_release (NULL);
2062 /* If requested, warn about function definitions where the function will
2063 return a value (usually of some struct or union type) which itself will
2064 take up a lot of stack space. */
2065 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2067 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2069 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2070 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2071 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2072 larger_than_size))
2074 unsigned int size_as_int
2075 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2077 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2078 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2079 decl, size_as_int);
2080 else
2081 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2082 decl, larger_than_size);
2086 gimple_set_body (decl, NULL);
2087 if (DECL_STRUCT_FUNCTION (decl) == 0
2088 && !cgraph_node::get (decl)->origin)
2090 /* Stop pointing to the local nodes about to be freed.
2091 But DECL_INITIAL must remain nonzero so we know this
2092 was an actual function definition.
2093 For a nested function, this is done in c_pop_function_context.
2094 If rest_of_compilation set this to 0, leave it 0. */
2095 if (DECL_INITIAL (decl) != 0)
2096 DECL_INITIAL (decl) = error_mark_node;
2099 input_location = saved_loc;
2101 ggc_collect ();
2102 timevar_pop (TV_REST_OF_COMPILATION);
2104 /* Make sure that BE didn't give up on compiling. */
2105 gcc_assert (TREE_ASM_WRITTEN (decl));
2106 if (cfun)
2107 pop_cfun ();
2109 /* It would make a lot more sense to output thunks before function body to get more
2110 forward and lest backwarding jumps. This however would need solving problem
2111 with comdats. See PR48668. Also aliases must come after function itself to
2112 make one pass assemblers, like one on AIX, happy. See PR 50689.
2113 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2114 groups. */
2115 assemble_thunks_and_aliases ();
2116 release_body ();
2117 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2118 points to the dead function body. */
2119 remove_callees ();
2120 remove_all_references ();
2123 /* Node comparer that is responsible for the order that corresponds
2124 to time when a function was launched for the first time. */
2126 static int
2127 node_cmp (const void *pa, const void *pb)
2129 const cgraph_node *a = *(const cgraph_node * const *) pa;
2130 const cgraph_node *b = *(const cgraph_node * const *) pb;
2132 /* Functions with time profile must be before these without profile. */
2133 if (!a->tp_first_run || !b->tp_first_run)
2134 return a->tp_first_run - b->tp_first_run;
2136 return a->tp_first_run != b->tp_first_run
2137 ? b->tp_first_run - a->tp_first_run
2138 : b->order - a->order;
2141 /* Expand all functions that must be output.
2143 Attempt to topologically sort the nodes so function is output when
2144 all called functions are already assembled to allow data to be
2145 propagated across the callgraph. Use a stack to get smaller distance
2146 between a function and its callees (later we may choose to use a more
2147 sophisticated algorithm for function reordering; we will likely want
2148 to use subsections to make the output functions appear in top-down
2149 order). */
2151 static void
2152 expand_all_functions (void)
2154 cgraph_node *node;
2155 cgraph_node **order = XCNEWVEC (cgraph_node *,
2156 symtab->cgraph_count);
2157 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2158 int order_pos, new_order_pos = 0;
2159 int i;
2161 order_pos = ipa_reverse_postorder (order);
2162 gcc_assert (order_pos == symtab->cgraph_count);
2164 /* Garbage collector may remove inline clones we eliminate during
2165 optimization. So we must be sure to not reference them. */
2166 for (i = 0; i < order_pos; i++)
2167 if (order[i]->process)
2168 order[new_order_pos++] = order[i];
2170 if (flag_profile_reorder_functions)
2171 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2173 for (i = new_order_pos - 1; i >= 0; i--)
2175 node = order[i];
2177 if (node->process)
2179 expanded_func_count++;
2180 if(node->tp_first_run)
2181 profiled_func_count++;
2183 if (symtab->dump_file)
2184 fprintf (symtab->dump_file,
2185 "Time profile order in expand_all_functions:%s:%d\n",
2186 node->asm_name (), node->tp_first_run);
2187 node->process = 0;
2188 node->expand ();
2192 if (dump_file)
2193 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2194 main_input_filename, profiled_func_count, expanded_func_count);
2196 if (symtab->dump_file && flag_profile_reorder_functions)
2197 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2198 profiled_func_count, expanded_func_count);
2200 symtab->process_new_functions ();
2201 free_gimplify_stack ();
2203 free (order);
2206 /* This is used to sort the node types by the cgraph order number. */
2208 enum cgraph_order_sort_kind
2210 ORDER_UNDEFINED = 0,
2211 ORDER_FUNCTION,
2212 ORDER_VAR,
2213 ORDER_VAR_UNDEF,
2214 ORDER_ASM
2217 struct cgraph_order_sort
2219 enum cgraph_order_sort_kind kind;
2220 union
2222 cgraph_node *f;
2223 varpool_node *v;
2224 asm_node *a;
2225 } u;
2228 /* Output all functions, variables, and asm statements in the order
2229 according to their order fields, which is the order in which they
2230 appeared in the file. This implements -fno-toplevel-reorder. In
2231 this mode we may output functions and variables which don't really
2232 need to be output. */
2234 static void
2235 output_in_order (void)
2237 int max;
2238 cgraph_order_sort *nodes;
2239 int i;
2240 cgraph_node *pf;
2241 varpool_node *pv;
2242 asm_node *pa;
2243 max = symtab->order;
2244 nodes = XCNEWVEC (cgraph_order_sort, max);
2246 FOR_EACH_DEFINED_FUNCTION (pf)
2248 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2250 if (!pf->no_reorder)
2251 continue;
2252 i = pf->order;
2253 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2254 nodes[i].kind = ORDER_FUNCTION;
2255 nodes[i].u.f = pf;
2259 /* There is a similar loop in symbol_table::output_variables.
2260 Please keep them in sync. */
2261 FOR_EACH_VARIABLE (pv)
2263 if (!pv->no_reorder)
2264 continue;
2265 if (DECL_HARD_REGISTER (pv->decl)
2266 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2267 continue;
2268 i = pv->order;
2269 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2270 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2271 nodes[i].u.v = pv;
2274 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2276 i = pa->order;
2277 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2278 nodes[i].kind = ORDER_ASM;
2279 nodes[i].u.a = pa;
2282 /* In toplevel reorder mode we output all statics; mark them as needed. */
2284 for (i = 0; i < max; ++i)
2285 if (nodes[i].kind == ORDER_VAR)
2286 nodes[i].u.v->finalize_named_section_flags ();
2288 for (i = 0; i < max; ++i)
2290 switch (nodes[i].kind)
2292 case ORDER_FUNCTION:
2293 nodes[i].u.f->process = 0;
2294 nodes[i].u.f->expand ();
2295 break;
2297 case ORDER_VAR:
2298 nodes[i].u.v->assemble_decl ();
2299 break;
2301 case ORDER_VAR_UNDEF:
2302 assemble_undefined_decl (nodes[i].u.v->decl);
2303 break;
2305 case ORDER_ASM:
2306 assemble_asm (nodes[i].u.a->asm_str);
2307 break;
2309 case ORDER_UNDEFINED:
2310 break;
2312 default:
2313 gcc_unreachable ();
2317 symtab->clear_asm_symbols ();
2319 free (nodes);
2322 static void
2323 ipa_passes (void)
2325 gcc::pass_manager *passes = g->get_passes ();
2327 set_cfun (NULL);
2328 current_function_decl = NULL;
2329 gimple_register_cfg_hooks ();
2330 bitmap_obstack_initialize (NULL);
2332 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2334 if (!in_lto_p)
2336 execute_ipa_pass_list (passes->all_small_ipa_passes);
2337 if (seen_error ())
2338 return;
2341 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2342 devirtualization and other changes where removal iterate. */
2343 symtab->remove_unreachable_nodes (symtab->dump_file);
2345 /* If pass_all_early_optimizations was not scheduled, the state of
2346 the cgraph will not be properly updated. Update it now. */
2347 if (symtab->state < IPA_SSA)
2348 symtab->state = IPA_SSA;
2350 if (!in_lto_p)
2352 /* Generate coverage variables and constructors. */
2353 coverage_finish ();
2355 /* Process new functions added. */
2356 set_cfun (NULL);
2357 current_function_decl = NULL;
2358 symtab->process_new_functions ();
2360 execute_ipa_summary_passes
2361 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2364 /* Some targets need to handle LTO assembler output specially. */
2365 if (flag_generate_lto || flag_generate_offload)
2366 targetm.asm_out.lto_start ();
2368 if (!in_lto_p)
2370 if (g->have_offload)
2372 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2373 lto_stream_offload_p = true;
2374 ipa_write_summaries ();
2375 lto_stream_offload_p = false;
2377 if (flag_lto)
2379 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2380 lto_stream_offload_p = false;
2381 ipa_write_summaries ();
2385 if (flag_generate_lto || flag_generate_offload)
2386 targetm.asm_out.lto_end ();
2388 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2389 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2390 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2392 bitmap_obstack_release (NULL);
2396 /* Return string alias is alias of. */
2398 static tree
2399 get_alias_symbol (tree decl)
2401 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2402 return get_identifier (TREE_STRING_POINTER
2403 (TREE_VALUE (TREE_VALUE (alias))));
2407 /* Weakrefs may be associated to external decls and thus not output
2408 at expansion time. Emit all necessary aliases. */
2410 void
2411 symbol_table::output_weakrefs (void)
2413 symtab_node *node;
2414 cgraph_node *cnode;
2415 FOR_EACH_SYMBOL (node)
2416 if (node->alias
2417 && !TREE_ASM_WRITTEN (node->decl)
2418 && (!(cnode = dyn_cast <cgraph_node *> (node))
2419 || !cnode->instrumented_version
2420 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2421 && node->weakref)
2423 tree target;
2425 /* Weakrefs are special by not requiring target definition in current
2426 compilation unit. It is thus bit hard to work out what we want to
2427 alias.
2428 When alias target is defined, we need to fetch it from symtab reference,
2429 otherwise it is pointed to by alias_target. */
2430 if (node->alias_target)
2431 target = (DECL_P (node->alias_target)
2432 ? DECL_ASSEMBLER_NAME (node->alias_target)
2433 : node->alias_target);
2434 else if (node->analyzed)
2435 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2436 else
2438 gcc_unreachable ();
2439 target = get_alias_symbol (node->decl);
2441 do_assemble_alias (node->decl, target);
2445 /* Perform simple optimizations based on callgraph. */
2447 void
2448 symbol_table::compile (void)
2450 if (seen_error ())
2451 return;
2453 symtab_node::checking_verify_symtab_nodes ();
2455 timevar_push (TV_CGRAPHOPT);
2456 if (pre_ipa_mem_report)
2458 fprintf (stderr, "Memory consumption before IPA\n");
2459 dump_memory_report (false);
2461 if (!quiet_flag)
2462 fprintf (stderr, "Performing interprocedural optimizations\n");
2463 state = IPA;
2465 /* Offloading requires LTO infrastructure. */
2466 if (!in_lto_p && g->have_offload)
2467 flag_generate_offload = 1;
2469 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2470 if (flag_generate_lto || flag_generate_offload)
2471 lto_streamer_hooks_init ();
2473 /* Don't run the IPA passes if there was any error or sorry messages. */
2474 if (!seen_error ())
2475 ipa_passes ();
2477 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2478 if (seen_error ()
2479 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2481 timevar_pop (TV_CGRAPHOPT);
2482 return;
2485 global_info_ready = true;
2486 if (dump_file)
2488 fprintf (dump_file, "Optimized ");
2489 symtab->dump (dump_file);
2491 if (post_ipa_mem_report)
2493 fprintf (stderr, "Memory consumption after IPA\n");
2494 dump_memory_report (false);
2496 timevar_pop (TV_CGRAPHOPT);
2498 /* Output everything. */
2499 (*debug_hooks->assembly_start) ();
2500 if (!quiet_flag)
2501 fprintf (stderr, "Assembling functions:\n");
2502 symtab_node::checking_verify_symtab_nodes ();
2504 bitmap_obstack_initialize (NULL);
2505 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2506 bitmap_obstack_release (NULL);
2507 mark_functions_to_output ();
2509 /* When weakref support is missing, we automatically translate all
2510 references to NODE to references to its ultimate alias target.
2511 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2512 TREE_CHAIN.
2514 Set up this mapping before we output any assembler but once we are sure
2515 that all symbol renaming is done.
2517 FIXME: All this uglyness can go away if we just do renaming at gimple
2518 level by physically rewritting the IL. At the moment we can only redirect
2519 calls, so we need infrastructure for renaming references as well. */
2520 #ifndef ASM_OUTPUT_WEAKREF
2521 symtab_node *node;
2523 FOR_EACH_SYMBOL (node)
2524 if (node->alias
2525 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2527 IDENTIFIER_TRANSPARENT_ALIAS
2528 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2529 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2530 = (node->alias_target ? node->alias_target
2531 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2533 #endif
2535 state = EXPANSION;
2537 /* Output first asm statements and anything ordered. The process
2538 flag is cleared for these nodes, so we skip them later. */
2539 output_in_order ();
2540 expand_all_functions ();
2541 output_variables ();
2543 process_new_functions ();
2544 state = FINISHED;
2545 output_weakrefs ();
2547 if (dump_file)
2549 fprintf (dump_file, "\nFinal ");
2550 symtab->dump (dump_file);
2552 if (!flag_checking)
2553 return;
2554 symtab_node::verify_symtab_nodes ();
2555 /* Double check that all inline clones are gone and that all
2556 function bodies have been released from memory. */
2557 if (!seen_error ())
2559 cgraph_node *node;
2560 bool error_found = false;
2562 FOR_EACH_DEFINED_FUNCTION (node)
2563 if (node->global.inlined_to
2564 || gimple_has_body_p (node->decl))
2566 error_found = true;
2567 node->debug ();
2569 if (error_found)
2570 internal_error ("nodes with unreleased memory found");
2575 /* Analyze the whole compilation unit once it is parsed completely. */
2577 void
2578 symbol_table::finalize_compilation_unit (void)
2580 timevar_push (TV_CGRAPH);
2582 /* If we're here there's no current function anymore. Some frontends
2583 are lazy in clearing these. */
2584 current_function_decl = NULL;
2585 set_cfun (NULL);
2587 /* Do not skip analyzing the functions if there were errors, we
2588 miss diagnostics for following functions otherwise. */
2590 /* Emit size functions we didn't inline. */
2591 finalize_size_functions ();
2593 /* Mark alias targets necessary and emit diagnostics. */
2594 handle_alias_pairs ();
2596 if (!quiet_flag)
2598 fprintf (stderr, "\nAnalyzing compilation unit\n");
2599 fflush (stderr);
2602 if (flag_dump_passes)
2603 dump_passes ();
2605 /* Gimplify and lower all functions, compute reachability and
2606 remove unreachable nodes. */
2607 analyze_functions (/*first_time=*/true);
2609 /* Mark alias targets necessary and emit diagnostics. */
2610 handle_alias_pairs ();
2612 /* Gimplify and lower thunks. */
2613 analyze_functions (/*first_time=*/false);
2615 if (!seen_error ())
2617 /* Emit early debug for reachable functions, and by consequence,
2618 locally scoped symbols. */
2619 struct cgraph_node *cnode;
2620 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2621 (*debug_hooks->early_global_decl) (cnode->decl);
2623 /* Clean up anything that needs cleaning up after initial debug
2624 generation. */
2625 (*debug_hooks->early_finish) (main_input_filename);
2628 /* Finally drive the pass manager. */
2629 compile ();
2631 timevar_pop (TV_CGRAPH);
2634 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2635 within the same process. For use by toplev::finalize. */
2637 void
2638 cgraphunit_c_finalize (void)
2640 gcc_assert (cgraph_new_nodes.length () == 0);
2641 cgraph_new_nodes.truncate (0);
2643 vtable_entry_type = NULL;
2644 queued_nodes = &symtab_terminator;
2646 first_analyzed = NULL;
2647 first_analyzed_var = NULL;
2650 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2651 kind of wrapper method. */
2653 void
2654 cgraph_node::create_wrapper (cgraph_node *target)
2656 /* Preserve DECL_RESULT so we get right by reference flag. */
2657 tree decl_result = DECL_RESULT (decl);
2659 /* Remove the function's body but keep arguments to be reused
2660 for thunk. */
2661 release_body (true);
2662 reset ();
2664 DECL_UNINLINABLE (decl) = false;
2665 DECL_RESULT (decl) = decl_result;
2666 DECL_INITIAL (decl) = NULL;
2667 allocate_struct_function (decl, false);
2668 set_cfun (NULL);
2670 /* Turn alias into thunk and expand it into GIMPLE representation. */
2671 definition = true;
2673 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2674 thunk.thunk_p = true;
2675 create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2676 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2678 tree arguments = DECL_ARGUMENTS (decl);
2680 while (arguments)
2682 TREE_ADDRESSABLE (arguments) = false;
2683 arguments = TREE_CHAIN (arguments);
2686 expand_thunk (false, true);
2688 /* Inline summary set-up. */
2689 analyze ();
2690 inline_analyze_function (this);
2693 #include "gt-cgraphunit.h"