P0409R2 - allow lambda capture [=, this]
[official-gcc.git] / gcc / cgraphunit.c
blob8c1acf770b9166937175a84c4354ef4ad2081792
1 /* Driver of optimization process
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "tree-chkp.h"
206 #include "lto-section-names.h"
207 #include "stringpool.h"
208 #include "attribs.h"
210 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
211 secondary queue used during optimization to accommodate passes that
212 may generate new functions that need to be optimized and expanded. */
213 vec<cgraph_node *> cgraph_new_nodes;
215 static void expand_all_functions (void);
216 static void mark_functions_to_output (void);
217 static void handle_alias_pairs (void);
219 /* Used for vtable lookup in thunk adjusting. */
220 static GTY (()) tree vtable_entry_type;
222 /* Return true if this symbol is a function from the C frontend specified
223 directly in RTL form (with "__RTL"). */
225 bool
226 symtab_node::native_rtl_p () const
228 if (TREE_CODE (decl) != FUNCTION_DECL)
229 return false;
230 if (!DECL_STRUCT_FUNCTION (decl))
231 return false;
232 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
235 /* Determine if symbol declaration is needed. That is, visible to something
236 either outside this translation unit, something magic in the system
237 configury */
238 bool
239 symtab_node::needed_p (void)
241 /* Double check that no one output the function into assembly file
242 early. */
243 if (!native_rtl_p ())
244 gcc_checking_assert
245 (!DECL_ASSEMBLER_NAME_SET_P (decl)
246 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248 if (!definition)
249 return false;
251 if (DECL_EXTERNAL (decl))
252 return false;
254 /* If the user told us it is used, then it must be so. */
255 if (force_output)
256 return true;
258 /* ABI forced symbols are needed when they are external. */
259 if (forced_by_abi && TREE_PUBLIC (decl))
260 return true;
262 /* Keep constructors, destructors and virtual functions. */
263 if (TREE_CODE (decl) == FUNCTION_DECL
264 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
265 return true;
267 /* Externally visible variables must be output. The exception is
268 COMDAT variables that must be output only when they are needed. */
269 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
270 return true;
272 return false;
275 /* Head and terminator of the queue of nodes to be processed while building
276 callgraph. */
278 static symtab_node symtab_terminator;
279 static symtab_node *queued_nodes = &symtab_terminator;
281 /* Add NODE to queue starting at QUEUED_NODES.
282 The queue is linked via AUX pointers and terminated by pointer to 1. */
284 static void
285 enqueue_node (symtab_node *node)
287 if (node->aux)
288 return;
289 gcc_checking_assert (queued_nodes);
290 node->aux = queued_nodes;
291 queued_nodes = node;
294 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
295 functions into callgraph in a way so they look like ordinary reachable
296 functions inserted into callgraph already at construction time. */
298 void
299 symbol_table::process_new_functions (void)
301 tree fndecl;
303 if (!cgraph_new_nodes.exists ())
304 return;
306 handle_alias_pairs ();
307 /* Note that this queue may grow as its being processed, as the new
308 functions may generate new ones. */
309 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
311 cgraph_node *node = cgraph_new_nodes[i];
312 fndecl = node->decl;
313 switch (state)
315 case CONSTRUCTION:
316 /* At construction time we just need to finalize function and move
317 it into reachable functions list. */
319 cgraph_node::finalize_function (fndecl, false);
320 call_cgraph_insertion_hooks (node);
321 enqueue_node (node);
322 break;
324 case IPA:
325 case IPA_SSA:
326 case IPA_SSA_AFTER_INLINING:
327 /* When IPA optimization already started, do all essential
328 transformations that has been already performed on the whole
329 cgraph but not on this function. */
331 gimple_register_cfg_hooks ();
332 if (!node->analyzed)
333 node->analyze ();
334 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
335 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
336 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 bool summaried_computed = ipa_fn_summaries != NULL;
339 g->get_passes ()->execute_early_local_passes ();
340 /* Early passes compure inline parameters to do inlining
341 and splitting. This is redundant for functions added late.
342 Just throw away whatever it did. */
343 if (!summaried_computed)
344 ipa_free_fn_summary ();
346 else if (ipa_fn_summaries != NULL)
347 compute_fn_summary (node, true);
348 free_dominance_info (CDI_POST_DOMINATORS);
349 free_dominance_info (CDI_DOMINATORS);
350 pop_cfun ();
351 call_cgraph_insertion_hooks (node);
352 break;
354 case EXPANSION:
355 /* Functions created during expansion shall be compiled
356 directly. */
357 node->process = 0;
358 call_cgraph_insertion_hooks (node);
359 node->expand ();
360 break;
362 default:
363 gcc_unreachable ();
364 break;
368 cgraph_new_nodes.release ();
371 /* As an GCC extension we allow redefinition of the function. The
372 semantics when both copies of bodies differ is not well defined.
373 We replace the old body with new body so in unit at a time mode
374 we always use new body, while in normal mode we may end up with
375 old body inlined into some functions and new body expanded and
376 inlined in others.
378 ??? It may make more sense to use one body for inlining and other
379 body for expanding the function but this is difficult to do. */
381 void
382 cgraph_node::reset (void)
384 /* If process is set, then we have already begun whole-unit analysis.
385 This is *not* testing for whether we've already emitted the function.
386 That case can be sort-of legitimately seen with real function redefinition
387 errors. I would argue that the front end should never present us with
388 such a case, but don't enforce that for now. */
389 gcc_assert (!process);
391 /* Reset our data structures so we can analyze the function again. */
392 memset (&local, 0, sizeof (local));
393 memset (&global, 0, sizeof (global));
394 memset (&rtl, 0, sizeof (rtl));
395 analyzed = false;
396 definition = false;
397 alias = false;
398 transparent_alias = false;
399 weakref = false;
400 cpp_implicit_alias = false;
402 remove_callees ();
403 remove_all_references ();
406 /* Return true when there are references to the node. INCLUDE_SELF is
407 true if a self reference counts as a reference. */
409 bool
410 symtab_node::referred_to_p (bool include_self)
412 ipa_ref *ref = NULL;
414 /* See if there are any references at all. */
415 if (iterate_referring (0, ref))
416 return true;
417 /* For functions check also calls. */
418 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
419 if (cn && cn->callers)
421 if (include_self)
422 return true;
423 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
424 if (e->caller != this)
425 return true;
427 return false;
430 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
431 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
432 the garbage collector run at the moment. We would need to either create
433 a new GC context, or just not compile right now. */
435 void
436 cgraph_node::finalize_function (tree decl, bool no_collect)
438 cgraph_node *node = cgraph_node::get_create (decl);
440 if (node->definition)
442 /* Nested functions should only be defined once. */
443 gcc_assert (!DECL_CONTEXT (decl)
444 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
445 node->reset ();
446 node->local.redefined_extern_inline = true;
449 /* Set definition first before calling notice_global_symbol so that
450 it is available to notice_global_symbol. */
451 node->definition = true;
452 notice_global_symbol (decl);
453 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
454 if (!flag_toplevel_reorder)
455 node->no_reorder = true;
457 /* With -fkeep-inline-functions we are keeping all inline functions except
458 for extern inline ones. */
459 if (flag_keep_inline_functions
460 && DECL_DECLARED_INLINE_P (decl)
461 && !DECL_EXTERNAL (decl)
462 && !DECL_DISREGARD_INLINE_LIMITS (decl))
463 node->force_output = 1;
465 /* __RTL functions were already output as soon as they were parsed (due
466 to the large amount of global state in the backend).
467 Mark such functions as "force_output" to reflect the fact that they
468 will be in the asm file when considering the symbols they reference.
469 The attempt to output them later on will bail out immediately. */
470 if (node->native_rtl_p ())
471 node->force_output = 1;
473 /* When not optimizing, also output the static functions. (see
474 PR24561), but don't do so for always_inline functions, functions
475 declared inline and nested functions. These were optimized out
476 in the original implementation and it is unclear whether we want
477 to change the behavior here. */
478 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
479 || node->no_reorder)
480 && !node->cpp_implicit_alias
481 && !DECL_DISREGARD_INLINE_LIMITS (decl)
482 && !DECL_DECLARED_INLINE_P (decl)
483 && !(DECL_CONTEXT (decl)
484 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
485 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
486 node->force_output = 1;
488 /* If we've not yet emitted decl, tell the debug info about it. */
489 if (!TREE_ASM_WRITTEN (decl))
490 (*debug_hooks->deferred_inline_function) (decl);
492 if (!no_collect)
493 ggc_collect ();
495 if (symtab->state == CONSTRUCTION
496 && (node->needed_p () || node->referred_to_p ()))
497 enqueue_node (node);
500 /* Add the function FNDECL to the call graph.
501 Unlike finalize_function, this function is intended to be used
502 by middle end and allows insertion of new function at arbitrary point
503 of compilation. The function can be either in high, low or SSA form
504 GIMPLE.
506 The function is assumed to be reachable and have address taken (so no
507 API breaking optimizations are performed on it).
509 Main work done by this function is to enqueue the function for later
510 processing to avoid need the passes to be re-entrant. */
512 void
513 cgraph_node::add_new_function (tree fndecl, bool lowered)
515 gcc::pass_manager *passes = g->get_passes ();
516 cgraph_node *node;
518 if (dump_file)
520 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
521 const char *function_type = ((gimple_has_body_p (fndecl))
522 ? (lowered
523 ? (gimple_in_ssa_p (fn)
524 ? "ssa gimple"
525 : "low gimple")
526 : "high gimple")
527 : "to-be-gimplified");
528 fprintf (dump_file,
529 "Added new %s function %s to callgraph\n",
530 function_type,
531 fndecl_name (fndecl));
534 switch (symtab->state)
536 case PARSING:
537 cgraph_node::finalize_function (fndecl, false);
538 break;
539 case CONSTRUCTION:
540 /* Just enqueue function to be processed at nearest occurrence. */
541 node = cgraph_node::get_create (fndecl);
542 if (lowered)
543 node->lowered = true;
544 cgraph_new_nodes.safe_push (node);
545 break;
547 case IPA:
548 case IPA_SSA:
549 case IPA_SSA_AFTER_INLINING:
550 case EXPANSION:
551 /* Bring the function into finalized state and enqueue for later
552 analyzing and compilation. */
553 node = cgraph_node::get_create (fndecl);
554 node->local.local = false;
555 node->definition = true;
556 node->force_output = true;
557 if (TREE_PUBLIC (fndecl))
558 node->externally_visible = true;
559 if (!lowered && symtab->state == EXPANSION)
561 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
562 gimple_register_cfg_hooks ();
563 bitmap_obstack_initialize (NULL);
564 execute_pass_list (cfun, passes->all_lowering_passes);
565 passes->execute_early_local_passes ();
566 bitmap_obstack_release (NULL);
567 pop_cfun ();
569 lowered = true;
571 if (lowered)
572 node->lowered = true;
573 cgraph_new_nodes.safe_push (node);
574 break;
576 case FINISHED:
577 /* At the very end of compilation we have to do all the work up
578 to expansion. */
579 node = cgraph_node::create (fndecl);
580 if (lowered)
581 node->lowered = true;
582 node->definition = true;
583 node->analyze ();
584 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
585 gimple_register_cfg_hooks ();
586 bitmap_obstack_initialize (NULL);
587 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
588 g->get_passes ()->execute_early_local_passes ();
589 bitmap_obstack_release (NULL);
590 pop_cfun ();
591 node->expand ();
592 break;
594 default:
595 gcc_unreachable ();
598 /* Set a personality if required and we already passed EH lowering. */
599 if (lowered
600 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
601 == eh_personality_lang))
602 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
605 /* Analyze the function scheduled to be output. */
606 void
607 cgraph_node::analyze (void)
609 if (native_rtl_p ())
611 analyzed = true;
612 return;
615 tree decl = this->decl;
616 location_t saved_loc = input_location;
617 input_location = DECL_SOURCE_LOCATION (decl);
619 if (thunk.thunk_p)
621 cgraph_node *t = cgraph_node::get (thunk.alias);
623 create_edge (t, NULL, t->count, CGRAPH_FREQ_BASE);
624 callees->can_throw_external = !TREE_NOTHROW (t->decl);
625 /* Target code in expand_thunk may need the thunk's target
626 to be analyzed, so recurse here. */
627 if (!t->analyzed)
628 t->analyze ();
629 if (t->alias)
631 t = t->get_alias_target ();
632 if (!t->analyzed)
633 t->analyze ();
635 if (!expand_thunk (false, false))
637 thunk.alias = NULL;
638 return;
640 thunk.alias = NULL;
642 if (alias)
643 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
644 else if (dispatcher_function)
646 /* Generate the dispatcher body of multi-versioned functions. */
647 cgraph_function_version_info *dispatcher_version_info
648 = function_version ();
649 if (dispatcher_version_info != NULL
650 && (dispatcher_version_info->dispatcher_resolver
651 == NULL_TREE))
653 tree resolver = NULL_TREE;
654 gcc_assert (targetm.generate_version_dispatcher_body);
655 resolver = targetm.generate_version_dispatcher_body (this);
656 gcc_assert (resolver != NULL_TREE);
659 else
661 push_cfun (DECL_STRUCT_FUNCTION (decl));
663 assign_assembler_name_if_needed (decl);
665 /* Make sure to gimplify bodies only once. During analyzing a
666 function we lower it, which will require gimplified nested
667 functions, so we can end up here with an already gimplified
668 body. */
669 if (!gimple_has_body_p (decl))
670 gimplify_function_tree (decl);
672 /* Lower the function. */
673 if (!lowered)
675 if (nested)
676 lower_nested_functions (decl);
677 gcc_assert (!nested);
679 gimple_register_cfg_hooks ();
680 bitmap_obstack_initialize (NULL);
681 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
682 free_dominance_info (CDI_POST_DOMINATORS);
683 free_dominance_info (CDI_DOMINATORS);
684 compact_blocks ();
685 bitmap_obstack_release (NULL);
686 lowered = true;
689 pop_cfun ();
691 analyzed = true;
693 input_location = saved_loc;
696 /* C++ frontend produce same body aliases all over the place, even before PCH
697 gets streamed out. It relies on us linking the aliases with their function
698 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
699 first produce aliases without links, but once C++ FE is sure he won't sream
700 PCH we build the links via this function. */
702 void
703 symbol_table::process_same_body_aliases (void)
705 symtab_node *node;
706 FOR_EACH_SYMBOL (node)
707 if (node->cpp_implicit_alias && !node->analyzed)
708 node->resolve_alias
709 (VAR_P (node->alias_target)
710 ? (symtab_node *)varpool_node::get_create (node->alias_target)
711 : (symtab_node *)cgraph_node::get_create (node->alias_target));
712 cpp_implicit_aliases_done = true;
715 /* Process attributes common for vars and functions. */
717 static void
718 process_common_attributes (symtab_node *node, tree decl)
720 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
722 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
724 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
725 "%<weakref%> attribute should be accompanied with"
726 " an %<alias%> attribute");
727 DECL_WEAK (decl) = 0;
728 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
729 DECL_ATTRIBUTES (decl));
732 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
733 node->no_reorder = 1;
736 /* Look for externally_visible and used attributes and mark cgraph nodes
737 accordingly.
739 We cannot mark the nodes at the point the attributes are processed (in
740 handle_*_attribute) because the copy of the declarations available at that
741 point may not be canonical. For example, in:
743 void f();
744 void f() __attribute__((used));
746 the declaration we see in handle_used_attribute will be the second
747 declaration -- but the front end will subsequently merge that declaration
748 with the original declaration and discard the second declaration.
750 Furthermore, we can't mark these nodes in finalize_function because:
752 void f() {}
753 void f() __attribute__((externally_visible));
755 is valid.
757 So, we walk the nodes at the end of the translation unit, applying the
758 attributes at that point. */
760 static void
761 process_function_and_variable_attributes (cgraph_node *first,
762 varpool_node *first_var)
764 cgraph_node *node;
765 varpool_node *vnode;
767 for (node = symtab->first_function (); node != first;
768 node = symtab->next_function (node))
770 tree decl = node->decl;
771 if (DECL_PRESERVE_P (decl))
772 node->mark_force_output ();
773 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
775 if (! TREE_PUBLIC (node->decl))
776 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
777 "%<externally_visible%>"
778 " attribute have effect only on public objects");
780 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
781 && (node->definition && !node->alias))
783 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
784 "%<weakref%> attribute ignored"
785 " because function is defined");
786 DECL_WEAK (decl) = 0;
787 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
788 DECL_ATTRIBUTES (decl));
791 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
792 && !DECL_DECLARED_INLINE_P (decl)
793 /* redefining extern inline function makes it DECL_UNINLINABLE. */
794 && !DECL_UNINLINABLE (decl))
795 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
796 "always_inline function might not be inlinable");
798 process_common_attributes (node, decl);
800 for (vnode = symtab->first_variable (); vnode != first_var;
801 vnode = symtab->next_variable (vnode))
803 tree decl = vnode->decl;
804 if (DECL_EXTERNAL (decl)
805 && DECL_INITIAL (decl))
806 varpool_node::finalize_decl (decl);
807 if (DECL_PRESERVE_P (decl))
808 vnode->force_output = true;
809 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
811 if (! TREE_PUBLIC (vnode->decl))
812 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
813 "%<externally_visible%>"
814 " attribute have effect only on public objects");
816 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
817 && vnode->definition
818 && DECL_INITIAL (decl))
820 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
821 "%<weakref%> attribute ignored"
822 " because variable is initialized");
823 DECL_WEAK (decl) = 0;
824 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
825 DECL_ATTRIBUTES (decl));
827 process_common_attributes (vnode, decl);
831 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
832 middle end to output the variable to asm file, if needed or externally
833 visible. */
835 void
836 varpool_node::finalize_decl (tree decl)
838 varpool_node *node = varpool_node::get_create (decl);
840 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
842 if (node->definition)
843 return;
844 /* Set definition first before calling notice_global_symbol so that
845 it is available to notice_global_symbol. */
846 node->definition = true;
847 notice_global_symbol (decl);
848 if (!flag_toplevel_reorder)
849 node->no_reorder = true;
850 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
851 /* Traditionally we do not eliminate static variables when not
852 optimizing and when not doing toplevel reoder. */
853 || (node->no_reorder && !DECL_COMDAT (node->decl)
854 && !DECL_ARTIFICIAL (node->decl)))
855 node->force_output = true;
857 if (symtab->state == CONSTRUCTION
858 && (node->needed_p () || node->referred_to_p ()))
859 enqueue_node (node);
860 if (symtab->state >= IPA_SSA)
861 node->analyze ();
862 /* Some frontends produce various interface variables after compilation
863 finished. */
864 if (symtab->state == FINISHED
865 || (node->no_reorder
866 && symtab->state == EXPANSION))
867 node->assemble_decl ();
869 if (DECL_INITIAL (decl))
870 chkp_register_var_initializer (decl);
873 /* EDGE is an polymorphic call. Mark all possible targets as reachable
874 and if there is only one target, perform trivial devirtualization.
875 REACHABLE_CALL_TARGETS collects target lists we already walked to
876 avoid udplicate work. */
878 static void
879 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
880 cgraph_edge *edge)
882 unsigned int i;
883 void *cache_token;
884 bool final;
885 vec <cgraph_node *>targets
886 = possible_polymorphic_call_targets
887 (edge, &final, &cache_token);
889 if (!reachable_call_targets->add (cache_token))
891 if (symtab->dump_file)
892 dump_possible_polymorphic_call_targets
893 (symtab->dump_file, edge);
895 for (i = 0; i < targets.length (); i++)
897 /* Do not bother to mark virtual methods in anonymous namespace;
898 either we will find use of virtual table defining it, or it is
899 unused. */
900 if (targets[i]->definition
901 && TREE_CODE
902 (TREE_TYPE (targets[i]->decl))
903 == METHOD_TYPE
904 && !type_in_anonymous_namespace_p
905 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
906 enqueue_node (targets[i]);
910 /* Very trivial devirtualization; when the type is
911 final or anonymous (so we know all its derivation)
912 and there is only one possible virtual call target,
913 make the edge direct. */
914 if (final)
916 if (targets.length () <= 1 && dbg_cnt (devirt))
918 cgraph_node *target;
919 if (targets.length () == 1)
920 target = targets[0];
921 else
922 target = cgraph_node::create
923 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
925 if (symtab->dump_file)
927 fprintf (symtab->dump_file,
928 "Devirtualizing call: ");
929 print_gimple_stmt (symtab->dump_file,
930 edge->call_stmt, 0,
931 TDF_SLIM);
933 if (dump_enabled_p ())
935 location_t locus = gimple_location_safe (edge->call_stmt);
936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
937 "devirtualizing call in %s to %s\n",
938 edge->caller->name (), target->name ());
941 edge->make_direct (target);
942 edge->redirect_call_stmt_to_callee ();
944 /* Call to __builtin_unreachable shouldn't be instrumented. */
945 if (!targets.length ())
946 gimple_call_set_with_bounds (edge->call_stmt, false);
948 if (symtab->dump_file)
950 fprintf (symtab->dump_file,
951 "Devirtualized as: ");
952 print_gimple_stmt (symtab->dump_file,
953 edge->call_stmt, 0,
954 TDF_SLIM);
960 /* Issue appropriate warnings for the global declaration DECL. */
962 static void
963 check_global_declaration (symtab_node *snode)
965 const char *decl_file;
966 tree decl = snode->decl;
968 /* Warn about any function declared static but not defined. We don't
969 warn about variables, because many programs have static variables
970 that exist only to get some text into the object file. */
971 if (TREE_CODE (decl) == FUNCTION_DECL
972 && DECL_INITIAL (decl) == 0
973 && DECL_EXTERNAL (decl)
974 && ! DECL_ARTIFICIAL (decl)
975 && ! TREE_NO_WARNING (decl)
976 && ! TREE_PUBLIC (decl)
977 && (warn_unused_function
978 || snode->referred_to_p (/*include_self=*/false)))
980 if (snode->referred_to_p (/*include_self=*/false))
981 pedwarn (input_location, 0, "%q+F used but never defined", decl);
982 else
983 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
984 /* This symbol is effectively an "extern" declaration now. */
985 TREE_PUBLIC (decl) = 1;
988 /* Warn about static fns or vars defined but not used. */
989 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
990 || (((warn_unused_variable && ! TREE_READONLY (decl))
991 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
992 && (warn_unused_const_variable == 2
993 || (main_input_filename != NULL
994 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
995 && filename_cmp (main_input_filename,
996 decl_file) == 0))))
997 && VAR_P (decl)))
998 && ! DECL_IN_SYSTEM_HEADER (decl)
999 && ! snode->referred_to_p (/*include_self=*/false)
1000 /* This TREE_USED check is needed in addition to referred_to_p
1001 above, because the `__unused__' attribute is not being
1002 considered for referred_to_p. */
1003 && ! TREE_USED (decl)
1004 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1005 to handle multiple external decls in different scopes. */
1006 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1007 && ! DECL_EXTERNAL (decl)
1008 && ! DECL_ARTIFICIAL (decl)
1009 && ! DECL_ABSTRACT_ORIGIN (decl)
1010 && ! TREE_PUBLIC (decl)
1011 /* A volatile variable might be used in some non-obvious way. */
1012 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1013 /* Global register variables must be declared to reserve them. */
1014 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1015 /* Global ctors and dtors are called by the runtime. */
1016 && (TREE_CODE (decl) != FUNCTION_DECL
1017 || (!DECL_STATIC_CONSTRUCTOR (decl)
1018 && !DECL_STATIC_DESTRUCTOR (decl)))
1019 /* Otherwise, ask the language. */
1020 && lang_hooks.decls.warn_unused_global (decl))
1021 warning_at (DECL_SOURCE_LOCATION (decl),
1022 (TREE_CODE (decl) == FUNCTION_DECL)
1023 ? OPT_Wunused_function
1024 : (TREE_READONLY (decl)
1025 ? OPT_Wunused_const_variable_
1026 : OPT_Wunused_variable),
1027 "%qD defined but not used", decl);
1030 /* Discover all functions and variables that are trivially needed, analyze
1031 them as well as all functions and variables referred by them */
1032 static cgraph_node *first_analyzed;
1033 static varpool_node *first_analyzed_var;
1035 /* FIRST_TIME is set to TRUE for the first time we are called for a
1036 translation unit from finalize_compilation_unit() or false
1037 otherwise. */
1039 static void
1040 analyze_functions (bool first_time)
1042 /* Keep track of already processed nodes when called multiple times for
1043 intermodule optimization. */
1044 cgraph_node *first_handled = first_analyzed;
1045 varpool_node *first_handled_var = first_analyzed_var;
1046 hash_set<void *> reachable_call_targets;
1048 symtab_node *node;
1049 symtab_node *next;
1050 int i;
1051 ipa_ref *ref;
1052 bool changed = true;
1053 location_t saved_loc = input_location;
1055 bitmap_obstack_initialize (NULL);
1056 symtab->state = CONSTRUCTION;
1057 input_location = UNKNOWN_LOCATION;
1059 /* Ugly, but the fixup can not happen at a time same body alias is created;
1060 C++ FE is confused about the COMDAT groups being right. */
1061 if (symtab->cpp_implicit_aliases_done)
1062 FOR_EACH_SYMBOL (node)
1063 if (node->cpp_implicit_alias)
1064 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1065 build_type_inheritance_graph ();
1067 /* Analysis adds static variables that in turn adds references to new functions.
1068 So we need to iterate the process until it stabilize. */
1069 while (changed)
1071 changed = false;
1072 process_function_and_variable_attributes (first_analyzed,
1073 first_analyzed_var);
1075 /* First identify the trivially needed symbols. */
1076 for (node = symtab->first_symbol ();
1077 node != first_analyzed
1078 && node != first_analyzed_var; node = node->next)
1080 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1081 node->get_comdat_group_id ();
1082 if (node->needed_p ())
1084 enqueue_node (node);
1085 if (!changed && symtab->dump_file)
1086 fprintf (symtab->dump_file, "Trivially needed symbols:");
1087 changed = true;
1088 if (symtab->dump_file)
1089 fprintf (symtab->dump_file, " %s", node->asm_name ());
1090 if (!changed && symtab->dump_file)
1091 fprintf (symtab->dump_file, "\n");
1093 if (node == first_analyzed
1094 || node == first_analyzed_var)
1095 break;
1097 symtab->process_new_functions ();
1098 first_analyzed_var = symtab->first_variable ();
1099 first_analyzed = symtab->first_function ();
1101 if (changed && symtab->dump_file)
1102 fprintf (symtab->dump_file, "\n");
1104 /* Lower representation, build callgraph edges and references for all trivially
1105 needed symbols and all symbols referred by them. */
1106 while (queued_nodes != &symtab_terminator)
1108 changed = true;
1109 node = queued_nodes;
1110 queued_nodes = (symtab_node *)queued_nodes->aux;
1111 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1112 if (cnode && cnode->definition)
1114 cgraph_edge *edge;
1115 tree decl = cnode->decl;
1117 /* ??? It is possible to create extern inline function
1118 and later using weak alias attribute to kill its body.
1119 See gcc.c-torture/compile/20011119-1.c */
1120 if (!DECL_STRUCT_FUNCTION (decl)
1121 && !cnode->alias
1122 && !cnode->thunk.thunk_p
1123 && !cnode->dispatcher_function)
1125 cnode->reset ();
1126 cnode->local.redefined_extern_inline = true;
1127 continue;
1130 if (!cnode->analyzed)
1131 cnode->analyze ();
1133 for (edge = cnode->callees; edge; edge = edge->next_callee)
1134 if (edge->callee->definition
1135 && (!DECL_EXTERNAL (edge->callee->decl)
1136 /* When not optimizing, do not try to analyze extern
1137 inline functions. Doing so is pointless. */
1138 || opt_for_fn (edge->callee->decl, optimize)
1139 /* Weakrefs needs to be preserved. */
1140 || edge->callee->alias
1141 /* always_inline functions are inlined aven at -O0. */
1142 || lookup_attribute
1143 ("always_inline",
1144 DECL_ATTRIBUTES (edge->callee->decl))
1145 /* Multiversioned functions needs the dispatcher to
1146 be produced locally even for extern functions. */
1147 || edge->callee->function_version ()))
1148 enqueue_node (edge->callee);
1149 if (opt_for_fn (cnode->decl, optimize)
1150 && opt_for_fn (cnode->decl, flag_devirtualize))
1152 cgraph_edge *next;
1154 for (edge = cnode->indirect_calls; edge; edge = next)
1156 next = edge->next_callee;
1157 if (edge->indirect_info->polymorphic)
1158 walk_polymorphic_call_targets (&reachable_call_targets,
1159 edge);
1163 /* If decl is a clone of an abstract function,
1164 mark that abstract function so that we don't release its body.
1165 The DECL_INITIAL() of that abstract function declaration
1166 will be later needed to output debug info. */
1167 if (DECL_ABSTRACT_ORIGIN (decl))
1169 cgraph_node *origin_node
1170 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1171 origin_node->used_as_abstract_origin = true;
1173 /* Preserve a functions function context node. It will
1174 later be needed to output debug info. */
1175 if (tree fn = decl_function_context (decl))
1177 cgraph_node *origin_node = cgraph_node::get_create (fn);
1178 enqueue_node (origin_node);
1181 else
1183 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1184 if (vnode && vnode->definition && !vnode->analyzed)
1185 vnode->analyze ();
1188 if (node->same_comdat_group)
1190 symtab_node *next;
1191 for (next = node->same_comdat_group;
1192 next != node;
1193 next = next->same_comdat_group)
1194 if (!next->comdat_local_p ())
1195 enqueue_node (next);
1197 for (i = 0; node->iterate_reference (i, ref); i++)
1198 if (ref->referred->definition
1199 && (!DECL_EXTERNAL (ref->referred->decl)
1200 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1201 && optimize)
1202 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1203 && opt_for_fn (ref->referred->decl, optimize))
1204 || node->alias
1205 || ref->referred->alias)))
1206 enqueue_node (ref->referred);
1207 symtab->process_new_functions ();
1210 update_type_inheritance_graph ();
1212 /* Collect entry points to the unit. */
1213 if (symtab->dump_file)
1215 fprintf (symtab->dump_file, "\n\nInitial ");
1216 symtab->dump (symtab->dump_file);
1219 if (first_time)
1221 symtab_node *snode;
1222 FOR_EACH_SYMBOL (snode)
1223 check_global_declaration (snode);
1226 if (symtab->dump_file)
1227 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1229 for (node = symtab->first_symbol ();
1230 node != first_handled
1231 && node != first_handled_var; node = next)
1233 next = node->next;
1234 if (!node->aux && !node->referred_to_p ())
1236 if (symtab->dump_file)
1237 fprintf (symtab->dump_file, " %s", node->name ());
1239 /* See if the debugger can use anything before the DECL
1240 passes away. Perhaps it can notice a DECL that is now a
1241 constant and can tag the early DIE with an appropriate
1242 attribute.
1244 Otherwise, this is the last chance the debug_hooks have
1245 at looking at optimized away DECLs, since
1246 late_global_decl will subsequently be called from the
1247 contents of the now pruned symbol table. */
1248 if (VAR_P (node->decl)
1249 && !decl_function_context (node->decl))
1251 /* We are reclaiming totally unreachable code and variables
1252 so they effectively appear as readonly. Show that to
1253 the debug machinery. */
1254 TREE_READONLY (node->decl) = 1;
1255 node->definition = false;
1256 (*debug_hooks->late_global_decl) (node->decl);
1259 node->remove ();
1260 continue;
1262 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1264 tree decl = node->decl;
1266 if (cnode->definition && !gimple_has_body_p (decl)
1267 && !cnode->alias
1268 && !cnode->thunk.thunk_p)
1269 cnode->reset ();
1271 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1272 || cnode->alias
1273 || gimple_has_body_p (decl)
1274 || cnode->native_rtl_p ());
1275 gcc_assert (cnode->analyzed == cnode->definition);
1277 node->aux = NULL;
1279 for (;node; node = node->next)
1280 node->aux = NULL;
1281 first_analyzed = symtab->first_function ();
1282 first_analyzed_var = symtab->first_variable ();
1283 if (symtab->dump_file)
1285 fprintf (symtab->dump_file, "\n\nReclaimed ");
1286 symtab->dump (symtab->dump_file);
1288 bitmap_obstack_release (NULL);
1289 ggc_collect ();
1290 /* Initialize assembler name hash, in particular we want to trigger C++
1291 mangling and same body alias creation before we free DECL_ARGUMENTS
1292 used by it. */
1293 if (!seen_error ())
1294 symtab->symtab_initialize_asm_name_hash ();
1296 input_location = saved_loc;
1299 /* Translate the ugly representation of aliases as alias pairs into nice
1300 representation in callgraph. We don't handle all cases yet,
1301 unfortunately. */
1303 static void
1304 handle_alias_pairs (void)
1306 alias_pair *p;
1307 unsigned i;
1309 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1311 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1313 /* Weakrefs with target not defined in current unit are easy to handle:
1314 they behave just as external variables except we need to note the
1315 alias flag to later output the weakref pseudo op into asm file. */
1316 if (!target_node
1317 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1319 symtab_node *node = symtab_node::get (p->decl);
1320 if (node)
1322 node->alias_target = p->target;
1323 node->weakref = true;
1324 node->alias = true;
1325 node->transparent_alias = true;
1327 alias_pairs->unordered_remove (i);
1328 continue;
1330 else if (!target_node)
1332 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1333 symtab_node *node = symtab_node::get (p->decl);
1334 if (node)
1335 node->alias = false;
1336 alias_pairs->unordered_remove (i);
1337 continue;
1340 if (DECL_EXTERNAL (target_node->decl)
1341 /* We use local aliases for C++ thunks to force the tailcall
1342 to bind locally. This is a hack - to keep it working do
1343 the following (which is not strictly correct). */
1344 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1345 || ! DECL_VIRTUAL_P (target_node->decl))
1346 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1348 error ("%q+D aliased to external symbol %qE",
1349 p->decl, p->target);
1352 if (TREE_CODE (p->decl) == FUNCTION_DECL
1353 && target_node && is_a <cgraph_node *> (target_node))
1355 tree t1 = TREE_TYPE (p->decl);
1356 tree t2 = TREE_TYPE (target_node->decl);
1358 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (p->decl)))
1360 t2 = TREE_TYPE (t2);
1361 if (POINTER_TYPE_P (t2))
1363 t2 = TREE_TYPE (t2);
1364 if (!FUNC_OR_METHOD_TYPE_P (t2))
1366 if (warning_at (DECL_SOURCE_LOCATION (p->decl),
1367 OPT_Wattributes,
1368 "%q+D %<ifunc%> resolver should return "
1369 "a function pointer",
1370 p->decl))
1371 inform (DECL_SOURCE_LOCATION (target_node->decl),
1372 "resolver declaration here");
1374 t2 = NULL_TREE;
1377 else
1379 /* Deal with static member function pointers. */
1380 if (TREE_CODE (t2) == RECORD_TYPE
1381 && TYPE_FIELDS (t2)
1382 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (t2))) == POINTER_TYPE
1383 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (t2))))
1384 == METHOD_TYPE))
1385 t2 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (t2)));
1386 else
1388 error ("%q+D %<ifunc%> resolver must return a function "
1389 "pointer",
1390 p->decl);
1391 inform (DECL_SOURCE_LOCATION (target_node->decl),
1392 "resolver declaration here");
1394 t2 = NULL_TREE;
1399 if (t2
1400 && (!FUNC_OR_METHOD_TYPE_P (t2)
1401 || (prototype_p (t1)
1402 && prototype_p (t2)
1403 && !types_compatible_p (t1, t2))))
1405 /* Warn for incompatibilities. Avoid warning for functions
1406 without a prototype to make it possible to declare aliases
1407 without knowing the exact type, as libstdc++ does. */
1408 if (warning_at (DECL_SOURCE_LOCATION (p->decl), OPT_Wattributes,
1409 "%q+D alias between functions of incompatible "
1410 "types %qT and %qT", p->decl, t1, t2))
1411 inform (DECL_SOURCE_LOCATION (target_node->decl),
1412 "aliased declaration here");
1415 cgraph_node *src_node = cgraph_node::get (p->decl);
1416 if (src_node && src_node->definition)
1417 src_node->reset ();
1418 cgraph_node::create_alias (p->decl, target_node->decl);
1419 alias_pairs->unordered_remove (i);
1421 else if (VAR_P (p->decl)
1422 && target_node && is_a <varpool_node *> (target_node))
1424 varpool_node::create_alias (p->decl, target_node->decl);
1425 alias_pairs->unordered_remove (i);
1427 else
1429 error ("%q+D alias between function and variable is not supported",
1430 p->decl);
1431 inform (DECL_SOURCE_LOCATION (target_node->decl),
1432 "aliased declaration here");
1434 alias_pairs->unordered_remove (i);
1437 vec_free (alias_pairs);
1441 /* Figure out what functions we want to assemble. */
1443 static void
1444 mark_functions_to_output (void)
1446 bool check_same_comdat_groups = false;
1447 cgraph_node *node;
1449 if (flag_checking)
1450 FOR_EACH_FUNCTION (node)
1451 gcc_assert (!node->process);
1453 FOR_EACH_FUNCTION (node)
1455 tree decl = node->decl;
1457 gcc_assert (!node->process || node->same_comdat_group);
1458 if (node->process)
1459 continue;
1461 /* We need to output all local functions that are used and not
1462 always inlined, as well as those that are reachable from
1463 outside the current compilation unit. */
1464 if (node->analyzed
1465 && !node->thunk.thunk_p
1466 && !node->alias
1467 && !node->global.inlined_to
1468 && !TREE_ASM_WRITTEN (decl)
1469 && !DECL_EXTERNAL (decl))
1471 node->process = 1;
1472 if (node->same_comdat_group)
1474 cgraph_node *next;
1475 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1476 next != node;
1477 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1478 if (!next->thunk.thunk_p && !next->alias
1479 && !next->comdat_local_p ())
1480 next->process = 1;
1483 else if (node->same_comdat_group)
1485 if (flag_checking)
1486 check_same_comdat_groups = true;
1488 else
1490 /* We should've reclaimed all functions that are not needed. */
1491 if (flag_checking
1492 && !node->global.inlined_to
1493 && gimple_has_body_p (decl)
1494 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1495 are inside partition, we can end up not removing the body since we no longer
1496 have analyzed node pointing to it. */
1497 && !node->in_other_partition
1498 && !node->alias
1499 && !node->clones
1500 && !DECL_EXTERNAL (decl))
1502 node->debug ();
1503 internal_error ("failed to reclaim unneeded function");
1505 gcc_assert (node->global.inlined_to
1506 || !gimple_has_body_p (decl)
1507 || node->in_other_partition
1508 || node->clones
1509 || DECL_ARTIFICIAL (decl)
1510 || DECL_EXTERNAL (decl));
1515 if (flag_checking && check_same_comdat_groups)
1516 FOR_EACH_FUNCTION (node)
1517 if (node->same_comdat_group && !node->process)
1519 tree decl = node->decl;
1520 if (!node->global.inlined_to
1521 && gimple_has_body_p (decl)
1522 /* FIXME: in an ltrans unit when the offline copy is outside a
1523 partition but inline copies are inside a partition, we can
1524 end up not removing the body since we no longer have an
1525 analyzed node pointing to it. */
1526 && !node->in_other_partition
1527 && !node->clones
1528 && !DECL_EXTERNAL (decl))
1530 node->debug ();
1531 internal_error ("failed to reclaim unneeded function in same "
1532 "comdat group");
1537 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1538 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1540 Set current_function_decl and cfun to newly constructed empty function body.
1541 return basic block in the function body. */
1543 basic_block
1544 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1546 basic_block bb;
1547 edge e;
1549 current_function_decl = decl;
1550 allocate_struct_function (decl, false);
1551 gimple_register_cfg_hooks ();
1552 init_empty_tree_cfg ();
1553 init_tree_ssa (cfun);
1555 if (in_ssa)
1557 init_ssa_operands (cfun);
1558 cfun->gimple_df->in_ssa_p = true;
1559 cfun->curr_properties |= PROP_ssa;
1562 DECL_INITIAL (decl) = make_node (BLOCK);
1563 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1565 DECL_SAVED_TREE (decl) = error_mark_node;
1566 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1567 | PROP_cfg | PROP_loops);
1569 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1570 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1571 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1573 /* Create BB for body of the function and connect it properly. */
1574 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1575 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1576 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1577 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1578 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1579 bb->count = count;
1580 bb->frequency = BB_FREQ_MAX;
1581 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1582 e->count = count;
1583 e->probability = profile_probability::always ();
1584 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1585 e->count = count;
1586 e->probability = profile_probability::always ();
1587 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1589 return bb;
1592 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1593 offset indicated by VIRTUAL_OFFSET, if that is
1594 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1595 zero for a result adjusting thunk. */
1597 tree
1598 thunk_adjust (gimple_stmt_iterator * bsi,
1599 tree ptr, bool this_adjusting,
1600 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1602 gassign *stmt;
1603 tree ret;
1605 if (this_adjusting
1606 && fixed_offset != 0)
1608 stmt = gimple_build_assign
1609 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1610 ptr,
1611 fixed_offset));
1612 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1615 /* If there's a virtual offset, look up that value in the vtable and
1616 adjust the pointer again. */
1617 if (virtual_offset)
1619 tree vtabletmp;
1620 tree vtabletmp2;
1621 tree vtabletmp3;
1623 if (!vtable_entry_type)
1625 tree vfunc_type = make_node (FUNCTION_TYPE);
1626 TREE_TYPE (vfunc_type) = integer_type_node;
1627 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1628 layout_type (vfunc_type);
1630 vtable_entry_type = build_pointer_type (vfunc_type);
1633 vtabletmp =
1634 create_tmp_reg (build_pointer_type
1635 (build_pointer_type (vtable_entry_type)), "vptr");
1637 /* The vptr is always at offset zero in the object. */
1638 stmt = gimple_build_assign (vtabletmp,
1639 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1640 ptr));
1641 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1643 /* Form the vtable address. */
1644 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1645 "vtableaddr");
1646 stmt = gimple_build_assign (vtabletmp2,
1647 build_simple_mem_ref (vtabletmp));
1648 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1650 /* Find the entry with the vcall offset. */
1651 stmt = gimple_build_assign (vtabletmp2,
1652 fold_build_pointer_plus_loc (input_location,
1653 vtabletmp2,
1654 virtual_offset));
1655 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1657 /* Get the offset itself. */
1658 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1659 "vcalloffset");
1660 stmt = gimple_build_assign (vtabletmp3,
1661 build_simple_mem_ref (vtabletmp2));
1662 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1664 /* Adjust the `this' pointer. */
1665 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1666 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1667 GSI_CONTINUE_LINKING);
1670 if (!this_adjusting
1671 && fixed_offset != 0)
1672 /* Adjust the pointer by the constant. */
1674 tree ptrtmp;
1676 if (VAR_P (ptr))
1677 ptrtmp = ptr;
1678 else
1680 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1681 stmt = gimple_build_assign (ptrtmp, ptr);
1682 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1684 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1685 ptrtmp, fixed_offset);
1688 /* Emit the statement and gimplify the adjustment expression. */
1689 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1690 stmt = gimple_build_assign (ret, ptr);
1691 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1693 return ret;
1696 /* Expand thunk NODE to gimple if possible.
1697 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1698 no assembler is produced.
1699 When OUTPUT_ASM_THUNK is true, also produce assembler for
1700 thunks that are not lowered. */
1702 bool
1703 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1705 bool this_adjusting = thunk.this_adjusting;
1706 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1707 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1708 tree virtual_offset = NULL;
1709 tree alias = callees->callee->decl;
1710 tree thunk_fndecl = decl;
1711 tree a;
1713 /* Instrumentation thunk is the same function with
1714 a different signature. Never need to expand it. */
1715 if (thunk.add_pointer_bounds_args)
1716 return false;
1718 if (!force_gimple_thunk && this_adjusting
1719 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1720 virtual_value, alias))
1722 const char *fnname;
1723 tree fn_block;
1724 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1726 if (!output_asm_thunks)
1728 analyzed = true;
1729 return false;
1732 if (in_lto_p)
1733 get_untransformed_body ();
1734 a = DECL_ARGUMENTS (thunk_fndecl);
1736 current_function_decl = thunk_fndecl;
1738 /* Ensure thunks are emitted in their correct sections. */
1739 resolve_unique_section (thunk_fndecl, 0,
1740 flag_function_sections);
1742 DECL_RESULT (thunk_fndecl)
1743 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1744 RESULT_DECL, 0, restype);
1745 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1746 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1748 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1749 create one. */
1750 fn_block = make_node (BLOCK);
1751 BLOCK_VARS (fn_block) = a;
1752 DECL_INITIAL (thunk_fndecl) = fn_block;
1753 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1754 allocate_struct_function (thunk_fndecl, false);
1755 init_function_start (thunk_fndecl);
1756 cfun->is_thunk = 1;
1757 insn_locations_init ();
1758 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1759 prologue_location = curr_insn_location ();
1760 assemble_start_function (thunk_fndecl, fnname);
1762 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1763 fixed_offset, virtual_value, alias);
1765 assemble_end_function (thunk_fndecl, fnname);
1766 insn_locations_finalize ();
1767 init_insn_lengths ();
1768 free_after_compilation (cfun);
1769 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1770 thunk.thunk_p = false;
1771 analyzed = false;
1773 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1775 error ("generic thunk code fails for method %qD which uses %<...%>",
1776 thunk_fndecl);
1777 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1778 analyzed = true;
1779 return false;
1781 else
1783 tree restype;
1784 basic_block bb, then_bb, else_bb, return_bb;
1785 gimple_stmt_iterator bsi;
1786 int nargs = 0;
1787 tree arg;
1788 int i;
1789 tree resdecl;
1790 tree restmp = NULL;
1791 tree resbnd = NULL;
1793 gcall *call;
1794 greturn *ret;
1795 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1797 /* We may be called from expand_thunk that releses body except for
1798 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1799 if (in_lto_p && !force_gimple_thunk)
1800 get_untransformed_body ();
1801 a = DECL_ARGUMENTS (thunk_fndecl);
1803 current_function_decl = thunk_fndecl;
1805 /* Ensure thunks are emitted in their correct sections. */
1806 resolve_unique_section (thunk_fndecl, 0,
1807 flag_function_sections);
1809 DECL_IGNORED_P (thunk_fndecl) = 1;
1810 bitmap_obstack_initialize (NULL);
1812 if (thunk.virtual_offset_p)
1813 virtual_offset = size_int (virtual_value);
1815 /* Build the return declaration for the function. */
1816 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1817 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1819 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1820 DECL_ARTIFICIAL (resdecl) = 1;
1821 DECL_IGNORED_P (resdecl) = 1;
1822 DECL_RESULT (thunk_fndecl) = resdecl;
1823 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1825 else
1826 resdecl = DECL_RESULT (thunk_fndecl);
1828 bb = then_bb = else_bb = return_bb
1829 = init_lowered_empty_function (thunk_fndecl, true, count);
1831 bsi = gsi_start_bb (bb);
1833 /* Build call to the function being thunked. */
1834 if (!VOID_TYPE_P (restype)
1835 && (!alias_is_noreturn
1836 || TREE_ADDRESSABLE (restype)
1837 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1839 if (DECL_BY_REFERENCE (resdecl))
1841 restmp = gimple_fold_indirect_ref (resdecl);
1842 if (!restmp)
1843 restmp = build2 (MEM_REF,
1844 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1845 resdecl,
1846 build_int_cst (TREE_TYPE
1847 (DECL_RESULT (alias)), 0));
1849 else if (!is_gimple_reg_type (restype))
1851 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1853 restmp = resdecl;
1855 if (VAR_P (restmp))
1856 add_local_decl (cfun, restmp);
1857 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1859 else
1860 restmp = create_tmp_var (restype, "retval");
1862 else
1863 restmp = create_tmp_reg (restype, "retval");
1866 for (arg = a; arg; arg = DECL_CHAIN (arg))
1867 nargs++;
1868 auto_vec<tree> vargs (nargs);
1869 i = 0;
1870 arg = a;
1871 if (this_adjusting)
1873 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1874 virtual_offset));
1875 arg = DECL_CHAIN (a);
1876 i = 1;
1879 if (nargs)
1880 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1882 tree tmp = arg;
1883 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1884 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1885 DECL_GIMPLE_REG_P (arg) = 1;
1887 if (!is_gimple_val (arg))
1889 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1890 (TREE_TYPE (arg)), "arg");
1891 gimple *stmt = gimple_build_assign (tmp, arg);
1892 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1894 vargs.quick_push (tmp);
1896 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1897 callees->call_stmt = call;
1898 gimple_call_set_from_thunk (call, true);
1899 gimple_call_set_with_bounds (call, instrumentation_clone);
1901 /* Return slot optimization is always possible and in fact requred to
1902 return values with DECL_BY_REFERENCE. */
1903 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1904 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1905 || DECL_BY_REFERENCE (resdecl)))
1906 gimple_call_set_return_slot_opt (call, true);
1908 if (restmp)
1910 gimple_call_set_lhs (call, restmp);
1911 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1912 TREE_TYPE (TREE_TYPE (alias))));
1914 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1915 if (!alias_is_noreturn)
1917 if (instrumentation_clone
1918 && !DECL_BY_REFERENCE (resdecl)
1919 && restmp
1920 && BOUNDED_P (restmp))
1922 resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1923 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1924 as_a <gcall *> (gsi_stmt (bsi)),
1925 callees->count, callees->frequency);
1928 if (restmp && !this_adjusting
1929 && (fixed_offset || virtual_offset))
1931 tree true_label = NULL_TREE;
1933 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1935 gimple *stmt;
1936 edge e;
1937 /* If the return type is a pointer, we need to
1938 protect against NULL. We know there will be an
1939 adjustment, because that's why we're emitting a
1940 thunk. */
1941 then_bb = create_basic_block (NULL, bb);
1942 then_bb->count = count - count.apply_scale (1, 16);
1943 then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1944 return_bb = create_basic_block (NULL, then_bb);
1945 return_bb->count = count;
1946 return_bb->frequency = BB_FREQ_MAX;
1947 else_bb = create_basic_block (NULL, else_bb);
1948 then_bb->count = count.apply_scale (1, 16);
1949 then_bb->frequency = BB_FREQ_MAX / 16;
1950 add_bb_to_loop (then_bb, bb->loop_father);
1951 add_bb_to_loop (return_bb, bb->loop_father);
1952 add_bb_to_loop (else_bb, bb->loop_father);
1953 remove_edge (single_succ_edge (bb));
1954 true_label = gimple_block_label (then_bb);
1955 stmt = gimple_build_cond (NE_EXPR, restmp,
1956 build_zero_cst (TREE_TYPE (restmp)),
1957 NULL_TREE, NULL_TREE);
1958 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1959 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1960 e->probability = profile_probability::guessed_always ()
1961 .apply_scale (1, 16);
1962 e->count = count - count.apply_scale (1, 16);
1963 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1964 e->probability = profile_probability::guessed_always ()
1965 .apply_scale (1, 16);
1966 e->count = count.apply_scale (1, 16);
1967 make_single_succ_edge (return_bb,
1968 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1969 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
1970 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1971 e->probability = profile_probability::always ();
1972 e->count = count.apply_scale (1, 16);
1973 bsi = gsi_last_bb (then_bb);
1976 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1977 fixed_offset, virtual_offset);
1978 if (true_label)
1980 gimple *stmt;
1981 bsi = gsi_last_bb (else_bb);
1982 stmt = gimple_build_assign (restmp,
1983 build_zero_cst (TREE_TYPE (restmp)));
1984 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1985 bsi = gsi_last_bb (return_bb);
1988 else
1989 gimple_call_set_tail (call, true);
1991 /* Build return value. */
1992 if (!DECL_BY_REFERENCE (resdecl))
1993 ret = gimple_build_return (restmp);
1994 else
1995 ret = gimple_build_return (resdecl);
1996 gimple_return_set_retbnd (ret, resbnd);
1998 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2000 else
2002 gimple_call_set_tail (call, true);
2003 remove_edge (single_succ_edge (bb));
2006 cfun->gimple_df->in_ssa_p = true;
2007 profile_status_for_fn (cfun)
2008 = count.initialized_p () ? PROFILE_READ : PROFILE_GUESSED;
2009 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2010 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2011 delete_unreachable_blocks ();
2012 update_ssa (TODO_update_ssa);
2013 checking_verify_flow_info ();
2014 free_dominance_info (CDI_DOMINATORS);
2016 /* Since we want to emit the thunk, we explicitly mark its name as
2017 referenced. */
2018 thunk.thunk_p = false;
2019 lowered = true;
2020 bitmap_obstack_release (NULL);
2022 current_function_decl = NULL;
2023 set_cfun (NULL);
2024 return true;
2027 /* Assemble thunks and aliases associated to node. */
2029 void
2030 cgraph_node::assemble_thunks_and_aliases (void)
2032 cgraph_edge *e;
2033 ipa_ref *ref;
2035 for (e = callers; e;)
2036 if (e->caller->thunk.thunk_p
2037 && !e->caller->global.inlined_to
2038 && !e->caller->thunk.add_pointer_bounds_args)
2040 cgraph_node *thunk = e->caller;
2042 e = e->next_caller;
2043 thunk->expand_thunk (true, false);
2044 thunk->assemble_thunks_and_aliases ();
2046 else
2047 e = e->next_caller;
2049 FOR_EACH_ALIAS (this, ref)
2051 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2052 if (!alias->transparent_alias)
2054 bool saved_written = TREE_ASM_WRITTEN (decl);
2056 /* Force assemble_alias to really output the alias this time instead
2057 of buffering it in same alias pairs. */
2058 TREE_ASM_WRITTEN (decl) = 1;
2059 do_assemble_alias (alias->decl,
2060 DECL_ASSEMBLER_NAME (decl));
2061 alias->assemble_thunks_and_aliases ();
2062 TREE_ASM_WRITTEN (decl) = saved_written;
2067 /* Expand function specified by node. */
2069 void
2070 cgraph_node::expand (void)
2072 location_t saved_loc;
2074 /* We ought to not compile any inline clones. */
2075 gcc_assert (!global.inlined_to);
2077 /* __RTL functions are compiled as soon as they are parsed, so don't
2078 do it again. */
2079 if (native_rtl_p ())
2080 return;
2082 announce_function (decl);
2083 process = 0;
2084 gcc_assert (lowered);
2085 get_untransformed_body ();
2087 /* Generate RTL for the body of DECL. */
2089 timevar_push (TV_REST_OF_COMPILATION);
2091 gcc_assert (symtab->global_info_ready);
2093 /* Initialize the default bitmap obstack. */
2094 bitmap_obstack_initialize (NULL);
2096 /* Initialize the RTL code for the function. */
2097 saved_loc = input_location;
2098 input_location = DECL_SOURCE_LOCATION (decl);
2100 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2101 push_cfun (DECL_STRUCT_FUNCTION (decl));
2102 init_function_start (decl);
2104 gimple_register_cfg_hooks ();
2106 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2108 execute_all_ipa_transforms ();
2110 /* Perform all tree transforms and optimizations. */
2112 /* Signal the start of passes. */
2113 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2115 execute_pass_list (cfun, g->get_passes ()->all_passes);
2117 /* Signal the end of passes. */
2118 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2120 bitmap_obstack_release (&reg_obstack);
2122 /* Release the default bitmap obstack. */
2123 bitmap_obstack_release (NULL);
2125 /* If requested, warn about function definitions where the function will
2126 return a value (usually of some struct or union type) which itself will
2127 take up a lot of stack space. */
2128 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2130 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2132 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2133 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2134 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2135 larger_than_size))
2137 unsigned int size_as_int
2138 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2140 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2141 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2142 decl, size_as_int);
2143 else
2144 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2145 decl, larger_than_size);
2149 gimple_set_body (decl, NULL);
2150 if (DECL_STRUCT_FUNCTION (decl) == 0
2151 && !cgraph_node::get (decl)->origin)
2153 /* Stop pointing to the local nodes about to be freed.
2154 But DECL_INITIAL must remain nonzero so we know this
2155 was an actual function definition.
2156 For a nested function, this is done in c_pop_function_context.
2157 If rest_of_compilation set this to 0, leave it 0. */
2158 if (DECL_INITIAL (decl) != 0)
2159 DECL_INITIAL (decl) = error_mark_node;
2162 input_location = saved_loc;
2164 ggc_collect ();
2165 timevar_pop (TV_REST_OF_COMPILATION);
2167 /* Make sure that BE didn't give up on compiling. */
2168 gcc_assert (TREE_ASM_WRITTEN (decl));
2169 if (cfun)
2170 pop_cfun ();
2172 /* It would make a lot more sense to output thunks before function body to get more
2173 forward and lest backwarding jumps. This however would need solving problem
2174 with comdats. See PR48668. Also aliases must come after function itself to
2175 make one pass assemblers, like one on AIX, happy. See PR 50689.
2176 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2177 groups. */
2178 assemble_thunks_and_aliases ();
2179 release_body ();
2180 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2181 points to the dead function body. */
2182 remove_callees ();
2183 remove_all_references ();
2186 /* Node comparer that is responsible for the order that corresponds
2187 to time when a function was launched for the first time. */
2189 static int
2190 node_cmp (const void *pa, const void *pb)
2192 const cgraph_node *a = *(const cgraph_node * const *) pa;
2193 const cgraph_node *b = *(const cgraph_node * const *) pb;
2195 /* Functions with time profile must be before these without profile. */
2196 if (!a->tp_first_run || !b->tp_first_run)
2197 return a->tp_first_run - b->tp_first_run;
2199 return a->tp_first_run != b->tp_first_run
2200 ? b->tp_first_run - a->tp_first_run
2201 : b->order - a->order;
2204 /* Expand all functions that must be output.
2206 Attempt to topologically sort the nodes so function is output when
2207 all called functions are already assembled to allow data to be
2208 propagated across the callgraph. Use a stack to get smaller distance
2209 between a function and its callees (later we may choose to use a more
2210 sophisticated algorithm for function reordering; we will likely want
2211 to use subsections to make the output functions appear in top-down
2212 order). */
2214 static void
2215 expand_all_functions (void)
2217 cgraph_node *node;
2218 cgraph_node **order = XCNEWVEC (cgraph_node *,
2219 symtab->cgraph_count);
2220 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2221 int order_pos, new_order_pos = 0;
2222 int i;
2224 order_pos = ipa_reverse_postorder (order);
2225 gcc_assert (order_pos == symtab->cgraph_count);
2227 /* Garbage collector may remove inline clones we eliminate during
2228 optimization. So we must be sure to not reference them. */
2229 for (i = 0; i < order_pos; i++)
2230 if (order[i]->process)
2231 order[new_order_pos++] = order[i];
2233 if (flag_profile_reorder_functions)
2234 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2236 for (i = new_order_pos - 1; i >= 0; i--)
2238 node = order[i];
2240 if (node->process)
2242 expanded_func_count++;
2243 if(node->tp_first_run)
2244 profiled_func_count++;
2246 if (symtab->dump_file)
2247 fprintf (symtab->dump_file,
2248 "Time profile order in expand_all_functions:%s:%d\n",
2249 node->asm_name (), node->tp_first_run);
2250 node->process = 0;
2251 node->expand ();
2255 if (dump_file)
2256 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2257 main_input_filename, profiled_func_count, expanded_func_count);
2259 if (symtab->dump_file && flag_profile_reorder_functions)
2260 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2261 profiled_func_count, expanded_func_count);
2263 symtab->process_new_functions ();
2264 free_gimplify_stack ();
2266 free (order);
2269 /* This is used to sort the node types by the cgraph order number. */
2271 enum cgraph_order_sort_kind
2273 ORDER_UNDEFINED = 0,
2274 ORDER_FUNCTION,
2275 ORDER_VAR,
2276 ORDER_VAR_UNDEF,
2277 ORDER_ASM
2280 struct cgraph_order_sort
2282 enum cgraph_order_sort_kind kind;
2283 union
2285 cgraph_node *f;
2286 varpool_node *v;
2287 asm_node *a;
2288 } u;
2291 /* Output all functions, variables, and asm statements in the order
2292 according to their order fields, which is the order in which they
2293 appeared in the file. This implements -fno-toplevel-reorder. In
2294 this mode we may output functions and variables which don't really
2295 need to be output. */
2297 static void
2298 output_in_order (void)
2300 int max;
2301 cgraph_order_sort *nodes;
2302 int i;
2303 cgraph_node *pf;
2304 varpool_node *pv;
2305 asm_node *pa;
2306 max = symtab->order;
2307 nodes = XCNEWVEC (cgraph_order_sort, max);
2309 FOR_EACH_DEFINED_FUNCTION (pf)
2311 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2313 if (!pf->no_reorder)
2314 continue;
2315 i = pf->order;
2316 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2317 nodes[i].kind = ORDER_FUNCTION;
2318 nodes[i].u.f = pf;
2322 /* There is a similar loop in symbol_table::output_variables.
2323 Please keep them in sync. */
2324 FOR_EACH_VARIABLE (pv)
2326 if (!pv->no_reorder)
2327 continue;
2328 if (DECL_HARD_REGISTER (pv->decl)
2329 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2330 continue;
2331 i = pv->order;
2332 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2333 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2334 nodes[i].u.v = pv;
2337 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2339 i = pa->order;
2340 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2341 nodes[i].kind = ORDER_ASM;
2342 nodes[i].u.a = pa;
2345 /* In toplevel reorder mode we output all statics; mark them as needed. */
2347 for (i = 0; i < max; ++i)
2348 if (nodes[i].kind == ORDER_VAR)
2349 nodes[i].u.v->finalize_named_section_flags ();
2351 for (i = 0; i < max; ++i)
2353 switch (nodes[i].kind)
2355 case ORDER_FUNCTION:
2356 nodes[i].u.f->process = 0;
2357 nodes[i].u.f->expand ();
2358 break;
2360 case ORDER_VAR:
2361 nodes[i].u.v->assemble_decl ();
2362 break;
2364 case ORDER_VAR_UNDEF:
2365 assemble_undefined_decl (nodes[i].u.v->decl);
2366 break;
2368 case ORDER_ASM:
2369 assemble_asm (nodes[i].u.a->asm_str);
2370 break;
2372 case ORDER_UNDEFINED:
2373 break;
2375 default:
2376 gcc_unreachable ();
2380 symtab->clear_asm_symbols ();
2382 free (nodes);
2385 static void
2386 ipa_passes (void)
2388 gcc::pass_manager *passes = g->get_passes ();
2390 set_cfun (NULL);
2391 current_function_decl = NULL;
2392 gimple_register_cfg_hooks ();
2393 bitmap_obstack_initialize (NULL);
2395 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2397 if (!in_lto_p)
2399 execute_ipa_pass_list (passes->all_small_ipa_passes);
2400 if (seen_error ())
2401 return;
2404 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2405 devirtualization and other changes where removal iterate. */
2406 symtab->remove_unreachable_nodes (symtab->dump_file);
2408 /* If pass_all_early_optimizations was not scheduled, the state of
2409 the cgraph will not be properly updated. Update it now. */
2410 if (symtab->state < IPA_SSA)
2411 symtab->state = IPA_SSA;
2413 if (!in_lto_p)
2415 /* Generate coverage variables and constructors. */
2416 coverage_finish ();
2418 /* Process new functions added. */
2419 set_cfun (NULL);
2420 current_function_decl = NULL;
2421 symtab->process_new_functions ();
2423 execute_ipa_summary_passes
2424 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2427 /* Some targets need to handle LTO assembler output specially. */
2428 if (flag_generate_lto || flag_generate_offload)
2429 targetm.asm_out.lto_start ();
2431 if (!in_lto_p)
2433 if (g->have_offload)
2435 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2436 lto_stream_offload_p = true;
2437 ipa_write_summaries ();
2438 lto_stream_offload_p = false;
2440 if (flag_lto)
2442 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2443 lto_stream_offload_p = false;
2444 ipa_write_summaries ();
2448 if (flag_generate_lto || flag_generate_offload)
2449 targetm.asm_out.lto_end ();
2451 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2452 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2453 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2455 bitmap_obstack_release (NULL);
2459 /* Return string alias is alias of. */
2461 static tree
2462 get_alias_symbol (tree decl)
2464 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2465 return get_identifier (TREE_STRING_POINTER
2466 (TREE_VALUE (TREE_VALUE (alias))));
2470 /* Weakrefs may be associated to external decls and thus not output
2471 at expansion time. Emit all necessary aliases. */
2473 void
2474 symbol_table::output_weakrefs (void)
2476 symtab_node *node;
2477 cgraph_node *cnode;
2478 FOR_EACH_SYMBOL (node)
2479 if (node->alias
2480 && !TREE_ASM_WRITTEN (node->decl)
2481 && (!(cnode = dyn_cast <cgraph_node *> (node))
2482 || !cnode->instrumented_version
2483 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2484 && node->weakref)
2486 tree target;
2488 /* Weakrefs are special by not requiring target definition in current
2489 compilation unit. It is thus bit hard to work out what we want to
2490 alias.
2491 When alias target is defined, we need to fetch it from symtab reference,
2492 otherwise it is pointed to by alias_target. */
2493 if (node->alias_target)
2494 target = (DECL_P (node->alias_target)
2495 ? DECL_ASSEMBLER_NAME (node->alias_target)
2496 : node->alias_target);
2497 else if (node->analyzed)
2498 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2499 else
2501 gcc_unreachable ();
2502 target = get_alias_symbol (node->decl);
2504 do_assemble_alias (node->decl, target);
2508 /* Perform simple optimizations based on callgraph. */
2510 void
2511 symbol_table::compile (void)
2513 if (seen_error ())
2514 return;
2516 symtab_node::checking_verify_symtab_nodes ();
2518 timevar_push (TV_CGRAPHOPT);
2519 if (pre_ipa_mem_report)
2521 fprintf (stderr, "Memory consumption before IPA\n");
2522 dump_memory_report (false);
2524 if (!quiet_flag)
2525 fprintf (stderr, "Performing interprocedural optimizations\n");
2526 state = IPA;
2528 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2529 if (flag_generate_lto || flag_generate_offload)
2530 lto_streamer_hooks_init ();
2532 /* Don't run the IPA passes if there was any error or sorry messages. */
2533 if (!seen_error ())
2534 ipa_passes ();
2536 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2537 if (seen_error ()
2538 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2540 timevar_pop (TV_CGRAPHOPT);
2541 return;
2544 global_info_ready = true;
2545 if (dump_file)
2547 fprintf (dump_file, "Optimized ");
2548 symtab->dump (dump_file);
2550 if (post_ipa_mem_report)
2552 fprintf (stderr, "Memory consumption after IPA\n");
2553 dump_memory_report (false);
2555 timevar_pop (TV_CGRAPHOPT);
2557 /* Output everything. */
2558 (*debug_hooks->assembly_start) ();
2559 if (!quiet_flag)
2560 fprintf (stderr, "Assembling functions:\n");
2561 symtab_node::checking_verify_symtab_nodes ();
2563 bitmap_obstack_initialize (NULL);
2564 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2565 bitmap_obstack_release (NULL);
2566 mark_functions_to_output ();
2568 /* When weakref support is missing, we automatically translate all
2569 references to NODE to references to its ultimate alias target.
2570 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2571 TREE_CHAIN.
2573 Set up this mapping before we output any assembler but once we are sure
2574 that all symbol renaming is done.
2576 FIXME: All this uglyness can go away if we just do renaming at gimple
2577 level by physically rewritting the IL. At the moment we can only redirect
2578 calls, so we need infrastructure for renaming references as well. */
2579 #ifndef ASM_OUTPUT_WEAKREF
2580 symtab_node *node;
2582 FOR_EACH_SYMBOL (node)
2583 if (node->alias
2584 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2586 IDENTIFIER_TRANSPARENT_ALIAS
2587 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2588 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2589 = (node->alias_target ? node->alias_target
2590 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2592 #endif
2594 state = EXPANSION;
2596 /* Output first asm statements and anything ordered. The process
2597 flag is cleared for these nodes, so we skip them later. */
2598 output_in_order ();
2599 expand_all_functions ();
2600 output_variables ();
2602 process_new_functions ();
2603 state = FINISHED;
2604 output_weakrefs ();
2606 if (dump_file)
2608 fprintf (dump_file, "\nFinal ");
2609 symtab->dump (dump_file);
2611 if (!flag_checking)
2612 return;
2613 symtab_node::verify_symtab_nodes ();
2614 /* Double check that all inline clones are gone and that all
2615 function bodies have been released from memory. */
2616 if (!seen_error ())
2618 cgraph_node *node;
2619 bool error_found = false;
2621 FOR_EACH_DEFINED_FUNCTION (node)
2622 if (node->global.inlined_to
2623 || gimple_has_body_p (node->decl))
2625 error_found = true;
2626 node->debug ();
2628 if (error_found)
2629 internal_error ("nodes with unreleased memory found");
2634 /* Analyze the whole compilation unit once it is parsed completely. */
2636 void
2637 symbol_table::finalize_compilation_unit (void)
2639 timevar_push (TV_CGRAPH);
2641 /* If we're here there's no current function anymore. Some frontends
2642 are lazy in clearing these. */
2643 current_function_decl = NULL;
2644 set_cfun (NULL);
2646 /* Do not skip analyzing the functions if there were errors, we
2647 miss diagnostics for following functions otherwise. */
2649 /* Emit size functions we didn't inline. */
2650 finalize_size_functions ();
2652 /* Mark alias targets necessary and emit diagnostics. */
2653 handle_alias_pairs ();
2655 if (!quiet_flag)
2657 fprintf (stderr, "\nAnalyzing compilation unit\n");
2658 fflush (stderr);
2661 if (flag_dump_passes)
2662 dump_passes ();
2664 /* Gimplify and lower all functions, compute reachability and
2665 remove unreachable nodes. */
2666 analyze_functions (/*first_time=*/true);
2668 /* Mark alias targets necessary and emit diagnostics. */
2669 handle_alias_pairs ();
2671 /* Gimplify and lower thunks. */
2672 analyze_functions (/*first_time=*/false);
2674 /* Offloading requires LTO infrastructure. */
2675 if (!in_lto_p && g->have_offload)
2676 flag_generate_offload = 1;
2678 if (!seen_error ())
2680 /* Emit early debug for reachable functions, and by consequence,
2681 locally scoped symbols. */
2682 struct cgraph_node *cnode;
2683 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2684 (*debug_hooks->early_global_decl) (cnode->decl);
2686 /* Clean up anything that needs cleaning up after initial debug
2687 generation. */
2688 (*debug_hooks->early_finish) (main_input_filename);
2691 /* Finally drive the pass manager. */
2692 compile ();
2694 timevar_pop (TV_CGRAPH);
2697 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2698 within the same process. For use by toplev::finalize. */
2700 void
2701 cgraphunit_c_finalize (void)
2703 gcc_assert (cgraph_new_nodes.length () == 0);
2704 cgraph_new_nodes.truncate (0);
2706 vtable_entry_type = NULL;
2707 queued_nodes = &symtab_terminator;
2709 first_analyzed = NULL;
2710 first_analyzed_var = NULL;
2713 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2714 kind of wrapper method. */
2716 void
2717 cgraph_node::create_wrapper (cgraph_node *target)
2719 /* Preserve DECL_RESULT so we get right by reference flag. */
2720 tree decl_result = DECL_RESULT (decl);
2722 /* Remove the function's body but keep arguments to be reused
2723 for thunk. */
2724 release_body (true);
2725 reset ();
2727 DECL_UNINLINABLE (decl) = false;
2728 DECL_RESULT (decl) = decl_result;
2729 DECL_INITIAL (decl) = NULL;
2730 allocate_struct_function (decl, false);
2731 set_cfun (NULL);
2733 /* Turn alias into thunk and expand it into GIMPLE representation. */
2734 definition = true;
2736 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2737 thunk.thunk_p = true;
2738 create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2739 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2741 tree arguments = DECL_ARGUMENTS (decl);
2743 while (arguments)
2745 TREE_ADDRESSABLE (arguments) = false;
2746 arguments = TREE_CHAIN (arguments);
2749 expand_thunk (false, true);
2751 /* Inline summary set-up. */
2752 analyze ();
2753 inline_analyze_function (this);
2756 #include "gt-cgraphunit.h"