PR82045: Avoid passing machine modes through "..."
[official-gcc.git] / gcc / cgraphunit.c
bloba287f0dfd44357e3e55a4f226fa131c1324cbf56
1 /* Driver of optimization process
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "tree-chkp.h"
206 #include "lto-section-names.h"
207 #include "stringpool.h"
208 #include "attribs.h"
210 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
211 secondary queue used during optimization to accommodate passes that
212 may generate new functions that need to be optimized and expanded. */
213 vec<cgraph_node *> cgraph_new_nodes;
215 static void expand_all_functions (void);
216 static void mark_functions_to_output (void);
217 static void handle_alias_pairs (void);
219 /* Used for vtable lookup in thunk adjusting. */
220 static GTY (()) tree vtable_entry_type;
222 /* Return true if this symbol is a function from the C frontend specified
223 directly in RTL form (with "__RTL"). */
225 bool
226 symtab_node::native_rtl_p () const
228 if (TREE_CODE (decl) != FUNCTION_DECL)
229 return false;
230 if (!DECL_STRUCT_FUNCTION (decl))
231 return false;
232 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
235 /* Determine if symbol declaration is needed. That is, visible to something
236 either outside this translation unit, something magic in the system
237 configury */
238 bool
239 symtab_node::needed_p (void)
241 /* Double check that no one output the function into assembly file
242 early. */
243 if (!native_rtl_p ())
244 gcc_checking_assert
245 (!DECL_ASSEMBLER_NAME_SET_P (decl)
246 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248 if (!definition)
249 return false;
251 if (DECL_EXTERNAL (decl))
252 return false;
254 /* If the user told us it is used, then it must be so. */
255 if (force_output)
256 return true;
258 /* ABI forced symbols are needed when they are external. */
259 if (forced_by_abi && TREE_PUBLIC (decl))
260 return true;
262 /* Keep constructors, destructors and virtual functions. */
263 if (TREE_CODE (decl) == FUNCTION_DECL
264 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
265 return true;
267 /* Externally visible variables must be output. The exception is
268 COMDAT variables that must be output only when they are needed. */
269 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
270 return true;
272 return false;
275 /* Head and terminator of the queue of nodes to be processed while building
276 callgraph. */
278 static symtab_node symtab_terminator;
279 static symtab_node *queued_nodes = &symtab_terminator;
281 /* Add NODE to queue starting at QUEUED_NODES.
282 The queue is linked via AUX pointers and terminated by pointer to 1. */
284 static void
285 enqueue_node (symtab_node *node)
287 if (node->aux)
288 return;
289 gcc_checking_assert (queued_nodes);
290 node->aux = queued_nodes;
291 queued_nodes = node;
294 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
295 functions into callgraph in a way so they look like ordinary reachable
296 functions inserted into callgraph already at construction time. */
298 void
299 symbol_table::process_new_functions (void)
301 tree fndecl;
303 if (!cgraph_new_nodes.exists ())
304 return;
306 handle_alias_pairs ();
307 /* Note that this queue may grow as its being processed, as the new
308 functions may generate new ones. */
309 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
311 cgraph_node *node = cgraph_new_nodes[i];
312 fndecl = node->decl;
313 switch (state)
315 case CONSTRUCTION:
316 /* At construction time we just need to finalize function and move
317 it into reachable functions list. */
319 cgraph_node::finalize_function (fndecl, false);
320 call_cgraph_insertion_hooks (node);
321 enqueue_node (node);
322 break;
324 case IPA:
325 case IPA_SSA:
326 case IPA_SSA_AFTER_INLINING:
327 /* When IPA optimization already started, do all essential
328 transformations that has been already performed on the whole
329 cgraph but not on this function. */
331 gimple_register_cfg_hooks ();
332 if (!node->analyzed)
333 node->analyze ();
334 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
335 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
336 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 bool summaried_computed = ipa_fn_summaries != NULL;
339 g->get_passes ()->execute_early_local_passes ();
340 /* Early passes compure inline parameters to do inlining
341 and splitting. This is redundant for functions added late.
342 Just throw away whatever it did. */
343 if (!summaried_computed)
344 ipa_free_fn_summary ();
346 else if (ipa_fn_summaries != NULL)
347 compute_fn_summary (node, true);
348 free_dominance_info (CDI_POST_DOMINATORS);
349 free_dominance_info (CDI_DOMINATORS);
350 pop_cfun ();
351 call_cgraph_insertion_hooks (node);
352 break;
354 case EXPANSION:
355 /* Functions created during expansion shall be compiled
356 directly. */
357 node->process = 0;
358 call_cgraph_insertion_hooks (node);
359 node->expand ();
360 break;
362 default:
363 gcc_unreachable ();
364 break;
368 cgraph_new_nodes.release ();
371 /* As an GCC extension we allow redefinition of the function. The
372 semantics when both copies of bodies differ is not well defined.
373 We replace the old body with new body so in unit at a time mode
374 we always use new body, while in normal mode we may end up with
375 old body inlined into some functions and new body expanded and
376 inlined in others.
378 ??? It may make more sense to use one body for inlining and other
379 body for expanding the function but this is difficult to do. */
381 void
382 cgraph_node::reset (void)
384 /* If process is set, then we have already begun whole-unit analysis.
385 This is *not* testing for whether we've already emitted the function.
386 That case can be sort-of legitimately seen with real function redefinition
387 errors. I would argue that the front end should never present us with
388 such a case, but don't enforce that for now. */
389 gcc_assert (!process);
391 /* Reset our data structures so we can analyze the function again. */
392 memset (&local, 0, sizeof (local));
393 memset (&global, 0, sizeof (global));
394 memset (&rtl, 0, sizeof (rtl));
395 analyzed = false;
396 definition = false;
397 alias = false;
398 transparent_alias = false;
399 weakref = false;
400 cpp_implicit_alias = false;
402 remove_callees ();
403 remove_all_references ();
406 /* Return true when there are references to the node. INCLUDE_SELF is
407 true if a self reference counts as a reference. */
409 bool
410 symtab_node::referred_to_p (bool include_self)
412 ipa_ref *ref = NULL;
414 /* See if there are any references at all. */
415 if (iterate_referring (0, ref))
416 return true;
417 /* For functions check also calls. */
418 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
419 if (cn && cn->callers)
421 if (include_self)
422 return true;
423 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
424 if (e->caller != this)
425 return true;
427 return false;
430 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
431 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
432 the garbage collector run at the moment. We would need to either create
433 a new GC context, or just not compile right now. */
435 void
436 cgraph_node::finalize_function (tree decl, bool no_collect)
438 cgraph_node *node = cgraph_node::get_create (decl);
440 if (node->definition)
442 /* Nested functions should only be defined once. */
443 gcc_assert (!DECL_CONTEXT (decl)
444 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
445 node->reset ();
446 node->local.redefined_extern_inline = true;
449 /* Set definition first before calling notice_global_symbol so that
450 it is available to notice_global_symbol. */
451 node->definition = true;
452 notice_global_symbol (decl);
453 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
454 if (!flag_toplevel_reorder)
455 node->no_reorder = true;
457 /* With -fkeep-inline-functions we are keeping all inline functions except
458 for extern inline ones. */
459 if (flag_keep_inline_functions
460 && DECL_DECLARED_INLINE_P (decl)
461 && !DECL_EXTERNAL (decl)
462 && !DECL_DISREGARD_INLINE_LIMITS (decl))
463 node->force_output = 1;
465 /* __RTL functions were already output as soon as they were parsed (due
466 to the large amount of global state in the backend).
467 Mark such functions as "force_output" to reflect the fact that they
468 will be in the asm file when considering the symbols they reference.
469 The attempt to output them later on will bail out immediately. */
470 if (node->native_rtl_p ())
471 node->force_output = 1;
473 /* When not optimizing, also output the static functions. (see
474 PR24561), but don't do so for always_inline functions, functions
475 declared inline and nested functions. These were optimized out
476 in the original implementation and it is unclear whether we want
477 to change the behavior here. */
478 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
479 || node->no_reorder)
480 && !node->cpp_implicit_alias
481 && !DECL_DISREGARD_INLINE_LIMITS (decl)
482 && !DECL_DECLARED_INLINE_P (decl)
483 && !(DECL_CONTEXT (decl)
484 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
485 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
486 node->force_output = 1;
488 /* If we've not yet emitted decl, tell the debug info about it. */
489 if (!TREE_ASM_WRITTEN (decl))
490 (*debug_hooks->deferred_inline_function) (decl);
492 if (!no_collect)
493 ggc_collect ();
495 if (symtab->state == CONSTRUCTION
496 && (node->needed_p () || node->referred_to_p ()))
497 enqueue_node (node);
500 /* Add the function FNDECL to the call graph.
501 Unlike finalize_function, this function is intended to be used
502 by middle end and allows insertion of new function at arbitrary point
503 of compilation. The function can be either in high, low or SSA form
504 GIMPLE.
506 The function is assumed to be reachable and have address taken (so no
507 API breaking optimizations are performed on it).
509 Main work done by this function is to enqueue the function for later
510 processing to avoid need the passes to be re-entrant. */
512 void
513 cgraph_node::add_new_function (tree fndecl, bool lowered)
515 gcc::pass_manager *passes = g->get_passes ();
516 cgraph_node *node;
518 if (dump_file)
520 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
521 const char *function_type = ((gimple_has_body_p (fndecl))
522 ? (lowered
523 ? (gimple_in_ssa_p (fn)
524 ? "ssa gimple"
525 : "low gimple")
526 : "high gimple")
527 : "to-be-gimplified");
528 fprintf (dump_file,
529 "Added new %s function %s to callgraph\n",
530 function_type,
531 fndecl_name (fndecl));
534 switch (symtab->state)
536 case PARSING:
537 cgraph_node::finalize_function (fndecl, false);
538 break;
539 case CONSTRUCTION:
540 /* Just enqueue function to be processed at nearest occurrence. */
541 node = cgraph_node::get_create (fndecl);
542 if (lowered)
543 node->lowered = true;
544 cgraph_new_nodes.safe_push (node);
545 break;
547 case IPA:
548 case IPA_SSA:
549 case IPA_SSA_AFTER_INLINING:
550 case EXPANSION:
551 /* Bring the function into finalized state and enqueue for later
552 analyzing and compilation. */
553 node = cgraph_node::get_create (fndecl);
554 node->local.local = false;
555 node->definition = true;
556 node->force_output = true;
557 if (TREE_PUBLIC (fndecl))
558 node->externally_visible = true;
559 if (!lowered && symtab->state == EXPANSION)
561 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
562 gimple_register_cfg_hooks ();
563 bitmap_obstack_initialize (NULL);
564 execute_pass_list (cfun, passes->all_lowering_passes);
565 passes->execute_early_local_passes ();
566 bitmap_obstack_release (NULL);
567 pop_cfun ();
569 lowered = true;
571 if (lowered)
572 node->lowered = true;
573 cgraph_new_nodes.safe_push (node);
574 break;
576 case FINISHED:
577 /* At the very end of compilation we have to do all the work up
578 to expansion. */
579 node = cgraph_node::create (fndecl);
580 if (lowered)
581 node->lowered = true;
582 node->definition = true;
583 node->analyze ();
584 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
585 gimple_register_cfg_hooks ();
586 bitmap_obstack_initialize (NULL);
587 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
588 g->get_passes ()->execute_early_local_passes ();
589 bitmap_obstack_release (NULL);
590 pop_cfun ();
591 node->expand ();
592 break;
594 default:
595 gcc_unreachable ();
598 /* Set a personality if required and we already passed EH lowering. */
599 if (lowered
600 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
601 == eh_personality_lang))
602 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
605 /* Analyze the function scheduled to be output. */
606 void
607 cgraph_node::analyze (void)
609 if (native_rtl_p ())
611 analyzed = true;
612 return;
615 tree decl = this->decl;
616 location_t saved_loc = input_location;
617 input_location = DECL_SOURCE_LOCATION (decl);
619 if (thunk.thunk_p)
621 cgraph_node *t = cgraph_node::get (thunk.alias);
623 create_edge (t, NULL, t->count, CGRAPH_FREQ_BASE);
624 callees->can_throw_external = !TREE_NOTHROW (t->decl);
625 /* Target code in expand_thunk may need the thunk's target
626 to be analyzed, so recurse here. */
627 if (!t->analyzed)
628 t->analyze ();
629 if (t->alias)
631 t = t->get_alias_target ();
632 if (!t->analyzed)
633 t->analyze ();
635 if (!expand_thunk (false, false))
637 thunk.alias = NULL;
638 return;
640 thunk.alias = NULL;
642 if (alias)
643 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
644 else if (dispatcher_function)
646 /* Generate the dispatcher body of multi-versioned functions. */
647 cgraph_function_version_info *dispatcher_version_info
648 = function_version ();
649 if (dispatcher_version_info != NULL
650 && (dispatcher_version_info->dispatcher_resolver
651 == NULL_TREE))
653 tree resolver = NULL_TREE;
654 gcc_assert (targetm.generate_version_dispatcher_body);
655 resolver = targetm.generate_version_dispatcher_body (this);
656 gcc_assert (resolver != NULL_TREE);
659 else
661 push_cfun (DECL_STRUCT_FUNCTION (decl));
663 assign_assembler_name_if_needed (decl);
665 /* Make sure to gimplify bodies only once. During analyzing a
666 function we lower it, which will require gimplified nested
667 functions, so we can end up here with an already gimplified
668 body. */
669 if (!gimple_has_body_p (decl))
670 gimplify_function_tree (decl);
672 /* Lower the function. */
673 if (!lowered)
675 if (nested)
676 lower_nested_functions (decl);
677 gcc_assert (!nested);
679 gimple_register_cfg_hooks ();
680 bitmap_obstack_initialize (NULL);
681 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
682 free_dominance_info (CDI_POST_DOMINATORS);
683 free_dominance_info (CDI_DOMINATORS);
684 compact_blocks ();
685 bitmap_obstack_release (NULL);
686 lowered = true;
689 pop_cfun ();
691 analyzed = true;
693 input_location = saved_loc;
696 /* C++ frontend produce same body aliases all over the place, even before PCH
697 gets streamed out. It relies on us linking the aliases with their function
698 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
699 first produce aliases without links, but once C++ FE is sure he won't sream
700 PCH we build the links via this function. */
702 void
703 symbol_table::process_same_body_aliases (void)
705 symtab_node *node;
706 FOR_EACH_SYMBOL (node)
707 if (node->cpp_implicit_alias && !node->analyzed)
708 node->resolve_alias
709 (VAR_P (node->alias_target)
710 ? (symtab_node *)varpool_node::get_create (node->alias_target)
711 : (symtab_node *)cgraph_node::get_create (node->alias_target));
712 cpp_implicit_aliases_done = true;
715 /* Process attributes common for vars and functions. */
717 static void
718 process_common_attributes (symtab_node *node, tree decl)
720 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
722 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
724 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
725 "%<weakref%> attribute should be accompanied with"
726 " an %<alias%> attribute");
727 DECL_WEAK (decl) = 0;
728 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
729 DECL_ATTRIBUTES (decl));
732 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
733 node->no_reorder = 1;
736 /* Look for externally_visible and used attributes and mark cgraph nodes
737 accordingly.
739 We cannot mark the nodes at the point the attributes are processed (in
740 handle_*_attribute) because the copy of the declarations available at that
741 point may not be canonical. For example, in:
743 void f();
744 void f() __attribute__((used));
746 the declaration we see in handle_used_attribute will be the second
747 declaration -- but the front end will subsequently merge that declaration
748 with the original declaration and discard the second declaration.
750 Furthermore, we can't mark these nodes in finalize_function because:
752 void f() {}
753 void f() __attribute__((externally_visible));
755 is valid.
757 So, we walk the nodes at the end of the translation unit, applying the
758 attributes at that point. */
760 static void
761 process_function_and_variable_attributes (cgraph_node *first,
762 varpool_node *first_var)
764 cgraph_node *node;
765 varpool_node *vnode;
767 for (node = symtab->first_function (); node != first;
768 node = symtab->next_function (node))
770 tree decl = node->decl;
771 if (DECL_PRESERVE_P (decl))
772 node->mark_force_output ();
773 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
775 if (! TREE_PUBLIC (node->decl))
776 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
777 "%<externally_visible%>"
778 " attribute have effect only on public objects");
780 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
781 && (node->definition && !node->alias))
783 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
784 "%<weakref%> attribute ignored"
785 " because function is defined");
786 DECL_WEAK (decl) = 0;
787 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
788 DECL_ATTRIBUTES (decl));
791 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
792 && !DECL_DECLARED_INLINE_P (decl)
793 /* redefining extern inline function makes it DECL_UNINLINABLE. */
794 && !DECL_UNINLINABLE (decl))
795 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
796 "always_inline function might not be inlinable");
798 process_common_attributes (node, decl);
800 for (vnode = symtab->first_variable (); vnode != first_var;
801 vnode = symtab->next_variable (vnode))
803 tree decl = vnode->decl;
804 if (DECL_EXTERNAL (decl)
805 && DECL_INITIAL (decl))
806 varpool_node::finalize_decl (decl);
807 if (DECL_PRESERVE_P (decl))
808 vnode->force_output = true;
809 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
811 if (! TREE_PUBLIC (vnode->decl))
812 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
813 "%<externally_visible%>"
814 " attribute have effect only on public objects");
816 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
817 && vnode->definition
818 && DECL_INITIAL (decl))
820 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
821 "%<weakref%> attribute ignored"
822 " because variable is initialized");
823 DECL_WEAK (decl) = 0;
824 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
825 DECL_ATTRIBUTES (decl));
827 process_common_attributes (vnode, decl);
831 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
832 middle end to output the variable to asm file, if needed or externally
833 visible. */
835 void
836 varpool_node::finalize_decl (tree decl)
838 varpool_node *node = varpool_node::get_create (decl);
840 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
842 if (node->definition)
843 return;
844 /* Set definition first before calling notice_global_symbol so that
845 it is available to notice_global_symbol. */
846 node->definition = true;
847 notice_global_symbol (decl);
848 if (!flag_toplevel_reorder)
849 node->no_reorder = true;
850 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
851 /* Traditionally we do not eliminate static variables when not
852 optimizing and when not doing toplevel reoder. */
853 || (node->no_reorder && !DECL_COMDAT (node->decl)
854 && !DECL_ARTIFICIAL (node->decl)))
855 node->force_output = true;
857 if (symtab->state == CONSTRUCTION
858 && (node->needed_p () || node->referred_to_p ()))
859 enqueue_node (node);
860 if (symtab->state >= IPA_SSA)
861 node->analyze ();
862 /* Some frontends produce various interface variables after compilation
863 finished. */
864 if (symtab->state == FINISHED
865 || (node->no_reorder
866 && symtab->state == EXPANSION))
867 node->assemble_decl ();
869 if (DECL_INITIAL (decl))
870 chkp_register_var_initializer (decl);
873 /* EDGE is an polymorphic call. Mark all possible targets as reachable
874 and if there is only one target, perform trivial devirtualization.
875 REACHABLE_CALL_TARGETS collects target lists we already walked to
876 avoid udplicate work. */
878 static void
879 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
880 cgraph_edge *edge)
882 unsigned int i;
883 void *cache_token;
884 bool final;
885 vec <cgraph_node *>targets
886 = possible_polymorphic_call_targets
887 (edge, &final, &cache_token);
889 if (!reachable_call_targets->add (cache_token))
891 if (symtab->dump_file)
892 dump_possible_polymorphic_call_targets
893 (symtab->dump_file, edge);
895 for (i = 0; i < targets.length (); i++)
897 /* Do not bother to mark virtual methods in anonymous namespace;
898 either we will find use of virtual table defining it, or it is
899 unused. */
900 if (targets[i]->definition
901 && TREE_CODE
902 (TREE_TYPE (targets[i]->decl))
903 == METHOD_TYPE
904 && !type_in_anonymous_namespace_p
905 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
906 enqueue_node (targets[i]);
910 /* Very trivial devirtualization; when the type is
911 final or anonymous (so we know all its derivation)
912 and there is only one possible virtual call target,
913 make the edge direct. */
914 if (final)
916 if (targets.length () <= 1 && dbg_cnt (devirt))
918 cgraph_node *target;
919 if (targets.length () == 1)
920 target = targets[0];
921 else
922 target = cgraph_node::create
923 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
925 if (symtab->dump_file)
927 fprintf (symtab->dump_file,
928 "Devirtualizing call: ");
929 print_gimple_stmt (symtab->dump_file,
930 edge->call_stmt, 0,
931 TDF_SLIM);
933 if (dump_enabled_p ())
935 location_t locus = gimple_location_safe (edge->call_stmt);
936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
937 "devirtualizing call in %s to %s\n",
938 edge->caller->name (), target->name ());
941 edge->make_direct (target);
942 edge->redirect_call_stmt_to_callee ();
944 /* Call to __builtin_unreachable shouldn't be instrumented. */
945 if (!targets.length ())
946 gimple_call_set_with_bounds (edge->call_stmt, false);
948 if (symtab->dump_file)
950 fprintf (symtab->dump_file,
951 "Devirtualized as: ");
952 print_gimple_stmt (symtab->dump_file,
953 edge->call_stmt, 0,
954 TDF_SLIM);
960 /* Issue appropriate warnings for the global declaration DECL. */
962 static void
963 check_global_declaration (symtab_node *snode)
965 const char *decl_file;
966 tree decl = snode->decl;
968 /* Warn about any function declared static but not defined. We don't
969 warn about variables, because many programs have static variables
970 that exist only to get some text into the object file. */
971 if (TREE_CODE (decl) == FUNCTION_DECL
972 && DECL_INITIAL (decl) == 0
973 && DECL_EXTERNAL (decl)
974 && ! DECL_ARTIFICIAL (decl)
975 && ! TREE_NO_WARNING (decl)
976 && ! TREE_PUBLIC (decl)
977 && (warn_unused_function
978 || snode->referred_to_p (/*include_self=*/false)))
980 if (snode->referred_to_p (/*include_self=*/false))
981 pedwarn (input_location, 0, "%q+F used but never defined", decl);
982 else
983 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
984 /* This symbol is effectively an "extern" declaration now. */
985 TREE_PUBLIC (decl) = 1;
988 /* Warn about static fns or vars defined but not used. */
989 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
990 || (((warn_unused_variable && ! TREE_READONLY (decl))
991 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
992 && (warn_unused_const_variable == 2
993 || (main_input_filename != NULL
994 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
995 && filename_cmp (main_input_filename,
996 decl_file) == 0))))
997 && VAR_P (decl)))
998 && ! DECL_IN_SYSTEM_HEADER (decl)
999 && ! snode->referred_to_p (/*include_self=*/false)
1000 /* This TREE_USED check is needed in addition to referred_to_p
1001 above, because the `__unused__' attribute is not being
1002 considered for referred_to_p. */
1003 && ! TREE_USED (decl)
1004 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1005 to handle multiple external decls in different scopes. */
1006 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1007 && ! DECL_EXTERNAL (decl)
1008 && ! DECL_ARTIFICIAL (decl)
1009 && ! DECL_ABSTRACT_ORIGIN (decl)
1010 && ! TREE_PUBLIC (decl)
1011 /* A volatile variable might be used in some non-obvious way. */
1012 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1013 /* Global register variables must be declared to reserve them. */
1014 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1015 /* Global ctors and dtors are called by the runtime. */
1016 && (TREE_CODE (decl) != FUNCTION_DECL
1017 || (!DECL_STATIC_CONSTRUCTOR (decl)
1018 && !DECL_STATIC_DESTRUCTOR (decl)))
1019 /* Otherwise, ask the language. */
1020 && lang_hooks.decls.warn_unused_global (decl))
1021 warning_at (DECL_SOURCE_LOCATION (decl),
1022 (TREE_CODE (decl) == FUNCTION_DECL)
1023 ? OPT_Wunused_function
1024 : (TREE_READONLY (decl)
1025 ? OPT_Wunused_const_variable_
1026 : OPT_Wunused_variable),
1027 "%qD defined but not used", decl);
1030 /* Discover all functions and variables that are trivially needed, analyze
1031 them as well as all functions and variables referred by them */
1032 static cgraph_node *first_analyzed;
1033 static varpool_node *first_analyzed_var;
1035 /* FIRST_TIME is set to TRUE for the first time we are called for a
1036 translation unit from finalize_compilation_unit() or false
1037 otherwise. */
1039 static void
1040 analyze_functions (bool first_time)
1042 /* Keep track of already processed nodes when called multiple times for
1043 intermodule optimization. */
1044 cgraph_node *first_handled = first_analyzed;
1045 varpool_node *first_handled_var = first_analyzed_var;
1046 hash_set<void *> reachable_call_targets;
1048 symtab_node *node;
1049 symtab_node *next;
1050 int i;
1051 ipa_ref *ref;
1052 bool changed = true;
1053 location_t saved_loc = input_location;
1055 bitmap_obstack_initialize (NULL);
1056 symtab->state = CONSTRUCTION;
1057 input_location = UNKNOWN_LOCATION;
1059 /* Ugly, but the fixup can not happen at a time same body alias is created;
1060 C++ FE is confused about the COMDAT groups being right. */
1061 if (symtab->cpp_implicit_aliases_done)
1062 FOR_EACH_SYMBOL (node)
1063 if (node->cpp_implicit_alias)
1064 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1065 build_type_inheritance_graph ();
1067 /* Analysis adds static variables that in turn adds references to new functions.
1068 So we need to iterate the process until it stabilize. */
1069 while (changed)
1071 changed = false;
1072 process_function_and_variable_attributes (first_analyzed,
1073 first_analyzed_var);
1075 /* First identify the trivially needed symbols. */
1076 for (node = symtab->first_symbol ();
1077 node != first_analyzed
1078 && node != first_analyzed_var; node = node->next)
1080 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1081 node->get_comdat_group_id ();
1082 if (node->needed_p ())
1084 enqueue_node (node);
1085 if (!changed && symtab->dump_file)
1086 fprintf (symtab->dump_file, "Trivially needed symbols:");
1087 changed = true;
1088 if (symtab->dump_file)
1089 fprintf (symtab->dump_file, " %s", node->asm_name ());
1090 if (!changed && symtab->dump_file)
1091 fprintf (symtab->dump_file, "\n");
1093 if (node == first_analyzed
1094 || node == first_analyzed_var)
1095 break;
1097 symtab->process_new_functions ();
1098 first_analyzed_var = symtab->first_variable ();
1099 first_analyzed = symtab->first_function ();
1101 if (changed && symtab->dump_file)
1102 fprintf (symtab->dump_file, "\n");
1104 /* Lower representation, build callgraph edges and references for all trivially
1105 needed symbols and all symbols referred by them. */
1106 while (queued_nodes != &symtab_terminator)
1108 changed = true;
1109 node = queued_nodes;
1110 queued_nodes = (symtab_node *)queued_nodes->aux;
1111 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1112 if (cnode && cnode->definition)
1114 cgraph_edge *edge;
1115 tree decl = cnode->decl;
1117 /* ??? It is possible to create extern inline function
1118 and later using weak alias attribute to kill its body.
1119 See gcc.c-torture/compile/20011119-1.c */
1120 if (!DECL_STRUCT_FUNCTION (decl)
1121 && !cnode->alias
1122 && !cnode->thunk.thunk_p
1123 && !cnode->dispatcher_function)
1125 cnode->reset ();
1126 cnode->local.redefined_extern_inline = true;
1127 continue;
1130 if (!cnode->analyzed)
1131 cnode->analyze ();
1133 for (edge = cnode->callees; edge; edge = edge->next_callee)
1134 if (edge->callee->definition
1135 && (!DECL_EXTERNAL (edge->callee->decl)
1136 /* When not optimizing, do not try to analyze extern
1137 inline functions. Doing so is pointless. */
1138 || opt_for_fn (edge->callee->decl, optimize)
1139 /* Weakrefs needs to be preserved. */
1140 || edge->callee->alias
1141 /* always_inline functions are inlined aven at -O0. */
1142 || lookup_attribute
1143 ("always_inline",
1144 DECL_ATTRIBUTES (edge->callee->decl))
1145 /* Multiversioned functions needs the dispatcher to
1146 be produced locally even for extern functions. */
1147 || edge->callee->function_version ()))
1148 enqueue_node (edge->callee);
1149 if (opt_for_fn (cnode->decl, optimize)
1150 && opt_for_fn (cnode->decl, flag_devirtualize))
1152 cgraph_edge *next;
1154 for (edge = cnode->indirect_calls; edge; edge = next)
1156 next = edge->next_callee;
1157 if (edge->indirect_info->polymorphic)
1158 walk_polymorphic_call_targets (&reachable_call_targets,
1159 edge);
1163 /* If decl is a clone of an abstract function,
1164 mark that abstract function so that we don't release its body.
1165 The DECL_INITIAL() of that abstract function declaration
1166 will be later needed to output debug info. */
1167 if (DECL_ABSTRACT_ORIGIN (decl))
1169 cgraph_node *origin_node
1170 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1171 origin_node->used_as_abstract_origin = true;
1173 /* Preserve a functions function context node. It will
1174 later be needed to output debug info. */
1175 if (tree fn = decl_function_context (decl))
1177 cgraph_node *origin_node = cgraph_node::get_create (fn);
1178 enqueue_node (origin_node);
1181 else
1183 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1184 if (vnode && vnode->definition && !vnode->analyzed)
1185 vnode->analyze ();
1188 if (node->same_comdat_group)
1190 symtab_node *next;
1191 for (next = node->same_comdat_group;
1192 next != node;
1193 next = next->same_comdat_group)
1194 if (!next->comdat_local_p ())
1195 enqueue_node (next);
1197 for (i = 0; node->iterate_reference (i, ref); i++)
1198 if (ref->referred->definition
1199 && (!DECL_EXTERNAL (ref->referred->decl)
1200 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1201 && optimize)
1202 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1203 && opt_for_fn (ref->referred->decl, optimize))
1204 || node->alias
1205 || ref->referred->alias)))
1206 enqueue_node (ref->referred);
1207 symtab->process_new_functions ();
1210 update_type_inheritance_graph ();
1212 /* Collect entry points to the unit. */
1213 if (symtab->dump_file)
1215 fprintf (symtab->dump_file, "\n\nInitial ");
1216 symtab->dump (symtab->dump_file);
1219 if (first_time)
1221 symtab_node *snode;
1222 FOR_EACH_SYMBOL (snode)
1223 check_global_declaration (snode);
1226 if (symtab->dump_file)
1227 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1229 for (node = symtab->first_symbol ();
1230 node != first_handled
1231 && node != first_handled_var; node = next)
1233 next = node->next;
1234 if (!node->aux && !node->referred_to_p ())
1236 if (symtab->dump_file)
1237 fprintf (symtab->dump_file, " %s", node->name ());
1239 /* See if the debugger can use anything before the DECL
1240 passes away. Perhaps it can notice a DECL that is now a
1241 constant and can tag the early DIE with an appropriate
1242 attribute.
1244 Otherwise, this is the last chance the debug_hooks have
1245 at looking at optimized away DECLs, since
1246 late_global_decl will subsequently be called from the
1247 contents of the now pruned symbol table. */
1248 if (VAR_P (node->decl)
1249 && !decl_function_context (node->decl))
1251 /* We are reclaiming totally unreachable code and variables
1252 so they effectively appear as readonly. Show that to
1253 the debug machinery. */
1254 TREE_READONLY (node->decl) = 1;
1255 node->definition = false;
1256 (*debug_hooks->late_global_decl) (node->decl);
1259 node->remove ();
1260 continue;
1262 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1264 tree decl = node->decl;
1266 if (cnode->definition && !gimple_has_body_p (decl)
1267 && !cnode->alias
1268 && !cnode->thunk.thunk_p)
1269 cnode->reset ();
1271 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1272 || cnode->alias
1273 || gimple_has_body_p (decl)
1274 || cnode->native_rtl_p ());
1275 gcc_assert (cnode->analyzed == cnode->definition);
1277 node->aux = NULL;
1279 for (;node; node = node->next)
1280 node->aux = NULL;
1281 first_analyzed = symtab->first_function ();
1282 first_analyzed_var = symtab->first_variable ();
1283 if (symtab->dump_file)
1285 fprintf (symtab->dump_file, "\n\nReclaimed ");
1286 symtab->dump (symtab->dump_file);
1288 bitmap_obstack_release (NULL);
1289 ggc_collect ();
1290 /* Initialize assembler name hash, in particular we want to trigger C++
1291 mangling and same body alias creation before we free DECL_ARGUMENTS
1292 used by it. */
1293 if (!seen_error ())
1294 symtab->symtab_initialize_asm_name_hash ();
1296 input_location = saved_loc;
1299 /* Translate the ugly representation of aliases as alias pairs into nice
1300 representation in callgraph. We don't handle all cases yet,
1301 unfortunately. */
1303 static void
1304 handle_alias_pairs (void)
1306 alias_pair *p;
1307 unsigned i;
1309 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1311 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1313 /* Weakrefs with target not defined in current unit are easy to handle:
1314 they behave just as external variables except we need to note the
1315 alias flag to later output the weakref pseudo op into asm file. */
1316 if (!target_node
1317 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1319 symtab_node *node = symtab_node::get (p->decl);
1320 if (node)
1322 node->alias_target = p->target;
1323 node->weakref = true;
1324 node->alias = true;
1325 node->transparent_alias = true;
1327 alias_pairs->unordered_remove (i);
1328 continue;
1330 else if (!target_node)
1332 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1333 symtab_node *node = symtab_node::get (p->decl);
1334 if (node)
1335 node->alias = false;
1336 alias_pairs->unordered_remove (i);
1337 continue;
1340 if (DECL_EXTERNAL (target_node->decl)
1341 /* We use local aliases for C++ thunks to force the tailcall
1342 to bind locally. This is a hack - to keep it working do
1343 the following (which is not strictly correct). */
1344 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1345 || ! DECL_VIRTUAL_P (target_node->decl))
1346 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1348 error ("%q+D aliased to external symbol %qE",
1349 p->decl, p->target);
1352 if (TREE_CODE (p->decl) == FUNCTION_DECL
1353 && target_node && is_a <cgraph_node *> (target_node))
1355 cgraph_node *src_node = cgraph_node::get (p->decl);
1356 if (src_node && src_node->definition)
1357 src_node->reset ();
1358 cgraph_node::create_alias (p->decl, target_node->decl);
1359 alias_pairs->unordered_remove (i);
1361 else if (VAR_P (p->decl)
1362 && target_node && is_a <varpool_node *> (target_node))
1364 varpool_node::create_alias (p->decl, target_node->decl);
1365 alias_pairs->unordered_remove (i);
1367 else
1369 error ("%q+D alias in between function and variable is not supported",
1370 p->decl);
1371 warning (0, "%q+D aliased declaration",
1372 target_node->decl);
1373 alias_pairs->unordered_remove (i);
1376 vec_free (alias_pairs);
1380 /* Figure out what functions we want to assemble. */
1382 static void
1383 mark_functions_to_output (void)
1385 bool check_same_comdat_groups = false;
1386 cgraph_node *node;
1388 if (flag_checking)
1389 FOR_EACH_FUNCTION (node)
1390 gcc_assert (!node->process);
1392 FOR_EACH_FUNCTION (node)
1394 tree decl = node->decl;
1396 gcc_assert (!node->process || node->same_comdat_group);
1397 if (node->process)
1398 continue;
1400 /* We need to output all local functions that are used and not
1401 always inlined, as well as those that are reachable from
1402 outside the current compilation unit. */
1403 if (node->analyzed
1404 && !node->thunk.thunk_p
1405 && !node->alias
1406 && !node->global.inlined_to
1407 && !TREE_ASM_WRITTEN (decl)
1408 && !DECL_EXTERNAL (decl))
1410 node->process = 1;
1411 if (node->same_comdat_group)
1413 cgraph_node *next;
1414 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1415 next != node;
1416 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1417 if (!next->thunk.thunk_p && !next->alias
1418 && !next->comdat_local_p ())
1419 next->process = 1;
1422 else if (node->same_comdat_group)
1424 if (flag_checking)
1425 check_same_comdat_groups = true;
1427 else
1429 /* We should've reclaimed all functions that are not needed. */
1430 if (flag_checking
1431 && !node->global.inlined_to
1432 && gimple_has_body_p (decl)
1433 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1434 are inside partition, we can end up not removing the body since we no longer
1435 have analyzed node pointing to it. */
1436 && !node->in_other_partition
1437 && !node->alias
1438 && !node->clones
1439 && !DECL_EXTERNAL (decl))
1441 node->debug ();
1442 internal_error ("failed to reclaim unneeded function");
1444 gcc_assert (node->global.inlined_to
1445 || !gimple_has_body_p (decl)
1446 || node->in_other_partition
1447 || node->clones
1448 || DECL_ARTIFICIAL (decl)
1449 || DECL_EXTERNAL (decl));
1454 if (flag_checking && check_same_comdat_groups)
1455 FOR_EACH_FUNCTION (node)
1456 if (node->same_comdat_group && !node->process)
1458 tree decl = node->decl;
1459 if (!node->global.inlined_to
1460 && gimple_has_body_p (decl)
1461 /* FIXME: in an ltrans unit when the offline copy is outside a
1462 partition but inline copies are inside a partition, we can
1463 end up not removing the body since we no longer have an
1464 analyzed node pointing to it. */
1465 && !node->in_other_partition
1466 && !node->clones
1467 && !DECL_EXTERNAL (decl))
1469 node->debug ();
1470 internal_error ("failed to reclaim unneeded function in same "
1471 "comdat group");
1476 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1477 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1479 Set current_function_decl and cfun to newly constructed empty function body.
1480 return basic block in the function body. */
1482 basic_block
1483 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1485 basic_block bb;
1486 edge e;
1488 current_function_decl = decl;
1489 allocate_struct_function (decl, false);
1490 gimple_register_cfg_hooks ();
1491 init_empty_tree_cfg ();
1492 init_tree_ssa (cfun);
1494 if (in_ssa)
1496 init_ssa_operands (cfun);
1497 cfun->gimple_df->in_ssa_p = true;
1498 cfun->curr_properties |= PROP_ssa;
1501 DECL_INITIAL (decl) = make_node (BLOCK);
1502 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1504 DECL_SAVED_TREE (decl) = error_mark_node;
1505 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1506 | PROP_cfg | PROP_loops);
1508 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1509 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1510 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1512 /* Create BB for body of the function and connect it properly. */
1513 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1514 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1515 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1516 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = BB_FREQ_MAX;
1517 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1518 bb->count = count;
1519 bb->frequency = BB_FREQ_MAX;
1520 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1521 e->count = count;
1522 e->probability = profile_probability::always ();
1523 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1524 e->count = count;
1525 e->probability = profile_probability::always ();
1526 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1528 return bb;
1531 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1532 offset indicated by VIRTUAL_OFFSET, if that is
1533 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1534 zero for a result adjusting thunk. */
1536 tree
1537 thunk_adjust (gimple_stmt_iterator * bsi,
1538 tree ptr, bool this_adjusting,
1539 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1541 gassign *stmt;
1542 tree ret;
1544 if (this_adjusting
1545 && fixed_offset != 0)
1547 stmt = gimple_build_assign
1548 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1549 ptr,
1550 fixed_offset));
1551 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1554 /* If there's a virtual offset, look up that value in the vtable and
1555 adjust the pointer again. */
1556 if (virtual_offset)
1558 tree vtabletmp;
1559 tree vtabletmp2;
1560 tree vtabletmp3;
1562 if (!vtable_entry_type)
1564 tree vfunc_type = make_node (FUNCTION_TYPE);
1565 TREE_TYPE (vfunc_type) = integer_type_node;
1566 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1567 layout_type (vfunc_type);
1569 vtable_entry_type = build_pointer_type (vfunc_type);
1572 vtabletmp =
1573 create_tmp_reg (build_pointer_type
1574 (build_pointer_type (vtable_entry_type)), "vptr");
1576 /* The vptr is always at offset zero in the object. */
1577 stmt = gimple_build_assign (vtabletmp,
1578 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1579 ptr));
1580 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1582 /* Form the vtable address. */
1583 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1584 "vtableaddr");
1585 stmt = gimple_build_assign (vtabletmp2,
1586 build_simple_mem_ref (vtabletmp));
1587 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1589 /* Find the entry with the vcall offset. */
1590 stmt = gimple_build_assign (vtabletmp2,
1591 fold_build_pointer_plus_loc (input_location,
1592 vtabletmp2,
1593 virtual_offset));
1594 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1596 /* Get the offset itself. */
1597 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1598 "vcalloffset");
1599 stmt = gimple_build_assign (vtabletmp3,
1600 build_simple_mem_ref (vtabletmp2));
1601 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1603 /* Adjust the `this' pointer. */
1604 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1605 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1606 GSI_CONTINUE_LINKING);
1609 if (!this_adjusting
1610 && fixed_offset != 0)
1611 /* Adjust the pointer by the constant. */
1613 tree ptrtmp;
1615 if (VAR_P (ptr))
1616 ptrtmp = ptr;
1617 else
1619 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1620 stmt = gimple_build_assign (ptrtmp, ptr);
1621 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1623 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1624 ptrtmp, fixed_offset);
1627 /* Emit the statement and gimplify the adjustment expression. */
1628 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1629 stmt = gimple_build_assign (ret, ptr);
1630 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1632 return ret;
1635 /* Expand thunk NODE to gimple if possible.
1636 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1637 no assembler is produced.
1638 When OUTPUT_ASM_THUNK is true, also produce assembler for
1639 thunks that are not lowered. */
1641 bool
1642 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1644 bool this_adjusting = thunk.this_adjusting;
1645 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1646 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1647 tree virtual_offset = NULL;
1648 tree alias = callees->callee->decl;
1649 tree thunk_fndecl = decl;
1650 tree a;
1652 /* Instrumentation thunk is the same function with
1653 a different signature. Never need to expand it. */
1654 if (thunk.add_pointer_bounds_args)
1655 return false;
1657 if (!force_gimple_thunk && this_adjusting
1658 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1659 virtual_value, alias))
1661 const char *fnname;
1662 tree fn_block;
1663 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1665 if (!output_asm_thunks)
1667 analyzed = true;
1668 return false;
1671 if (in_lto_p)
1672 get_untransformed_body ();
1673 a = DECL_ARGUMENTS (thunk_fndecl);
1675 current_function_decl = thunk_fndecl;
1677 /* Ensure thunks are emitted in their correct sections. */
1678 resolve_unique_section (thunk_fndecl, 0,
1679 flag_function_sections);
1681 DECL_RESULT (thunk_fndecl)
1682 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1683 RESULT_DECL, 0, restype);
1684 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1685 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1687 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1688 create one. */
1689 fn_block = make_node (BLOCK);
1690 BLOCK_VARS (fn_block) = a;
1691 DECL_INITIAL (thunk_fndecl) = fn_block;
1692 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1693 allocate_struct_function (thunk_fndecl, false);
1694 init_function_start (thunk_fndecl);
1695 cfun->is_thunk = 1;
1696 insn_locations_init ();
1697 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1698 prologue_location = curr_insn_location ();
1699 assemble_start_function (thunk_fndecl, fnname);
1701 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1702 fixed_offset, virtual_value, alias);
1704 assemble_end_function (thunk_fndecl, fnname);
1705 insn_locations_finalize ();
1706 init_insn_lengths ();
1707 free_after_compilation (cfun);
1708 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1709 thunk.thunk_p = false;
1710 analyzed = false;
1712 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1714 error ("generic thunk code fails for method %qD which uses %<...%>",
1715 thunk_fndecl);
1716 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1717 analyzed = true;
1718 return false;
1720 else
1722 tree restype;
1723 basic_block bb, then_bb, else_bb, return_bb;
1724 gimple_stmt_iterator bsi;
1725 int nargs = 0;
1726 tree arg;
1727 int i;
1728 tree resdecl;
1729 tree restmp = NULL;
1730 tree resbnd = NULL;
1732 gcall *call;
1733 greturn *ret;
1734 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1736 /* We may be called from expand_thunk that releses body except for
1737 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1738 if (in_lto_p && !force_gimple_thunk)
1739 get_untransformed_body ();
1740 a = DECL_ARGUMENTS (thunk_fndecl);
1742 current_function_decl = thunk_fndecl;
1744 /* Ensure thunks are emitted in their correct sections. */
1745 resolve_unique_section (thunk_fndecl, 0,
1746 flag_function_sections);
1748 DECL_IGNORED_P (thunk_fndecl) = 1;
1749 bitmap_obstack_initialize (NULL);
1751 if (thunk.virtual_offset_p)
1752 virtual_offset = size_int (virtual_value);
1754 /* Build the return declaration for the function. */
1755 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1756 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1758 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1759 DECL_ARTIFICIAL (resdecl) = 1;
1760 DECL_IGNORED_P (resdecl) = 1;
1761 DECL_RESULT (thunk_fndecl) = resdecl;
1762 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1764 else
1765 resdecl = DECL_RESULT (thunk_fndecl);
1767 bb = then_bb = else_bb = return_bb
1768 = init_lowered_empty_function (thunk_fndecl, true, count);
1770 bsi = gsi_start_bb (bb);
1772 /* Build call to the function being thunked. */
1773 if (!VOID_TYPE_P (restype)
1774 && (!alias_is_noreturn
1775 || TREE_ADDRESSABLE (restype)
1776 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1778 if (DECL_BY_REFERENCE (resdecl))
1780 restmp = gimple_fold_indirect_ref (resdecl);
1781 if (!restmp)
1782 restmp = build2 (MEM_REF,
1783 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1784 resdecl,
1785 build_int_cst (TREE_TYPE
1786 (DECL_RESULT (alias)), 0));
1788 else if (!is_gimple_reg_type (restype))
1790 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1792 restmp = resdecl;
1794 if (VAR_P (restmp))
1795 add_local_decl (cfun, restmp);
1796 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1798 else
1799 restmp = create_tmp_var (restype, "retval");
1801 else
1802 restmp = create_tmp_reg (restype, "retval");
1805 for (arg = a; arg; arg = DECL_CHAIN (arg))
1806 nargs++;
1807 auto_vec<tree> vargs (nargs);
1808 i = 0;
1809 arg = a;
1810 if (this_adjusting)
1812 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1813 virtual_offset));
1814 arg = DECL_CHAIN (a);
1815 i = 1;
1818 if (nargs)
1819 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1821 tree tmp = arg;
1822 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1823 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1824 DECL_GIMPLE_REG_P (arg) = 1;
1826 if (!is_gimple_val (arg))
1828 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1829 (TREE_TYPE (arg)), "arg");
1830 gimple *stmt = gimple_build_assign (tmp, arg);
1831 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1833 vargs.quick_push (tmp);
1835 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1836 callees->call_stmt = call;
1837 gimple_call_set_from_thunk (call, true);
1838 gimple_call_set_with_bounds (call, instrumentation_clone);
1840 /* Return slot optimization is always possible and in fact requred to
1841 return values with DECL_BY_REFERENCE. */
1842 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1843 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1844 || DECL_BY_REFERENCE (resdecl)))
1845 gimple_call_set_return_slot_opt (call, true);
1847 if (restmp)
1849 gimple_call_set_lhs (call, restmp);
1850 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1851 TREE_TYPE (TREE_TYPE (alias))));
1853 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1854 if (!alias_is_noreturn)
1856 if (instrumentation_clone
1857 && !DECL_BY_REFERENCE (resdecl)
1858 && restmp
1859 && BOUNDED_P (restmp))
1861 resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1862 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1863 as_a <gcall *> (gsi_stmt (bsi)),
1864 callees->count, callees->frequency);
1867 if (restmp && !this_adjusting
1868 && (fixed_offset || virtual_offset))
1870 tree true_label = NULL_TREE;
1872 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1874 gimple *stmt;
1875 edge e;
1876 /* If the return type is a pointer, we need to
1877 protect against NULL. We know there will be an
1878 adjustment, because that's why we're emitting a
1879 thunk. */
1880 then_bb = create_basic_block (NULL, bb);
1881 then_bb->count = count - count.apply_scale (1, 16);
1882 then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1883 return_bb = create_basic_block (NULL, then_bb);
1884 return_bb->count = count;
1885 return_bb->frequency = BB_FREQ_MAX;
1886 else_bb = create_basic_block (NULL, else_bb);
1887 then_bb->count = count.apply_scale (1, 16);
1888 then_bb->frequency = BB_FREQ_MAX / 16;
1889 add_bb_to_loop (then_bb, bb->loop_father);
1890 add_bb_to_loop (return_bb, bb->loop_father);
1891 add_bb_to_loop (else_bb, bb->loop_father);
1892 remove_edge (single_succ_edge (bb));
1893 true_label = gimple_block_label (then_bb);
1894 stmt = gimple_build_cond (NE_EXPR, restmp,
1895 build_zero_cst (TREE_TYPE (restmp)),
1896 NULL_TREE, NULL_TREE);
1897 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1898 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1899 e->probability = profile_probability::guessed_always ()
1900 .apply_scale (1, 16);
1901 e->count = count - count.apply_scale (1, 16);
1902 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1903 e->probability = profile_probability::guessed_always ()
1904 .apply_scale (1, 16);
1905 e->count = count.apply_scale (1, 16);
1906 make_single_succ_edge (return_bb,
1907 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1908 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
1909 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1910 e->probability = profile_probability::always ();
1911 e->count = count.apply_scale (1, 16);
1912 bsi = gsi_last_bb (then_bb);
1915 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1916 fixed_offset, virtual_offset);
1917 if (true_label)
1919 gimple *stmt;
1920 bsi = gsi_last_bb (else_bb);
1921 stmt = gimple_build_assign (restmp,
1922 build_zero_cst (TREE_TYPE (restmp)));
1923 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1924 bsi = gsi_last_bb (return_bb);
1927 else
1928 gimple_call_set_tail (call, true);
1930 /* Build return value. */
1931 if (!DECL_BY_REFERENCE (resdecl))
1932 ret = gimple_build_return (restmp);
1933 else
1934 ret = gimple_build_return (resdecl);
1935 gimple_return_set_retbnd (ret, resbnd);
1937 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1939 else
1941 gimple_call_set_tail (call, true);
1942 remove_edge (single_succ_edge (bb));
1945 cfun->gimple_df->in_ssa_p = true;
1946 profile_status_for_fn (cfun)
1947 = count.initialized_p () ? PROFILE_READ : PROFILE_GUESSED;
1948 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1949 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1950 delete_unreachable_blocks ();
1951 update_ssa (TODO_update_ssa);
1952 checking_verify_flow_info ();
1953 free_dominance_info (CDI_DOMINATORS);
1955 /* Since we want to emit the thunk, we explicitly mark its name as
1956 referenced. */
1957 thunk.thunk_p = false;
1958 lowered = true;
1959 bitmap_obstack_release (NULL);
1961 current_function_decl = NULL;
1962 set_cfun (NULL);
1963 return true;
1966 /* Assemble thunks and aliases associated to node. */
1968 void
1969 cgraph_node::assemble_thunks_and_aliases (void)
1971 cgraph_edge *e;
1972 ipa_ref *ref;
1974 for (e = callers; e;)
1975 if (e->caller->thunk.thunk_p
1976 && !e->caller->global.inlined_to
1977 && !e->caller->thunk.add_pointer_bounds_args)
1979 cgraph_node *thunk = e->caller;
1981 e = e->next_caller;
1982 thunk->expand_thunk (true, false);
1983 thunk->assemble_thunks_and_aliases ();
1985 else
1986 e = e->next_caller;
1988 FOR_EACH_ALIAS (this, ref)
1990 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1991 if (!alias->transparent_alias)
1993 bool saved_written = TREE_ASM_WRITTEN (decl);
1995 /* Force assemble_alias to really output the alias this time instead
1996 of buffering it in same alias pairs. */
1997 TREE_ASM_WRITTEN (decl) = 1;
1998 do_assemble_alias (alias->decl,
1999 DECL_ASSEMBLER_NAME (decl));
2000 alias->assemble_thunks_and_aliases ();
2001 TREE_ASM_WRITTEN (decl) = saved_written;
2006 /* Expand function specified by node. */
2008 void
2009 cgraph_node::expand (void)
2011 location_t saved_loc;
2013 /* We ought to not compile any inline clones. */
2014 gcc_assert (!global.inlined_to);
2016 /* __RTL functions are compiled as soon as they are parsed, so don't
2017 do it again. */
2018 if (native_rtl_p ())
2019 return;
2021 announce_function (decl);
2022 process = 0;
2023 gcc_assert (lowered);
2024 get_untransformed_body ();
2026 /* Generate RTL for the body of DECL. */
2028 timevar_push (TV_REST_OF_COMPILATION);
2030 gcc_assert (symtab->global_info_ready);
2032 /* Initialize the default bitmap obstack. */
2033 bitmap_obstack_initialize (NULL);
2035 /* Initialize the RTL code for the function. */
2036 saved_loc = input_location;
2037 input_location = DECL_SOURCE_LOCATION (decl);
2039 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2040 push_cfun (DECL_STRUCT_FUNCTION (decl));
2041 init_function_start (decl);
2043 gimple_register_cfg_hooks ();
2045 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2047 execute_all_ipa_transforms ();
2049 /* Perform all tree transforms and optimizations. */
2051 /* Signal the start of passes. */
2052 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2054 execute_pass_list (cfun, g->get_passes ()->all_passes);
2056 /* Signal the end of passes. */
2057 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2059 bitmap_obstack_release (&reg_obstack);
2061 /* Release the default bitmap obstack. */
2062 bitmap_obstack_release (NULL);
2064 /* If requested, warn about function definitions where the function will
2065 return a value (usually of some struct or union type) which itself will
2066 take up a lot of stack space. */
2067 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2069 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2071 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2072 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2073 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2074 larger_than_size))
2076 unsigned int size_as_int
2077 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2079 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2080 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2081 decl, size_as_int);
2082 else
2083 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2084 decl, larger_than_size);
2088 gimple_set_body (decl, NULL);
2089 if (DECL_STRUCT_FUNCTION (decl) == 0
2090 && !cgraph_node::get (decl)->origin)
2092 /* Stop pointing to the local nodes about to be freed.
2093 But DECL_INITIAL must remain nonzero so we know this
2094 was an actual function definition.
2095 For a nested function, this is done in c_pop_function_context.
2096 If rest_of_compilation set this to 0, leave it 0. */
2097 if (DECL_INITIAL (decl) != 0)
2098 DECL_INITIAL (decl) = error_mark_node;
2101 input_location = saved_loc;
2103 ggc_collect ();
2104 timevar_pop (TV_REST_OF_COMPILATION);
2106 /* Make sure that BE didn't give up on compiling. */
2107 gcc_assert (TREE_ASM_WRITTEN (decl));
2108 if (cfun)
2109 pop_cfun ();
2111 /* It would make a lot more sense to output thunks before function body to get more
2112 forward and lest backwarding jumps. This however would need solving problem
2113 with comdats. See PR48668. Also aliases must come after function itself to
2114 make one pass assemblers, like one on AIX, happy. See PR 50689.
2115 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2116 groups. */
2117 assemble_thunks_and_aliases ();
2118 release_body ();
2119 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2120 points to the dead function body. */
2121 remove_callees ();
2122 remove_all_references ();
2125 /* Node comparer that is responsible for the order that corresponds
2126 to time when a function was launched for the first time. */
2128 static int
2129 node_cmp (const void *pa, const void *pb)
2131 const cgraph_node *a = *(const cgraph_node * const *) pa;
2132 const cgraph_node *b = *(const cgraph_node * const *) pb;
2134 /* Functions with time profile must be before these without profile. */
2135 if (!a->tp_first_run || !b->tp_first_run)
2136 return a->tp_first_run - b->tp_first_run;
2138 return a->tp_first_run != b->tp_first_run
2139 ? b->tp_first_run - a->tp_first_run
2140 : b->order - a->order;
2143 /* Expand all functions that must be output.
2145 Attempt to topologically sort the nodes so function is output when
2146 all called functions are already assembled to allow data to be
2147 propagated across the callgraph. Use a stack to get smaller distance
2148 between a function and its callees (later we may choose to use a more
2149 sophisticated algorithm for function reordering; we will likely want
2150 to use subsections to make the output functions appear in top-down
2151 order). */
2153 static void
2154 expand_all_functions (void)
2156 cgraph_node *node;
2157 cgraph_node **order = XCNEWVEC (cgraph_node *,
2158 symtab->cgraph_count);
2159 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2160 int order_pos, new_order_pos = 0;
2161 int i;
2163 order_pos = ipa_reverse_postorder (order);
2164 gcc_assert (order_pos == symtab->cgraph_count);
2166 /* Garbage collector may remove inline clones we eliminate during
2167 optimization. So we must be sure to not reference them. */
2168 for (i = 0; i < order_pos; i++)
2169 if (order[i]->process)
2170 order[new_order_pos++] = order[i];
2172 if (flag_profile_reorder_functions)
2173 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2175 for (i = new_order_pos - 1; i >= 0; i--)
2177 node = order[i];
2179 if (node->process)
2181 expanded_func_count++;
2182 if(node->tp_first_run)
2183 profiled_func_count++;
2185 if (symtab->dump_file)
2186 fprintf (symtab->dump_file,
2187 "Time profile order in expand_all_functions:%s:%d\n",
2188 node->asm_name (), node->tp_first_run);
2189 node->process = 0;
2190 node->expand ();
2194 if (dump_file)
2195 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2196 main_input_filename, profiled_func_count, expanded_func_count);
2198 if (symtab->dump_file && flag_profile_reorder_functions)
2199 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2200 profiled_func_count, expanded_func_count);
2202 symtab->process_new_functions ();
2203 free_gimplify_stack ();
2205 free (order);
2208 /* This is used to sort the node types by the cgraph order number. */
2210 enum cgraph_order_sort_kind
2212 ORDER_UNDEFINED = 0,
2213 ORDER_FUNCTION,
2214 ORDER_VAR,
2215 ORDER_VAR_UNDEF,
2216 ORDER_ASM
2219 struct cgraph_order_sort
2221 enum cgraph_order_sort_kind kind;
2222 union
2224 cgraph_node *f;
2225 varpool_node *v;
2226 asm_node *a;
2227 } u;
2230 /* Output all functions, variables, and asm statements in the order
2231 according to their order fields, which is the order in which they
2232 appeared in the file. This implements -fno-toplevel-reorder. In
2233 this mode we may output functions and variables which don't really
2234 need to be output. */
2236 static void
2237 output_in_order (void)
2239 int max;
2240 cgraph_order_sort *nodes;
2241 int i;
2242 cgraph_node *pf;
2243 varpool_node *pv;
2244 asm_node *pa;
2245 max = symtab->order;
2246 nodes = XCNEWVEC (cgraph_order_sort, max);
2248 FOR_EACH_DEFINED_FUNCTION (pf)
2250 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2252 if (!pf->no_reorder)
2253 continue;
2254 i = pf->order;
2255 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2256 nodes[i].kind = ORDER_FUNCTION;
2257 nodes[i].u.f = pf;
2261 /* There is a similar loop in symbol_table::output_variables.
2262 Please keep them in sync. */
2263 FOR_EACH_VARIABLE (pv)
2265 if (!pv->no_reorder)
2266 continue;
2267 if (DECL_HARD_REGISTER (pv->decl)
2268 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2269 continue;
2270 i = pv->order;
2271 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2272 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2273 nodes[i].u.v = pv;
2276 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2278 i = pa->order;
2279 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2280 nodes[i].kind = ORDER_ASM;
2281 nodes[i].u.a = pa;
2284 /* In toplevel reorder mode we output all statics; mark them as needed. */
2286 for (i = 0; i < max; ++i)
2287 if (nodes[i].kind == ORDER_VAR)
2288 nodes[i].u.v->finalize_named_section_flags ();
2290 for (i = 0; i < max; ++i)
2292 switch (nodes[i].kind)
2294 case ORDER_FUNCTION:
2295 nodes[i].u.f->process = 0;
2296 nodes[i].u.f->expand ();
2297 break;
2299 case ORDER_VAR:
2300 nodes[i].u.v->assemble_decl ();
2301 break;
2303 case ORDER_VAR_UNDEF:
2304 assemble_undefined_decl (nodes[i].u.v->decl);
2305 break;
2307 case ORDER_ASM:
2308 assemble_asm (nodes[i].u.a->asm_str);
2309 break;
2311 case ORDER_UNDEFINED:
2312 break;
2314 default:
2315 gcc_unreachable ();
2319 symtab->clear_asm_symbols ();
2321 free (nodes);
2324 static void
2325 ipa_passes (void)
2327 gcc::pass_manager *passes = g->get_passes ();
2329 set_cfun (NULL);
2330 current_function_decl = NULL;
2331 gimple_register_cfg_hooks ();
2332 bitmap_obstack_initialize (NULL);
2334 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2336 if (!in_lto_p)
2338 execute_ipa_pass_list (passes->all_small_ipa_passes);
2339 if (seen_error ())
2340 return;
2343 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2344 devirtualization and other changes where removal iterate. */
2345 symtab->remove_unreachable_nodes (symtab->dump_file);
2347 /* If pass_all_early_optimizations was not scheduled, the state of
2348 the cgraph will not be properly updated. Update it now. */
2349 if (symtab->state < IPA_SSA)
2350 symtab->state = IPA_SSA;
2352 if (!in_lto_p)
2354 /* Generate coverage variables and constructors. */
2355 coverage_finish ();
2357 /* Process new functions added. */
2358 set_cfun (NULL);
2359 current_function_decl = NULL;
2360 symtab->process_new_functions ();
2362 execute_ipa_summary_passes
2363 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2366 /* Some targets need to handle LTO assembler output specially. */
2367 if (flag_generate_lto || flag_generate_offload)
2368 targetm.asm_out.lto_start ();
2370 if (!in_lto_p)
2372 if (g->have_offload)
2374 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2375 lto_stream_offload_p = true;
2376 ipa_write_summaries ();
2377 lto_stream_offload_p = false;
2379 if (flag_lto)
2381 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2382 lto_stream_offload_p = false;
2383 ipa_write_summaries ();
2387 if (flag_generate_lto || flag_generate_offload)
2388 targetm.asm_out.lto_end ();
2390 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2391 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2392 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2394 bitmap_obstack_release (NULL);
2398 /* Return string alias is alias of. */
2400 static tree
2401 get_alias_symbol (tree decl)
2403 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2404 return get_identifier (TREE_STRING_POINTER
2405 (TREE_VALUE (TREE_VALUE (alias))));
2409 /* Weakrefs may be associated to external decls and thus not output
2410 at expansion time. Emit all necessary aliases. */
2412 void
2413 symbol_table::output_weakrefs (void)
2415 symtab_node *node;
2416 cgraph_node *cnode;
2417 FOR_EACH_SYMBOL (node)
2418 if (node->alias
2419 && !TREE_ASM_WRITTEN (node->decl)
2420 && (!(cnode = dyn_cast <cgraph_node *> (node))
2421 || !cnode->instrumented_version
2422 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2423 && node->weakref)
2425 tree target;
2427 /* Weakrefs are special by not requiring target definition in current
2428 compilation unit. It is thus bit hard to work out what we want to
2429 alias.
2430 When alias target is defined, we need to fetch it from symtab reference,
2431 otherwise it is pointed to by alias_target. */
2432 if (node->alias_target)
2433 target = (DECL_P (node->alias_target)
2434 ? DECL_ASSEMBLER_NAME (node->alias_target)
2435 : node->alias_target);
2436 else if (node->analyzed)
2437 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2438 else
2440 gcc_unreachable ();
2441 target = get_alias_symbol (node->decl);
2443 do_assemble_alias (node->decl, target);
2447 /* Perform simple optimizations based on callgraph. */
2449 void
2450 symbol_table::compile (void)
2452 if (seen_error ())
2453 return;
2455 symtab_node::checking_verify_symtab_nodes ();
2457 timevar_push (TV_CGRAPHOPT);
2458 if (pre_ipa_mem_report)
2460 fprintf (stderr, "Memory consumption before IPA\n");
2461 dump_memory_report (false);
2463 if (!quiet_flag)
2464 fprintf (stderr, "Performing interprocedural optimizations\n");
2465 state = IPA;
2467 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2468 if (flag_generate_lto || flag_generate_offload)
2469 lto_streamer_hooks_init ();
2471 /* Don't run the IPA passes if there was any error or sorry messages. */
2472 if (!seen_error ())
2473 ipa_passes ();
2475 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2476 if (seen_error ()
2477 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2479 timevar_pop (TV_CGRAPHOPT);
2480 return;
2483 global_info_ready = true;
2484 if (dump_file)
2486 fprintf (dump_file, "Optimized ");
2487 symtab->dump (dump_file);
2489 if (post_ipa_mem_report)
2491 fprintf (stderr, "Memory consumption after IPA\n");
2492 dump_memory_report (false);
2494 timevar_pop (TV_CGRAPHOPT);
2496 /* Output everything. */
2497 (*debug_hooks->assembly_start) ();
2498 if (!quiet_flag)
2499 fprintf (stderr, "Assembling functions:\n");
2500 symtab_node::checking_verify_symtab_nodes ();
2502 bitmap_obstack_initialize (NULL);
2503 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2504 bitmap_obstack_release (NULL);
2505 mark_functions_to_output ();
2507 /* When weakref support is missing, we automatically translate all
2508 references to NODE to references to its ultimate alias target.
2509 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2510 TREE_CHAIN.
2512 Set up this mapping before we output any assembler but once we are sure
2513 that all symbol renaming is done.
2515 FIXME: All this uglyness can go away if we just do renaming at gimple
2516 level by physically rewritting the IL. At the moment we can only redirect
2517 calls, so we need infrastructure for renaming references as well. */
2518 #ifndef ASM_OUTPUT_WEAKREF
2519 symtab_node *node;
2521 FOR_EACH_SYMBOL (node)
2522 if (node->alias
2523 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2525 IDENTIFIER_TRANSPARENT_ALIAS
2526 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2527 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2528 = (node->alias_target ? node->alias_target
2529 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2531 #endif
2533 state = EXPANSION;
2535 /* Output first asm statements and anything ordered. The process
2536 flag is cleared for these nodes, so we skip them later. */
2537 output_in_order ();
2538 expand_all_functions ();
2539 output_variables ();
2541 process_new_functions ();
2542 state = FINISHED;
2543 output_weakrefs ();
2545 if (dump_file)
2547 fprintf (dump_file, "\nFinal ");
2548 symtab->dump (dump_file);
2550 if (!flag_checking)
2551 return;
2552 symtab_node::verify_symtab_nodes ();
2553 /* Double check that all inline clones are gone and that all
2554 function bodies have been released from memory. */
2555 if (!seen_error ())
2557 cgraph_node *node;
2558 bool error_found = false;
2560 FOR_EACH_DEFINED_FUNCTION (node)
2561 if (node->global.inlined_to
2562 || gimple_has_body_p (node->decl))
2564 error_found = true;
2565 node->debug ();
2567 if (error_found)
2568 internal_error ("nodes with unreleased memory found");
2573 /* Analyze the whole compilation unit once it is parsed completely. */
2575 void
2576 symbol_table::finalize_compilation_unit (void)
2578 timevar_push (TV_CGRAPH);
2580 /* If we're here there's no current function anymore. Some frontends
2581 are lazy in clearing these. */
2582 current_function_decl = NULL;
2583 set_cfun (NULL);
2585 /* Do not skip analyzing the functions if there were errors, we
2586 miss diagnostics for following functions otherwise. */
2588 /* Emit size functions we didn't inline. */
2589 finalize_size_functions ();
2591 /* Mark alias targets necessary and emit diagnostics. */
2592 handle_alias_pairs ();
2594 if (!quiet_flag)
2596 fprintf (stderr, "\nAnalyzing compilation unit\n");
2597 fflush (stderr);
2600 if (flag_dump_passes)
2601 dump_passes ();
2603 /* Gimplify and lower all functions, compute reachability and
2604 remove unreachable nodes. */
2605 analyze_functions (/*first_time=*/true);
2607 /* Mark alias targets necessary and emit diagnostics. */
2608 handle_alias_pairs ();
2610 /* Gimplify and lower thunks. */
2611 analyze_functions (/*first_time=*/false);
2613 /* Offloading requires LTO infrastructure. */
2614 if (!in_lto_p && g->have_offload)
2615 flag_generate_offload = 1;
2617 if (!seen_error ())
2619 /* Emit early debug for reachable functions, and by consequence,
2620 locally scoped symbols. */
2621 struct cgraph_node *cnode;
2622 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2623 (*debug_hooks->early_global_decl) (cnode->decl);
2625 /* Clean up anything that needs cleaning up after initial debug
2626 generation. */
2627 (*debug_hooks->early_finish) (main_input_filename);
2630 /* Finally drive the pass manager. */
2631 compile ();
2633 timevar_pop (TV_CGRAPH);
2636 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2637 within the same process. For use by toplev::finalize. */
2639 void
2640 cgraphunit_c_finalize (void)
2642 gcc_assert (cgraph_new_nodes.length () == 0);
2643 cgraph_new_nodes.truncate (0);
2645 vtable_entry_type = NULL;
2646 queued_nodes = &symtab_terminator;
2648 first_analyzed = NULL;
2649 first_analyzed_var = NULL;
2652 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2653 kind of wrapper method. */
2655 void
2656 cgraph_node::create_wrapper (cgraph_node *target)
2658 /* Preserve DECL_RESULT so we get right by reference flag. */
2659 tree decl_result = DECL_RESULT (decl);
2661 /* Remove the function's body but keep arguments to be reused
2662 for thunk. */
2663 release_body (true);
2664 reset ();
2666 DECL_UNINLINABLE (decl) = false;
2667 DECL_RESULT (decl) = decl_result;
2668 DECL_INITIAL (decl) = NULL;
2669 allocate_struct_function (decl, false);
2670 set_cfun (NULL);
2672 /* Turn alias into thunk and expand it into GIMPLE representation. */
2673 definition = true;
2675 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2676 thunk.thunk_p = true;
2677 create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2678 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2680 tree arguments = DECL_ARGUMENTS (decl);
2682 while (arguments)
2684 TREE_ADDRESSABLE (arguments) = false;
2685 arguments = TREE_CHAIN (arguments);
2688 expand_thunk (false, true);
2690 /* Inline summary set-up. */
2691 analyze ();
2692 inline_analyze_function (this);
2695 #include "gt-cgraphunit.h"