* include/bits/basic_string.h (getline): Qualify call to prevent ADL
[official-gcc.git] / gcc / cgraphunit.c
blobd9acc65096637fdc6abfde4f3183f62c1fc6d8ba
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - cgraph_finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "basic-block.h"
171 #include "tree-ssa-alias.h"
172 #include "internal-fn.h"
173 #include "gimple-fold.h"
174 #include "gimple-expr.h"
175 #include "is-a.h"
176 #include "gimple.h"
177 #include "gimplify.h"
178 #include "gimple-iterator.h"
179 #include "gimplify-me.h"
180 #include "gimple-ssa.h"
181 #include "tree-cfg.h"
182 #include "tree-into-ssa.h"
183 #include "tree-ssa.h"
184 #include "tree-inline.h"
185 #include "langhooks.h"
186 #include "toplev.h"
187 #include "flags.h"
188 #include "debug.h"
189 #include "target.h"
190 #include "diagnostic.h"
191 #include "params.h"
192 #include "fibheap.h"
193 #include "intl.h"
194 #include "function.h"
195 #include "ipa-prop.h"
196 #include "tree-iterator.h"
197 #include "tree-pass.h"
198 #include "tree-dump.h"
199 #include "gimple-pretty-print.h"
200 #include "output.h"
201 #include "coverage.h"
202 #include "plugin.h"
203 #include "ipa-inline.h"
204 #include "ipa-utils.h"
205 #include "lto-streamer.h"
206 #include "except.h"
207 #include "cfgloop.h"
208 #include "regset.h" /* FIXME: For reg_obstack. */
209 #include "context.h"
210 #include "pass_manager.h"
211 #include "tree-nested.h"
212 #include "gimplify.h"
213 #include "dbgcnt.h"
215 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
216 secondary queue used during optimization to accommodate passes that
217 may generate new functions that need to be optimized and expanded. */
218 vec<cgraph_node *> cgraph_new_nodes;
220 static void expand_all_functions (void);
221 static void mark_functions_to_output (void);
222 static void expand_function (struct cgraph_node *);
223 static void handle_alias_pairs (void);
225 FILE *cgraph_dump_file;
227 /* Linked list of cgraph asm nodes. */
228 struct asm_node *asm_nodes;
230 /* Last node in cgraph_asm_nodes. */
231 static GTY(()) struct asm_node *asm_last_node;
233 /* Used for vtable lookup in thunk adjusting. */
234 static GTY (()) tree vtable_entry_type;
236 /* Determine if symbol DECL is needed. That is, visible to something
237 either outside this translation unit, something magic in the system
238 configury */
239 bool
240 decide_is_symbol_needed (symtab_node *node)
242 tree decl = node->decl;
244 /* Double check that no one output the function into assembly file
245 early. */
246 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
247 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
249 if (!node->definition)
250 return false;
252 if (DECL_EXTERNAL (decl))
253 return false;
255 /* If the user told us it is used, then it must be so. */
256 if (node->force_output)
257 return true;
259 /* ABI forced symbols are needed when they are external. */
260 if (node->forced_by_abi && TREE_PUBLIC (decl))
261 return true;
263 /* Keep constructors, destructors and virtual functions. */
264 if (TREE_CODE (decl) == FUNCTION_DECL
265 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
266 return true;
268 /* Externally visible variables must be output. The exception is
269 COMDAT variables that must be output only when they are needed. */
270 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
271 return true;
273 return false;
276 /* Head and terminator of the queue of nodes to be processed while building
277 callgraph. */
279 static symtab_node symtab_terminator;
280 static symtab_node *queued_nodes = &symtab_terminator;
282 /* Add NODE to queue starting at QUEUED_NODES.
283 The queue is linked via AUX pointers and terminated by pointer to 1. */
285 static void
286 enqueue_node (symtab_node *node)
288 if (node->aux)
289 return;
290 gcc_checking_assert (queued_nodes);
291 node->aux = queued_nodes;
292 queued_nodes = node;
295 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
296 functions into callgraph in a way so they look like ordinary reachable
297 functions inserted into callgraph already at construction time. */
299 void
300 cgraph_process_new_functions (void)
302 tree fndecl;
304 if (!cgraph_new_nodes.exists ())
305 return;
307 handle_alias_pairs ();
308 /* Note that this queue may grow as its being processed, as the new
309 functions may generate new ones. */
310 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
312 cgraph_node *node = cgraph_new_nodes[i];
313 fndecl = node->decl;
314 switch (cgraph_state)
316 case CGRAPH_STATE_CONSTRUCTION:
317 /* At construction time we just need to finalize function and move
318 it into reachable functions list. */
320 cgraph_finalize_function (fndecl, false);
321 node->call_function_insertion_hooks ();
322 enqueue_node (node);
323 break;
325 case CGRAPH_STATE_IPA:
326 case CGRAPH_STATE_IPA_SSA:
327 /* When IPA optimization already started, do all essential
328 transformations that has been already performed on the whole
329 cgraph but not on this function. */
331 gimple_register_cfg_hooks ();
332 if (!node->analyzed)
333 node->analyze ();
334 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
335 if (cgraph_state == CGRAPH_STATE_IPA_SSA
336 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
337 g->get_passes ()->execute_early_local_passes ();
338 else if (inline_summary_vec != NULL)
339 compute_inline_parameters (node, true);
340 free_dominance_info (CDI_POST_DOMINATORS);
341 free_dominance_info (CDI_DOMINATORS);
342 pop_cfun ();
343 node->call_function_insertion_hooks ();
344 break;
346 case CGRAPH_STATE_EXPANSION:
347 /* Functions created during expansion shall be compiled
348 directly. */
349 node->process = 0;
350 node->call_function_insertion_hooks ();
351 expand_function (node);
352 break;
354 default:
355 gcc_unreachable ();
356 break;
360 cgraph_new_nodes.release ();
363 /* As an GCC extension we allow redefinition of the function. The
364 semantics when both copies of bodies differ is not well defined.
365 We replace the old body with new body so in unit at a time mode
366 we always use new body, while in normal mode we may end up with
367 old body inlined into some functions and new body expanded and
368 inlined in others.
370 ??? It may make more sense to use one body for inlining and other
371 body for expanding the function but this is difficult to do. */
373 void
374 cgraph_node::reset (void)
376 /* If process is set, then we have already begun whole-unit analysis.
377 This is *not* testing for whether we've already emitted the function.
378 That case can be sort-of legitimately seen with real function redefinition
379 errors. I would argue that the front end should never present us with
380 such a case, but don't enforce that for now. */
381 gcc_assert (!process);
383 /* Reset our data structures so we can analyze the function again. */
384 memset (&local, 0, sizeof (local));
385 memset (&global, 0, sizeof (global));
386 memset (&rtl, 0, sizeof (rtl));
387 analyzed = false;
388 definition = false;
389 alias = false;
390 weakref = false;
391 cpp_implicit_alias = false;
393 remove_callees ();
394 remove_all_references ();
397 /* Return true when there are references to NODE. */
399 static bool
400 referred_to_p (symtab_node *node)
402 struct ipa_ref *ref = NULL;
404 /* See if there are any references at all. */
405 if (node->iterate_referring (0, ref))
406 return true;
407 /* For functions check also calls. */
408 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
409 if (cn && cn->callers)
410 return true;
411 return false;
414 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
415 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
416 the garbage collector run at the moment. We would need to either create
417 a new GC context, or just not compile right now. */
419 void
420 cgraph_finalize_function (tree decl, bool no_collect)
422 struct cgraph_node *node = cgraph_node::get_create (decl);
424 if (node->definition)
426 /* Nested functions should only be defined once. */
427 gcc_assert (!DECL_CONTEXT (decl)
428 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
429 node->reset ();
430 node->local.redefined_extern_inline = true;
433 notice_global_symbol (decl);
434 node->definition = true;
435 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
437 /* With -fkeep-inline-functions we are keeping all inline functions except
438 for extern inline ones. */
439 if (flag_keep_inline_functions
440 && DECL_DECLARED_INLINE_P (decl)
441 && !DECL_EXTERNAL (decl)
442 && !DECL_DISREGARD_INLINE_LIMITS (decl))
443 node->force_output = 1;
445 /* When not optimizing, also output the static functions. (see
446 PR24561), but don't do so for always_inline functions, functions
447 declared inline and nested functions. These were optimized out
448 in the original implementation and it is unclear whether we want
449 to change the behavior here. */
450 if ((!optimize
451 && !node->cpp_implicit_alias
452 && !DECL_DISREGARD_INLINE_LIMITS (decl)
453 && !DECL_DECLARED_INLINE_P (decl)
454 && !(DECL_CONTEXT (decl)
455 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
456 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
457 node->force_output = 1;
459 /* If we've not yet emitted decl, tell the debug info about it. */
460 if (!TREE_ASM_WRITTEN (decl))
461 (*debug_hooks->deferred_inline_function) (decl);
463 /* Possibly warn about unused parameters. */
464 if (warn_unused_parameter)
465 do_warn_unused_parameter (decl);
467 if (!no_collect)
468 ggc_collect ();
470 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
471 && (decide_is_symbol_needed (node)
472 || referred_to_p (node)))
473 enqueue_node (node);
476 /* Add the function FNDECL to the call graph.
477 Unlike cgraph_finalize_function, this function is intended to be used
478 by middle end and allows insertion of new function at arbitrary point
479 of compilation. The function can be either in high, low or SSA form
480 GIMPLE.
482 The function is assumed to be reachable and have address taken (so no
483 API breaking optimizations are performed on it).
485 Main work done by this function is to enqueue the function for later
486 processing to avoid need the passes to be re-entrant. */
488 void
489 cgraph_node::add_new_function (tree fndecl, bool lowered)
491 gcc::pass_manager *passes = g->get_passes ();
492 struct cgraph_node *node;
493 switch (cgraph_state)
495 case CGRAPH_STATE_PARSING:
496 cgraph_finalize_function (fndecl, false);
497 break;
498 case CGRAPH_STATE_CONSTRUCTION:
499 /* Just enqueue function to be processed at nearest occurrence. */
500 node = cgraph_node::get_create (fndecl);
501 if (lowered)
502 node->lowered = true;
503 cgraph_new_nodes.safe_push (node);
504 break;
506 case CGRAPH_STATE_IPA:
507 case CGRAPH_STATE_IPA_SSA:
508 case CGRAPH_STATE_EXPANSION:
509 /* Bring the function into finalized state and enqueue for later
510 analyzing and compilation. */
511 node = cgraph_node::get_create (fndecl);
512 node->local.local = false;
513 node->definition = true;
514 node->force_output = true;
515 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
517 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
518 gimple_register_cfg_hooks ();
519 bitmap_obstack_initialize (NULL);
520 execute_pass_list (cfun, passes->all_lowering_passes);
521 passes->execute_early_local_passes ();
522 bitmap_obstack_release (NULL);
523 pop_cfun ();
525 lowered = true;
527 if (lowered)
528 node->lowered = true;
529 cgraph_new_nodes.safe_push (node);
530 break;
532 case CGRAPH_STATE_FINISHED:
533 /* At the very end of compilation we have to do all the work up
534 to expansion. */
535 node = cgraph_node::create (fndecl);
536 if (lowered)
537 node->lowered = true;
538 node->definition = true;
539 node->analyze ();
540 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
541 gimple_register_cfg_hooks ();
542 bitmap_obstack_initialize (NULL);
543 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
544 g->get_passes ()->execute_early_local_passes ();
545 bitmap_obstack_release (NULL);
546 pop_cfun ();
547 expand_function (node);
548 break;
550 default:
551 gcc_unreachable ();
554 /* Set a personality if required and we already passed EH lowering. */
555 if (lowered
556 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
557 == eh_personality_lang))
558 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
561 /* Add a top-level asm statement to the list. */
563 struct asm_node *
564 add_asm_node (tree asm_str)
566 struct asm_node *node;
568 node = ggc_cleared_alloc<asm_node> ();
569 node->asm_str = asm_str;
570 node->order = symtab_order++;
571 node->next = NULL;
572 if (asm_nodes == NULL)
573 asm_nodes = node;
574 else
575 asm_last_node->next = node;
576 asm_last_node = node;
577 return node;
580 /* Output all asm statements we have stored up to be output. */
582 static void
583 output_asm_statements (void)
585 struct asm_node *can;
587 if (seen_error ())
588 return;
590 for (can = asm_nodes; can; can = can->next)
591 assemble_asm (can->asm_str);
592 asm_nodes = NULL;
595 /* Analyze the function scheduled to be output. */
596 void
597 cgraph_node::analyze (void)
599 tree decl = this->decl;
600 location_t saved_loc = input_location;
601 input_location = DECL_SOURCE_LOCATION (decl);
603 if (thunk.thunk_p)
605 create_edge (cgraph_node::get (thunk.alias),
606 NULL, 0, CGRAPH_FREQ_BASE);
607 if (!expand_thunk (false, false))
609 thunk.alias = NULL;
610 analyzed = true;
611 return;
613 thunk.alias = NULL;
615 if (alias)
616 resolve_alias (cgraph_node::get (alias_target));
617 else if (dispatcher_function)
619 /* Generate the dispatcher body of multi-versioned functions. */
620 struct cgraph_function_version_info *dispatcher_version_info
621 = function_version ();
622 if (dispatcher_version_info != NULL
623 && (dispatcher_version_info->dispatcher_resolver
624 == NULL_TREE))
626 tree resolver = NULL_TREE;
627 gcc_assert (targetm.generate_version_dispatcher_body);
628 resolver = targetm.generate_version_dispatcher_body (this);
629 gcc_assert (resolver != NULL_TREE);
632 else
634 push_cfun (DECL_STRUCT_FUNCTION (decl));
636 assign_assembler_name_if_neeeded (decl);
638 /* Make sure to gimplify bodies only once. During analyzing a
639 function we lower it, which will require gimplified nested
640 functions, so we can end up here with an already gimplified
641 body. */
642 if (!gimple_has_body_p (decl))
643 gimplify_function_tree (decl);
644 dump_function (TDI_generic, decl);
646 /* Lower the function. */
647 if (!lowered)
649 if (nested)
650 lower_nested_functions (decl);
651 gcc_assert (!nested);
653 gimple_register_cfg_hooks ();
654 bitmap_obstack_initialize (NULL);
655 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
656 free_dominance_info (CDI_POST_DOMINATORS);
657 free_dominance_info (CDI_DOMINATORS);
658 compact_blocks ();
659 bitmap_obstack_release (NULL);
660 lowered = true;
663 pop_cfun ();
665 analyzed = true;
667 input_location = saved_loc;
670 /* C++ frontend produce same body aliases all over the place, even before PCH
671 gets streamed out. It relies on us linking the aliases with their function
672 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
673 first produce aliases without links, but once C++ FE is sure he won't sream
674 PCH we build the links via this function. */
676 void
677 cgraph_process_same_body_aliases (void)
679 symtab_node *node;
680 FOR_EACH_SYMBOL (node)
681 if (node->cpp_implicit_alias && !node->analyzed)
682 node->resolve_alias
683 (TREE_CODE (node->alias_target) == VAR_DECL
684 ? (symtab_node *)varpool_node::get_create (node->alias_target)
685 : (symtab_node *)cgraph_node::get_create (node->alias_target));
686 cpp_implicit_aliases_done = true;
689 /* Process attributes common for vars and functions. */
691 static void
692 process_common_attributes (tree decl)
694 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
696 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
698 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
699 "%<weakref%> attribute should be accompanied with"
700 " an %<alias%> attribute");
701 DECL_WEAK (decl) = 0;
702 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
703 DECL_ATTRIBUTES (decl));
707 /* Look for externally_visible and used attributes and mark cgraph nodes
708 accordingly.
710 We cannot mark the nodes at the point the attributes are processed (in
711 handle_*_attribute) because the copy of the declarations available at that
712 point may not be canonical. For example, in:
714 void f();
715 void f() __attribute__((used));
717 the declaration we see in handle_used_attribute will be the second
718 declaration -- but the front end will subsequently merge that declaration
719 with the original declaration and discard the second declaration.
721 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
723 void f() {}
724 void f() __attribute__((externally_visible));
726 is valid.
728 So, we walk the nodes at the end of the translation unit, applying the
729 attributes at that point. */
731 static void
732 process_function_and_variable_attributes (struct cgraph_node *first,
733 varpool_node *first_var)
735 struct cgraph_node *node;
736 varpool_node *vnode;
738 for (node = cgraph_first_function (); node != first;
739 node = cgraph_next_function (node))
741 tree decl = node->decl;
742 if (DECL_PRESERVE_P (decl))
743 node->mark_force_output ();
744 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
746 if (! TREE_PUBLIC (node->decl))
747 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
748 "%<externally_visible%>"
749 " attribute have effect only on public objects");
751 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
752 && (node->definition && !node->alias))
754 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
755 "%<weakref%> attribute ignored"
756 " because function is defined");
757 DECL_WEAK (decl) = 0;
758 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
759 DECL_ATTRIBUTES (decl));
762 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
763 && !DECL_DECLARED_INLINE_P (decl)
764 /* redefining extern inline function makes it DECL_UNINLINABLE. */
765 && !DECL_UNINLINABLE (decl))
766 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
767 "always_inline function might not be inlinable");
769 process_common_attributes (decl);
771 for (vnode = varpool_first_variable (); vnode != first_var;
772 vnode = varpool_next_variable (vnode))
774 tree decl = vnode->decl;
775 if (DECL_EXTERNAL (decl)
776 && DECL_INITIAL (decl))
777 varpool_node::finalize_decl (decl);
778 if (DECL_PRESERVE_P (decl))
779 vnode->force_output = true;
780 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
782 if (! TREE_PUBLIC (vnode->decl))
783 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
784 "%<externally_visible%>"
785 " attribute have effect only on public objects");
787 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
788 && vnode->definition
789 && DECL_INITIAL (decl))
791 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
792 "%<weakref%> attribute ignored"
793 " because variable is initialized");
794 DECL_WEAK (decl) = 0;
795 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
796 DECL_ATTRIBUTES (decl));
798 process_common_attributes (decl);
802 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
803 middle end to output the variable to asm file, if needed or externally
804 visible. */
806 void
807 varpool_node::finalize_decl (tree decl)
809 varpool_node *node = varpool_node::get_create (decl);
811 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
813 if (node->definition)
814 return;
815 notice_global_symbol (decl);
816 node->definition = true;
817 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
818 /* Traditionally we do not eliminate static variables when not
819 optimizing and when not doing toplevel reoder. */
820 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
821 && !DECL_ARTIFICIAL (node->decl)))
822 node->force_output = true;
824 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
825 && (decide_is_symbol_needed (node)
826 || referred_to_p (node)))
827 enqueue_node (node);
828 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
829 node->analyze ();
830 /* Some frontends produce various interface variables after compilation
831 finished. */
832 if (cgraph_state == CGRAPH_STATE_FINISHED
833 || (!flag_toplevel_reorder && cgraph_state == CGRAPH_STATE_EXPANSION))
834 node->assemble_decl ();
837 /* EDGE is an polymorphic call. Mark all possible targets as reachable
838 and if there is only one target, perform trivial devirtualization.
839 REACHABLE_CALL_TARGETS collects target lists we already walked to
840 avoid udplicate work. */
842 static void
843 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
844 struct cgraph_edge *edge)
846 unsigned int i;
847 void *cache_token;
848 bool final;
849 vec <cgraph_node *>targets
850 = possible_polymorphic_call_targets
851 (edge, &final, &cache_token);
853 if (!reachable_call_targets->add (cache_token))
855 if (cgraph_dump_file)
856 dump_possible_polymorphic_call_targets
857 (cgraph_dump_file, edge);
859 for (i = 0; i < targets.length (); i++)
861 /* Do not bother to mark virtual methods in anonymous namespace;
862 either we will find use of virtual table defining it, or it is
863 unused. */
864 if (targets[i]->definition
865 && TREE_CODE
866 (TREE_TYPE (targets[i]->decl))
867 == METHOD_TYPE
868 && !type_in_anonymous_namespace_p
869 (method_class_type
870 (TREE_TYPE (targets[i]->decl))))
871 enqueue_node (targets[i]);
875 /* Very trivial devirtualization; when the type is
876 final or anonymous (so we know all its derivation)
877 and there is only one possible virtual call target,
878 make the edge direct. */
879 if (final)
881 if (targets.length () <= 1 && dbg_cnt (devirt))
883 cgraph_node *target;
884 if (targets.length () == 1)
885 target = targets[0];
886 else
887 target = cgraph_node::create
888 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
890 if (cgraph_dump_file)
892 fprintf (cgraph_dump_file,
893 "Devirtualizing call: ");
894 print_gimple_stmt (cgraph_dump_file,
895 edge->call_stmt, 0,
896 TDF_SLIM);
898 if (dump_enabled_p ())
900 location_t locus = gimple_location_safe (edge->call_stmt);
901 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
902 "devirtualizing call in %s to %s\n",
903 edge->caller->name (), target->name ());
906 cgraph_make_edge_direct (edge, target);
907 cgraph_redirect_edge_call_stmt_to_callee (edge);
908 if (cgraph_dump_file)
910 fprintf (cgraph_dump_file,
911 "Devirtualized as: ");
912 print_gimple_stmt (cgraph_dump_file,
913 edge->call_stmt, 0,
914 TDF_SLIM);
921 /* Discover all functions and variables that are trivially needed, analyze
922 them as well as all functions and variables referred by them */
924 static void
925 analyze_functions (void)
927 /* Keep track of already processed nodes when called multiple times for
928 intermodule optimization. */
929 static struct cgraph_node *first_analyzed;
930 struct cgraph_node *first_handled = first_analyzed;
931 static varpool_node *first_analyzed_var;
932 varpool_node *first_handled_var = first_analyzed_var;
933 hash_set<void *> reachable_call_targets;
935 symtab_node *node;
936 symtab_node *next;
937 int i;
938 struct ipa_ref *ref;
939 bool changed = true;
940 location_t saved_loc = input_location;
942 bitmap_obstack_initialize (NULL);
943 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
944 input_location = UNKNOWN_LOCATION;
946 /* Ugly, but the fixup can not happen at a time same body alias is created;
947 C++ FE is confused about the COMDAT groups being right. */
948 if (cpp_implicit_aliases_done)
949 FOR_EACH_SYMBOL (node)
950 if (node->cpp_implicit_alias)
951 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
952 if (optimize && flag_devirtualize)
953 build_type_inheritance_graph ();
955 /* Analysis adds static variables that in turn adds references to new functions.
956 So we need to iterate the process until it stabilize. */
957 while (changed)
959 changed = false;
960 process_function_and_variable_attributes (first_analyzed,
961 first_analyzed_var);
963 /* First identify the trivially needed symbols. */
964 for (node = symtab_nodes;
965 node != first_analyzed
966 && node != first_analyzed_var; node = node->next)
968 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
969 node->get_comdat_group_id ();
970 if (decide_is_symbol_needed (node))
972 enqueue_node (node);
973 if (!changed && cgraph_dump_file)
974 fprintf (cgraph_dump_file, "Trivially needed symbols:");
975 changed = true;
976 if (cgraph_dump_file)
977 fprintf (cgraph_dump_file, " %s", node->asm_name ());
978 if (!changed && cgraph_dump_file)
979 fprintf (cgraph_dump_file, "\n");
981 if (node == first_analyzed
982 || node == first_analyzed_var)
983 break;
985 cgraph_process_new_functions ();
986 first_analyzed_var = varpool_first_variable ();
987 first_analyzed = cgraph_first_function ();
989 if (changed && cgraph_dump_file)
990 fprintf (cgraph_dump_file, "\n");
992 /* Lower representation, build callgraph edges and references for all trivially
993 needed symbols and all symbols referred by them. */
994 while (queued_nodes != &symtab_terminator)
996 changed = true;
997 node = queued_nodes;
998 queued_nodes = (symtab_node *)queued_nodes->aux;
999 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1000 if (cnode && cnode->definition)
1002 struct cgraph_edge *edge;
1003 tree decl = cnode->decl;
1005 /* ??? It is possible to create extern inline function
1006 and later using weak alias attribute to kill its body.
1007 See gcc.c-torture/compile/20011119-1.c */
1008 if (!DECL_STRUCT_FUNCTION (decl)
1009 && !cnode->alias
1010 && !cnode->thunk.thunk_p
1011 && !cnode->dispatcher_function)
1013 cnode->reset ();
1014 cnode->local.redefined_extern_inline = true;
1015 continue;
1018 if (!cnode->analyzed)
1019 cnode->analyze ();
1021 for (edge = cnode->callees; edge; edge = edge->next_callee)
1022 if (edge->callee->definition)
1023 enqueue_node (edge->callee);
1024 if (optimize && flag_devirtualize)
1026 struct cgraph_edge *next;
1028 for (edge = cnode->indirect_calls; edge; edge = next)
1030 next = edge->next_callee;
1031 if (edge->indirect_info->polymorphic)
1032 walk_polymorphic_call_targets (&reachable_call_targets,
1033 edge);
1037 /* If decl is a clone of an abstract function,
1038 mark that abstract function so that we don't release its body.
1039 The DECL_INITIAL() of that abstract function declaration
1040 will be later needed to output debug info. */
1041 if (DECL_ABSTRACT_ORIGIN (decl))
1043 struct cgraph_node *origin_node
1044 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1045 origin_node->used_as_abstract_origin = true;
1048 else
1050 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1051 if (vnode && vnode->definition && !vnode->analyzed)
1052 vnode->analyze ();
1055 if (node->same_comdat_group)
1057 symtab_node *next;
1058 for (next = node->same_comdat_group;
1059 next != node;
1060 next = next->same_comdat_group)
1061 enqueue_node (next);
1063 for (i = 0; node->iterate_reference (i, ref); i++)
1064 if (ref->referred->definition)
1065 enqueue_node (ref->referred);
1066 cgraph_process_new_functions ();
1069 if (optimize && flag_devirtualize)
1070 update_type_inheritance_graph ();
1072 /* Collect entry points to the unit. */
1073 if (cgraph_dump_file)
1075 fprintf (cgraph_dump_file, "\n\nInitial ");
1076 symtab_node::dump_table (cgraph_dump_file);
1079 if (cgraph_dump_file)
1080 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1082 for (node = symtab_nodes;
1083 node != first_handled
1084 && node != first_handled_var; node = next)
1086 next = node->next;
1087 if (!node->aux && !referred_to_p (node))
1089 if (cgraph_dump_file)
1090 fprintf (cgraph_dump_file, " %s", node->name ());
1091 node->remove ();
1092 continue;
1094 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1096 tree decl = node->decl;
1098 if (cnode->definition && !gimple_has_body_p (decl)
1099 && !cnode->alias
1100 && !cnode->thunk.thunk_p)
1101 cnode->reset ();
1103 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1104 || cnode->alias
1105 || gimple_has_body_p (decl));
1106 gcc_assert (cnode->analyzed == cnode->definition);
1108 node->aux = NULL;
1110 for (;node; node = node->next)
1111 node->aux = NULL;
1112 first_analyzed = cgraph_first_function ();
1113 first_analyzed_var = varpool_first_variable ();
1114 if (cgraph_dump_file)
1116 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1117 symtab_node::dump_table (cgraph_dump_file);
1119 bitmap_obstack_release (NULL);
1120 ggc_collect ();
1121 /* Initialize assembler name hash, in particular we want to trigger C++
1122 mangling and same body alias creation before we free DECL_ARGUMENTS
1123 used by it. */
1124 if (!seen_error ())
1125 symtab_initialize_asm_name_hash ();
1127 input_location = saved_loc;
1130 /* Translate the ugly representation of aliases as alias pairs into nice
1131 representation in callgraph. We don't handle all cases yet,
1132 unfortunately. */
1134 static void
1135 handle_alias_pairs (void)
1137 alias_pair *p;
1138 unsigned i;
1140 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1142 symtab_node *target_node = symtab_node_for_asm (p->target);
1144 /* Weakrefs with target not defined in current unit are easy to handle:
1145 they behave just as external variables except we need to note the
1146 alias flag to later output the weakref pseudo op into asm file. */
1147 if (!target_node
1148 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1150 symtab_node *node = symtab_node::get (p->decl);
1151 if (node)
1153 node->alias_target = p->target;
1154 node->weakref = true;
1155 node->alias = true;
1157 alias_pairs->unordered_remove (i);
1158 continue;
1160 else if (!target_node)
1162 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1163 symtab_node *node = symtab_node::get (p->decl);
1164 if (node)
1165 node->alias = false;
1166 alias_pairs->unordered_remove (i);
1167 continue;
1170 if (DECL_EXTERNAL (target_node->decl)
1171 /* We use local aliases for C++ thunks to force the tailcall
1172 to bind locally. This is a hack - to keep it working do
1173 the following (which is not strictly correct). */
1174 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1175 || ! DECL_VIRTUAL_P (target_node->decl))
1176 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1178 error ("%q+D aliased to external symbol %qE",
1179 p->decl, p->target);
1182 if (TREE_CODE (p->decl) == FUNCTION_DECL
1183 && target_node && is_a <cgraph_node *> (target_node))
1185 struct cgraph_node *src_node = cgraph_node::get (p->decl);
1186 if (src_node && src_node->definition)
1187 src_node->reset ();
1188 cgraph_node::create_alias (p->decl, target_node->decl);
1189 alias_pairs->unordered_remove (i);
1191 else if (TREE_CODE (p->decl) == VAR_DECL
1192 && target_node && is_a <varpool_node *> (target_node))
1194 varpool_node::create_alias (p->decl, target_node->decl);
1195 alias_pairs->unordered_remove (i);
1197 else
1199 error ("%q+D alias in between function and variable is not supported",
1200 p->decl);
1201 warning (0, "%q+D aliased declaration",
1202 target_node->decl);
1203 alias_pairs->unordered_remove (i);
1206 vec_free (alias_pairs);
1210 /* Figure out what functions we want to assemble. */
1212 static void
1213 mark_functions_to_output (void)
1215 struct cgraph_node *node;
1216 #ifdef ENABLE_CHECKING
1217 bool check_same_comdat_groups = false;
1219 FOR_EACH_FUNCTION (node)
1220 gcc_assert (!node->process);
1221 #endif
1223 FOR_EACH_FUNCTION (node)
1225 tree decl = node->decl;
1227 gcc_assert (!node->process || node->same_comdat_group);
1228 if (node->process)
1229 continue;
1231 /* We need to output all local functions that are used and not
1232 always inlined, as well as those that are reachable from
1233 outside the current compilation unit. */
1234 if (node->analyzed
1235 && !node->thunk.thunk_p
1236 && !node->alias
1237 && !node->global.inlined_to
1238 && !TREE_ASM_WRITTEN (decl)
1239 && !DECL_EXTERNAL (decl))
1241 node->process = 1;
1242 if (node->same_comdat_group)
1244 struct cgraph_node *next;
1245 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1246 next != node;
1247 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1248 if (!next->thunk.thunk_p && !next->alias
1249 && !next->comdat_local_p ())
1250 next->process = 1;
1253 else if (node->same_comdat_group)
1255 #ifdef ENABLE_CHECKING
1256 check_same_comdat_groups = true;
1257 #endif
1259 else
1261 /* We should've reclaimed all functions that are not needed. */
1262 #ifdef ENABLE_CHECKING
1263 if (!node->global.inlined_to
1264 && gimple_has_body_p (decl)
1265 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1266 are inside partition, we can end up not removing the body since we no longer
1267 have analyzed node pointing to it. */
1268 && !node->in_other_partition
1269 && !node->alias
1270 && !node->clones
1271 && !DECL_EXTERNAL (decl))
1273 node->debug ();
1274 internal_error ("failed to reclaim unneeded function");
1276 #endif
1277 gcc_assert (node->global.inlined_to
1278 || !gimple_has_body_p (decl)
1279 || node->in_other_partition
1280 || node->clones
1281 || DECL_ARTIFICIAL (decl)
1282 || DECL_EXTERNAL (decl));
1287 #ifdef ENABLE_CHECKING
1288 if (check_same_comdat_groups)
1289 FOR_EACH_FUNCTION (node)
1290 if (node->same_comdat_group && !node->process)
1292 tree decl = node->decl;
1293 if (!node->global.inlined_to
1294 && gimple_has_body_p (decl)
1295 /* FIXME: in an ltrans unit when the offline copy is outside a
1296 partition but inline copies are inside a partition, we can
1297 end up not removing the body since we no longer have an
1298 analyzed node pointing to it. */
1299 && !node->in_other_partition
1300 && !node->clones
1301 && !DECL_EXTERNAL (decl))
1303 node->debug ();
1304 internal_error ("failed to reclaim unneeded function in same "
1305 "comdat group");
1308 #endif
1311 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1312 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1314 Set current_function_decl and cfun to newly constructed empty function body.
1315 return basic block in the function body. */
1317 basic_block
1318 init_lowered_empty_function (tree decl, bool in_ssa)
1320 basic_block bb;
1322 current_function_decl = decl;
1323 allocate_struct_function (decl, false);
1324 gimple_register_cfg_hooks ();
1325 init_empty_tree_cfg ();
1327 if (in_ssa)
1329 init_tree_ssa (cfun);
1330 init_ssa_operands (cfun);
1331 cfun->gimple_df->in_ssa_p = true;
1332 cfun->curr_properties |= PROP_ssa;
1335 DECL_INITIAL (decl) = make_node (BLOCK);
1337 DECL_SAVED_TREE (decl) = error_mark_node;
1338 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1339 | PROP_cfg | PROP_loops);
1341 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1342 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1343 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1345 /* Create BB for body of the function and connect it properly. */
1346 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1347 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1348 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1349 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1351 return bb;
1354 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1355 offset indicated by VIRTUAL_OFFSET, if that is
1356 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1357 zero for a result adjusting thunk. */
1359 static tree
1360 thunk_adjust (gimple_stmt_iterator * bsi,
1361 tree ptr, bool this_adjusting,
1362 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1364 gimple stmt;
1365 tree ret;
1367 if (this_adjusting
1368 && fixed_offset != 0)
1370 stmt = gimple_build_assign
1371 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1372 ptr,
1373 fixed_offset));
1374 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1377 /* If there's a virtual offset, look up that value in the vtable and
1378 adjust the pointer again. */
1379 if (virtual_offset)
1381 tree vtabletmp;
1382 tree vtabletmp2;
1383 tree vtabletmp3;
1385 if (!vtable_entry_type)
1387 tree vfunc_type = make_node (FUNCTION_TYPE);
1388 TREE_TYPE (vfunc_type) = integer_type_node;
1389 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1390 layout_type (vfunc_type);
1392 vtable_entry_type = build_pointer_type (vfunc_type);
1395 vtabletmp =
1396 create_tmp_reg (build_pointer_type
1397 (build_pointer_type (vtable_entry_type)), "vptr");
1399 /* The vptr is always at offset zero in the object. */
1400 stmt = gimple_build_assign (vtabletmp,
1401 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1402 ptr));
1403 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1405 /* Form the vtable address. */
1406 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1407 "vtableaddr");
1408 stmt = gimple_build_assign (vtabletmp2,
1409 build_simple_mem_ref (vtabletmp));
1410 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1412 /* Find the entry with the vcall offset. */
1413 stmt = gimple_build_assign (vtabletmp2,
1414 fold_build_pointer_plus_loc (input_location,
1415 vtabletmp2,
1416 virtual_offset));
1417 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1419 /* Get the offset itself. */
1420 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1421 "vcalloffset");
1422 stmt = gimple_build_assign (vtabletmp3,
1423 build_simple_mem_ref (vtabletmp2));
1424 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1426 /* Adjust the `this' pointer. */
1427 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1428 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1429 GSI_CONTINUE_LINKING);
1432 if (!this_adjusting
1433 && fixed_offset != 0)
1434 /* Adjust the pointer by the constant. */
1436 tree ptrtmp;
1438 if (TREE_CODE (ptr) == VAR_DECL)
1439 ptrtmp = ptr;
1440 else
1442 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1443 stmt = gimple_build_assign (ptrtmp, ptr);
1444 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1446 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1447 ptrtmp, fixed_offset);
1450 /* Emit the statement and gimplify the adjustment expression. */
1451 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1452 stmt = gimple_build_assign (ret, ptr);
1453 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1455 return ret;
1458 /* Expand thunk NODE to gimple if possible.
1459 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1460 no assembler is produced.
1461 When OUTPUT_ASM_THUNK is true, also produce assembler for
1462 thunks that are not lowered. */
1464 bool
1465 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1467 bool this_adjusting = thunk.this_adjusting;
1468 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1469 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1470 tree virtual_offset = NULL;
1471 tree alias = callees->callee->decl;
1472 tree thunk_fndecl = decl;
1473 tree a;
1476 if (!force_gimple_thunk && this_adjusting
1477 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1478 virtual_value, alias))
1480 const char *fnname;
1481 tree fn_block;
1482 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1484 if (!output_asm_thunks)
1485 return false;
1487 if (in_lto_p)
1488 get_body ();
1489 a = DECL_ARGUMENTS (thunk_fndecl);
1491 current_function_decl = thunk_fndecl;
1493 /* Ensure thunks are emitted in their correct sections. */
1494 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1496 DECL_RESULT (thunk_fndecl)
1497 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1498 RESULT_DECL, 0, restype);
1499 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1500 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1502 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1503 create one. */
1504 fn_block = make_node (BLOCK);
1505 BLOCK_VARS (fn_block) = a;
1506 DECL_INITIAL (thunk_fndecl) = fn_block;
1507 init_function_start (thunk_fndecl);
1508 cfun->is_thunk = 1;
1509 insn_locations_init ();
1510 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1511 prologue_location = curr_insn_location ();
1512 assemble_start_function (thunk_fndecl, fnname);
1514 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1515 fixed_offset, virtual_value, alias);
1517 assemble_end_function (thunk_fndecl, fnname);
1518 insn_locations_finalize ();
1519 init_insn_lengths ();
1520 free_after_compilation (cfun);
1521 set_cfun (NULL);
1522 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1523 thunk.thunk_p = false;
1524 analyzed = false;
1526 else
1528 tree restype;
1529 basic_block bb, then_bb, else_bb, return_bb;
1530 gimple_stmt_iterator bsi;
1531 int nargs = 0;
1532 tree arg;
1533 int i;
1534 tree resdecl;
1535 tree restmp = NULL;
1537 gimple call;
1538 gimple ret;
1540 if (in_lto_p)
1541 get_body ();
1542 a = DECL_ARGUMENTS (thunk_fndecl);
1544 current_function_decl = thunk_fndecl;
1546 /* Ensure thunks are emitted in their correct sections. */
1547 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1549 DECL_IGNORED_P (thunk_fndecl) = 1;
1550 bitmap_obstack_initialize (NULL);
1552 if (thunk.virtual_offset_p)
1553 virtual_offset = size_int (virtual_value);
1555 /* Build the return declaration for the function. */
1556 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1557 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1559 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1560 DECL_ARTIFICIAL (resdecl) = 1;
1561 DECL_IGNORED_P (resdecl) = 1;
1562 DECL_RESULT (thunk_fndecl) = resdecl;
1563 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1565 else
1566 resdecl = DECL_RESULT (thunk_fndecl);
1568 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1570 bsi = gsi_start_bb (bb);
1572 /* Build call to the function being thunked. */
1573 if (!VOID_TYPE_P (restype))
1575 if (DECL_BY_REFERENCE (resdecl))
1576 restmp = gimple_fold_indirect_ref (resdecl);
1577 else if (!is_gimple_reg_type (restype))
1579 restmp = resdecl;
1580 add_local_decl (cfun, restmp);
1581 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1583 else
1584 restmp = create_tmp_reg (restype, "retval");
1587 for (arg = a; arg; arg = DECL_CHAIN (arg))
1588 nargs++;
1589 auto_vec<tree> vargs (nargs);
1590 if (this_adjusting)
1591 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1592 virtual_offset));
1593 else if (nargs)
1594 vargs.quick_push (a);
1596 if (nargs)
1597 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1599 tree tmp = arg;
1600 if (!is_gimple_val (arg))
1602 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1603 (TREE_TYPE (arg)), "arg");
1604 gimple stmt = gimple_build_assign (tmp, arg);
1605 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1607 vargs.quick_push (tmp);
1609 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1610 callees->call_stmt = call;
1611 gimple_call_set_from_thunk (call, true);
1612 if (restmp)
1614 gimple_call_set_lhs (call, restmp);
1615 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1616 TREE_TYPE (TREE_TYPE (alias))));
1618 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1619 if (!(gimple_call_flags (call) & ECF_NORETURN))
1621 if (restmp && !this_adjusting
1622 && (fixed_offset || virtual_offset))
1624 tree true_label = NULL_TREE;
1626 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1628 gimple stmt;
1629 /* If the return type is a pointer, we need to
1630 protect against NULL. We know there will be an
1631 adjustment, because that's why we're emitting a
1632 thunk. */
1633 then_bb = create_basic_block (NULL, (void *) 0, bb);
1634 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1635 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1636 add_bb_to_loop (then_bb, bb->loop_father);
1637 add_bb_to_loop (return_bb, bb->loop_father);
1638 add_bb_to_loop (else_bb, bb->loop_father);
1639 remove_edge (single_succ_edge (bb));
1640 true_label = gimple_block_label (then_bb);
1641 stmt = gimple_build_cond (NE_EXPR, restmp,
1642 build_zero_cst (TREE_TYPE (restmp)),
1643 NULL_TREE, NULL_TREE);
1644 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1645 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1646 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1647 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1648 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1649 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1650 bsi = gsi_last_bb (then_bb);
1653 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1654 fixed_offset, virtual_offset);
1655 if (true_label)
1657 gimple stmt;
1658 bsi = gsi_last_bb (else_bb);
1659 stmt = gimple_build_assign (restmp,
1660 build_zero_cst (TREE_TYPE (restmp)));
1661 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1662 bsi = gsi_last_bb (return_bb);
1665 else
1666 gimple_call_set_tail (call, true);
1668 /* Build return value. */
1669 ret = gimple_build_return (restmp);
1670 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1672 else
1674 gimple_call_set_tail (call, true);
1675 remove_edge (single_succ_edge (bb));
1678 cfun->gimple_df->in_ssa_p = true;
1679 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1680 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1681 delete_unreachable_blocks ();
1682 update_ssa (TODO_update_ssa);
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1685 #endif
1686 free_dominance_info (CDI_DOMINATORS);
1688 /* Since we want to emit the thunk, we explicitly mark its name as
1689 referenced. */
1690 thunk.thunk_p = false;
1691 lowered = true;
1692 bitmap_obstack_release (NULL);
1694 current_function_decl = NULL;
1695 set_cfun (NULL);
1696 return true;
1699 /* Assemble thunks and aliases associated to NODE. */
1701 static void
1702 assemble_thunks_and_aliases (struct cgraph_node *node)
1704 struct cgraph_edge *e;
1705 struct ipa_ref *ref;
1707 for (e = node->callers; e;)
1708 if (e->caller->thunk.thunk_p)
1710 struct cgraph_node *thunk = e->caller;
1712 e = e->next_caller;
1713 thunk->expand_thunk (true, false);
1714 assemble_thunks_and_aliases (thunk);
1716 else
1717 e = e->next_caller;
1719 FOR_EACH_ALIAS (node, ref)
1721 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1722 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1724 /* Force assemble_alias to really output the alias this time instead
1725 of buffering it in same alias pairs. */
1726 TREE_ASM_WRITTEN (node->decl) = 1;
1727 do_assemble_alias (alias->decl,
1728 DECL_ASSEMBLER_NAME (node->decl));
1729 assemble_thunks_and_aliases (alias);
1730 TREE_ASM_WRITTEN (node->decl) = saved_written;
1734 /* Expand function specified by NODE. */
1736 static void
1737 expand_function (struct cgraph_node *node)
1739 tree decl = node->decl;
1740 location_t saved_loc;
1742 /* We ought to not compile any inline clones. */
1743 gcc_assert (!node->global.inlined_to);
1745 announce_function (decl);
1746 node->process = 0;
1747 gcc_assert (node->lowered);
1748 node->get_body ();
1750 /* Generate RTL for the body of DECL. */
1752 timevar_push (TV_REST_OF_COMPILATION);
1754 gcc_assert (cgraph_global_info_ready);
1756 /* Initialize the default bitmap obstack. */
1757 bitmap_obstack_initialize (NULL);
1759 /* Initialize the RTL code for the function. */
1760 current_function_decl = decl;
1761 saved_loc = input_location;
1762 input_location = DECL_SOURCE_LOCATION (decl);
1763 init_function_start (decl);
1765 gimple_register_cfg_hooks ();
1767 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1769 execute_all_ipa_transforms ();
1771 /* Perform all tree transforms and optimizations. */
1773 /* Signal the start of passes. */
1774 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1776 execute_pass_list (cfun, g->get_passes ()->all_passes);
1778 /* Signal the end of passes. */
1779 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1781 bitmap_obstack_release (&reg_obstack);
1783 /* Release the default bitmap obstack. */
1784 bitmap_obstack_release (NULL);
1786 /* If requested, warn about function definitions where the function will
1787 return a value (usually of some struct or union type) which itself will
1788 take up a lot of stack space. */
1789 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1791 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1793 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1794 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1795 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1796 larger_than_size))
1798 unsigned int size_as_int
1799 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1801 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1802 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1803 decl, size_as_int);
1804 else
1805 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1806 decl, larger_than_size);
1810 gimple_set_body (decl, NULL);
1811 if (DECL_STRUCT_FUNCTION (decl) == 0
1812 && !cgraph_node::get (decl)->origin)
1814 /* Stop pointing to the local nodes about to be freed.
1815 But DECL_INITIAL must remain nonzero so we know this
1816 was an actual function definition.
1817 For a nested function, this is done in c_pop_function_context.
1818 If rest_of_compilation set this to 0, leave it 0. */
1819 if (DECL_INITIAL (decl) != 0)
1820 DECL_INITIAL (decl) = error_mark_node;
1823 input_location = saved_loc;
1825 ggc_collect ();
1826 timevar_pop (TV_REST_OF_COMPILATION);
1828 /* Make sure that BE didn't give up on compiling. */
1829 gcc_assert (TREE_ASM_WRITTEN (decl));
1830 set_cfun (NULL);
1831 current_function_decl = NULL;
1833 /* It would make a lot more sense to output thunks before function body to get more
1834 forward and lest backwarding jumps. This however would need solving problem
1835 with comdats. See PR48668. Also aliases must come after function itself to
1836 make one pass assemblers, like one on AIX, happy. See PR 50689.
1837 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1838 groups. */
1839 assemble_thunks_and_aliases (node);
1840 node->release_body ();
1841 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1842 points to the dead function body. */
1843 node->remove_callees ();
1844 node->remove_all_references ();
1847 /* Node comparer that is responsible for the order that corresponds
1848 to time when a function was launched for the first time. */
1850 static int
1851 node_cmp (const void *pa, const void *pb)
1853 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
1854 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
1856 /* Functions with time profile must be before these without profile. */
1857 if (!a->tp_first_run || !b->tp_first_run)
1858 return a->tp_first_run - b->tp_first_run;
1860 return a->tp_first_run != b->tp_first_run
1861 ? b->tp_first_run - a->tp_first_run
1862 : b->order - a->order;
1865 /* Expand all functions that must be output.
1867 Attempt to topologically sort the nodes so function is output when
1868 all called functions are already assembled to allow data to be
1869 propagated across the callgraph. Use a stack to get smaller distance
1870 between a function and its callees (later we may choose to use a more
1871 sophisticated algorithm for function reordering; we will likely want
1872 to use subsections to make the output functions appear in top-down
1873 order). */
1875 static void
1876 expand_all_functions (void)
1878 struct cgraph_node *node;
1879 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1880 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1881 int order_pos, new_order_pos = 0;
1882 int i;
1884 order_pos = ipa_reverse_postorder (order);
1885 gcc_assert (order_pos == cgraph_n_nodes);
1887 /* Garbage collector may remove inline clones we eliminate during
1888 optimization. So we must be sure to not reference them. */
1889 for (i = 0; i < order_pos; i++)
1890 if (order[i]->process)
1891 order[new_order_pos++] = order[i];
1893 if (flag_profile_reorder_functions)
1894 qsort (order, new_order_pos, sizeof (struct cgraph_node *), node_cmp);
1896 for (i = new_order_pos - 1; i >= 0; i--)
1898 node = order[i];
1900 if (node->process)
1902 expanded_func_count++;
1903 if(node->tp_first_run)
1904 profiled_func_count++;
1906 if (cgraph_dump_file)
1907 fprintf (cgraph_dump_file, "Time profile order in expand_all_functions:%s:%d\n", node->asm_name (), node->tp_first_run);
1909 node->process = 0;
1910 expand_function (node);
1914 if (dump_file)
1915 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1916 main_input_filename, profiled_func_count, expanded_func_count);
1918 if (cgraph_dump_file && flag_profile_reorder_functions)
1919 fprintf (cgraph_dump_file, "Expanded functions with time profile:%u/%u\n",
1920 profiled_func_count, expanded_func_count);
1922 cgraph_process_new_functions ();
1923 free_gimplify_stack ();
1925 free (order);
1928 /* This is used to sort the node types by the cgraph order number. */
1930 enum cgraph_order_sort_kind
1932 ORDER_UNDEFINED = 0,
1933 ORDER_FUNCTION,
1934 ORDER_VAR,
1935 ORDER_ASM
1938 struct cgraph_order_sort
1940 enum cgraph_order_sort_kind kind;
1941 union
1943 struct cgraph_node *f;
1944 varpool_node *v;
1945 struct asm_node *a;
1946 } u;
1949 /* Output all functions, variables, and asm statements in the order
1950 according to their order fields, which is the order in which they
1951 appeared in the file. This implements -fno-toplevel-reorder. In
1952 this mode we may output functions and variables which don't really
1953 need to be output. */
1955 static void
1956 output_in_order (void)
1958 int max;
1959 struct cgraph_order_sort *nodes;
1960 int i;
1961 struct cgraph_node *pf;
1962 varpool_node *pv;
1963 struct asm_node *pa;
1965 max = symtab_order;
1966 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1968 FOR_EACH_DEFINED_FUNCTION (pf)
1970 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1972 i = pf->order;
1973 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1974 nodes[i].kind = ORDER_FUNCTION;
1975 nodes[i].u.f = pf;
1979 FOR_EACH_DEFINED_VARIABLE (pv)
1980 if (!DECL_EXTERNAL (pv->decl))
1982 i = pv->order;
1983 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1984 nodes[i].kind = ORDER_VAR;
1985 nodes[i].u.v = pv;
1988 for (pa = asm_nodes; pa; pa = pa->next)
1990 i = pa->order;
1991 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1992 nodes[i].kind = ORDER_ASM;
1993 nodes[i].u.a = pa;
1996 /* In toplevel reorder mode we output all statics; mark them as needed. */
1998 for (i = 0; i < max; ++i)
1999 if (nodes[i].kind == ORDER_VAR)
2000 nodes[i].u.v->finalize_named_section_flags ();
2002 for (i = 0; i < max; ++i)
2004 switch (nodes[i].kind)
2006 case ORDER_FUNCTION:
2007 nodes[i].u.f->process = 0;
2008 expand_function (nodes[i].u.f);
2009 break;
2011 case ORDER_VAR:
2012 nodes[i].u.v->assemble_decl ();
2013 break;
2015 case ORDER_ASM:
2016 assemble_asm (nodes[i].u.a->asm_str);
2017 break;
2019 case ORDER_UNDEFINED:
2020 break;
2022 default:
2023 gcc_unreachable ();
2027 asm_nodes = NULL;
2028 free (nodes);
2031 static void
2032 ipa_passes (void)
2034 gcc::pass_manager *passes = g->get_passes ();
2036 set_cfun (NULL);
2037 current_function_decl = NULL;
2038 gimple_register_cfg_hooks ();
2039 bitmap_obstack_initialize (NULL);
2041 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2043 if (!in_lto_p)
2045 execute_ipa_pass_list (passes->all_small_ipa_passes);
2046 if (seen_error ())
2047 return;
2050 /* We never run removal of unreachable nodes after early passes. This is
2051 because TODO is run before the subpasses. It is important to remove
2052 the unreachable functions to save works at IPA level and to get LTO
2053 symbol tables right. */
2054 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
2056 /* If pass_all_early_optimizations was not scheduled, the state of
2057 the cgraph will not be properly updated. Update it now. */
2058 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2059 cgraph_state = CGRAPH_STATE_IPA_SSA;
2061 if (!in_lto_p)
2063 /* Generate coverage variables and constructors. */
2064 coverage_finish ();
2066 /* Process new functions added. */
2067 set_cfun (NULL);
2068 current_function_decl = NULL;
2069 cgraph_process_new_functions ();
2071 execute_ipa_summary_passes
2072 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2075 /* Some targets need to handle LTO assembler output specially. */
2076 if (flag_generate_lto)
2077 targetm.asm_out.lto_start ();
2079 if (!in_lto_p)
2080 ipa_write_summaries ();
2082 if (flag_generate_lto)
2083 targetm.asm_out.lto_end ();
2085 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2086 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2087 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2089 bitmap_obstack_release (NULL);
2093 /* Return string alias is alias of. */
2095 static tree
2096 get_alias_symbol (tree decl)
2098 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2099 return get_identifier (TREE_STRING_POINTER
2100 (TREE_VALUE (TREE_VALUE (alias))));
2104 /* Weakrefs may be associated to external decls and thus not output
2105 at expansion time. Emit all necessary aliases. */
2107 static void
2108 output_weakrefs (void)
2110 symtab_node *node;
2111 FOR_EACH_SYMBOL (node)
2112 if (node->alias
2113 && !TREE_ASM_WRITTEN (node->decl)
2114 && node->weakref)
2116 tree target;
2118 /* Weakrefs are special by not requiring target definition in current
2119 compilation unit. It is thus bit hard to work out what we want to
2120 alias.
2121 When alias target is defined, we need to fetch it from symtab reference,
2122 otherwise it is pointed to by alias_target. */
2123 if (node->alias_target)
2124 target = (DECL_P (node->alias_target)
2125 ? DECL_ASSEMBLER_NAME (node->alias_target)
2126 : node->alias_target);
2127 else if (node->analyzed)
2128 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2129 else
2131 gcc_unreachable ();
2132 target = get_alias_symbol (node->decl);
2134 do_assemble_alias (node->decl, target);
2138 /* Initialize callgraph dump file. */
2140 void
2141 init_cgraph (void)
2143 if (!cgraph_dump_file)
2144 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2148 /* Perform simple optimizations based on callgraph. */
2150 void
2151 compile (void)
2153 if (seen_error ())
2154 return;
2156 #ifdef ENABLE_CHECKING
2157 symtab_node::verify_symtab_nodes ();
2158 #endif
2160 timevar_push (TV_CGRAPHOPT);
2161 if (pre_ipa_mem_report)
2163 fprintf (stderr, "Memory consumption before IPA\n");
2164 dump_memory_report (false);
2166 if (!quiet_flag)
2167 fprintf (stderr, "Performing interprocedural optimizations\n");
2168 cgraph_state = CGRAPH_STATE_IPA;
2170 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2171 if (flag_lto)
2172 lto_streamer_hooks_init ();
2174 /* Don't run the IPA passes if there was any error or sorry messages. */
2175 if (!seen_error ())
2176 ipa_passes ();
2178 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2179 if (seen_error ()
2180 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2182 timevar_pop (TV_CGRAPHOPT);
2183 return;
2186 /* This pass remove bodies of extern inline functions we never inlined.
2187 Do this later so other IPA passes see what is really going on. */
2188 symtab_remove_unreachable_nodes (false, dump_file);
2189 cgraph_global_info_ready = true;
2190 if (cgraph_dump_file)
2192 fprintf (cgraph_dump_file, "Optimized ");
2193 symtab_node:: dump_table (cgraph_dump_file);
2195 if (post_ipa_mem_report)
2197 fprintf (stderr, "Memory consumption after IPA\n");
2198 dump_memory_report (false);
2200 timevar_pop (TV_CGRAPHOPT);
2202 /* Output everything. */
2203 (*debug_hooks->assembly_start) ();
2204 if (!quiet_flag)
2205 fprintf (stderr, "Assembling functions:\n");
2206 #ifdef ENABLE_CHECKING
2207 symtab_node::verify_symtab_nodes ();
2208 #endif
2210 cgraph_materialize_all_clones ();
2211 bitmap_obstack_initialize (NULL);
2212 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2213 symtab_remove_unreachable_nodes (true, dump_file);
2214 #ifdef ENABLE_CHECKING
2215 symtab_node::verify_symtab_nodes ();
2216 #endif
2217 bitmap_obstack_release (NULL);
2218 mark_functions_to_output ();
2220 /* When weakref support is missing, we autmatically translate all
2221 references to NODE to references to its ultimate alias target.
2222 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2223 TREE_CHAIN.
2225 Set up this mapping before we output any assembler but once we are sure
2226 that all symbol renaming is done.
2228 FIXME: All this uglyness can go away if we just do renaming at gimple
2229 level by physically rewritting the IL. At the moment we can only redirect
2230 calls, so we need infrastructure for renaming references as well. */
2231 #ifndef ASM_OUTPUT_WEAKREF
2232 symtab_node *node;
2234 FOR_EACH_SYMBOL (node)
2235 if (node->alias
2236 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2238 IDENTIFIER_TRANSPARENT_ALIAS
2239 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2240 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2241 = (node->alias_target ? node->alias_target
2242 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2244 #endif
2246 cgraph_state = CGRAPH_STATE_EXPANSION;
2248 if (!flag_toplevel_reorder)
2249 output_in_order ();
2250 else
2252 output_asm_statements ();
2254 expand_all_functions ();
2255 varpool_node::output_variables ();
2258 cgraph_process_new_functions ();
2259 cgraph_state = CGRAPH_STATE_FINISHED;
2260 output_weakrefs ();
2262 if (cgraph_dump_file)
2264 fprintf (cgraph_dump_file, "\nFinal ");
2265 symtab_node::dump_table (cgraph_dump_file);
2267 #ifdef ENABLE_CHECKING
2268 symtab_node::verify_symtab_nodes ();
2269 /* Double check that all inline clones are gone and that all
2270 function bodies have been released from memory. */
2271 if (!seen_error ())
2273 struct cgraph_node *node;
2274 bool error_found = false;
2276 FOR_EACH_DEFINED_FUNCTION (node)
2277 if (node->global.inlined_to
2278 || gimple_has_body_p (node->decl))
2280 error_found = true;
2281 node->debug ();
2283 if (error_found)
2284 internal_error ("nodes with unreleased memory found");
2286 #endif
2290 /* Analyze the whole compilation unit once it is parsed completely. */
2292 void
2293 finalize_compilation_unit (void)
2295 timevar_push (TV_CGRAPH);
2297 /* If we're here there's no current function anymore. Some frontends
2298 are lazy in clearing these. */
2299 current_function_decl = NULL;
2300 set_cfun (NULL);
2302 /* Do not skip analyzing the functions if there were errors, we
2303 miss diagnostics for following functions otherwise. */
2305 /* Emit size functions we didn't inline. */
2306 finalize_size_functions ();
2308 /* Mark alias targets necessary and emit diagnostics. */
2309 handle_alias_pairs ();
2311 if (!quiet_flag)
2313 fprintf (stderr, "\nAnalyzing compilation unit\n");
2314 fflush (stderr);
2317 if (flag_dump_passes)
2318 dump_passes ();
2320 /* Gimplify and lower all functions, compute reachability and
2321 remove unreachable nodes. */
2322 analyze_functions ();
2324 /* Mark alias targets necessary and emit diagnostics. */
2325 handle_alias_pairs ();
2327 /* Gimplify and lower thunks. */
2328 analyze_functions ();
2330 /* Finally drive the pass manager. */
2331 compile ();
2333 timevar_pop (TV_CGRAPH);
2336 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2337 kind of wrapper method. */
2339 void
2340 cgraph_node::create_wrapper (struct cgraph_node *target)
2342 /* Preserve DECL_RESULT so we get right by reference flag. */
2343 tree decl_result = DECL_RESULT (decl);
2345 /* Remove the function's body. */
2346 release_body ();
2347 reset ();
2349 DECL_RESULT (decl) = decl_result;
2350 DECL_INITIAL (decl) = NULL;
2351 allocate_struct_function (decl, false);
2352 set_cfun (NULL);
2354 /* Turn alias into thunk and expand it into GIMPLE representation. */
2355 definition = true;
2356 thunk.thunk_p = true;
2357 thunk.this_adjusting = false;
2359 struct cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2361 if (!expand_thunk (false, true))
2362 analyzed = true;
2364 e->call_stmt_cannot_inline_p = true;
2366 /* Inline summary set-up. */
2367 analyze ();
2368 inline_analyze_function (this);
2371 #include "gt-cgraphunit.h"