gcc/c-family/
[official-gcc.git] / gcc / cgraphunit.c
blob4a351180ae5b79a84169381ccad1565ff2228657
1 /* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module implements main driver of compilation process.
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
26 The front-end is supposed to use following functionality:
28 - cgraph_finalize_function
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
33 (There is one exception needed for implementing GCC extern inline
34 function.)
36 - varpool_finalize_decl
38 This function has same behavior as the above but is used for static
39 variables.
41 - add_asm_node
43 Insert new toplevel ASM statement
45 - finalize_compilation_unit
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
55 At the end the bodies of unreachable functions are removed.
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
60 - compile
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
68 Compile time:
70 1) Inter-procedural optimization.
71 (ipa_passes)
73 This part is further split into:
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
83 b) early small interprocedural passes.
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
101 Compile time and or linktime analysis stage (WPA):
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
112 Compile time and/or parallel linktime stage (ltrans)
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
131 4) late small IP passes
133 Simple IP passes working within single program partition.
135 5) Expansion
136 (expand_all_functions)
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
154 - cgraph_function_versioning
156 produces a copy of function into new one (a version)
157 and apply simple transformations
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "gimple.h"
168 #include "gimplify.h"
169 #include "gimple-iterator.h"
170 #include "gimplify-me.h"
171 #include "gimple-ssa.h"
172 #include "tree-cfg.h"
173 #include "tree-into-ssa.h"
174 #include "tree-ssa.h"
175 #include "tree-inline.h"
176 #include "langhooks.h"
177 #include "pointer-set.h"
178 #include "toplev.h"
179 #include "flags.h"
180 #include "ggc.h"
181 #include "debug.h"
182 #include "target.h"
183 #include "diagnostic.h"
184 #include "params.h"
185 #include "fibheap.h"
186 #include "intl.h"
187 #include "function.h"
188 #include "ipa-prop.h"
189 #include "tree-iterator.h"
190 #include "tree-pass.h"
191 #include "tree-dump.h"
192 #include "gimple-pretty-print.h"
193 #include "output.h"
194 #include "coverage.h"
195 #include "plugin.h"
196 #include "ipa-inline.h"
197 #include "ipa-utils.h"
198 #include "lto-streamer.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "regset.h" /* FIXME: For reg_obstack. */
202 #include "context.h"
203 #include "pass_manager.h"
204 #include "tree-nested.h"
206 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
207 secondary queue used during optimization to accommodate passes that
208 may generate new functions that need to be optimized and expanded. */
209 cgraph_node_set cgraph_new_nodes;
211 static void expand_all_functions (void);
212 static void mark_functions_to_output (void);
213 static void expand_function (struct cgraph_node *);
214 static void analyze_function (struct cgraph_node *);
215 static void handle_alias_pairs (void);
217 FILE *cgraph_dump_file;
219 /* Linked list of cgraph asm nodes. */
220 struct asm_node *asm_nodes;
222 /* Last node in cgraph_asm_nodes. */
223 static GTY(()) struct asm_node *asm_last_node;
225 /* Used for vtable lookup in thunk adjusting. */
226 static GTY (()) tree vtable_entry_type;
228 /* Determine if symbol DECL is needed. That is, visible to something
229 either outside this translation unit, something magic in the system
230 configury */
231 bool
232 decide_is_symbol_needed (symtab_node *node)
234 tree decl = node->decl;
236 /* Double check that no one output the function into assembly file
237 early. */
238 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
239 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
241 if (!node->definition)
242 return false;
244 if (DECL_EXTERNAL (decl))
245 return false;
247 /* If the user told us it is used, then it must be so. */
248 if (node->force_output)
249 return true;
251 /* ABI forced symbols are needed when they are external. */
252 if (node->forced_by_abi && TREE_PUBLIC (decl))
253 return true;
255 /* Keep constructors, destructors and virtual functions. */
256 if (TREE_CODE (decl) == FUNCTION_DECL
257 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
258 return true;
260 /* Externally visible variables must be output. The exception is
261 COMDAT variables that must be output only when they are needed. */
262 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
263 return true;
265 return false;
268 /* Head of the queue of nodes to be processed while building callgraph */
270 static symtab_node *first = (symtab_node *)(void *)1;
272 /* Add NODE to queue starting at FIRST.
273 The queue is linked via AUX pointers and terminated by pointer to 1. */
275 static void
276 enqueue_node (symtab_node *node)
278 if (node->aux)
279 return;
280 gcc_checking_assert (first);
281 node->aux = first;
282 first = node;
285 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
286 functions into callgraph in a way so they look like ordinary reachable
287 functions inserted into callgraph already at construction time. */
289 bool
290 cgraph_process_new_functions (void)
292 bool output = false;
293 tree fndecl;
294 struct cgraph_node *node;
295 cgraph_node_set_iterator csi;
297 if (!cgraph_new_nodes)
298 return false;
299 handle_alias_pairs ();
300 /* Note that this queue may grow as its being processed, as the new
301 functions may generate new ones. */
302 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
304 node = csi_node (csi);
305 fndecl = node->decl;
306 switch (cgraph_state)
308 case CGRAPH_STATE_CONSTRUCTION:
309 /* At construction time we just need to finalize function and move
310 it into reachable functions list. */
312 cgraph_finalize_function (fndecl, false);
313 output = true;
314 cgraph_call_function_insertion_hooks (node);
315 enqueue_node (node);
316 break;
318 case CGRAPH_STATE_IPA:
319 case CGRAPH_STATE_IPA_SSA:
320 /* When IPA optimization already started, do all essential
321 transformations that has been already performed on the whole
322 cgraph but not on this function. */
324 gimple_register_cfg_hooks ();
325 if (!node->analyzed)
326 analyze_function (node);
327 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
328 if (cgraph_state == CGRAPH_STATE_IPA_SSA
329 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
330 g->get_passes ()->execute_early_local_passes ();
331 else if (inline_summary_vec != NULL)
332 compute_inline_parameters (node, true);
333 free_dominance_info (CDI_POST_DOMINATORS);
334 free_dominance_info (CDI_DOMINATORS);
335 pop_cfun ();
336 cgraph_call_function_insertion_hooks (node);
337 break;
339 case CGRAPH_STATE_EXPANSION:
340 /* Functions created during expansion shall be compiled
341 directly. */
342 node->process = 0;
343 cgraph_call_function_insertion_hooks (node);
344 expand_function (node);
345 break;
347 default:
348 gcc_unreachable ();
349 break;
352 free_cgraph_node_set (cgraph_new_nodes);
353 cgraph_new_nodes = NULL;
354 return output;
357 /* As an GCC extension we allow redefinition of the function. The
358 semantics when both copies of bodies differ is not well defined.
359 We replace the old body with new body so in unit at a time mode
360 we always use new body, while in normal mode we may end up with
361 old body inlined into some functions and new body expanded and
362 inlined in others.
364 ??? It may make more sense to use one body for inlining and other
365 body for expanding the function but this is difficult to do. */
367 void
368 cgraph_reset_node (struct cgraph_node *node)
370 /* If node->process is set, then we have already begun whole-unit analysis.
371 This is *not* testing for whether we've already emitted the function.
372 That case can be sort-of legitimately seen with real function redefinition
373 errors. I would argue that the front end should never present us with
374 such a case, but don't enforce that for now. */
375 gcc_assert (!node->process);
377 /* Reset our data structures so we can analyze the function again. */
378 memset (&node->local, 0, sizeof (node->local));
379 memset (&node->global, 0, sizeof (node->global));
380 memset (&node->rtl, 0, sizeof (node->rtl));
381 node->analyzed = false;
382 node->definition = false;
383 node->alias = false;
384 node->weakref = false;
385 node->cpp_implicit_alias = false;
387 cgraph_node_remove_callees (node);
388 ipa_remove_all_references (&node->ref_list);
391 /* Return true when there are references to NODE. */
393 static bool
394 referred_to_p (symtab_node *node)
396 struct ipa_ref *ref;
398 /* See if there are any references at all. */
399 if (ipa_ref_list_referring_iterate (&node->ref_list, 0, ref))
400 return true;
401 /* For functions check also calls. */
402 cgraph_node *cn = dyn_cast <cgraph_node> (node);
403 if (cn && cn->callers)
404 return true;
405 return false;
408 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
409 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
410 the garbage collector run at the moment. We would need to either create
411 a new GC context, or just not compile right now. */
413 void
414 cgraph_finalize_function (tree decl, bool no_collect)
416 struct cgraph_node *node = cgraph_get_create_node (decl);
418 if (node->definition)
420 /* Nested functions should only be defined once. */
421 gcc_assert (!DECL_CONTEXT (decl)
422 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
423 cgraph_reset_node (node);
424 node->local.redefined_extern_inline = true;
427 notice_global_symbol (decl);
428 node->definition = true;
429 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
431 /* With -fkeep-inline-functions we are keeping all inline functions except
432 for extern inline ones. */
433 if (flag_keep_inline_functions
434 && DECL_DECLARED_INLINE_P (decl)
435 && !DECL_EXTERNAL (decl)
436 && !DECL_DISREGARD_INLINE_LIMITS (decl))
437 node->force_output = 1;
439 /* When not optimizing, also output the static functions. (see
440 PR24561), but don't do so for always_inline functions, functions
441 declared inline and nested functions. These were optimized out
442 in the original implementation and it is unclear whether we want
443 to change the behavior here. */
444 if ((!optimize
445 && !node->cpp_implicit_alias
446 && !DECL_DISREGARD_INLINE_LIMITS (decl)
447 && !DECL_DECLARED_INLINE_P (decl)
448 && !(DECL_CONTEXT (decl)
449 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
450 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
451 node->force_output = 1;
453 /* If we've not yet emitted decl, tell the debug info about it. */
454 if (!TREE_ASM_WRITTEN (decl))
455 (*debug_hooks->deferred_inline_function) (decl);
457 /* Possibly warn about unused parameters. */
458 if (warn_unused_parameter)
459 do_warn_unused_parameter (decl);
461 if (!no_collect)
462 ggc_collect ();
464 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
465 && (decide_is_symbol_needed (node)
466 || referred_to_p (node)))
467 enqueue_node (node);
470 /* Add the function FNDECL to the call graph.
471 Unlike cgraph_finalize_function, this function is intended to be used
472 by middle end and allows insertion of new function at arbitrary point
473 of compilation. The function can be either in high, low or SSA form
474 GIMPLE.
476 The function is assumed to be reachable and have address taken (so no
477 API breaking optimizations are performed on it).
479 Main work done by this function is to enqueue the function for later
480 processing to avoid need the passes to be re-entrant. */
482 void
483 cgraph_add_new_function (tree fndecl, bool lowered)
485 gcc::pass_manager *passes = g->get_passes ();
486 struct cgraph_node *node;
487 switch (cgraph_state)
489 case CGRAPH_STATE_PARSING:
490 cgraph_finalize_function (fndecl, false);
491 break;
492 case CGRAPH_STATE_CONSTRUCTION:
493 /* Just enqueue function to be processed at nearest occurrence. */
494 node = cgraph_create_node (fndecl);
495 if (lowered)
496 node->lowered = true;
497 if (!cgraph_new_nodes)
498 cgraph_new_nodes = cgraph_node_set_new ();
499 cgraph_node_set_add (cgraph_new_nodes, node);
500 break;
502 case CGRAPH_STATE_IPA:
503 case CGRAPH_STATE_IPA_SSA:
504 case CGRAPH_STATE_EXPANSION:
505 /* Bring the function into finalized state and enqueue for later
506 analyzing and compilation. */
507 node = cgraph_get_create_node (fndecl);
508 node->local.local = false;
509 node->definition = true;
510 node->force_output = true;
511 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
513 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
514 gimple_register_cfg_hooks ();
515 bitmap_obstack_initialize (NULL);
516 execute_pass_list (passes->all_lowering_passes);
517 passes->execute_early_local_passes ();
518 bitmap_obstack_release (NULL);
519 pop_cfun ();
521 lowered = true;
523 if (lowered)
524 node->lowered = true;
525 if (!cgraph_new_nodes)
526 cgraph_new_nodes = cgraph_node_set_new ();
527 cgraph_node_set_add (cgraph_new_nodes, node);
528 break;
530 case CGRAPH_STATE_FINISHED:
531 /* At the very end of compilation we have to do all the work up
532 to expansion. */
533 node = cgraph_create_node (fndecl);
534 if (lowered)
535 node->lowered = true;
536 node->definition = true;
537 analyze_function (node);
538 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
539 gimple_register_cfg_hooks ();
540 bitmap_obstack_initialize (NULL);
541 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
542 g->get_passes ()->execute_early_local_passes ();
543 bitmap_obstack_release (NULL);
544 pop_cfun ();
545 expand_function (node);
546 break;
548 default:
549 gcc_unreachable ();
552 /* Set a personality if required and we already passed EH lowering. */
553 if (lowered
554 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
555 == eh_personality_lang))
556 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
559 /* Add a top-level asm statement to the list. */
561 struct asm_node *
562 add_asm_node (tree asm_str)
564 struct asm_node *node;
566 node = ggc_alloc_cleared_asm_node ();
567 node->asm_str = asm_str;
568 node->order = symtab_order++;
569 node->next = NULL;
570 if (asm_nodes == NULL)
571 asm_nodes = node;
572 else
573 asm_last_node->next = node;
574 asm_last_node = node;
575 return node;
578 /* Output all asm statements we have stored up to be output. */
580 static void
581 output_asm_statements (void)
583 struct asm_node *can;
585 if (seen_error ())
586 return;
588 for (can = asm_nodes; can; can = can->next)
589 assemble_asm (can->asm_str);
590 asm_nodes = NULL;
593 /* Analyze the function scheduled to be output. */
594 static void
595 analyze_function (struct cgraph_node *node)
597 tree decl = node->decl;
598 location_t saved_loc = input_location;
599 input_location = DECL_SOURCE_LOCATION (decl);
601 if (node->thunk.thunk_p)
603 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
604 NULL, 0, CGRAPH_FREQ_BASE);
605 if (!expand_thunk (node, false))
607 node->thunk.alias = NULL;
608 node->analyzed = true;
609 return;
611 node->thunk.alias = NULL;
613 if (node->alias)
614 symtab_resolve_alias
615 (node, cgraph_get_node (node->alias_target));
616 else if (node->dispatcher_function)
618 /* Generate the dispatcher body of multi-versioned functions. */
619 struct cgraph_function_version_info *dispatcher_version_info
620 = get_cgraph_node_version (node);
621 if (dispatcher_version_info != NULL
622 && (dispatcher_version_info->dispatcher_resolver
623 == NULL_TREE))
625 tree resolver = NULL_TREE;
626 gcc_assert (targetm.generate_version_dispatcher_body);
627 resolver = targetm.generate_version_dispatcher_body (node);
628 gcc_assert (resolver != NULL_TREE);
631 else
633 push_cfun (DECL_STRUCT_FUNCTION (decl));
635 assign_assembler_name_if_neeeded (node->decl);
637 /* Make sure to gimplify bodies only once. During analyzing a
638 function we lower it, which will require gimplified nested
639 functions, so we can end up here with an already gimplified
640 body. */
641 if (!gimple_has_body_p (decl))
642 gimplify_function_tree (decl);
643 dump_function (TDI_generic, decl);
645 /* Lower the function. */
646 if (!node->lowered)
648 if (node->nested)
649 lower_nested_functions (node->decl);
650 gcc_assert (!node->nested);
652 gimple_register_cfg_hooks ();
653 bitmap_obstack_initialize (NULL);
654 execute_pass_list (g->get_passes ()->all_lowering_passes);
655 free_dominance_info (CDI_POST_DOMINATORS);
656 free_dominance_info (CDI_DOMINATORS);
657 compact_blocks ();
658 bitmap_obstack_release (NULL);
659 node->lowered = true;
662 pop_cfun ();
664 node->analyzed = true;
666 input_location = saved_loc;
669 /* C++ frontend produce same body aliases all over the place, even before PCH
670 gets streamed out. It relies on us linking the aliases with their function
671 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
672 first produce aliases without links, but once C++ FE is sure he won't sream
673 PCH we build the links via this function. */
675 void
676 cgraph_process_same_body_aliases (void)
678 symtab_node *node;
679 FOR_EACH_SYMBOL (node)
680 if (node->cpp_implicit_alias && !node->analyzed)
681 symtab_resolve_alias
682 (node,
683 TREE_CODE (node->alias_target) == VAR_DECL
684 ? (symtab_node *)varpool_node_for_decl (node->alias_target)
685 : (symtab_node *)cgraph_get_create_node (node->alias_target));
686 cpp_implicit_aliases_done = true;
689 /* Process attributes common for vars and functions. */
691 static void
692 process_common_attributes (tree decl)
694 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
696 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
698 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
699 "%<weakref%> attribute should be accompanied with"
700 " an %<alias%> attribute");
701 DECL_WEAK (decl) = 0;
702 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
703 DECL_ATTRIBUTES (decl));
707 /* Look for externally_visible and used attributes and mark cgraph nodes
708 accordingly.
710 We cannot mark the nodes at the point the attributes are processed (in
711 handle_*_attribute) because the copy of the declarations available at that
712 point may not be canonical. For example, in:
714 void f();
715 void f() __attribute__((used));
717 the declaration we see in handle_used_attribute will be the second
718 declaration -- but the front end will subsequently merge that declaration
719 with the original declaration and discard the second declaration.
721 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
723 void f() {}
724 void f() __attribute__((externally_visible));
726 is valid.
728 So, we walk the nodes at the end of the translation unit, applying the
729 attributes at that point. */
731 static void
732 process_function_and_variable_attributes (struct cgraph_node *first,
733 struct varpool_node *first_var)
735 struct cgraph_node *node;
736 struct varpool_node *vnode;
738 for (node = cgraph_first_function (); node != first;
739 node = cgraph_next_function (node))
741 tree decl = node->decl;
742 if (DECL_PRESERVE_P (decl))
743 cgraph_mark_force_output_node (node);
744 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
746 if (! TREE_PUBLIC (node->decl))
747 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
748 "%<externally_visible%>"
749 " attribute have effect only on public objects");
751 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
752 && (node->definition && !node->alias))
754 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
755 "%<weakref%> attribute ignored"
756 " because function is defined");
757 DECL_WEAK (decl) = 0;
758 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
759 DECL_ATTRIBUTES (decl));
762 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
763 && !DECL_DECLARED_INLINE_P (decl)
764 /* redefining extern inline function makes it DECL_UNINLINABLE. */
765 && !DECL_UNINLINABLE (decl))
766 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
767 "always_inline function might not be inlinable");
769 process_common_attributes (decl);
771 for (vnode = varpool_first_variable (); vnode != first_var;
772 vnode = varpool_next_variable (vnode))
774 tree decl = vnode->decl;
775 if (DECL_EXTERNAL (decl)
776 && DECL_INITIAL (decl))
777 varpool_finalize_decl (decl);
778 if (DECL_PRESERVE_P (decl))
779 vnode->force_output = true;
780 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
782 if (! TREE_PUBLIC (vnode->decl))
783 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
784 "%<externally_visible%>"
785 " attribute have effect only on public objects");
787 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
788 && vnode->definition
789 && DECL_INITIAL (decl))
791 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
792 "%<weakref%> attribute ignored"
793 " because variable is initialized");
794 DECL_WEAK (decl) = 0;
795 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
796 DECL_ATTRIBUTES (decl));
798 process_common_attributes (decl);
802 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
803 middle end to output the variable to asm file, if needed or externally
804 visible. */
806 void
807 varpool_finalize_decl (tree decl)
809 struct varpool_node *node = varpool_node_for_decl (decl);
811 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
813 if (node->definition)
814 return;
815 notice_global_symbol (decl);
816 node->definition = true;
817 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
818 /* Traditionally we do not eliminate static variables when not
819 optimizing and when not doing toplevel reoder. */
820 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
821 && !DECL_ARTIFICIAL (node->decl)))
822 node->force_output = true;
824 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
825 && (decide_is_symbol_needed (node)
826 || referred_to_p (node)))
827 enqueue_node (node);
828 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
829 varpool_analyze_node (node);
830 /* Some frontends produce various interface variables after compilation
831 finished. */
832 if (cgraph_state == CGRAPH_STATE_FINISHED)
833 varpool_assemble_decl (node);
836 /* EDGE is an polymorphic call. Mark all possible targets as reachable
837 and if there is only one target, perform trivial devirtualization.
838 REACHABLE_CALL_TARGETS collects target lists we already walked to
839 avoid udplicate work. */
841 static void
842 walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
843 struct cgraph_edge *edge)
845 unsigned int i;
846 void *cache_token;
847 bool final;
848 vec <cgraph_node *>targets
849 = possible_polymorphic_call_targets
850 (edge, &final, &cache_token);
852 if (!pointer_set_insert (reachable_call_targets,
853 cache_token))
855 if (cgraph_dump_file)
856 dump_possible_polymorphic_call_targets
857 (cgraph_dump_file, edge);
859 for (i = 0; i < targets.length (); i++)
861 /* Do not bother to mark virtual methods in anonymous namespace;
862 either we will find use of virtual table defining it, or it is
863 unused. */
864 if (targets[i]->definition
865 && TREE_CODE
866 (TREE_TYPE (targets[i]->decl))
867 == METHOD_TYPE
868 && !type_in_anonymous_namespace_p
869 (method_class_type
870 (TREE_TYPE (targets[i]->decl))))
871 enqueue_node (targets[i]);
875 /* Very trivial devirtualization; when the type is
876 final or anonymous (so we know all its derivation)
877 and there is only one possible virtual call target,
878 make the edge direct. */
879 if (final)
881 if (targets.length () <= 1)
883 cgraph_node *target;
884 if (targets.length () == 1)
885 target = targets[0];
886 else
887 target = cgraph_get_create_node
888 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
890 if (cgraph_dump_file)
892 fprintf (cgraph_dump_file,
893 "Devirtualizing call: ");
894 print_gimple_stmt (cgraph_dump_file,
895 edge->call_stmt, 0,
896 TDF_SLIM);
898 cgraph_make_edge_direct (edge, target);
899 cgraph_redirect_edge_call_stmt_to_callee (edge);
900 if (cgraph_dump_file)
902 fprintf (cgraph_dump_file,
903 "Devirtualized as: ");
904 print_gimple_stmt (cgraph_dump_file,
905 edge->call_stmt, 0,
906 TDF_SLIM);
913 /* Discover all functions and variables that are trivially needed, analyze
914 them as well as all functions and variables referred by them */
916 static void
917 analyze_functions (void)
919 /* Keep track of already processed nodes when called multiple times for
920 intermodule optimization. */
921 static struct cgraph_node *first_analyzed;
922 struct cgraph_node *first_handled = first_analyzed;
923 static struct varpool_node *first_analyzed_var;
924 struct varpool_node *first_handled_var = first_analyzed_var;
925 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
927 symtab_node *node;
928 symtab_node *next;
929 int i;
930 struct ipa_ref *ref;
931 bool changed = true;
932 location_t saved_loc = input_location;
934 bitmap_obstack_initialize (NULL);
935 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
936 input_location = UNKNOWN_LOCATION;
938 /* Ugly, but the fixup can not happen at a time same body alias is created;
939 C++ FE is confused about the COMDAT groups being right. */
940 if (cpp_implicit_aliases_done)
941 FOR_EACH_SYMBOL (node)
942 if (node->cpp_implicit_alias)
943 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
944 if (optimize && flag_devirtualize)
945 build_type_inheritance_graph ();
947 /* Analysis adds static variables that in turn adds references to new functions.
948 So we need to iterate the process until it stabilize. */
949 while (changed)
951 changed = false;
952 process_function_and_variable_attributes (first_analyzed,
953 first_analyzed_var);
955 /* First identify the trivially needed symbols. */
956 for (node = symtab_nodes;
957 node != first_analyzed
958 && node != first_analyzed_var; node = node->next)
960 if (decide_is_symbol_needed (node))
962 enqueue_node (node);
963 if (!changed && cgraph_dump_file)
964 fprintf (cgraph_dump_file, "Trivially needed symbols:");
965 changed = true;
966 if (cgraph_dump_file)
967 fprintf (cgraph_dump_file, " %s", node->asm_name ());
968 if (!changed && cgraph_dump_file)
969 fprintf (cgraph_dump_file, "\n");
971 if (node == first_analyzed
972 || node == first_analyzed_var)
973 break;
975 cgraph_process_new_functions ();
976 first_analyzed_var = varpool_first_variable ();
977 first_analyzed = cgraph_first_function ();
979 if (changed && dump_file)
980 fprintf (cgraph_dump_file, "\n");
982 /* Lower representation, build callgraph edges and references for all trivially
983 needed symbols and all symbols referred by them. */
984 while (first != (symtab_node *)(void *)1)
986 changed = true;
987 node = first;
988 first = (symtab_node *)first->aux;
989 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
990 if (cnode && cnode->definition)
992 struct cgraph_edge *edge;
993 tree decl = cnode->decl;
995 /* ??? It is possible to create extern inline function
996 and later using weak alias attribute to kill its body.
997 See gcc.c-torture/compile/20011119-1.c */
998 if (!DECL_STRUCT_FUNCTION (decl)
999 && !cnode->alias
1000 && !cnode->thunk.thunk_p
1001 && !cnode->dispatcher_function)
1003 cgraph_reset_node (cnode);
1004 cnode->local.redefined_extern_inline = true;
1005 continue;
1008 if (!cnode->analyzed)
1009 analyze_function (cnode);
1011 for (edge = cnode->callees; edge; edge = edge->next_callee)
1012 if (edge->callee->definition)
1013 enqueue_node (edge->callee);
1014 if (optimize && flag_devirtualize)
1016 struct cgraph_edge *next;
1018 for (edge = cnode->indirect_calls; edge; edge = next)
1020 next = edge->next_callee;
1021 if (edge->indirect_info->polymorphic)
1022 walk_polymorphic_call_targets (reachable_call_targets,
1023 edge);
1027 /* If decl is a clone of an abstract function,
1028 mark that abstract function so that we don't release its body.
1029 The DECL_INITIAL() of that abstract function declaration
1030 will be later needed to output debug info. */
1031 if (DECL_ABSTRACT_ORIGIN (decl))
1033 struct cgraph_node *origin_node
1034 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1035 origin_node->used_as_abstract_origin = true;
1038 else
1040 varpool_node *vnode = dyn_cast <varpool_node> (node);
1041 if (vnode && vnode->definition && !vnode->analyzed)
1042 varpool_analyze_node (vnode);
1045 if (node->same_comdat_group)
1047 symtab_node *next;
1048 for (next = node->same_comdat_group;
1049 next != node;
1050 next = next->same_comdat_group)
1051 enqueue_node (next);
1053 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1054 if (ref->referred->definition)
1055 enqueue_node (ref->referred);
1056 cgraph_process_new_functions ();
1059 if (optimize && flag_devirtualize)
1060 update_type_inheritance_graph ();
1062 /* Collect entry points to the unit. */
1063 if (cgraph_dump_file)
1065 fprintf (cgraph_dump_file, "\n\nInitial ");
1066 dump_symtab (cgraph_dump_file);
1069 if (cgraph_dump_file)
1070 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1072 for (node = symtab_nodes;
1073 node != first_handled
1074 && node != first_handled_var; node = next)
1076 next = node->next;
1077 if (!node->aux && !referred_to_p (node))
1079 if (cgraph_dump_file)
1080 fprintf (cgraph_dump_file, " %s", node->name ());
1081 symtab_remove_node (node);
1082 continue;
1084 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1086 tree decl = node->decl;
1088 if (cnode->definition && !gimple_has_body_p (decl)
1089 && !cnode->alias
1090 && !cnode->thunk.thunk_p)
1091 cgraph_reset_node (cnode);
1093 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1094 || cnode->alias
1095 || gimple_has_body_p (decl));
1096 gcc_assert (cnode->analyzed == cnode->definition);
1098 node->aux = NULL;
1100 for (;node; node = node->next)
1101 node->aux = NULL;
1102 first_analyzed = cgraph_first_function ();
1103 first_analyzed_var = varpool_first_variable ();
1104 if (cgraph_dump_file)
1106 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1107 dump_symtab (cgraph_dump_file);
1109 bitmap_obstack_release (NULL);
1110 pointer_set_destroy (reachable_call_targets);
1111 ggc_collect ();
1112 /* Initialize assembler name hash, in particular we want to trigger C++
1113 mangling and same body alias creation before we free DECL_ARGUMENTS
1114 used by it. */
1115 if (!seen_error ())
1116 symtab_initialize_asm_name_hash ();
1118 input_location = saved_loc;
1121 /* Translate the ugly representation of aliases as alias pairs into nice
1122 representation in callgraph. We don't handle all cases yet,
1123 unfortunately. */
1125 static void
1126 handle_alias_pairs (void)
1128 alias_pair *p;
1129 unsigned i;
1131 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1133 symtab_node *target_node = symtab_node_for_asm (p->target);
1135 /* Weakrefs with target not defined in current unit are easy to handle:
1136 they behave just as external variables except we need to note the
1137 alias flag to later output the weakref pseudo op into asm file. */
1138 if (!target_node
1139 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1141 symtab_node *node = symtab_get_node (p->decl);
1142 if (node)
1144 node->alias_target = p->target;
1145 node->weakref = true;
1146 node->alias = true;
1148 alias_pairs->unordered_remove (i);
1149 continue;
1151 else if (!target_node)
1153 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1154 symtab_node *node = symtab_get_node (p->decl);
1155 if (node)
1156 node->alias = false;
1157 alias_pairs->unordered_remove (i);
1158 continue;
1161 if (DECL_EXTERNAL (target_node->decl)
1162 /* We use local aliases for C++ thunks to force the tailcall
1163 to bind locally. This is a hack - to keep it working do
1164 the following (which is not strictly correct). */
1165 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1166 || ! DECL_VIRTUAL_P (target_node->decl))
1167 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1169 error ("%q+D aliased to external symbol %qE",
1170 p->decl, p->target);
1173 if (TREE_CODE (p->decl) == FUNCTION_DECL
1174 && target_node && is_a <cgraph_node> (target_node))
1176 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1177 if (src_node && src_node->definition)
1178 cgraph_reset_node (src_node);
1179 cgraph_create_function_alias (p->decl, target_node->decl);
1180 alias_pairs->unordered_remove (i);
1182 else if (TREE_CODE (p->decl) == VAR_DECL
1183 && target_node && is_a <varpool_node> (target_node))
1185 varpool_create_variable_alias (p->decl, target_node->decl);
1186 alias_pairs->unordered_remove (i);
1188 else
1190 error ("%q+D alias in between function and variable is not supported",
1191 p->decl);
1192 warning (0, "%q+D aliased declaration",
1193 target_node->decl);
1194 alias_pairs->unordered_remove (i);
1197 vec_free (alias_pairs);
1201 /* Figure out what functions we want to assemble. */
1203 static void
1204 mark_functions_to_output (void)
1206 struct cgraph_node *node;
1207 #ifdef ENABLE_CHECKING
1208 bool check_same_comdat_groups = false;
1210 FOR_EACH_FUNCTION (node)
1211 gcc_assert (!node->process);
1212 #endif
1214 FOR_EACH_FUNCTION (node)
1216 tree decl = node->decl;
1218 gcc_assert (!node->process || node->same_comdat_group);
1219 if (node->process)
1220 continue;
1222 /* We need to output all local functions that are used and not
1223 always inlined, as well as those that are reachable from
1224 outside the current compilation unit. */
1225 if (node->analyzed
1226 && !node->thunk.thunk_p
1227 && !node->alias
1228 && !node->global.inlined_to
1229 && !TREE_ASM_WRITTEN (decl)
1230 && !DECL_EXTERNAL (decl))
1232 node->process = 1;
1233 if (node->same_comdat_group)
1235 struct cgraph_node *next;
1236 for (next = cgraph (node->same_comdat_group);
1237 next != node;
1238 next = cgraph (next->same_comdat_group))
1239 if (!next->thunk.thunk_p && !next->alias)
1240 next->process = 1;
1243 else if (node->same_comdat_group)
1245 #ifdef ENABLE_CHECKING
1246 check_same_comdat_groups = true;
1247 #endif
1249 else
1251 /* We should've reclaimed all functions that are not needed. */
1252 #ifdef ENABLE_CHECKING
1253 if (!node->global.inlined_to
1254 && gimple_has_body_p (decl)
1255 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1256 are inside partition, we can end up not removing the body since we no longer
1257 have analyzed node pointing to it. */
1258 && !node->in_other_partition
1259 && !node->alias
1260 && !node->clones
1261 && !DECL_EXTERNAL (decl))
1263 dump_cgraph_node (stderr, node);
1264 internal_error ("failed to reclaim unneeded function");
1266 #endif
1267 gcc_assert (node->global.inlined_to
1268 || !gimple_has_body_p (decl)
1269 || node->in_other_partition
1270 || node->clones
1271 || DECL_ARTIFICIAL (decl)
1272 || DECL_EXTERNAL (decl));
1277 #ifdef ENABLE_CHECKING
1278 if (check_same_comdat_groups)
1279 FOR_EACH_FUNCTION (node)
1280 if (node->same_comdat_group && !node->process)
1282 tree decl = node->decl;
1283 if (!node->global.inlined_to
1284 && gimple_has_body_p (decl)
1285 /* FIXME: in an ltrans unit when the offline copy is outside a
1286 partition but inline copies are inside a partition, we can
1287 end up not removing the body since we no longer have an
1288 analyzed node pointing to it. */
1289 && !node->in_other_partition
1290 && !node->clones
1291 && !DECL_EXTERNAL (decl))
1293 dump_cgraph_node (stderr, node);
1294 internal_error ("failed to reclaim unneeded function in same "
1295 "comdat group");
1298 #endif
1301 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1302 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1304 Set current_function_decl and cfun to newly constructed empty function body.
1305 return basic block in the function body. */
1307 basic_block
1308 init_lowered_empty_function (tree decl, bool in_ssa)
1310 basic_block bb;
1312 current_function_decl = decl;
1313 allocate_struct_function (decl, false);
1314 gimple_register_cfg_hooks ();
1315 init_empty_tree_cfg ();
1317 if (in_ssa)
1319 init_tree_ssa (cfun);
1320 init_ssa_operands (cfun);
1321 cfun->gimple_df->in_ssa_p = true;
1322 cfun->curr_properties |= PROP_ssa;
1325 DECL_INITIAL (decl) = make_node (BLOCK);
1327 DECL_SAVED_TREE (decl) = error_mark_node;
1328 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1329 | PROP_cfg | PROP_loops);
1331 set_loops_for_fn (cfun, ggc_alloc_cleared_loops ());
1332 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1333 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1335 /* Create BB for body of the function and connect it properly. */
1336 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1337 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1338 make_edge (bb, EXIT_BLOCK_PTR, 0);
1339 add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
1341 return bb;
1344 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1345 offset indicated by VIRTUAL_OFFSET, if that is
1346 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1347 zero for a result adjusting thunk. */
1349 static tree
1350 thunk_adjust (gimple_stmt_iterator * bsi,
1351 tree ptr, bool this_adjusting,
1352 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1354 gimple stmt;
1355 tree ret;
1357 if (this_adjusting
1358 && fixed_offset != 0)
1360 stmt = gimple_build_assign
1361 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1362 ptr,
1363 fixed_offset));
1364 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1367 /* If there's a virtual offset, look up that value in the vtable and
1368 adjust the pointer again. */
1369 if (virtual_offset)
1371 tree vtabletmp;
1372 tree vtabletmp2;
1373 tree vtabletmp3;
1375 if (!vtable_entry_type)
1377 tree vfunc_type = make_node (FUNCTION_TYPE);
1378 TREE_TYPE (vfunc_type) = integer_type_node;
1379 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1380 layout_type (vfunc_type);
1382 vtable_entry_type = build_pointer_type (vfunc_type);
1385 vtabletmp =
1386 create_tmp_reg (build_pointer_type
1387 (build_pointer_type (vtable_entry_type)), "vptr");
1389 /* The vptr is always at offset zero in the object. */
1390 stmt = gimple_build_assign (vtabletmp,
1391 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1392 ptr));
1393 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395 /* Form the vtable address. */
1396 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1397 "vtableaddr");
1398 stmt = gimple_build_assign (vtabletmp2,
1399 build_simple_mem_ref (vtabletmp));
1400 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1402 /* Find the entry with the vcall offset. */
1403 stmt = gimple_build_assign (vtabletmp2,
1404 fold_build_pointer_plus_loc (input_location,
1405 vtabletmp2,
1406 virtual_offset));
1407 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1409 /* Get the offset itself. */
1410 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1411 "vcalloffset");
1412 stmt = gimple_build_assign (vtabletmp3,
1413 build_simple_mem_ref (vtabletmp2));
1414 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1416 /* Adjust the `this' pointer. */
1417 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1418 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1419 GSI_CONTINUE_LINKING);
1422 if (!this_adjusting
1423 && fixed_offset != 0)
1424 /* Adjust the pointer by the constant. */
1426 tree ptrtmp;
1428 if (TREE_CODE (ptr) == VAR_DECL)
1429 ptrtmp = ptr;
1430 else
1432 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1433 stmt = gimple_build_assign (ptrtmp, ptr);
1434 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1436 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1437 ptrtmp, fixed_offset);
1440 /* Emit the statement and gimplify the adjustment expression. */
1441 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1442 stmt = gimple_build_assign (ret, ptr);
1443 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1445 return ret;
1448 /* Expand thunk NODE to gimple if possible.
1449 When OUTPUT_ASM_THUNK is true, also produce assembler for
1450 thunks that are not lowered. */
1452 bool
1453 expand_thunk (struct cgraph_node *node, bool output_asm_thunks)
1455 bool this_adjusting = node->thunk.this_adjusting;
1456 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1457 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1458 tree virtual_offset = NULL;
1459 tree alias = node->callees->callee->decl;
1460 tree thunk_fndecl = node->decl;
1461 tree a;
1464 if (this_adjusting
1465 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1466 virtual_value, alias))
1468 const char *fnname;
1469 tree fn_block;
1470 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1472 if (!output_asm_thunks)
1473 return false;
1475 if (in_lto_p)
1476 cgraph_get_body (node);
1477 a = DECL_ARGUMENTS (thunk_fndecl);
1479 current_function_decl = thunk_fndecl;
1481 /* Ensure thunks are emitted in their correct sections. */
1482 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1484 DECL_RESULT (thunk_fndecl)
1485 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1486 RESULT_DECL, 0, restype);
1487 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1488 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1490 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1491 create one. */
1492 fn_block = make_node (BLOCK);
1493 BLOCK_VARS (fn_block) = a;
1494 DECL_INITIAL (thunk_fndecl) = fn_block;
1495 init_function_start (thunk_fndecl);
1496 cfun->is_thunk = 1;
1497 insn_locations_init ();
1498 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1499 prologue_location = curr_insn_location ();
1500 assemble_start_function (thunk_fndecl, fnname);
1502 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1503 fixed_offset, virtual_value, alias);
1505 assemble_end_function (thunk_fndecl, fnname);
1506 insn_locations_finalize ();
1507 init_insn_lengths ();
1508 free_after_compilation (cfun);
1509 set_cfun (NULL);
1510 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1511 node->thunk.thunk_p = false;
1512 node->analyzed = false;
1514 else
1516 tree restype;
1517 basic_block bb, then_bb, else_bb, return_bb;
1518 gimple_stmt_iterator bsi;
1519 int nargs = 0;
1520 tree arg;
1521 int i;
1522 tree resdecl;
1523 tree restmp = NULL;
1524 vec<tree> vargs;
1526 gimple call;
1527 gimple ret;
1529 if (in_lto_p)
1530 cgraph_get_body (node);
1531 a = DECL_ARGUMENTS (thunk_fndecl);
1533 current_function_decl = thunk_fndecl;
1535 /* Ensure thunks are emitted in their correct sections. */
1536 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1538 DECL_IGNORED_P (thunk_fndecl) = 1;
1539 bitmap_obstack_initialize (NULL);
1541 if (node->thunk.virtual_offset_p)
1542 virtual_offset = size_int (virtual_value);
1544 /* Build the return declaration for the function. */
1545 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1546 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1548 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1549 DECL_ARTIFICIAL (resdecl) = 1;
1550 DECL_IGNORED_P (resdecl) = 1;
1551 DECL_RESULT (thunk_fndecl) = resdecl;
1552 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1554 else
1555 resdecl = DECL_RESULT (thunk_fndecl);
1557 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1559 bsi = gsi_start_bb (bb);
1561 /* Build call to the function being thunked. */
1562 if (!VOID_TYPE_P (restype))
1564 if (DECL_BY_REFERENCE (resdecl))
1565 restmp = gimple_fold_indirect_ref (resdecl);
1566 else if (!is_gimple_reg_type (restype))
1568 restmp = resdecl;
1569 add_local_decl (cfun, restmp);
1570 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1572 else
1573 restmp = create_tmp_reg (restype, "retval");
1576 for (arg = a; arg; arg = DECL_CHAIN (arg))
1577 nargs++;
1578 vargs.create (nargs);
1579 if (this_adjusting)
1580 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1581 virtual_offset));
1582 else if (nargs)
1583 vargs.quick_push (a);
1585 if (nargs)
1586 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1587 vargs.quick_push (arg);
1588 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1589 node->callees->call_stmt = call;
1590 vargs.release ();
1591 gimple_call_set_from_thunk (call, true);
1592 if (restmp)
1594 gimple_call_set_lhs (call, restmp);
1595 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1596 TREE_TYPE (TREE_TYPE (alias))));
1598 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1599 if (!(gimple_call_flags (call) & ECF_NORETURN))
1601 if (restmp && !this_adjusting
1602 && (fixed_offset || virtual_offset))
1604 tree true_label = NULL_TREE;
1606 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1608 gimple stmt;
1609 /* If the return type is a pointer, we need to
1610 protect against NULL. We know there will be an
1611 adjustment, because that's why we're emitting a
1612 thunk. */
1613 then_bb = create_basic_block (NULL, (void *) 0, bb);
1614 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1615 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1616 add_bb_to_loop (then_bb, bb->loop_father);
1617 add_bb_to_loop (return_bb, bb->loop_father);
1618 add_bb_to_loop (else_bb, bb->loop_father);
1619 remove_edge (single_succ_edge (bb));
1620 true_label = gimple_block_label (then_bb);
1621 stmt = gimple_build_cond (NE_EXPR, restmp,
1622 build_zero_cst (TREE_TYPE (restmp)),
1623 NULL_TREE, NULL_TREE);
1624 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1625 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1626 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1627 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1628 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1629 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1630 bsi = gsi_last_bb (then_bb);
1633 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1634 fixed_offset, virtual_offset);
1635 if (true_label)
1637 gimple stmt;
1638 bsi = gsi_last_bb (else_bb);
1639 stmt = gimple_build_assign (restmp,
1640 build_zero_cst (TREE_TYPE (restmp)));
1641 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1642 bsi = gsi_last_bb (return_bb);
1645 else
1646 gimple_call_set_tail (call, true);
1648 /* Build return value. */
1649 ret = gimple_build_return (restmp);
1650 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1652 else
1654 gimple_call_set_tail (call, true);
1655 remove_edge (single_succ_edge (bb));
1658 cfun->gimple_df->in_ssa_p = true;
1659 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1660 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1661 delete_unreachable_blocks ();
1662 update_ssa (TODO_update_ssa);
1663 #ifdef ENABLE_CHECKING
1664 verify_flow_info ();
1665 #endif
1667 /* Since we want to emit the thunk, we explicitly mark its name as
1668 referenced. */
1669 node->thunk.thunk_p = false;
1670 node->lowered = true;
1671 bitmap_obstack_release (NULL);
1673 current_function_decl = NULL;
1674 set_cfun (NULL);
1675 return true;
1678 /* Assemble thunks and aliases associated to NODE. */
1680 static void
1681 assemble_thunks_and_aliases (struct cgraph_node *node)
1683 struct cgraph_edge *e;
1684 int i;
1685 struct ipa_ref *ref;
1687 for (e = node->callers; e;)
1688 if (e->caller->thunk.thunk_p)
1690 struct cgraph_node *thunk = e->caller;
1692 e = e->next_caller;
1693 assemble_thunks_and_aliases (thunk);
1694 expand_thunk (thunk, true);
1696 else
1697 e = e->next_caller;
1698 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
1699 i, ref); i++)
1700 if (ref->use == IPA_REF_ALIAS)
1702 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1703 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1705 /* Force assemble_alias to really output the alias this time instead
1706 of buffering it in same alias pairs. */
1707 TREE_ASM_WRITTEN (node->decl) = 1;
1708 do_assemble_alias (alias->decl,
1709 DECL_ASSEMBLER_NAME (node->decl));
1710 assemble_thunks_and_aliases (alias);
1711 TREE_ASM_WRITTEN (node->decl) = saved_written;
1715 /* Expand function specified by NODE. */
1717 static void
1718 expand_function (struct cgraph_node *node)
1720 tree decl = node->decl;
1721 location_t saved_loc;
1723 /* We ought to not compile any inline clones. */
1724 gcc_assert (!node->global.inlined_to);
1726 announce_function (decl);
1727 node->process = 0;
1728 gcc_assert (node->lowered);
1729 cgraph_get_body (node);
1731 /* Generate RTL for the body of DECL. */
1733 timevar_push (TV_REST_OF_COMPILATION);
1735 gcc_assert (cgraph_global_info_ready);
1737 /* Initialize the default bitmap obstack. */
1738 bitmap_obstack_initialize (NULL);
1740 /* Initialize the RTL code for the function. */
1741 current_function_decl = decl;
1742 saved_loc = input_location;
1743 input_location = DECL_SOURCE_LOCATION (decl);
1744 init_function_start (decl);
1746 gimple_register_cfg_hooks ();
1748 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1750 execute_all_ipa_transforms ();
1752 /* Perform all tree transforms and optimizations. */
1754 /* Signal the start of passes. */
1755 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1757 execute_pass_list (g->get_passes ()->all_passes);
1759 /* Signal the end of passes. */
1760 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1762 bitmap_obstack_release (&reg_obstack);
1764 /* Release the default bitmap obstack. */
1765 bitmap_obstack_release (NULL);
1767 /* If requested, warn about function definitions where the function will
1768 return a value (usually of some struct or union type) which itself will
1769 take up a lot of stack space. */
1770 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1772 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1774 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1775 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1776 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1777 larger_than_size))
1779 unsigned int size_as_int
1780 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1782 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1783 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1784 decl, size_as_int);
1785 else
1786 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1787 decl, larger_than_size);
1791 gimple_set_body (decl, NULL);
1792 if (DECL_STRUCT_FUNCTION (decl) == 0
1793 && !cgraph_get_node (decl)->origin)
1795 /* Stop pointing to the local nodes about to be freed.
1796 But DECL_INITIAL must remain nonzero so we know this
1797 was an actual function definition.
1798 For a nested function, this is done in c_pop_function_context.
1799 If rest_of_compilation set this to 0, leave it 0. */
1800 if (DECL_INITIAL (decl) != 0)
1801 DECL_INITIAL (decl) = error_mark_node;
1804 input_location = saved_loc;
1806 ggc_collect ();
1807 timevar_pop (TV_REST_OF_COMPILATION);
1809 /* Make sure that BE didn't give up on compiling. */
1810 gcc_assert (TREE_ASM_WRITTEN (decl));
1811 set_cfun (NULL);
1812 current_function_decl = NULL;
1814 /* It would make a lot more sense to output thunks before function body to get more
1815 forward and lest backwarding jumps. This however would need solving problem
1816 with comdats. See PR48668. Also aliases must come after function itself to
1817 make one pass assemblers, like one on AIX, happy. See PR 50689.
1818 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1819 groups. */
1820 assemble_thunks_and_aliases (node);
1821 cgraph_release_function_body (node);
1822 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1823 points to the dead function body. */
1824 cgraph_node_remove_callees (node);
1825 ipa_remove_all_references (&node->ref_list);
1829 /* Expand all functions that must be output.
1831 Attempt to topologically sort the nodes so function is output when
1832 all called functions are already assembled to allow data to be
1833 propagated across the callgraph. Use a stack to get smaller distance
1834 between a function and its callees (later we may choose to use a more
1835 sophisticated algorithm for function reordering; we will likely want
1836 to use subsections to make the output functions appear in top-down
1837 order). */
1839 static void
1840 expand_all_functions (void)
1842 struct cgraph_node *node;
1843 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1844 int order_pos, new_order_pos = 0;
1845 int i;
1847 order_pos = ipa_reverse_postorder (order);
1848 gcc_assert (order_pos == cgraph_n_nodes);
1850 /* Garbage collector may remove inline clones we eliminate during
1851 optimization. So we must be sure to not reference them. */
1852 for (i = 0; i < order_pos; i++)
1853 if (order[i]->process)
1854 order[new_order_pos++] = order[i];
1856 for (i = new_order_pos - 1; i >= 0; i--)
1858 node = order[i];
1859 if (node->process)
1861 node->process = 0;
1862 expand_function (node);
1865 cgraph_process_new_functions ();
1867 free (order);
1871 /* This is used to sort the node types by the cgraph order number. */
1873 enum cgraph_order_sort_kind
1875 ORDER_UNDEFINED = 0,
1876 ORDER_FUNCTION,
1877 ORDER_VAR,
1878 ORDER_ASM
1881 struct cgraph_order_sort
1883 enum cgraph_order_sort_kind kind;
1884 union
1886 struct cgraph_node *f;
1887 struct varpool_node *v;
1888 struct asm_node *a;
1889 } u;
1892 /* Output all functions, variables, and asm statements in the order
1893 according to their order fields, which is the order in which they
1894 appeared in the file. This implements -fno-toplevel-reorder. In
1895 this mode we may output functions and variables which don't really
1896 need to be output. */
1898 static void
1899 output_in_order (void)
1901 int max;
1902 struct cgraph_order_sort *nodes;
1903 int i;
1904 struct cgraph_node *pf;
1905 struct varpool_node *pv;
1906 struct asm_node *pa;
1908 max = symtab_order;
1909 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1911 FOR_EACH_DEFINED_FUNCTION (pf)
1913 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1915 i = pf->order;
1916 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1917 nodes[i].kind = ORDER_FUNCTION;
1918 nodes[i].u.f = pf;
1922 FOR_EACH_DEFINED_VARIABLE (pv)
1923 if (!DECL_EXTERNAL (pv->decl))
1925 i = pv->order;
1926 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1927 nodes[i].kind = ORDER_VAR;
1928 nodes[i].u.v = pv;
1931 for (pa = asm_nodes; pa; pa = pa->next)
1933 i = pa->order;
1934 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1935 nodes[i].kind = ORDER_ASM;
1936 nodes[i].u.a = pa;
1939 /* In toplevel reorder mode we output all statics; mark them as needed. */
1941 for (i = 0; i < max; ++i)
1942 if (nodes[i].kind == ORDER_VAR)
1943 varpool_finalize_named_section_flags (nodes[i].u.v);
1945 for (i = 0; i < max; ++i)
1947 switch (nodes[i].kind)
1949 case ORDER_FUNCTION:
1950 nodes[i].u.f->process = 0;
1951 expand_function (nodes[i].u.f);
1952 break;
1954 case ORDER_VAR:
1955 varpool_assemble_decl (nodes[i].u.v);
1956 break;
1958 case ORDER_ASM:
1959 assemble_asm (nodes[i].u.a->asm_str);
1960 break;
1962 case ORDER_UNDEFINED:
1963 break;
1965 default:
1966 gcc_unreachable ();
1970 asm_nodes = NULL;
1971 free (nodes);
1974 static void
1975 ipa_passes (void)
1977 gcc::pass_manager *passes = g->get_passes ();
1979 set_cfun (NULL);
1980 current_function_decl = NULL;
1981 gimple_register_cfg_hooks ();
1982 bitmap_obstack_initialize (NULL);
1984 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1986 if (!in_lto_p)
1988 execute_ipa_pass_list (passes->all_small_ipa_passes);
1989 if (seen_error ())
1990 return;
1993 /* We never run removal of unreachable nodes after early passes. This is
1994 because TODO is run before the subpasses. It is important to remove
1995 the unreachable functions to save works at IPA level and to get LTO
1996 symbol tables right. */
1997 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1999 /* If pass_all_early_optimizations was not scheduled, the state of
2000 the cgraph will not be properly updated. Update it now. */
2001 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2002 cgraph_state = CGRAPH_STATE_IPA_SSA;
2004 if (!in_lto_p)
2006 /* Generate coverage variables and constructors. */
2007 coverage_finish ();
2009 /* Process new functions added. */
2010 set_cfun (NULL);
2011 current_function_decl = NULL;
2012 cgraph_process_new_functions ();
2014 execute_ipa_summary_passes
2015 ((struct ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2018 /* Some targets need to handle LTO assembler output specially. */
2019 if (flag_generate_lto)
2020 targetm.asm_out.lto_start ();
2022 execute_ipa_summary_passes ((struct ipa_opt_pass_d *)
2023 passes->all_lto_gen_passes);
2025 if (!in_lto_p)
2026 ipa_write_summaries ();
2028 if (flag_generate_lto)
2029 targetm.asm_out.lto_end ();
2031 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2032 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2033 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2035 bitmap_obstack_release (NULL);
2039 /* Return string alias is alias of. */
2041 static tree
2042 get_alias_symbol (tree decl)
2044 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2045 return get_identifier (TREE_STRING_POINTER
2046 (TREE_VALUE (TREE_VALUE (alias))));
2050 /* Weakrefs may be associated to external decls and thus not output
2051 at expansion time. Emit all necessary aliases. */
2053 static void
2054 output_weakrefs (void)
2056 symtab_node *node;
2057 FOR_EACH_SYMBOL (node)
2058 if (node->alias
2059 && !TREE_ASM_WRITTEN (node->decl)
2060 && node->weakref)
2062 tree target;
2064 /* Weakrefs are special by not requiring target definition in current
2065 compilation unit. It is thus bit hard to work out what we want to
2066 alias.
2067 When alias target is defined, we need to fetch it from symtab reference,
2068 otherwise it is pointed to by alias_target. */
2069 if (node->alias_target)
2070 target = (DECL_P (node->alias_target)
2071 ? DECL_ASSEMBLER_NAME (node->alias_target)
2072 : node->alias_target);
2073 else if (node->analyzed)
2074 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
2075 else
2077 gcc_unreachable ();
2078 target = get_alias_symbol (node->decl);
2080 do_assemble_alias (node->decl, target);
2084 /* Initialize callgraph dump file. */
2086 void
2087 init_cgraph (void)
2089 if (!cgraph_dump_file)
2090 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2094 /* Perform simple optimizations based on callgraph. */
2096 void
2097 compile (void)
2099 if (seen_error ())
2100 return;
2102 #ifdef ENABLE_CHECKING
2103 verify_symtab ();
2104 #endif
2106 timevar_push (TV_CGRAPHOPT);
2107 if (pre_ipa_mem_report)
2109 fprintf (stderr, "Memory consumption before IPA\n");
2110 dump_memory_report (false);
2112 if (!quiet_flag)
2113 fprintf (stderr, "Performing interprocedural optimizations\n");
2114 cgraph_state = CGRAPH_STATE_IPA;
2116 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2117 if (flag_lto)
2118 lto_streamer_hooks_init ();
2120 /* Don't run the IPA passes if there was any error or sorry messages. */
2121 if (!seen_error ())
2122 ipa_passes ();
2124 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2125 if (seen_error ()
2126 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2128 timevar_pop (TV_CGRAPHOPT);
2129 return;
2132 /* This pass remove bodies of extern inline functions we never inlined.
2133 Do this later so other IPA passes see what is really going on. */
2134 symtab_remove_unreachable_nodes (false, dump_file);
2135 cgraph_global_info_ready = true;
2136 if (cgraph_dump_file)
2138 fprintf (cgraph_dump_file, "Optimized ");
2139 dump_symtab (cgraph_dump_file);
2141 if (post_ipa_mem_report)
2143 fprintf (stderr, "Memory consumption after IPA\n");
2144 dump_memory_report (false);
2146 timevar_pop (TV_CGRAPHOPT);
2148 /* Output everything. */
2149 (*debug_hooks->assembly_start) ();
2150 if (!quiet_flag)
2151 fprintf (stderr, "Assembling functions:\n");
2152 #ifdef ENABLE_CHECKING
2153 verify_symtab ();
2154 #endif
2156 cgraph_materialize_all_clones ();
2157 bitmap_obstack_initialize (NULL);
2158 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2159 symtab_remove_unreachable_nodes (true, dump_file);
2160 #ifdef ENABLE_CHECKING
2161 verify_symtab ();
2162 #endif
2163 bitmap_obstack_release (NULL);
2164 mark_functions_to_output ();
2166 /* When weakref support is missing, we autmatically translate all
2167 references to NODE to references to its ultimate alias target.
2168 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2169 TREE_CHAIN.
2171 Set up this mapping before we output any assembler but once we are sure
2172 that all symbol renaming is done.
2174 FIXME: All this uglyness can go away if we just do renaming at gimple
2175 level by physically rewritting the IL. At the moment we can only redirect
2176 calls, so we need infrastructure for renaming references as well. */
2177 #ifndef ASM_OUTPUT_WEAKREF
2178 symtab_node *node;
2180 FOR_EACH_SYMBOL (node)
2181 if (node->alias
2182 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2184 IDENTIFIER_TRANSPARENT_ALIAS
2185 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2186 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2187 = (node->alias_target ? node->alias_target
2188 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl));
2190 #endif
2192 cgraph_state = CGRAPH_STATE_EXPANSION;
2193 if (!flag_toplevel_reorder)
2194 output_in_order ();
2195 else
2197 output_asm_statements ();
2199 expand_all_functions ();
2200 varpool_output_variables ();
2203 cgraph_process_new_functions ();
2204 cgraph_state = CGRAPH_STATE_FINISHED;
2205 output_weakrefs ();
2207 if (cgraph_dump_file)
2209 fprintf (cgraph_dump_file, "\nFinal ");
2210 dump_symtab (cgraph_dump_file);
2212 #ifdef ENABLE_CHECKING
2213 verify_symtab ();
2214 /* Double check that all inline clones are gone and that all
2215 function bodies have been released from memory. */
2216 if (!seen_error ())
2218 struct cgraph_node *node;
2219 bool error_found = false;
2221 FOR_EACH_DEFINED_FUNCTION (node)
2222 if (node->global.inlined_to
2223 || gimple_has_body_p (node->decl))
2225 error_found = true;
2226 dump_cgraph_node (stderr, node);
2228 if (error_found)
2229 internal_error ("nodes with unreleased memory found");
2231 #endif
2235 /* Analyze the whole compilation unit once it is parsed completely. */
2237 void
2238 finalize_compilation_unit (void)
2240 timevar_push (TV_CGRAPH);
2242 /* If we're here there's no current function anymore. Some frontends
2243 are lazy in clearing these. */
2244 current_function_decl = NULL;
2245 set_cfun (NULL);
2247 /* Do not skip analyzing the functions if there were errors, we
2248 miss diagnostics for following functions otherwise. */
2250 /* Emit size functions we didn't inline. */
2251 finalize_size_functions ();
2253 /* Mark alias targets necessary and emit diagnostics. */
2254 handle_alias_pairs ();
2256 if (!quiet_flag)
2258 fprintf (stderr, "\nAnalyzing compilation unit\n");
2259 fflush (stderr);
2262 if (flag_dump_passes)
2263 dump_passes ();
2265 /* Gimplify and lower all functions, compute reachability and
2266 remove unreachable nodes. */
2267 analyze_functions ();
2269 /* Mark alias targets necessary and emit diagnostics. */
2270 handle_alias_pairs ();
2272 /* Gimplify and lower thunks. */
2273 analyze_functions ();
2275 /* Finally drive the pass manager. */
2276 compile ();
2278 timevar_pop (TV_CGRAPH);
2282 #include "gt-cgraphunit.h"