2010-11-25 Joern Rennecke <amylaar@spamcop.net>
[official-gcc.git] / gcc / ipa.c
blob28e6872ef7ffe2a47e1ee88e92e8b1c90799e5b4
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
38 int
39 cgraph_postorder (struct cgraph_node **order)
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!node->address_taken
61 && !node->global.inlined_to
62 && !cgraph_only_called_directly_p (node))))
64 node2 = node;
65 if (!node->callers)
66 node->aux = &last;
67 else
68 node->aux = node->callers;
69 while (node2)
71 while (node2->aux != &last)
73 edge = (struct cgraph_edge *) node2->aux;
74 if (edge->next_caller)
75 node2->aux = edge->next_caller;
76 else
77 node2->aux = &last;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge->caller->local.disregard_inline_limits
82 && !edge->callee->local.disregard_inline_limits)
83 continue;
84 if (!edge->caller->aux)
86 if (!edge->caller->callers)
87 edge->caller->aux = &last;
88 else
89 edge->caller->aux = edge->caller->callers;
90 stack[stack_size++] = node2;
91 node2 = edge->caller;
92 break;
95 if (node2->aux == &last)
97 order[order_pos++] = node2;
98 if (stack_size)
99 node2 = stack[--stack_size];
100 else
101 node2 = NULL;
105 free (stack);
106 for (node = cgraph_nodes; node; node = node->next)
107 node->aux = NULL;
108 return order_pos;
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
112 to INLINED_TO. */
114 static void
115 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
117 struct cgraph_edge *e;
118 for (e = node->callees; e; e = e->next_callee)
119 if (e->callee->global.inlined_to)
121 e->callee->global.inlined_to = inlined_to;
122 update_inlined_to_pointer (e->callee, inlined_to);
126 /* Add cgraph NODE to queue starting at FIRST.
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
132 reachable. */
134 static void
135 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
137 /* Node is still in queue; do nothing. */
138 if (node->aux && node->aux != (void *) 2)
139 return;
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node->aux == (void *)2 && !node->reachable)
143 return;
144 node->aux = *first;
145 *first = node;
148 /* Add varpool NODE to queue starting at FIRST. */
150 static void
151 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
153 node->aux = *first;
154 *first = node;
157 /* Process references. */
159 static void
160 process_references (struct ipa_ref_list *list,
161 struct cgraph_node **first,
162 struct varpool_node **first_varpool,
163 bool before_inlining_p)
165 int i;
166 struct ipa_ref *ref;
167 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
169 if (ref->refered_type == IPA_REF_CGRAPH)
171 struct cgraph_node *node = ipa_ref_node (ref);
172 if (!node->reachable
173 && node->analyzed
174 && (!DECL_EXTERNAL (node->decl)
175 || before_inlining_p))
176 node->reachable = true;
177 enqueue_cgraph_node (node, first);
179 else
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
191 /* Return true when function can be marked local. */
193 static bool
194 cgraph_local_node_p (struct cgraph_node *node)
196 return (cgraph_only_called_directly_p (node)
197 && node->analyzed
198 && !DECL_EXTERNAL (node->decl)
199 && !node->local.externally_visible
200 && !node->reachable_from_other_partition
201 && !node->in_other_partition);
204 /* Perform reachability analysis and reclaim all unreachable nodes.
205 If BEFORE_INLINING_P is true this function is called before inlining
206 decisions has been made. If BEFORE_INLINING_P is false this function also
207 removes unneeded bodies of extern inline functions. */
209 bool
210 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
212 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
213 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
214 struct cgraph_node *node, *next;
215 struct varpool_node *vnode, *vnext;
216 bool changed = false;
218 #ifdef ENABLE_CHECKING
219 verify_cgraph ();
220 #endif
221 if (file)
222 fprintf (file, "\nReclaiming functions:");
223 #ifdef ENABLE_CHECKING
224 for (node = cgraph_nodes; node; node = node->next)
225 gcc_assert (!node->aux);
226 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
227 gcc_assert (!vnode->aux);
228 #endif
229 varpool_reset_queue ();
230 /* Mark functions whose bodies are obviously needed.
231 This is mostly when they can be referenced externally. Inline clones
232 are special since their declarations are shared with master clone and thus
233 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
234 for (node = cgraph_nodes; node; node = node->next)
235 if (node->analyzed && !node->global.inlined_to
236 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
237 /* Keep around virtual functions for possible devirtualization. */
238 || (before_inlining_p
239 && DECL_VIRTUAL_P (node->decl)
240 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
241 /* Also external functions with address taken are better to stay
242 for indirect inlining. */
243 || (before_inlining_p
244 && DECL_EXTERNAL (node->decl)
245 && node->address_taken)))
247 gcc_assert (!node->global.inlined_to);
248 enqueue_cgraph_node (node, &first);
249 node->reachable = true;
251 else
253 gcc_assert (!node->aux);
254 node->reachable = false;
257 /* Mark variables that are obviously needed. */
258 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
260 vnode->next_needed = NULL;
261 vnode->prev_needed = NULL;
262 if (vnode->analyzed
263 && !varpool_can_remove_if_no_refs (vnode))
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
269 else
270 vnode->needed = false;
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
283 if (first != (struct cgraph_node *) (void *) 1)
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
296 for (e = node->callees; e; e = e->next_callee)
298 if (!e->callee->reachable
299 && node->analyzed
300 && (!e->inline_failed
301 || !DECL_EXTERNAL (e->callee->decl)
302 || before_inlining_p))
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
337 enqueue_cgraph_node (node, &first);
338 break;
342 if (first_varpool != (struct varpool_node *) (void *) 1)
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
366 /* Remove unreachable nodes.
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
377 next = node->next;
378 if (node->aux && !node->reachable)
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
385 if (!node->aux)
387 struct cgraph_edge *e;
388 bool found = false;
389 int i;
390 struct ipa_ref *ref;
392 node->global.inlined_to = NULL;
393 if (file)
394 fprintf (file, " %s", cgraph_node_name (node));
395 /* See if there is reachable caller. */
396 for (e = node->callers; e && !found; e = e->next_caller)
397 if (e->caller->reachable)
398 found = true;
399 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
400 && !found); i++)
401 if (ref->refering_type == IPA_REF_CGRAPH
402 && ipa_ref_refering_node (ref)->reachable)
403 found = true;
404 else if (ref->refering_type == IPA_REF_VARPOOL
405 && ipa_ref_refering_varpool_node (ref)->needed)
406 found = true;
408 /* If so, we need to keep node in the callgraph. */
409 if (found)
411 if (node->analyzed)
413 struct cgraph_node *clone;
415 /* If there are still clones, we must keep body around.
416 Otherwise we can just remove the body but keep the clone. */
417 for (clone = node->clones; clone;
418 clone = clone->next_sibling_clone)
419 if (clone->aux)
420 break;
421 if (!clone)
423 cgraph_release_function_body (node);
424 node->local.inlinable = false;
425 if (node->prev_sibling_clone)
426 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
427 else if (node->clone_of)
428 node->clone_of->clones = node->next_sibling_clone;
429 if (node->next_sibling_clone)
430 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
431 #ifdef ENABLE_CHECKING
432 if (node->clone_of)
433 node->former_clone_of = node->clone_of->decl;
434 #endif
435 node->clone_of = NULL;
436 node->next_sibling_clone = NULL;
437 node->prev_sibling_clone = NULL;
439 else
440 gcc_assert (!clone->in_other_partition);
441 node->analyzed = false;
442 changed = true;
443 cgraph_node_remove_callees (node);
444 ipa_remove_all_references (&node->ref_list);
447 else
449 cgraph_remove_node (node);
450 changed = true;
454 for (node = cgraph_nodes; node; node = node->next)
456 /* Inline clones might be kept around so their materializing allows further
457 cloning. If the function the clone is inlined into is removed, we need
458 to turn it into normal cone. */
459 if (node->global.inlined_to
460 && !node->callers)
462 gcc_assert (node->clones);
463 node->global.inlined_to = NULL;
464 update_inlined_to_pointer (node, node);
466 node->aux = NULL;
469 if (file)
470 fprintf (file, "\n");
472 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
473 are undesirable at -O0 since we do not want to remove anything. */
474 if (!optimize)
475 return changed;
477 if (file)
478 fprintf (file, "Reclaiming variables:");
479 for (vnode = varpool_nodes; vnode; vnode = vnext)
481 vnext = vnode->next;
482 if (!vnode->needed)
484 if (file)
485 fprintf (file, " %s", varpool_node_name (vnode));
486 varpool_remove_node (vnode);
487 changed = true;
491 /* Now update address_taken flags and try to promote functions to be local. */
493 if (file)
494 fprintf (file, "\nClearing address taken flags:");
495 for (node = cgraph_nodes; node; node = node->next)
496 if (node->address_taken
497 && !node->reachable_from_other_partition)
499 int i;
500 struct ipa_ref *ref;
501 bool found = false;
502 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
503 && !found; i++)
505 gcc_assert (ref->use == IPA_REF_ADDR);
506 found = true;
508 if (!found)
510 if (file)
511 fprintf (file, " %s", cgraph_node_name (node));
512 node->address_taken = false;
513 changed = true;
514 if (cgraph_local_node_p (node))
516 node->local.local = true;
517 if (file)
518 fprintf (file, " (local)");
523 #ifdef ENABLE_CHECKING
524 verify_cgraph ();
525 #endif
527 /* Reclaim alias pairs for functions that have disappeared from the
528 call graph. */
529 remove_unreachable_alias_pairs ();
531 return changed;
534 /* Discover variables that have no longer address taken or that are read only
535 and update their flags.
537 FIXME: This can not be done in between gimplify and omp_expand since
538 readonly flag plays role on what is shared and what is not. Currently we do
539 this transformation as part of whole program visibility and re-do at
540 ipa-reference pass (to take into account clonning), but it would
541 make sense to do it before early optimizations. */
543 void
544 ipa_discover_readonly_nonaddressable_vars (void)
546 struct varpool_node *vnode;
547 if (dump_file)
548 fprintf (dump_file, "Clearing variable flags:");
549 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
550 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
551 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
553 bool written = false;
554 bool address_taken = false;
555 int i;
556 struct ipa_ref *ref;
557 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
558 && (!written || !address_taken); i++)
559 switch (ref->use)
561 case IPA_REF_ADDR:
562 address_taken = true;
563 break;
564 case IPA_REF_LOAD:
565 break;
566 case IPA_REF_STORE:
567 written = true;
568 break;
570 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
572 if (dump_file)
573 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
574 TREE_ADDRESSABLE (vnode->decl) = 0;
576 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
577 /* Making variable in explicit section readonly can cause section
578 type conflict.
579 See e.g. gcc.c-torture/compile/pr23237.c */
580 && DECL_SECTION_NAME (vnode->decl) == NULL)
582 if (dump_file)
583 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
584 TREE_READONLY (vnode->decl) = 1;
587 if (dump_file)
588 fprintf (dump_file, "\n");
591 /* Return true when there is a reference to node and it is not vtable. */
592 static bool
593 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
595 int i;
596 struct ipa_ref *ref;
597 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
599 struct varpool_node *node;
600 if (ref->refered_type == IPA_REF_CGRAPH)
601 return true;
602 node = ipa_ref_varpool_node (ref);
603 if (!DECL_VIRTUAL_P (node->decl))
604 return true;
606 return false;
609 /* COMDAT functions must be shared only if they have address taken,
610 otherwise we can produce our own private implementation with
611 -fwhole-program.
612 Return true when turning COMDAT functoin static can not lead to wrong
613 code when the resulting object links with a library defining same COMDAT.
615 Virtual functions do have their addresses taken from the vtables,
616 but in C++ there is no way to compare their addresses for equality. */
618 bool
619 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
621 if ((cgraph_address_taken_from_non_vtable_p (node)
622 && !DECL_VIRTUAL_P (node->decl))
623 || !node->analyzed)
624 return false;
625 if (node->same_comdat_group)
627 struct cgraph_node *next;
629 /* If more than one function is in the same COMDAT group, it must
630 be shared even if just one function in the comdat group has
631 address taken. */
632 for (next = node->same_comdat_group;
633 next != node; next = next->same_comdat_group)
634 if (cgraph_address_taken_from_non_vtable_p (node)
635 && !DECL_VIRTUAL_P (next->decl))
636 return false;
638 return true;
641 /* Return true when function NODE should be considered externally visible. */
643 static bool
644 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
646 struct cgraph_node *alias;
647 if (!node->local.finalized)
648 return false;
649 if (!DECL_COMDAT (node->decl)
650 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
651 return false;
653 /* Do not even try to be smart about aliased nodes. Until we properly
654 represent everything by same body alias, these are just evil. */
655 if (aliased)
656 return true;
658 /* If linker counts on us, we must preserve the function. */
659 if (cgraph_used_from_object_file_p (node))
660 return true;
661 if (DECL_PRESERVE_P (node->decl))
662 return true;
663 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
664 return true;
666 /* When doing LTO or whole program, we can bring COMDAT functoins static.
667 This improves code quality and we know we will duplicate them at most twice
668 (in the case that we are not using plugin and link with object file
669 implementing same COMDAT) */
670 if ((in_lto_p || whole_program)
671 && DECL_COMDAT (node->decl)
672 && cgraph_comdat_can_be_unshared_p (node))
673 return false;
675 /* See if we have linker information about symbol not being used or
676 if we need to make guess based on the declaration.
678 Even if the linker clams the symbol is unused, never bring internal
679 symbols that are declared by user as used or externally visible.
680 This is needed for i.e. references from asm statements. */
681 for (alias = node->same_body; alias; alias = alias->next)
682 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
683 break;
684 if (!alias && node->resolution == LDPR_PREVAILING_DEF_IRONLY)
685 return false;
687 /* When doing link time optimizations, hidden symbols become local. */
688 if (in_lto_p
689 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
690 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
691 /* Be sure that node is defined in IR file, not in other object
692 file. In that case we don't set used_from_other_object_file. */
693 && node->analyzed)
695 else if (!whole_program)
696 return true;
698 if (MAIN_NAME_P (DECL_NAME (node->decl)))
699 return true;
701 return false;
704 /* Return true when variable VNODE should be considered externally visible. */
706 static bool
707 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
709 struct varpool_node *alias;
710 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
711 return false;
713 /* Do not even try to be smart about aliased nodes. Until we properly
714 represent everything by same body alias, these are just evil. */
715 if (aliased)
716 return true;
718 /* If linker counts on us, we must preserve the function. */
719 if (varpool_used_from_object_file_p (vnode))
720 return true;
722 if (DECL_PRESERVE_P (vnode->decl))
723 return true;
724 if (lookup_attribute ("externally_visible",
725 DECL_ATTRIBUTES (vnode->decl)))
726 return true;
728 /* See if we have linker information about symbol not being used or
729 if we need to make guess based on the declaration.
731 Even if the linker clams the symbol is unused, never bring internal
732 symbols that are declared by user as used or externally visible.
733 This is needed for i.e. references from asm statements. */
734 if (varpool_used_from_object_file_p (vnode))
735 return true;
736 for (alias = vnode->extra_name; alias; alias = alias->next)
737 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
738 break;
739 if (!alias && vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
740 return false;
742 /* As a special case, the COMDAT virutal tables can be unshared.
743 In LTO mode turn vtables into static variables. The variable is readonly,
744 so this does not enable more optimization, but referring static var
745 is faster for dynamic linking. Also this match logic hidding vtables
746 from LTO symbol tables. */
747 if ((in_lto_p || flag_whole_program)
748 && !vnode->force_output
749 && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
750 return false;
752 /* When doing link time optimizations, hidden symbols become local. */
753 if (in_lto_p
754 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
755 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
756 /* Be sure that node is defined in IR file, not in other object
757 file. In that case we don't set used_from_other_object_file. */
758 && vnode->finalized)
760 else if (!flag_whole_program)
761 return true;
763 /* Do not attempt to privatize COMDATS by default.
764 This would break linking with C++ libraries sharing
765 inline definitions.
767 FIXME: We can do so for readonly vars with no address taken and
768 possibly also for vtables since no direct pointer comparsion is done.
769 It might be interesting to do so to reduce linking overhead. */
770 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
771 return true;
772 return false;
775 /* Dissolve the same_comdat_group list in which NODE resides. */
777 static void
778 dissolve_same_comdat_group_list (struct cgraph_node *node)
780 struct cgraph_node *n = node, *next;
783 next = n->same_comdat_group;
784 n->same_comdat_group = NULL;
785 n = next;
787 while (n != node);
790 /* Mark visibility of all functions.
792 A local function is one whose calls can occur only in the current
793 compilation unit and all its calls are explicit, so we can change
794 its calling convention. We simply mark all static functions whose
795 address is not taken as local.
797 We also change the TREE_PUBLIC flag of all declarations that are public
798 in language point of view but we want to overwrite this default
799 via visibilities for the backend point of view. */
801 static unsigned int
802 function_and_variable_visibility (bool whole_program)
804 struct cgraph_node *node;
805 struct varpool_node *vnode;
806 struct pointer_set_t *aliased_nodes = pointer_set_create ();
807 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
808 unsigned i;
809 alias_pair *p;
811 /* Discover aliased nodes. */
812 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
814 if (dump_file)
815 fprintf (dump_file, "Alias %s->%s",
816 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
817 IDENTIFIER_POINTER (p->target));
819 if ((node = cgraph_node_for_asm (p->target)) != NULL)
821 gcc_assert (node->needed);
822 pointer_set_insert (aliased_nodes, node);
823 if (dump_file)
824 fprintf (dump_file, " node %s/%i",
825 cgraph_node_name (node), node->uid);
827 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
829 gcc_assert (vnode->needed);
830 pointer_set_insert (aliased_vnodes, vnode);
831 if (dump_file)
832 fprintf (dump_file, " varpool node %s",
833 varpool_node_name (vnode));
835 if (dump_file)
836 fprintf (dump_file, "\n");
839 for (node = cgraph_nodes; node; node = node->next)
841 int flags = flags_from_decl_or_type (node->decl);
842 if (optimize
843 && (flags & (ECF_CONST | ECF_PURE))
844 && !(flags & ECF_LOOPING_CONST_OR_PURE))
846 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
847 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
850 /* C++ FE on lack of COMDAT support create local COMDAT functions
851 (that ought to be shared but can not due to object format
852 limitations). It is neccesary to keep the flag to make rest of C++ FE
853 happy. Clear the flag here to avoid confusion in middle-end. */
854 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
855 DECL_COMDAT (node->decl) = 0;
856 /* For external decls stop tracking same_comdat_group, it doesn't matter
857 what comdat group they are in when they won't be emitted in this TU,
858 and simplifies later passes. */
859 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
861 #ifdef ENABLE_CHECKING
862 struct cgraph_node *n;
864 for (n = node->same_comdat_group;
865 n != node;
866 n = n->same_comdat_group)
867 /* If at least one of same comdat group functions is external,
868 all of them have to be, otherwise it is a front-end bug. */
869 gcc_assert (DECL_EXTERNAL (n->decl));
870 #endif
871 dissolve_same_comdat_group_list (node);
873 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
874 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
875 if (cgraph_externally_visible_p (node, whole_program,
876 pointer_set_contains (aliased_nodes,
877 node)))
879 gcc_assert (!node->global.inlined_to);
880 node->local.externally_visible = true;
882 else
883 node->local.externally_visible = false;
884 if (!node->local.externally_visible && node->analyzed
885 && !DECL_EXTERNAL (node->decl))
887 struct cgraph_node *alias;
888 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
889 cgraph_make_decl_local (node->decl);
890 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
891 for (alias = node->same_body; alias; alias = alias->next)
892 cgraph_make_decl_local (alias->decl);
893 if (node->same_comdat_group)
894 /* cgraph_externally_visible_p has already checked all other nodes
895 in the group and they will all be made local. We need to
896 dissolve the group at once so that the predicate does not
897 segfault though. */
898 dissolve_same_comdat_group_list (node);
900 node->local.local = cgraph_local_node_p (node);
902 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
904 /* weak flag makes no sense on local variables. */
905 gcc_assert (!DECL_WEAK (vnode->decl)
906 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
907 /* In several cases declarations can not be common:
909 - when declaration has initializer
910 - when it is in weak
911 - when it has specific section
912 - when it resides in non-generic address space.
913 - if declaration is local, it will get into .local common section
914 so common flag is not needed. Frontends still produce these in
915 certain cases, such as for:
917 static int a __attribute__ ((common))
919 Canonicalize things here and clear the redundant flag. */
920 if (DECL_COMMON (vnode->decl)
921 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
922 || (DECL_INITIAL (vnode->decl)
923 && DECL_INITIAL (vnode->decl) != error_mark_node)
924 || DECL_WEAK (vnode->decl)
925 || DECL_SECTION_NAME (vnode->decl) != NULL
926 || ! (ADDR_SPACE_GENERIC_P
927 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
928 DECL_COMMON (vnode->decl) = 0;
930 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
932 if (!vnode->finalized)
933 continue;
934 if (vnode->needed
935 && varpool_externally_visible_p
936 (vnode,
937 pointer_set_contains (aliased_vnodes, vnode)))
938 vnode->externally_visible = true;
939 else
940 vnode->externally_visible = false;
941 if (!vnode->externally_visible)
943 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
944 cgraph_make_decl_local (vnode->decl);
945 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
947 gcc_assert (TREE_STATIC (vnode->decl));
949 pointer_set_destroy (aliased_nodes);
950 pointer_set_destroy (aliased_vnodes);
952 if (dump_file)
954 fprintf (dump_file, "\nMarking local functions:");
955 for (node = cgraph_nodes; node; node = node->next)
956 if (node->local.local)
957 fprintf (dump_file, " %s", cgraph_node_name (node));
958 fprintf (dump_file, "\n\n");
959 fprintf (dump_file, "\nMarking externally visible functions:");
960 for (node = cgraph_nodes; node; node = node->next)
961 if (node->local.externally_visible)
962 fprintf (dump_file, " %s", cgraph_node_name (node));
963 fprintf (dump_file, "\n\n");
964 fprintf (dump_file, "\nMarking externally visible variables:");
965 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
966 if (vnode->externally_visible)
967 fprintf (dump_file, " %s", varpool_node_name (vnode));
968 fprintf (dump_file, "\n\n");
970 cgraph_function_flags_ready = true;
971 return 0;
974 /* Local function pass handling visibilities. This happens before LTO streaming
975 so in particular -fwhole-program should be ignored at this level. */
977 static unsigned int
978 local_function_and_variable_visibility (void)
980 return function_and_variable_visibility (flag_whole_program && !flag_lto);
983 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
986 SIMPLE_IPA_PASS,
987 "visibility", /* name */
988 NULL, /* gate */
989 local_function_and_variable_visibility,/* execute */
990 NULL, /* sub */
991 NULL, /* next */
992 0, /* static_pass_number */
993 TV_CGRAPHOPT, /* tv_id */
994 0, /* properties_required */
995 0, /* properties_provided */
996 0, /* properties_destroyed */
997 0, /* todo_flags_start */
998 TODO_remove_functions | TODO_dump_cgraph
999 | TODO_ggc_collect /* todo_flags_finish */
1003 /* Do not re-run on ltrans stage. */
1005 static bool
1006 gate_whole_program_function_and_variable_visibility (void)
1008 return !flag_ltrans;
1011 /* Bring functionss local at LTO time whith -fwhole-program. */
1013 static unsigned int
1014 whole_program_function_and_variable_visibility (void)
1016 struct cgraph_node *node;
1017 struct varpool_node *vnode;
1019 function_and_variable_visibility (flag_whole_program);
1021 for (node = cgraph_nodes; node; node = node->next)
1022 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
1023 && node->local.finalized)
1024 cgraph_mark_needed_node (node);
1025 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1026 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
1027 varpool_mark_needed_node (vnode);
1028 if (dump_file)
1030 fprintf (dump_file, "\nNeeded variables:");
1031 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1032 if (vnode->needed)
1033 fprintf (dump_file, " %s", varpool_node_name (vnode));
1034 fprintf (dump_file, "\n\n");
1036 if (optimize)
1037 ipa_discover_readonly_nonaddressable_vars ();
1038 return 0;
1041 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1044 IPA_PASS,
1045 "whole-program", /* name */
1046 gate_whole_program_function_and_variable_visibility,/* gate */
1047 whole_program_function_and_variable_visibility,/* execute */
1048 NULL, /* sub */
1049 NULL, /* next */
1050 0, /* static_pass_number */
1051 TV_CGRAPHOPT, /* tv_id */
1052 0, /* properties_required */
1053 0, /* properties_provided */
1054 0, /* properties_destroyed */
1055 0, /* todo_flags_start */
1056 TODO_remove_functions | TODO_dump_cgraph
1057 | TODO_ggc_collect /* todo_flags_finish */
1059 NULL, /* generate_summary */
1060 NULL, /* write_summary */
1061 NULL, /* read_summary */
1062 NULL, /* write_optimization_summary */
1063 NULL, /* read_optimization_summary */
1064 NULL, /* stmt_fixup */
1065 0, /* TODOs */
1066 NULL, /* function_transform */
1067 NULL, /* variable_transform */
1070 /* Hash a cgraph node set element. */
1072 static hashval_t
1073 hash_cgraph_node_set_element (const void *p)
1075 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1076 return htab_hash_pointer (element->node);
1079 /* Compare two cgraph node set elements. */
1081 static int
1082 eq_cgraph_node_set_element (const void *p1, const void *p2)
1084 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1085 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1087 return e1->node == e2->node;
1090 /* Create a new cgraph node set. */
1092 cgraph_node_set
1093 cgraph_node_set_new (void)
1095 cgraph_node_set new_node_set;
1097 new_node_set = ggc_alloc_cgraph_node_set_def ();
1098 new_node_set->hashtab = htab_create_ggc (10,
1099 hash_cgraph_node_set_element,
1100 eq_cgraph_node_set_element,
1101 NULL);
1102 new_node_set->nodes = NULL;
1103 return new_node_set;
1106 /* Add cgraph_node NODE to cgraph_node_set SET. */
1108 void
1109 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1111 void **slot;
1112 cgraph_node_set_element element;
1113 struct cgraph_node_set_element_def dummy;
1115 dummy.node = node;
1116 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1118 if (*slot != HTAB_EMPTY_ENTRY)
1120 element = (cgraph_node_set_element) *slot;
1121 gcc_assert (node == element->node
1122 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1123 == node));
1124 return;
1127 /* Insert node into hash table. */
1128 element = ggc_alloc_cgraph_node_set_element_def ();
1129 element->node = node;
1130 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1131 *slot = element;
1133 /* Insert into node vector. */
1134 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1137 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1139 void
1140 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1142 void **slot, **last_slot;
1143 cgraph_node_set_element element, last_element;
1144 struct cgraph_node *last_node;
1145 struct cgraph_node_set_element_def dummy;
1147 dummy.node = node;
1148 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1149 if (slot == NULL)
1150 return;
1152 element = (cgraph_node_set_element) *slot;
1153 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1154 == node);
1156 /* Remove from vector. We do this by swapping node with the last element
1157 of the vector. */
1158 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1159 if (last_node != node)
1161 dummy.node = last_node;
1162 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1163 last_element = (cgraph_node_set_element) *last_slot;
1164 gcc_assert (last_element);
1166 /* Move the last element to the original spot of NODE. */
1167 last_element->index = element->index;
1168 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1169 last_node);
1172 /* Remove element from hash table. */
1173 htab_clear_slot (set->hashtab, slot);
1174 ggc_free (element);
1177 /* Find NODE in SET and return an iterator to it if found. A null iterator
1178 is returned if NODE is not in SET. */
1180 cgraph_node_set_iterator
1181 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1183 void **slot;
1184 struct cgraph_node_set_element_def dummy;
1185 cgraph_node_set_element element;
1186 cgraph_node_set_iterator csi;
1188 dummy.node = node;
1189 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1190 if (slot == NULL)
1191 csi.index = (unsigned) ~0;
1192 else
1194 element = (cgraph_node_set_element) *slot;
1195 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1196 == node);
1197 csi.index = element->index;
1199 csi.set = set;
1201 return csi;
1204 /* Dump content of SET to file F. */
1206 void
1207 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1209 cgraph_node_set_iterator iter;
1211 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1213 struct cgraph_node *node = csi_node (iter);
1214 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1216 fprintf (f, "\n");
1219 /* Dump content of SET to stderr. */
1221 DEBUG_FUNCTION void
1222 debug_cgraph_node_set (cgraph_node_set set)
1224 dump_cgraph_node_set (stderr, set);
1227 /* Hash a varpool node set element. */
1229 static hashval_t
1230 hash_varpool_node_set_element (const void *p)
1232 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1233 return htab_hash_pointer (element->node);
1236 /* Compare two varpool node set elements. */
1238 static int
1239 eq_varpool_node_set_element (const void *p1, const void *p2)
1241 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1242 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1244 return e1->node == e2->node;
1247 /* Create a new varpool node set. */
1249 varpool_node_set
1250 varpool_node_set_new (void)
1252 varpool_node_set new_node_set;
1254 new_node_set = ggc_alloc_varpool_node_set_def ();
1255 new_node_set->hashtab = htab_create_ggc (10,
1256 hash_varpool_node_set_element,
1257 eq_varpool_node_set_element,
1258 NULL);
1259 new_node_set->nodes = NULL;
1260 return new_node_set;
1263 /* Add varpool_node NODE to varpool_node_set SET. */
1265 void
1266 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1268 void **slot;
1269 varpool_node_set_element element;
1270 struct varpool_node_set_element_def dummy;
1272 dummy.node = node;
1273 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1275 if (*slot != HTAB_EMPTY_ENTRY)
1277 element = (varpool_node_set_element) *slot;
1278 gcc_assert (node == element->node
1279 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1280 == node));
1281 return;
1284 /* Insert node into hash table. */
1285 element = ggc_alloc_varpool_node_set_element_def ();
1286 element->node = node;
1287 element->index = VEC_length (varpool_node_ptr, set->nodes);
1288 *slot = element;
1290 /* Insert into node vector. */
1291 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1294 /* Remove varpool_node NODE from varpool_node_set SET. */
1296 void
1297 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1299 void **slot, **last_slot;
1300 varpool_node_set_element element, last_element;
1301 struct varpool_node *last_node;
1302 struct varpool_node_set_element_def dummy;
1304 dummy.node = node;
1305 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1306 if (slot == NULL)
1307 return;
1309 element = (varpool_node_set_element) *slot;
1310 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1311 == node);
1313 /* Remove from vector. We do this by swapping node with the last element
1314 of the vector. */
1315 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1316 if (last_node != node)
1318 dummy.node = last_node;
1319 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1320 last_element = (varpool_node_set_element) *last_slot;
1321 gcc_assert (last_element);
1323 /* Move the last element to the original spot of NODE. */
1324 last_element->index = element->index;
1325 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1326 last_node);
1329 /* Remove element from hash table. */
1330 htab_clear_slot (set->hashtab, slot);
1331 ggc_free (element);
1334 /* Find NODE in SET and return an iterator to it if found. A null iterator
1335 is returned if NODE is not in SET. */
1337 varpool_node_set_iterator
1338 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1340 void **slot;
1341 struct varpool_node_set_element_def dummy;
1342 varpool_node_set_element element;
1343 varpool_node_set_iterator vsi;
1345 dummy.node = node;
1346 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1347 if (slot == NULL)
1348 vsi.index = (unsigned) ~0;
1349 else
1351 element = (varpool_node_set_element) *slot;
1352 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1353 == node);
1354 vsi.index = element->index;
1356 vsi.set = set;
1358 return vsi;
1361 /* Dump content of SET to file F. */
1363 void
1364 dump_varpool_node_set (FILE *f, varpool_node_set set)
1366 varpool_node_set_iterator iter;
1368 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1370 struct varpool_node *node = vsi_node (iter);
1371 fprintf (f, " %s", varpool_node_name (node));
1373 fprintf (f, "\n");
1376 /* Dump content of SET to stderr. */
1378 DEBUG_FUNCTION void
1379 debug_varpool_node_set (varpool_node_set set)
1381 dump_varpool_node_set (stderr, set);
1385 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1387 static unsigned int
1388 ipa_profile (void)
1390 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1391 struct cgraph_edge *e;
1392 int order_pos;
1393 bool something_changed = false;
1394 int i;
1396 order_pos = cgraph_postorder (order);
1397 for (i = order_pos - 1; i >= 0; i--)
1399 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1401 for (e = order[i]->callees; e; e = e->next_callee)
1402 if (e->callee->local.local && !e->callee->aux)
1404 something_changed = true;
1405 e->callee->aux = (void *)1;
1408 order[i]->aux = NULL;
1411 while (something_changed)
1413 something_changed = false;
1414 for (i = order_pos - 1; i >= 0; i--)
1416 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1418 for (e = order[i]->callees; e; e = e->next_callee)
1419 if (e->callee->local.local && !e->callee->aux)
1421 something_changed = true;
1422 e->callee->aux = (void *)1;
1425 order[i]->aux = NULL;
1428 free (order);
1429 return 0;
1432 static bool
1433 gate_ipa_profile (void)
1435 return flag_ipa_profile;
1438 struct ipa_opt_pass_d pass_ipa_profile =
1441 IPA_PASS,
1442 "ipa-profile", /* name */
1443 gate_ipa_profile, /* gate */
1444 ipa_profile, /* execute */
1445 NULL, /* sub */
1446 NULL, /* next */
1447 0, /* static_pass_number */
1448 TV_IPA_PROFILE, /* tv_id */
1449 0, /* properties_required */
1450 0, /* properties_provided */
1451 0, /* properties_destroyed */
1452 0, /* todo_flags_start */
1453 0 /* todo_flags_finish */
1455 NULL, /* generate_summary */
1456 NULL, /* write_summary */
1457 NULL, /* read_summary */
1458 NULL, /* write_optimization_summary */
1459 NULL, /* read_optimization_summary */
1460 NULL, /* stmt_fixup */
1461 0, /* TODOs */
1462 NULL, /* function_transform */
1463 NULL /* variable_transform */
1466 /* Generate and emit a static constructor or destructor. WHICH must
1467 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1468 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1469 initialization priority for this constructor or destructor. */
1471 void
1472 cgraph_build_static_cdtor (char which, tree body, int priority)
1474 static int counter = 0;
1475 char which_buf[16];
1476 tree decl, name, resdecl;
1478 /* The priority is encoded in the constructor or destructor name.
1479 collect2 will sort the names and arrange that they are called at
1480 program startup. */
1481 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1482 name = get_file_function_name (which_buf);
1484 decl = build_decl (input_location, FUNCTION_DECL, name,
1485 build_function_type_list (void_type_node, NULL_TREE));
1486 current_function_decl = decl;
1488 resdecl = build_decl (input_location,
1489 RESULT_DECL, NULL_TREE, void_type_node);
1490 DECL_ARTIFICIAL (resdecl) = 1;
1491 DECL_RESULT (decl) = resdecl;
1492 DECL_CONTEXT (resdecl) = decl;
1494 allocate_struct_function (decl, false);
1496 TREE_STATIC (decl) = 1;
1497 TREE_USED (decl) = 1;
1498 DECL_ARTIFICIAL (decl) = 1;
1499 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1500 DECL_SAVED_TREE (decl) = body;
1501 if (!targetm.have_ctors_dtors)
1503 TREE_PUBLIC (decl) = 1;
1504 DECL_PRESERVE_P (decl) = 1;
1506 DECL_UNINLINABLE (decl) = 1;
1508 DECL_INITIAL (decl) = make_node (BLOCK);
1509 TREE_USED (DECL_INITIAL (decl)) = 1;
1511 DECL_SOURCE_LOCATION (decl) = input_location;
1512 cfun->function_end_locus = input_location;
1514 switch (which)
1516 case 'I':
1517 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1518 decl_init_priority_insert (decl, priority);
1519 break;
1520 case 'D':
1521 DECL_STATIC_DESTRUCTOR (decl) = 1;
1522 decl_fini_priority_insert (decl, priority);
1523 break;
1524 default:
1525 gcc_unreachable ();
1528 gimplify_function_tree (decl);
1530 cgraph_add_new_function (decl, false);
1532 set_cfun (NULL);
1533 current_function_decl = NULL;
1537 /* A vector of FUNCTION_DECLs declared as static constructors. */
1538 static VEC(tree, heap) *static_ctors;
1539 /* A vector of FUNCTION_DECLs declared as static destructors. */
1540 static VEC(tree, heap) *static_dtors;
1542 /* When target does not have ctors and dtors, we call all constructor
1543 and destructor by special initialization/destruction function
1544 recognized by collect2.
1546 When we are going to build this function, collect all constructors and
1547 destructors and turn them into normal functions. */
1549 static void
1550 record_cdtor_fn (struct cgraph_node *node)
1552 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1553 VEC_safe_push (tree, heap, static_ctors, node->decl);
1554 if (DECL_STATIC_DESTRUCTOR (node->decl))
1555 VEC_safe_push (tree, heap, static_dtors, node->decl);
1556 node = cgraph_node (node->decl);
1557 node->local.disregard_inline_limits = 1;
1560 /* Define global constructors/destructor functions for the CDTORS, of
1561 which they are LEN. The CDTORS are sorted by initialization
1562 priority. If CTOR_P is true, these are constructors; otherwise,
1563 they are destructors. */
1565 static void
1566 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1568 size_t i,j;
1569 size_t len = VEC_length (tree, cdtors);
1571 i = 0;
1572 while (i < len)
1574 tree body;
1575 tree fn;
1576 priority_type priority;
1578 priority = 0;
1579 body = NULL_TREE;
1580 j = i;
1583 priority_type p;
1584 fn = VEC_index (tree, cdtors, j);
1585 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1586 if (j == i)
1587 priority = p;
1588 else if (p != priority)
1589 break;
1590 j++;
1592 while (j < len);
1594 /* When there is only one cdtor and target supports them, do nothing. */
1595 if (j == i + 1
1596 && targetm.have_ctors_dtors)
1598 i++;
1599 continue;
1601 /* Find the next batch of constructors/destructors with the same
1602 initialization priority. */
1603 for (;i < j; i++)
1605 tree call;
1606 fn = VEC_index (tree, cdtors, i);
1607 call = build_call_expr (fn, 0);
1608 if (ctor_p)
1609 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1610 else
1611 DECL_STATIC_DESTRUCTOR (fn) = 0;
1612 /* We do not want to optimize away pure/const calls here.
1613 When optimizing, these should be already removed, when not
1614 optimizing, we want user to be able to breakpoint in them. */
1615 TREE_SIDE_EFFECTS (call) = 1;
1616 append_to_statement_list (call, &body);
1618 gcc_assert (body != NULL_TREE);
1619 /* Generate a function to call all the function of like
1620 priority. */
1621 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1625 /* Comparison function for qsort. P1 and P2 are actually of type
1626 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1627 used to determine the sort order. */
1629 static int
1630 compare_ctor (const void *p1, const void *p2)
1632 tree f1;
1633 tree f2;
1634 int priority1;
1635 int priority2;
1637 f1 = *(const tree *)p1;
1638 f2 = *(const tree *)p2;
1639 priority1 = DECL_INIT_PRIORITY (f1);
1640 priority2 = DECL_INIT_PRIORITY (f2);
1642 if (priority1 < priority2)
1643 return -1;
1644 else if (priority1 > priority2)
1645 return 1;
1646 else
1647 /* Ensure a stable sort. Constructors are executed in backwarding
1648 order to make LTO initialize braries first. */
1649 return DECL_UID (f2) - DECL_UID (f1);
1652 /* Comparison function for qsort. P1 and P2 are actually of type
1653 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1654 used to determine the sort order. */
1656 static int
1657 compare_dtor (const void *p1, const void *p2)
1659 tree f1;
1660 tree f2;
1661 int priority1;
1662 int priority2;
1664 f1 = *(const tree *)p1;
1665 f2 = *(const tree *)p2;
1666 priority1 = DECL_FINI_PRIORITY (f1);
1667 priority2 = DECL_FINI_PRIORITY (f2);
1669 if (priority1 < priority2)
1670 return -1;
1671 else if (priority1 > priority2)
1672 return 1;
1673 else
1674 /* Ensure a stable sort. */
1675 return DECL_UID (f1) - DECL_UID (f2);
1678 /* Generate functions to call static constructors and destructors
1679 for targets that do not support .ctors/.dtors sections. These
1680 functions have magic names which are detected by collect2. */
1682 static void
1683 build_cdtor_fns (void)
1685 if (!VEC_empty (tree, static_ctors))
1687 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1688 VEC_qsort (tree, static_ctors, compare_ctor);
1689 build_cdtor (/*ctor_p=*/true, static_ctors);
1692 if (!VEC_empty (tree, static_dtors))
1694 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1695 VEC_qsort (tree, static_dtors, compare_dtor);
1696 build_cdtor (/*ctor_p=*/false, static_dtors);
1700 /* Look for constructors and destructors and produce function calling them.
1701 This is needed for targets not supporting ctors or dtors, but we perform the
1702 transformation also at linktime to merge possibly numberous
1703 constructors/destructors into single function to improve code locality and
1704 reduce size. */
1706 static unsigned int
1707 ipa_cdtor_merge (void)
1709 struct cgraph_node *node;
1710 for (node = cgraph_nodes; node; node = node->next)
1711 if (node->analyzed
1712 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1713 || DECL_STATIC_DESTRUCTOR (node->decl)))
1714 record_cdtor_fn (node);
1715 build_cdtor_fns ();
1716 VEC_free (tree, heap, static_ctors);
1717 VEC_free (tree, heap, static_dtors);
1718 return 0;
1721 /* Perform the pass when we have no ctors/dtors support
1722 or at LTO time to merge multiple constructors into single
1723 function. */
1725 static bool
1726 gate_ipa_cdtor_merge (void)
1728 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1731 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1734 IPA_PASS,
1735 "cdtor", /* name */
1736 gate_ipa_cdtor_merge, /* gate */
1737 ipa_cdtor_merge, /* execute */
1738 NULL, /* sub */
1739 NULL, /* next */
1740 0, /* static_pass_number */
1741 TV_CGRAPHOPT, /* tv_id */
1742 0, /* properties_required */
1743 0, /* properties_provided */
1744 0, /* properties_destroyed */
1745 0, /* todo_flags_start */
1746 0 /* todo_flags_finish */
1748 NULL, /* generate_summary */
1749 NULL, /* write_summary */
1750 NULL, /* read_summary */
1751 NULL, /* write_optimization_summary */
1752 NULL, /* read_optimization_summary */
1753 NULL, /* stmt_fixup */
1754 0, /* TODOs */
1755 NULL, /* function_transform */
1756 NULL /* variable_transform */