* intrinsics/pack_generic.c (pack): Add missing return and fix whitespace.
[official-gcc.git] / gcc / ipa.c
blob48e331e9b77f271fc7665a4dbb563871a5fd25d5
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
38 int
39 cgraph_postorder (struct cgraph_node **order)
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
70 while (node2->aux != &last)
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
94 if (node2->aux == &last)
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
125 /* Add cgraph NODE to queue starting at FIRST.
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
147 /* Add varpool NODE to queue starting at FIRST. */
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 node->aux = *first;
153 *first = node;
156 /* Process references. */
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 if (ref->refered_type == IPA_REF_CGRAPH)
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
179 else
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
201 /* Return true when function can be marked local. */
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 && ((!DECL_EXTERNAL (node->decl))
243 || before_inlining_p))
245 gcc_assert (!node->global.inlined_to);
246 enqueue_cgraph_node (node, &first);
247 node->reachable = true;
249 else
251 gcc_assert (!node->aux);
252 node->reachable = false;
254 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
256 vnode->next_needed = NULL;
257 vnode->prev_needed = NULL;
258 if (!varpool_can_remove_if_no_refs (vnode))
260 vnode->needed = false;
261 varpool_mark_needed_node (vnode);
262 enqueue_varpool_node (vnode, &first_varpool);
264 else
265 vnode->needed = false;
268 /* Perform reachability analysis. As a special case do not consider
269 extern inline functions not inlined as live because we won't output
270 them at all.
272 We maintain two worklist, one for cgraph nodes other for varpools and
273 are finished once both are empty. */
275 while (first != (struct cgraph_node *) (void *) 1
276 || first_varpool != (struct varpool_node *) (void *) 1)
278 if (first != (struct cgraph_node *) (void *) 1)
280 struct cgraph_edge *e;
281 node = first;
282 first = (struct cgraph_node *) first->aux;
283 if (!node->reachable)
284 node->aux = (void *)2;
286 /* If we found this node reachable, first mark on the callees
287 reachable too, unless they are direct calls to extern inline functions
288 we decided to not inline. */
289 if (node->reachable)
291 for (e = node->callees; e; e = e->next_callee)
292 if (!e->callee->reachable
293 && node->analyzed
294 && (!e->inline_failed || !e->callee->analyzed
295 || (!DECL_EXTERNAL (e->callee->decl))
296 || before_inlining_p))
298 e->callee->reachable = true;
299 enqueue_cgraph_node (e->callee, &first);
301 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
304 /* If any function in a comdat group is reachable, force
305 all other functions in the same comdat group to be
306 also reachable. */
307 if (node->same_comdat_group
308 && node->reachable
309 && !node->global.inlined_to)
311 for (next = node->same_comdat_group;
312 next != node;
313 next = next->same_comdat_group)
314 if (!next->reachable)
316 next->reachable = true;
317 enqueue_cgraph_node (next, &first);
321 /* We can freely remove inline clones even if they are cloned, however if
322 function is clone of real clone, we must keep it around in order to
323 make materialize_clones produce function body with the changes
324 applied. */
325 while (node->clone_of && !node->clone_of->aux
326 && !gimple_has_body_p (node->decl))
328 bool noninline = node->clone_of->decl != node->decl;
329 node = node->clone_of;
330 if (noninline && !node->reachable && !node->aux)
332 enqueue_cgraph_node (node, &first);
333 break;
337 if (first_varpool != (struct varpool_node *) (void *) 1)
339 vnode = first_varpool;
340 first_varpool = (struct varpool_node *)first_varpool->aux;
341 vnode->aux = NULL;
342 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
343 /* If any function in a comdat group is reachable, force
344 all other functions in the same comdat group to be
345 also reachable. */
346 if (vnode->same_comdat_group)
348 struct varpool_node *next;
349 for (next = vnode->same_comdat_group;
350 next != vnode;
351 next = next->same_comdat_group)
352 if (!next->needed)
354 varpool_mark_needed_node (next);
355 enqueue_varpool_node (next, &first_varpool);
361 /* Remove unreachable nodes.
363 Completely unreachable functions can be fully removed from the callgraph.
364 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
365 callgraph (so we still have edges to them). We remove function body then.
367 Also we need to care functions that are unreachable but we need to keep them around
368 for later clonning. In this case we also turn them to unanalyzed nodes, but
369 keep the body around. */
370 for (node = cgraph_nodes; node; node = next)
372 next = node->next;
373 if (node->aux && !node->reachable)
375 cgraph_node_remove_callees (node);
376 ipa_remove_all_references (&node->ref_list);
377 node->analyzed = false;
378 node->local.inlinable = false;
380 if (!node->aux)
382 node->global.inlined_to = NULL;
383 if (file)
384 fprintf (file, " %s", cgraph_node_name (node));
385 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
386 cgraph_remove_node (node);
387 else
389 struct cgraph_edge *e;
391 /* See if there is reachable caller. */
392 for (e = node->callers; e; e = e->next_caller)
393 if (e->caller->reachable)
394 break;
396 /* If so, we need to keep node in the callgraph. */
397 if (e || node->needed)
399 struct cgraph_node *clone;
401 /* If there are still clones, we must keep body around.
402 Otherwise we can just remove the body but keep the clone. */
403 for (clone = node->clones; clone;
404 clone = clone->next_sibling_clone)
405 if (clone->aux)
406 break;
407 if (!clone)
409 cgraph_release_function_body (node);
410 node->analyzed = false;
411 node->local.inlinable = false;
413 else
414 gcc_assert (!clone->in_other_partition);
415 cgraph_node_remove_callees (node);
416 ipa_remove_all_references (&node->ref_list);
417 if (node->prev_sibling_clone)
418 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
419 else if (node->clone_of)
420 node->clone_of->clones = node->next_sibling_clone;
421 if (node->next_sibling_clone)
422 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
423 node->clone_of = NULL;
424 node->next_sibling_clone = NULL;
425 node->prev_sibling_clone = NULL;
427 else
428 cgraph_remove_node (node);
430 changed = true;
433 for (node = cgraph_nodes; node; node = node->next)
435 /* Inline clones might be kept around so their materializing allows further
436 cloning. If the function the clone is inlined into is removed, we need
437 to turn it into normal cone. */
438 if (node->global.inlined_to
439 && !node->callers)
441 gcc_assert (node->clones);
442 node->global.inlined_to = NULL;
443 update_inlined_to_pointer (node, node);
445 node->aux = NULL;
448 if (file)
449 fprintf (file, "\n");
451 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
452 are undesirable at -O0 since we do not want to remove anything. */
453 if (!optimize)
454 return changed;
456 if (file)
457 fprintf (file, "Reclaiming variables:");
458 for (vnode = varpool_nodes; vnode; vnode = vnext)
460 vnext = vnode->next;
461 if (!vnode->needed)
463 if (file)
464 fprintf (file, " %s", varpool_node_name (vnode));
465 varpool_remove_node (vnode);
466 changed = true;
470 /* Now update address_taken flags and try to promote functions to be local. */
472 if (file)
473 fprintf (file, "\nClearing address taken flags:");
474 for (node = cgraph_nodes; node; node = node->next)
475 if (node->address_taken
476 && !node->reachable_from_other_partition)
478 int i;
479 struct ipa_ref *ref;
480 bool found = false;
481 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
482 && !found; i++)
484 gcc_assert (ref->use == IPA_REF_ADDR);
485 found = true;
487 if (!found)
489 if (file)
490 fprintf (file, " %s", cgraph_node_name (node));
491 node->address_taken = false;
492 changed = true;
493 if (cgraph_local_node_p (node))
495 node->local.local = true;
496 if (file)
497 fprintf (file, " (local)");
502 #ifdef ENABLE_CHECKING
503 verify_cgraph ();
504 #endif
506 /* Reclaim alias pairs for functions that have disappeared from the
507 call graph. */
508 remove_unreachable_alias_pairs ();
510 return changed;
513 /* Discover variables that have no longer address taken or that are read only
514 and update their flags.
516 FIXME: This can not be done in between gimplify and omp_expand since
517 readonly flag plays role on what is shared and what is not. Currently we do
518 this transformation as part of whole program visibility and re-do at
519 ipa-reference pass (to take into account clonning), but it would
520 make sense to do it before early optimizations. */
522 void
523 ipa_discover_readonly_nonaddressable_vars (void)
525 struct varpool_node *vnode;
526 if (dump_file)
527 fprintf (dump_file, "Clearing variable flags:");
528 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
529 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
530 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
532 bool written = false;
533 bool address_taken = false;
534 int i;
535 struct ipa_ref *ref;
536 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
537 && (!written || !address_taken); i++)
538 switch (ref->use)
540 case IPA_REF_ADDR:
541 address_taken = true;
542 break;
543 case IPA_REF_LOAD:
544 break;
545 case IPA_REF_STORE:
546 written = true;
547 break;
549 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
551 if (dump_file)
552 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
553 TREE_ADDRESSABLE (vnode->decl) = 0;
555 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
556 /* Making variable in explicit section readonly can cause section
557 type conflict.
558 See e.g. gcc.c-torture/compile/pr23237.c */
559 && DECL_SECTION_NAME (vnode->decl) == NULL)
561 if (dump_file)
562 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
563 TREE_READONLY (vnode->decl) = 1;
564 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
567 if (dump_file)
568 fprintf (dump_file, "\n");
571 /* Return true when function NODE should be considered externally visible. */
573 static bool
574 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
576 if (!node->local.finalized)
577 return false;
578 if (!DECL_COMDAT (node->decl)
579 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
580 return false;
582 /* Do not even try to be smart about aliased nodes. Until we properly
583 represent everything by same body alias, these are just evil. */
584 if (aliased)
585 return true;
587 /* When doing link time optimizations, hidden symbols become local. */
588 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
589 /* Be sure that node is defined in IR file, not in other object
590 file. In that case we don't set used_from_other_object_file. */
591 && node->analyzed)
593 else if (!whole_program)
594 return true;
595 /* COMDAT functions must be shared only if they have address taken,
596 otherwise we can produce our own private implementation with
597 -fwhole-program. */
598 else if (DECL_COMDAT (node->decl))
600 if (node->address_taken || !node->analyzed)
601 return true;
602 if (node->same_comdat_group)
604 struct cgraph_node *next;
606 /* If more than one function is in the same COMDAT group, it must
607 be shared even if just one function in the comdat group has
608 address taken. */
609 for (next = node->same_comdat_group;
610 next != node;
611 next = next->same_comdat_group)
612 if (next->address_taken || !next->analyzed)
613 return true;
616 if (node->local.used_from_object_file)
617 return true;
618 if (DECL_PRESERVE_P (node->decl))
619 return true;
620 if (MAIN_NAME_P (DECL_NAME (node->decl)))
621 return true;
622 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
623 return true;
624 return false;
627 /* Dissolve the same_comdat_group list in which NODE resides. */
629 static void
630 dissolve_same_comdat_group_list (struct cgraph_node *node)
632 struct cgraph_node *n = node, *next;
635 next = n->same_comdat_group;
636 n->same_comdat_group = NULL;
637 n = next;
639 while (n != node);
642 /* Mark visibility of all functions.
644 A local function is one whose calls can occur only in the current
645 compilation unit and all its calls are explicit, so we can change
646 its calling convention. We simply mark all static functions whose
647 address is not taken as local.
649 We also change the TREE_PUBLIC flag of all declarations that are public
650 in language point of view but we want to overwrite this default
651 via visibilities for the backend point of view. */
653 static unsigned int
654 function_and_variable_visibility (bool whole_program)
656 struct cgraph_node *node;
657 struct varpool_node *vnode;
658 struct pointer_set_t *aliased_nodes = pointer_set_create ();
659 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
660 unsigned i;
661 alias_pair *p;
663 /* Discover aliased nodes. */
664 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
666 if (dump_file)
667 fprintf (dump_file, "Alias %s->%s",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
669 IDENTIFIER_POINTER (p->target));
671 if ((node = cgraph_node_for_asm (p->target)) != NULL)
673 gcc_assert (node->needed);
674 pointer_set_insert (aliased_nodes, node);
675 if (dump_file)
676 fprintf (dump_file, " node %s/%i",
677 cgraph_node_name (node), node->uid);
679 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
681 gcc_assert (vnode->needed);
682 pointer_set_insert (aliased_vnodes, vnode);
683 if (dump_file)
684 fprintf (dump_file, " varpool node %s",
685 varpool_node_name (vnode));
687 if (dump_file)
688 fprintf (dump_file, "\n");
691 for (node = cgraph_nodes; node; node = node->next)
693 /* C++ FE on lack of COMDAT support create local COMDAT functions
694 (that ought to be shared but can not due to object format
695 limitations). It is neccesary to keep the flag to make rest of C++ FE
696 happy. Clear the flag here to avoid confusion in middle-end. */
697 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
698 DECL_COMDAT (node->decl) = 0;
699 /* For external decls stop tracking same_comdat_group, it doesn't matter
700 what comdat group they are in when they won't be emitted in this TU,
701 and simplifies later passes. */
702 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
704 #ifdef ENABLE_CHECKING
705 struct cgraph_node *n;
707 for (n = node->same_comdat_group;
708 n != node;
709 n = n->same_comdat_group)
710 /* If at least one of same comdat group functions is external,
711 all of them have to be, otherwise it is a front-end bug. */
712 gcc_assert (DECL_EXTERNAL (n->decl));
713 #endif
714 dissolve_same_comdat_group_list (node);
716 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
717 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
718 if (cgraph_externally_visible_p (node, whole_program,
719 pointer_set_contains (aliased_nodes,
720 node)))
722 gcc_assert (!node->global.inlined_to);
723 node->local.externally_visible = true;
725 else
726 node->local.externally_visible = false;
727 if (!node->local.externally_visible && node->analyzed
728 && !DECL_EXTERNAL (node->decl))
730 struct cgraph_node *alias;
731 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
732 cgraph_make_decl_local (node->decl);
733 for (alias = node->same_body; alias; alias = alias->next)
734 cgraph_make_decl_local (alias->decl);
735 if (node->same_comdat_group)
736 /* cgraph_externally_visible_p has already checked all other nodes
737 in the group and they will all be made local. We need to
738 dissolve the group at once so that the predicate does not
739 segfault though. */
740 dissolve_same_comdat_group_list (node);
742 node->local.local = cgraph_local_node_p (node);
744 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
746 /* weak flag makes no sense on local variables. */
747 gcc_assert (!DECL_WEAK (vnode->decl)
748 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
749 /* In several cases declarations can not be common:
751 - when declaration has initializer
752 - when it is in weak
753 - when it has specific section
754 - when it resides in non-generic address space.
755 - if declaration is local, it will get into .local common section
756 so common flag is not needed. Frontends still produce these in
757 certain cases, such as for:
759 static int a __attribute__ ((common))
761 Canonicalize things here and clear the redundant flag. */
762 if (DECL_COMMON (vnode->decl)
763 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
764 || (DECL_INITIAL (vnode->decl)
765 && DECL_INITIAL (vnode->decl) != error_mark_node)
766 || DECL_WEAK (vnode->decl)
767 || DECL_SECTION_NAME (vnode->decl) != NULL
768 || ! (ADDR_SPACE_GENERIC_P
769 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
770 DECL_COMMON (vnode->decl) = 0;
771 /* Even extern variables might have initializers known.
772 See, for example testsuite/g++.dg/opt/static3.C */
773 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
775 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
777 if (!vnode->finalized)
778 continue;
779 if (vnode->needed
780 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
781 && (((!whole_program
782 /* We can privatize comdat readonly variables whose address is
783 not taken, but doing so is not going to bring us
784 optimization oppurtunities until we start reordering
785 datastructures. */
786 || DECL_COMDAT (vnode->decl)
787 || DECL_WEAK (vnode->decl))
788 /* When doing linktime optimizations, all hidden symbols will
789 become local. */
790 && (!in_lto_p
791 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
792 /* We can get prevailing decision in other object file.
793 In this case we do not sed used_from_object_file. */
794 || !vnode->finalized))
795 || DECL_PRESERVE_P (vnode->decl)
796 || vnode->used_from_object_file
797 || pointer_set_contains (aliased_vnodes, vnode)
798 || lookup_attribute ("externally_visible",
799 DECL_ATTRIBUTES (vnode->decl))))
800 vnode->externally_visible = true;
801 else
802 vnode->externally_visible = false;
803 if (!vnode->externally_visible)
805 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
806 cgraph_make_decl_local (vnode->decl);
808 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
809 gcc_assert (TREE_STATIC (vnode->decl));
811 pointer_set_destroy (aliased_nodes);
812 pointer_set_destroy (aliased_vnodes);
814 if (dump_file)
816 fprintf (dump_file, "\nMarking local functions:");
817 for (node = cgraph_nodes; node; node = node->next)
818 if (node->local.local)
819 fprintf (dump_file, " %s", cgraph_node_name (node));
820 fprintf (dump_file, "\n\n");
821 fprintf (dump_file, "\nMarking externally visible functions:");
822 for (node = cgraph_nodes; node; node = node->next)
823 if (node->local.externally_visible)
824 fprintf (dump_file, " %s", cgraph_node_name (node));
825 fprintf (dump_file, "\n\n");
826 fprintf (dump_file, "\nMarking externally visible variables:");
827 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
828 if (vnode->externally_visible)
829 fprintf (dump_file, " %s", varpool_node_name (vnode));
830 fprintf (dump_file, "\n\n");
832 cgraph_function_flags_ready = true;
833 return 0;
836 /* Local function pass handling visibilities. This happens before LTO streaming
837 so in particular -fwhole-program should be ignored at this level. */
839 static unsigned int
840 local_function_and_variable_visibility (void)
842 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
845 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
848 SIMPLE_IPA_PASS,
849 "visibility", /* name */
850 NULL, /* gate */
851 local_function_and_variable_visibility,/* execute */
852 NULL, /* sub */
853 NULL, /* next */
854 0, /* static_pass_number */
855 TV_CGRAPHOPT, /* tv_id */
856 0, /* properties_required */
857 0, /* properties_provided */
858 0, /* properties_destroyed */
859 0, /* todo_flags_start */
860 TODO_remove_functions | TODO_dump_cgraph
861 | TODO_ggc_collect /* todo_flags_finish */
865 /* Do not re-run on ltrans stage. */
867 static bool
868 gate_whole_program_function_and_variable_visibility (void)
870 return !flag_ltrans;
873 /* Bring functionss local at LTO time whith -fwhole-program. */
875 static unsigned int
876 whole_program_function_and_variable_visibility (void)
878 struct cgraph_node *node;
879 struct varpool_node *vnode;
881 function_and_variable_visibility (flag_whole_program);
883 for (node = cgraph_nodes; node; node = node->next)
884 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
885 && node->local.finalized)
886 cgraph_mark_needed_node (node);
887 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
888 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
889 varpool_mark_needed_node (vnode);
890 if (dump_file)
892 fprintf (dump_file, "\nNeeded variables:");
893 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
894 if (vnode->needed)
895 fprintf (dump_file, " %s", varpool_node_name (vnode));
896 fprintf (dump_file, "\n\n");
898 if (optimize)
899 ipa_discover_readonly_nonaddressable_vars ();
900 return 0;
903 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
906 IPA_PASS,
907 "whole-program", /* name */
908 gate_whole_program_function_and_variable_visibility,/* gate */
909 whole_program_function_and_variable_visibility,/* execute */
910 NULL, /* sub */
911 NULL, /* next */
912 0, /* static_pass_number */
913 TV_CGRAPHOPT, /* tv_id */
914 0, /* properties_required */
915 0, /* properties_provided */
916 0, /* properties_destroyed */
917 0, /* todo_flags_start */
918 TODO_remove_functions | TODO_dump_cgraph
919 | TODO_ggc_collect /* todo_flags_finish */
921 NULL, /* generate_summary */
922 NULL, /* write_summary */
923 NULL, /* read_summary */
924 NULL, /* write_optimization_summary */
925 NULL, /* read_optimization_summary */
926 NULL, /* stmt_fixup */
927 0, /* TODOs */
928 NULL, /* function_transform */
929 NULL, /* variable_transform */
932 /* Hash a cgraph node set element. */
934 static hashval_t
935 hash_cgraph_node_set_element (const void *p)
937 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
938 return htab_hash_pointer (element->node);
941 /* Compare two cgraph node set elements. */
943 static int
944 eq_cgraph_node_set_element (const void *p1, const void *p2)
946 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
947 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
949 return e1->node == e2->node;
952 /* Create a new cgraph node set. */
954 cgraph_node_set
955 cgraph_node_set_new (void)
957 cgraph_node_set new_node_set;
959 new_node_set = ggc_alloc_cgraph_node_set_def ();
960 new_node_set->hashtab = htab_create_ggc (10,
961 hash_cgraph_node_set_element,
962 eq_cgraph_node_set_element,
963 NULL);
964 new_node_set->nodes = NULL;
965 return new_node_set;
968 /* Add cgraph_node NODE to cgraph_node_set SET. */
970 void
971 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
973 void **slot;
974 cgraph_node_set_element element;
975 struct cgraph_node_set_element_def dummy;
977 dummy.node = node;
978 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
980 if (*slot != HTAB_EMPTY_ENTRY)
982 element = (cgraph_node_set_element) *slot;
983 gcc_assert (node == element->node
984 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
985 == node));
986 return;
989 /* Insert node into hash table. */
990 element = ggc_alloc_cgraph_node_set_element_def ();
991 element->node = node;
992 element->index = VEC_length (cgraph_node_ptr, set->nodes);
993 *slot = element;
995 /* Insert into node vector. */
996 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
999 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1001 void
1002 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1004 void **slot, **last_slot;
1005 cgraph_node_set_element element, last_element;
1006 struct cgraph_node *last_node;
1007 struct cgraph_node_set_element_def dummy;
1009 dummy.node = node;
1010 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1011 if (slot == NULL)
1012 return;
1014 element = (cgraph_node_set_element) *slot;
1015 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1016 == node);
1018 /* Remove from vector. We do this by swapping node with the last element
1019 of the vector. */
1020 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1021 if (last_node != node)
1023 dummy.node = last_node;
1024 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1025 last_element = (cgraph_node_set_element) *last_slot;
1026 gcc_assert (last_element);
1028 /* Move the last element to the original spot of NODE. */
1029 last_element->index = element->index;
1030 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1031 last_node);
1034 /* Remove element from hash table. */
1035 htab_clear_slot (set->hashtab, slot);
1036 ggc_free (element);
1039 /* Find NODE in SET and return an iterator to it if found. A null iterator
1040 is returned if NODE is not in SET. */
1042 cgraph_node_set_iterator
1043 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1045 void **slot;
1046 struct cgraph_node_set_element_def dummy;
1047 cgraph_node_set_element element;
1048 cgraph_node_set_iterator csi;
1050 dummy.node = node;
1051 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1052 if (slot == NULL)
1053 csi.index = (unsigned) ~0;
1054 else
1056 element = (cgraph_node_set_element) *slot;
1057 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1058 == node);
1059 csi.index = element->index;
1061 csi.set = set;
1063 return csi;
1066 /* Dump content of SET to file F. */
1068 void
1069 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1071 cgraph_node_set_iterator iter;
1073 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1075 struct cgraph_node *node = csi_node (iter);
1076 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1078 fprintf (f, "\n");
1081 /* Dump content of SET to stderr. */
1083 DEBUG_FUNCTION void
1084 debug_cgraph_node_set (cgraph_node_set set)
1086 dump_cgraph_node_set (stderr, set);
1089 /* Hash a varpool node set element. */
1091 static hashval_t
1092 hash_varpool_node_set_element (const void *p)
1094 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1095 return htab_hash_pointer (element->node);
1098 /* Compare two varpool node set elements. */
1100 static int
1101 eq_varpool_node_set_element (const void *p1, const void *p2)
1103 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1104 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1106 return e1->node == e2->node;
1109 /* Create a new varpool node set. */
1111 varpool_node_set
1112 varpool_node_set_new (void)
1114 varpool_node_set new_node_set;
1116 new_node_set = ggc_alloc_varpool_node_set_def ();
1117 new_node_set->hashtab = htab_create_ggc (10,
1118 hash_varpool_node_set_element,
1119 eq_varpool_node_set_element,
1120 NULL);
1121 new_node_set->nodes = NULL;
1122 return new_node_set;
1125 /* Add varpool_node NODE to varpool_node_set SET. */
1127 void
1128 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1130 void **slot;
1131 varpool_node_set_element element;
1132 struct varpool_node_set_element_def dummy;
1134 dummy.node = node;
1135 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1137 if (*slot != HTAB_EMPTY_ENTRY)
1139 element = (varpool_node_set_element) *slot;
1140 gcc_assert (node == element->node
1141 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1142 == node));
1143 return;
1146 /* Insert node into hash table. */
1147 element = ggc_alloc_varpool_node_set_element_def ();
1148 element->node = node;
1149 element->index = VEC_length (varpool_node_ptr, set->nodes);
1150 *slot = element;
1152 /* Insert into node vector. */
1153 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1156 /* Remove varpool_node NODE from varpool_node_set SET. */
1158 void
1159 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1161 void **slot, **last_slot;
1162 varpool_node_set_element element, last_element;
1163 struct varpool_node *last_node;
1164 struct varpool_node_set_element_def dummy;
1166 dummy.node = node;
1167 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1168 if (slot == NULL)
1169 return;
1171 element = (varpool_node_set_element) *slot;
1172 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1173 == node);
1175 /* Remove from vector. We do this by swapping node with the last element
1176 of the vector. */
1177 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1178 if (last_node != node)
1180 dummy.node = last_node;
1181 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1182 last_element = (varpool_node_set_element) *last_slot;
1183 gcc_assert (last_element);
1185 /* Move the last element to the original spot of NODE. */
1186 last_element->index = element->index;
1187 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1188 last_node);
1191 /* Remove element from hash table. */
1192 htab_clear_slot (set->hashtab, slot);
1193 ggc_free (element);
1196 /* Find NODE in SET and return an iterator to it if found. A null iterator
1197 is returned if NODE is not in SET. */
1199 varpool_node_set_iterator
1200 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1202 void **slot;
1203 struct varpool_node_set_element_def dummy;
1204 varpool_node_set_element element;
1205 varpool_node_set_iterator vsi;
1207 dummy.node = node;
1208 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1209 if (slot == NULL)
1210 vsi.index = (unsigned) ~0;
1211 else
1213 element = (varpool_node_set_element) *slot;
1214 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1215 == node);
1216 vsi.index = element->index;
1218 vsi.set = set;
1220 return vsi;
1223 /* Dump content of SET to file F. */
1225 void
1226 dump_varpool_node_set (FILE *f, varpool_node_set set)
1228 varpool_node_set_iterator iter;
1230 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1232 struct varpool_node *node = vsi_node (iter);
1233 fprintf (f, " %s", varpool_node_name (node));
1235 fprintf (f, "\n");
1238 /* Dump content of SET to stderr. */
1240 DEBUG_FUNCTION void
1241 debug_varpool_node_set (varpool_node_set set)
1243 dump_varpool_node_set (stderr, set);
1247 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1249 static unsigned int
1250 ipa_profile (void)
1252 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1253 struct cgraph_edge *e;
1254 int order_pos;
1255 bool something_changed = false;
1256 int i;
1258 order_pos = cgraph_postorder (order);
1259 for (i = order_pos - 1; i >= 0; i--)
1261 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1263 for (e = order[i]->callees; e; e = e->next_callee)
1264 if (e->callee->local.local && !e->callee->aux)
1266 something_changed = true;
1267 e->callee->aux = (void *)1;
1270 order[i]->aux = NULL;
1273 while (something_changed)
1275 something_changed = false;
1276 for (i = order_pos - 1; i >= 0; i--)
1278 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1280 for (e = order[i]->callees; e; e = e->next_callee)
1281 if (e->callee->local.local && !e->callee->aux)
1283 something_changed = true;
1284 e->callee->aux = (void *)1;
1287 order[i]->aux = NULL;
1290 free (order);
1291 return 0;
1294 static bool
1295 gate_ipa_profile (void)
1297 return flag_ipa_profile;
1300 struct ipa_opt_pass_d pass_ipa_profile =
1303 IPA_PASS,
1304 "ipa-profile", /* name */
1305 gate_ipa_profile, /* gate */
1306 ipa_profile, /* execute */
1307 NULL, /* sub */
1308 NULL, /* next */
1309 0, /* static_pass_number */
1310 TV_IPA_PROFILE, /* tv_id */
1311 0, /* properties_required */
1312 0, /* properties_provided */
1313 0, /* properties_destroyed */
1314 0, /* todo_flags_start */
1315 0 /* todo_flags_finish */
1317 NULL, /* generate_summary */
1318 NULL, /* write_summary */
1319 NULL, /* read_summary */
1320 NULL, /* write_optimization_summary */
1321 NULL, /* read_optimization_summary */
1322 NULL, /* stmt_fixup */
1323 0, /* TODOs */
1324 NULL, /* function_transform */
1325 NULL /* variable_transform */
1328 /* Generate and emit a static constructor or destructor. WHICH must
1329 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1330 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1331 initialization priority for this constructor or destructor. */
1333 void
1334 cgraph_build_static_cdtor (char which, tree body, int priority)
1336 static int counter = 0;
1337 char which_buf[16];
1338 tree decl, name, resdecl;
1340 /* The priority is encoded in the constructor or destructor name.
1341 collect2 will sort the names and arrange that they are called at
1342 program startup. */
1343 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1344 name = get_file_function_name (which_buf);
1346 decl = build_decl (input_location, FUNCTION_DECL, name,
1347 build_function_type_list (void_type_node, NULL_TREE));
1348 current_function_decl = decl;
1350 resdecl = build_decl (input_location,
1351 RESULT_DECL, NULL_TREE, void_type_node);
1352 DECL_ARTIFICIAL (resdecl) = 1;
1353 DECL_RESULT (decl) = resdecl;
1354 DECL_CONTEXT (resdecl) = decl;
1356 allocate_struct_function (decl, false);
1358 TREE_STATIC (decl) = 1;
1359 TREE_USED (decl) = 1;
1360 DECL_ARTIFICIAL (decl) = 1;
1361 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1362 DECL_SAVED_TREE (decl) = body;
1363 if (!targetm.have_ctors_dtors)
1365 TREE_PUBLIC (decl) = 1;
1366 DECL_PRESERVE_P (decl) = 1;
1368 DECL_UNINLINABLE (decl) = 1;
1370 DECL_INITIAL (decl) = make_node (BLOCK);
1371 TREE_USED (DECL_INITIAL (decl)) = 1;
1373 DECL_SOURCE_LOCATION (decl) = input_location;
1374 cfun->function_end_locus = input_location;
1376 switch (which)
1378 case 'I':
1379 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1380 decl_init_priority_insert (decl, priority);
1381 break;
1382 case 'D':
1383 DECL_STATIC_DESTRUCTOR (decl) = 1;
1384 decl_fini_priority_insert (decl, priority);
1385 break;
1386 default:
1387 gcc_unreachable ();
1390 gimplify_function_tree (decl);
1392 cgraph_add_new_function (decl, false);
1394 set_cfun (NULL);
1395 current_function_decl = NULL;
1399 /* A vector of FUNCTION_DECLs declared as static constructors. */
1400 static VEC(tree, heap) *static_ctors;
1401 /* A vector of FUNCTION_DECLs declared as static destructors. */
1402 static VEC(tree, heap) *static_dtors;
1404 /* When target does not have ctors and dtors, we call all constructor
1405 and destructor by special initialization/destruction function
1406 recognized by collect2.
1408 When we are going to build this function, collect all constructors and
1409 destructors and turn them into normal functions. */
1411 static void
1412 record_cdtor_fn (struct cgraph_node *node)
1414 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1415 VEC_safe_push (tree, heap, static_ctors, node->decl);
1416 if (DECL_STATIC_DESTRUCTOR (node->decl))
1417 VEC_safe_push (tree, heap, static_dtors, node->decl);
1418 node = cgraph_node (node->decl);
1419 node->local.disregard_inline_limits = 1;
1422 /* Define global constructors/destructor functions for the CDTORS, of
1423 which they are LEN. The CDTORS are sorted by initialization
1424 priority. If CTOR_P is true, these are constructors; otherwise,
1425 they are destructors. */
1427 static void
1428 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1430 size_t i,j;
1431 size_t len = VEC_length (tree, cdtors);
1433 i = 0;
1434 while (i < len)
1436 tree body;
1437 tree fn;
1438 priority_type priority;
1440 priority = 0;
1441 body = NULL_TREE;
1442 j = i;
1445 priority_type p;
1446 fn = VEC_index (tree, cdtors, j);
1447 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1448 if (j == i)
1449 priority = p;
1450 else if (p != priority)
1451 break;
1452 j++;
1454 while (j < len);
1456 /* When there is only one cdtor and target supports them, do nothing. */
1457 if (j == i + 1
1458 && targetm.have_ctors_dtors)
1460 i++;
1461 continue;
1463 /* Find the next batch of constructors/destructors with the same
1464 initialization priority. */
1465 for (;i < j; i++)
1467 tree call;
1468 fn = VEC_index (tree, cdtors, i);
1469 call = build_call_expr (fn, 0);
1470 if (ctor_p)
1471 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1472 else
1473 DECL_STATIC_DESTRUCTOR (fn) = 0;
1474 /* We do not want to optimize away pure/const calls here.
1475 When optimizing, these should be already removed, when not
1476 optimizing, we want user to be able to breakpoint in them. */
1477 TREE_SIDE_EFFECTS (call) = 1;
1478 append_to_statement_list (call, &body);
1480 while (i < len);
1481 gcc_assert (body != NULL_TREE);
1482 /* Generate a function to call all the function of like
1483 priority. */
1484 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1488 /* Comparison function for qsort. P1 and P2 are actually of type
1489 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1490 used to determine the sort order. */
1492 static int
1493 compare_ctor (const void *p1, const void *p2)
1495 tree f1;
1496 tree f2;
1497 int priority1;
1498 int priority2;
1500 f1 = *(const tree *)p1;
1501 f2 = *(const tree *)p2;
1502 priority1 = DECL_INIT_PRIORITY (f1);
1503 priority2 = DECL_INIT_PRIORITY (f2);
1505 if (priority1 < priority2)
1506 return -1;
1507 else if (priority1 > priority2)
1508 return 1;
1509 else
1510 /* Ensure a stable sort. Constructors are executed in backwarding
1511 order to make LTO initialize braries first. */
1512 return DECL_UID (f2) - DECL_UID (f1);
1515 /* Comparison function for qsort. P1 and P2 are actually of type
1516 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1517 used to determine the sort order. */
1519 static int
1520 compare_dtor (const void *p1, const void *p2)
1522 tree f1;
1523 tree f2;
1524 int priority1;
1525 int priority2;
1527 f1 = *(const tree *)p1;
1528 f2 = *(const tree *)p2;
1529 priority1 = DECL_FINI_PRIORITY (f1);
1530 priority2 = DECL_FINI_PRIORITY (f2);
1532 if (priority1 < priority2)
1533 return -1;
1534 else if (priority1 > priority2)
1535 return 1;
1536 else
1537 /* Ensure a stable sort. */
1538 return DECL_UID (f1) - DECL_UID (f2);
1541 /* Generate functions to call static constructors and destructors
1542 for targets that do not support .ctors/.dtors sections. These
1543 functions have magic names which are detected by collect2. */
1545 static void
1546 build_cdtor_fns (void)
1548 if (!VEC_empty (tree, static_ctors))
1550 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1551 qsort (VEC_address (tree, static_ctors),
1552 VEC_length (tree, static_ctors),
1553 sizeof (tree),
1554 compare_ctor);
1555 build_cdtor (/*ctor_p=*/true, static_ctors);
1558 if (!VEC_empty (tree, static_dtors))
1560 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1561 qsort (VEC_address (tree, static_dtors),
1562 VEC_length (tree, static_dtors),
1563 sizeof (tree),
1564 compare_dtor);
1565 build_cdtor (/*ctor_p=*/false, static_dtors);
1569 /* Look for constructors and destructors and produce function calling them.
1570 This is needed for targets not supporting ctors or dtors, but we perform the
1571 transformation also at linktime to merge possibly numberous
1572 constructors/destructors into single function to improve code locality and
1573 reduce size. */
1575 static unsigned int
1576 ipa_cdtor_merge (void)
1578 struct cgraph_node *node;
1579 for (node = cgraph_nodes; node; node = node->next)
1580 if (node->analyzed
1581 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1582 || DECL_STATIC_DESTRUCTOR (node->decl)))
1583 record_cdtor_fn (node);
1584 build_cdtor_fns ();
1585 VEC_free (tree, heap, static_ctors);
1586 VEC_free (tree, heap, static_dtors);
1587 return 0;
1590 /* Perform the pass when we have no ctors/dtors support
1591 or at LTO time to merge multiple constructors into single
1592 function. */
1594 static bool
1595 gate_ipa_cdtor_merge (void)
1597 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1600 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1603 IPA_PASS,
1604 "cdtor", /* name */
1605 gate_ipa_cdtor_merge, /* gate */
1606 ipa_cdtor_merge, /* execute */
1607 NULL, /* sub */
1608 NULL, /* next */
1609 0, /* static_pass_number */
1610 TV_CGRAPHOPT, /* tv_id */
1611 0, /* properties_required */
1612 0, /* properties_provided */
1613 0, /* properties_destroyed */
1614 0, /* todo_flags_start */
1615 0 /* todo_flags_finish */
1617 NULL, /* generate_summary */
1618 NULL, /* write_summary */
1619 NULL, /* read_summary */
1620 NULL, /* write_optimization_summary */
1621 NULL, /* read_optimization_summary */
1622 NULL, /* stmt_fixup */
1623 0, /* TODOs */
1624 NULL, /* function_transform */
1625 NULL /* variable_transform */