Retry rdrand if the carry flag isn't valid.
[official-gcc.git] / gcc / ipa.c
blob05e164271dbfb7f0690fa90f3045b4252867d3b0
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
32 /* Fill array order with all nodes with output flag set in the reverse
33 topological order. */
35 int
36 cgraph_postorder (struct cgraph_node **order)
38 struct cgraph_node *node, *node2;
39 int stack_size = 0;
40 int order_pos = 0;
41 struct cgraph_edge *edge, last;
42 int pass;
44 struct cgraph_node **stack =
45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
50 right through inline functions. */
51 for (node = cgraph_nodes; node; node = node->next)
52 node->aux = NULL;
53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
55 if (!node->aux
56 && (pass
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
60 node2 = node;
61 if (!node->callers)
62 node->aux = &last;
63 else
64 node->aux = node->callers;
65 while (node2)
67 while (node2->aux != &last)
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
72 else
73 node2->aux = &last;
74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
79 continue;
80 if (!edge->caller->aux)
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
84 else
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
87 node2 = edge->caller;
88 break;
91 if (node2->aux == &last)
93 order[order_pos++] = node2;
94 if (stack_size)
95 node2 = stack[--stack_size];
96 else
97 node2 = NULL;
101 free (stack);
102 for (node = cgraph_nodes; node; node = node->next)
103 node->aux = NULL;
104 return order_pos;
107 /* Look for all functions inlined to NODE and update their inlined_to pointers
108 to INLINED_TO. */
110 static void
111 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
122 /* Add cgraph NODE to queue starting at FIRST.
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
128 reachable. */
130 static void
131 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
135 return;
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
139 return;
140 node->aux = *first;
141 *first = node;
144 /* Add varpool NODE to queue starting at FIRST. */
146 static void
147 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
149 node->aux = *first;
150 *first = node;
153 /* Process references. */
155 static void
156 process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
161 int i;
162 struct ipa_ref *ref;
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
165 if (ref->refered_type == IPA_REF_CGRAPH)
167 struct cgraph_node *node = ipa_ref_node (ref);
168 if (!node->reachable
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
176 else
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
179 if (!node->needed)
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
188 /* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
191 static inline bool
192 varpool_can_remove_if_no_refs (struct varpool_node *node)
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
198 /* Return true when function can be marked local. */
200 static bool
201 cgraph_local_node_p (struct cgraph_node *node)
203 return (cgraph_only_called_directly_p (node)
204 && node->analyzed
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
211 /* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
213 decisions has been made. If BEFORE_INLINING_P is false this function also
214 removes unneeded bodies of extern inline functions. */
216 bool
217 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
221 struct cgraph_node *node, *next;
222 struct varpool_node *vnode, *vnext;
223 bool changed = false;
225 #ifdef ENABLE_CHECKING
226 verify_cgraph ();
227 #endif
228 if (file)
229 fprintf (file, "\nReclaiming functions:");
230 #ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
235 #endif
236 varpool_reset_queue ();
237 for (node = cgraph_nodes; node; node = node->next)
238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
239 && ((!DECL_EXTERNAL (node->decl))
240 || before_inlining_p))
242 gcc_assert (!node->global.inlined_to);
243 enqueue_cgraph_node (node, &first);
244 node->reachable = true;
246 else
248 gcc_assert (!node->aux);
249 node->reachable = false;
251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
261 else
262 vnode->needed = false;
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
267 them at all.
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
275 if (first != (struct cgraph_node *) (void *) 1)
277 struct cgraph_edge *e;
278 node = first;
279 first = (struct cgraph_node *) first->aux;
280 if (!node->reachable)
281 node->aux = (void *)2;
283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
286 if (node->reachable)
288 for (e = node->callees; e; e = e->next_callee)
289 if (!e->callee->reachable
290 && node->analyzed
291 && (!e->inline_failed || !e->callee->analyzed
292 || (!DECL_EXTERNAL (e->callee->decl))
293 || before_inlining_p))
295 e->callee->reachable = true;
296 enqueue_cgraph_node (e->callee, &first);
298 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
301 /* If any function in a comdat group is reachable, force
302 all other functions in the same comdat group to be
303 also reachable. */
304 if (node->same_comdat_group
305 && node->reachable
306 && !node->global.inlined_to)
308 for (next = node->same_comdat_group;
309 next != node;
310 next = next->same_comdat_group)
311 if (!next->reachable)
313 next->reachable = true;
314 enqueue_cgraph_node (next, &first);
318 /* We can freely remove inline clones even if they are cloned, however if
319 function is clone of real clone, we must keep it around in order to
320 make materialize_clones produce function body with the changes
321 applied. */
322 while (node->clone_of && !node->clone_of->aux
323 && !gimple_has_body_p (node->decl))
325 bool noninline = node->clone_of->decl != node->decl;
326 node = node->clone_of;
327 if (noninline && !node->reachable && !node->aux)
329 enqueue_cgraph_node (node, &first);
330 break;
334 if (first_varpool != (struct varpool_node *) (void *) 1)
336 vnode = first_varpool;
337 first_varpool = (struct varpool_node *)first_varpool->aux;
338 vnode->aux = NULL;
339 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
340 /* If any function in a comdat group is reachable, force
341 all other functions in the same comdat group to be
342 also reachable. */
343 if (vnode->same_comdat_group)
345 struct varpool_node *next;
346 for (next = vnode->same_comdat_group;
347 next != vnode;
348 next = next->same_comdat_group)
349 if (!next->needed)
351 varpool_mark_needed_node (next);
352 enqueue_varpool_node (next, &first_varpool);
358 /* Remove unreachable nodes.
360 Completely unreachable functions can be fully removed from the callgraph.
361 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
362 callgraph (so we still have edges to them). We remove function body then.
364 Also we need to care functions that are unreachable but we need to keep them around
365 for later clonning. In this case we also turn them to unanalyzed nodes, but
366 keep the body around. */
367 for (node = cgraph_nodes; node; node = next)
369 next = node->next;
370 if (node->aux && !node->reachable)
372 cgraph_node_remove_callees (node);
373 ipa_remove_all_references (&node->ref_list);
374 node->analyzed = false;
375 node->local.inlinable = false;
377 if (!node->aux)
379 node->global.inlined_to = NULL;
380 if (file)
381 fprintf (file, " %s", cgraph_node_name (node));
382 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
383 cgraph_remove_node (node);
384 else
386 struct cgraph_edge *e;
388 /* See if there is reachable caller. */
389 for (e = node->callers; e; e = e->next_caller)
390 if (e->caller->reachable)
391 break;
393 /* If so, we need to keep node in the callgraph. */
394 if (e || node->needed)
396 struct cgraph_node *clone;
398 /* If there are still clones, we must keep body around.
399 Otherwise we can just remove the body but keep the clone. */
400 for (clone = node->clones; clone;
401 clone = clone->next_sibling_clone)
402 if (clone->aux)
403 break;
404 if (!clone)
406 cgraph_release_function_body (node);
407 node->analyzed = false;
408 node->local.inlinable = false;
410 else
411 gcc_assert (!clone->in_other_partition);
412 cgraph_node_remove_callees (node);
413 ipa_remove_all_references (&node->ref_list);
414 if (node->prev_sibling_clone)
415 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
416 else if (node->clone_of)
417 node->clone_of->clones = node->next_sibling_clone;
418 if (node->next_sibling_clone)
419 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
420 node->clone_of = NULL;
421 node->next_sibling_clone = NULL;
422 node->prev_sibling_clone = NULL;
424 else
425 cgraph_remove_node (node);
427 changed = true;
430 for (node = cgraph_nodes; node; node = node->next)
432 /* Inline clones might be kept around so their materializing allows further
433 cloning. If the function the clone is inlined into is removed, we need
434 to turn it into normal cone. */
435 if (node->global.inlined_to
436 && !node->callers)
438 gcc_assert (node->clones);
439 node->global.inlined_to = NULL;
440 update_inlined_to_pointer (node, node);
442 node->aux = NULL;
445 if (file)
446 fprintf (file, "\n");
448 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
449 are undesirable at -O0 since we do not want to remove anything. */
450 if (!optimize)
451 return changed;
453 if (file)
454 fprintf (file, "Reclaiming variables:");
455 for (vnode = varpool_nodes; vnode; vnode = vnext)
457 vnext = vnode->next;
458 if (!vnode->needed)
460 if (file)
461 fprintf (file, " %s", varpool_node_name (vnode));
462 varpool_remove_node (vnode);
463 changed = true;
467 /* Now update address_taken flags and try to promote functions to be local. */
469 if (file)
470 fprintf (file, "\nClearing address taken flags:");
471 for (node = cgraph_nodes; node; node = node->next)
472 if (node->address_taken
473 && !node->reachable_from_other_partition)
475 int i;
476 struct ipa_ref *ref;
477 bool found = false;
478 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
479 && !found; i++)
481 gcc_assert (ref->use == IPA_REF_ADDR);
482 found = true;
484 if (!found)
486 if (file)
487 fprintf (file, " %s", cgraph_node_name (node));
488 node->address_taken = false;
489 changed = true;
490 if (cgraph_local_node_p (node))
492 node->local.local = true;
493 if (file)
494 fprintf (file, " (local)");
499 #ifdef ENABLE_CHECKING
500 verify_cgraph ();
501 #endif
503 /* Reclaim alias pairs for functions that have disappeared from the
504 call graph. */
505 remove_unreachable_alias_pairs ();
507 return changed;
510 /* Discover variables that have no longer address taken or that are read only
511 and update their flags.
513 FIXME: This can not be done in between gimplify and omp_expand since
514 readonly flag plays role on what is shared and what is not. Currently we do
515 this transformation as part of whole program visibility and re-do at
516 ipa-reference pass (to take into account clonning), but it would
517 make sense to do it before early optimizations. */
519 void
520 ipa_discover_readonly_nonaddressable_vars (void)
522 struct varpool_node *vnode;
523 if (dump_file)
524 fprintf (dump_file, "Clearing variable flags:");
525 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
526 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
527 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
529 bool written = false;
530 bool address_taken = false;
531 int i;
532 struct ipa_ref *ref;
533 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
534 && (!written || !address_taken); i++)
535 switch (ref->use)
537 case IPA_REF_ADDR:
538 address_taken = true;
539 break;
540 case IPA_REF_LOAD:
541 break;
542 case IPA_REF_STORE:
543 written = true;
544 break;
546 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
548 if (dump_file)
549 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
550 TREE_ADDRESSABLE (vnode->decl) = 0;
552 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
553 /* Making variable in explicit section readonly can cause section
554 type conflict.
555 See e.g. gcc.c-torture/compile/pr23237.c */
556 && DECL_SECTION_NAME (vnode->decl) == NULL)
558 if (dump_file)
559 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
560 TREE_READONLY (vnode->decl) = 1;
563 if (dump_file)
564 fprintf (dump_file, "\n");
567 /* Return true when function NODE should be considered externally visible. */
569 static bool
570 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
572 if (!node->local.finalized)
573 return false;
574 if (!DECL_COMDAT (node->decl)
575 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
576 return false;
577 if (!whole_program)
578 return true;
579 if (node->local.used_from_object_file)
580 return true;
581 if (DECL_PRESERVE_P (node->decl))
582 return true;
583 /* COMDAT functions must be shared only if they have address taken,
584 otherwise we can produce our own private implementation with
585 -fwhole-program. */
586 if (DECL_COMDAT (node->decl))
588 if (node->address_taken || !node->analyzed)
589 return true;
590 if (node->same_comdat_group)
592 struct cgraph_node *next;
594 /* If more than one function is in the same COMDAT group, it must
595 be shared even if just one function in the comdat group has
596 address taken. */
597 for (next = node->same_comdat_group;
598 next != node;
599 next = next->same_comdat_group)
600 if (next->address_taken || !next->analyzed)
601 return true;
604 if (MAIN_NAME_P (DECL_NAME (node->decl)))
605 return true;
606 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
607 return true;
608 return false;
611 /* Dissolve the same_comdat_group list in which NODE resides. */
613 static void
614 dissolve_same_comdat_group_list (struct cgraph_node *node)
616 struct cgraph_node *n = node, *next;
619 next = n->same_comdat_group;
620 n->same_comdat_group = NULL;
621 n = next;
623 while (n != node);
626 /* Mark visibility of all functions.
628 A local function is one whose calls can occur only in the current
629 compilation unit and all its calls are explicit, so we can change
630 its calling convention. We simply mark all static functions whose
631 address is not taken as local.
633 We also change the TREE_PUBLIC flag of all declarations that are public
634 in language point of view but we want to overwrite this default
635 via visibilities for the backend point of view. */
637 static unsigned int
638 function_and_variable_visibility (bool whole_program)
640 struct cgraph_node *node;
641 struct varpool_node *vnode;
643 for (node = cgraph_nodes; node; node = node->next)
645 /* C++ FE on lack of COMDAT support create local COMDAT functions
646 (that ought to be shared but can not due to object format
647 limitations). It is neccesary to keep the flag to make rest of C++ FE
648 happy. Clear the flag here to avoid confusion in middle-end. */
649 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
650 DECL_COMDAT (node->decl) = 0;
651 /* For external decls stop tracking same_comdat_group, it doesn't matter
652 what comdat group they are in when they won't be emitted in this TU,
653 and simplifies later passes. */
654 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
656 #ifdef ENABLE_CHECKING
657 struct cgraph_node *n;
659 for (n = node->same_comdat_group;
660 n != node;
661 n = n->same_comdat_group)
662 /* If at least one of same comdat group functions is external,
663 all of them have to be, otherwise it is a front-end bug. */
664 gcc_assert (DECL_EXTERNAL (n->decl));
665 #endif
666 dissolve_same_comdat_group_list (node);
668 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
669 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
670 if (cgraph_externally_visible_p (node, whole_program))
672 gcc_assert (!node->global.inlined_to);
673 node->local.externally_visible = true;
675 else
676 node->local.externally_visible = false;
677 if (!node->local.externally_visible && node->analyzed
678 && !DECL_EXTERNAL (node->decl))
680 struct cgraph_node *alias;
681 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
682 cgraph_make_decl_local (node->decl);
683 for (alias = node->same_body; alias; alias = alias->next)
684 cgraph_make_decl_local (alias->decl);
685 if (node->same_comdat_group)
686 /* cgraph_externally_visible_p has already checked all other nodes
687 in the group and they will all be made local. We need to
688 dissolve the group at once so that the predicate does not
689 segfault though. */
690 dissolve_same_comdat_group_list (node);
692 node->local.local = cgraph_local_node_p (node);
694 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
696 /* weak flag makes no sense on local variables. */
697 gcc_assert (!DECL_WEAK (vnode->decl)
698 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
699 /* In several cases declarations can not be common:
701 - when declaration has initializer
702 - when it is in weak
703 - when it has specific section
704 - when it resides in non-generic address space.
705 - if declaration is local, it will get into .local common section
706 so common flag is not needed. Frontends still produce these in
707 certain cases, such as for:
709 static int a __attribute__ ((common))
711 Canonicalize things here and clear the redundant flag. */
712 if (DECL_COMMON (vnode->decl)
713 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
714 || (DECL_INITIAL (vnode->decl)
715 && DECL_INITIAL (vnode->decl) != error_mark_node)
716 || DECL_WEAK (vnode->decl)
717 || DECL_SECTION_NAME (vnode->decl) != NULL
718 || ! (ADDR_SPACE_GENERIC_P
719 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
720 DECL_COMMON (vnode->decl) = 0;
722 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
724 if (!vnode->finalized)
725 continue;
726 if (vnode->needed
727 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
728 && (!whole_program
729 /* We can privatize comdat readonly variables whose address is not taken,
730 but doing so is not going to bring us optimization oppurtunities until
731 we start reordering datastructures. */
732 || DECL_COMDAT (vnode->decl)
733 || DECL_WEAK (vnode->decl)
734 || vnode->used_from_object_file
735 || lookup_attribute ("externally_visible",
736 DECL_ATTRIBUTES (vnode->decl))))
737 vnode->externally_visible = true;
738 else
739 vnode->externally_visible = false;
740 if (!vnode->externally_visible)
742 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
743 cgraph_make_decl_local (vnode->decl);
745 gcc_assert (TREE_STATIC (vnode->decl));
748 if (dump_file)
750 fprintf (dump_file, "\nMarking local functions:");
751 for (node = cgraph_nodes; node; node = node->next)
752 if (node->local.local)
753 fprintf (dump_file, " %s", cgraph_node_name (node));
754 fprintf (dump_file, "\n\n");
755 fprintf (dump_file, "\nMarking externally visible functions:");
756 for (node = cgraph_nodes; node; node = node->next)
757 if (node->local.externally_visible)
758 fprintf (dump_file, " %s", cgraph_node_name (node));
759 fprintf (dump_file, "\n\n");
760 fprintf (dump_file, "\nMarking externally visible variables:");
761 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
762 if (vnode->externally_visible)
763 fprintf (dump_file, " %s", varpool_node_name (vnode));
764 fprintf (dump_file, "\n\n");
766 cgraph_function_flags_ready = true;
767 return 0;
770 /* Local function pass handling visibilities. This happens before LTO streaming
771 so in particular -fwhole-program should be ignored at this level. */
773 static unsigned int
774 local_function_and_variable_visibility (void)
776 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
779 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
782 SIMPLE_IPA_PASS,
783 "visibility", /* name */
784 NULL, /* gate */
785 local_function_and_variable_visibility,/* execute */
786 NULL, /* sub */
787 NULL, /* next */
788 0, /* static_pass_number */
789 TV_CGRAPHOPT, /* tv_id */
790 0, /* properties_required */
791 0, /* properties_provided */
792 0, /* properties_destroyed */
793 0, /* todo_flags_start */
794 TODO_remove_functions | TODO_dump_cgraph
795 | TODO_ggc_collect /* todo_flags_finish */
799 /* Do not re-run on ltrans stage. */
801 static bool
802 gate_whole_program_function_and_variable_visibility (void)
804 return !flag_ltrans;
807 /* Bring functionss local at LTO time whith -fwhole-program. */
809 static unsigned int
810 whole_program_function_and_variable_visibility (void)
812 struct cgraph_node *node;
813 struct varpool_node *vnode;
815 function_and_variable_visibility (flag_whole_program);
817 for (node = cgraph_nodes; node; node = node->next)
818 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
819 && node->local.finalized)
820 cgraph_mark_needed_node (node);
821 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
822 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
823 varpool_mark_needed_node (vnode);
824 if (dump_file)
826 fprintf (dump_file, "\nNeeded variables:");
827 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
828 if (vnode->needed)
829 fprintf (dump_file, " %s", varpool_node_name (vnode));
830 fprintf (dump_file, "\n\n");
832 if (optimize)
833 ipa_discover_readonly_nonaddressable_vars ();
834 return 0;
837 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
840 IPA_PASS,
841 "whole-program", /* name */
842 gate_whole_program_function_and_variable_visibility,/* gate */
843 whole_program_function_and_variable_visibility,/* execute */
844 NULL, /* sub */
845 NULL, /* next */
846 0, /* static_pass_number */
847 TV_CGRAPHOPT, /* tv_id */
848 0, /* properties_required */
849 0, /* properties_provided */
850 0, /* properties_destroyed */
851 0, /* todo_flags_start */
852 TODO_remove_functions | TODO_dump_cgraph
853 | TODO_ggc_collect /* todo_flags_finish */
855 NULL, /* generate_summary */
856 NULL, /* write_summary */
857 NULL, /* read_summary */
858 NULL, /* write_optimization_summary */
859 NULL, /* read_optimization_summary */
860 NULL, /* stmt_fixup */
861 0, /* TODOs */
862 NULL, /* function_transform */
863 NULL, /* variable_transform */
866 /* Hash a cgraph node set element. */
868 static hashval_t
869 hash_cgraph_node_set_element (const void *p)
871 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
872 return htab_hash_pointer (element->node);
875 /* Compare two cgraph node set elements. */
877 static int
878 eq_cgraph_node_set_element (const void *p1, const void *p2)
880 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
881 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
883 return e1->node == e2->node;
886 /* Create a new cgraph node set. */
888 cgraph_node_set
889 cgraph_node_set_new (void)
891 cgraph_node_set new_node_set;
893 new_node_set = ggc_alloc_cgraph_node_set_def ();
894 new_node_set->hashtab = htab_create_ggc (10,
895 hash_cgraph_node_set_element,
896 eq_cgraph_node_set_element,
897 NULL);
898 new_node_set->nodes = NULL;
899 return new_node_set;
902 /* Add cgraph_node NODE to cgraph_node_set SET. */
904 void
905 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
907 void **slot;
908 cgraph_node_set_element element;
909 struct cgraph_node_set_element_def dummy;
911 dummy.node = node;
912 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
914 if (*slot != HTAB_EMPTY_ENTRY)
916 element = (cgraph_node_set_element) *slot;
917 gcc_assert (node == element->node
918 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
919 == node));
920 return;
923 /* Insert node into hash table. */
924 element = ggc_alloc_cgraph_node_set_element_def ();
925 element->node = node;
926 element->index = VEC_length (cgraph_node_ptr, set->nodes);
927 *slot = element;
929 /* Insert into node vector. */
930 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
933 /* Remove cgraph_node NODE from cgraph_node_set SET. */
935 void
936 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
938 void **slot, **last_slot;
939 cgraph_node_set_element element, last_element;
940 struct cgraph_node *last_node;
941 struct cgraph_node_set_element_def dummy;
943 dummy.node = node;
944 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
945 if (slot == NULL)
946 return;
948 element = (cgraph_node_set_element) *slot;
949 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
950 == node);
952 /* Remove from vector. We do this by swapping node with the last element
953 of the vector. */
954 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
955 if (last_node != node)
957 dummy.node = last_node;
958 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
959 last_element = (cgraph_node_set_element) *last_slot;
960 gcc_assert (last_element);
962 /* Move the last element to the original spot of NODE. */
963 last_element->index = element->index;
964 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
965 last_node);
968 /* Remove element from hash table. */
969 htab_clear_slot (set->hashtab, slot);
970 ggc_free (element);
973 /* Find NODE in SET and return an iterator to it if found. A null iterator
974 is returned if NODE is not in SET. */
976 cgraph_node_set_iterator
977 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
979 void **slot;
980 struct cgraph_node_set_element_def dummy;
981 cgraph_node_set_element element;
982 cgraph_node_set_iterator csi;
984 dummy.node = node;
985 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
986 if (slot == NULL)
987 csi.index = (unsigned) ~0;
988 else
990 element = (cgraph_node_set_element) *slot;
991 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
992 == node);
993 csi.index = element->index;
995 csi.set = set;
997 return csi;
1000 /* Dump content of SET to file F. */
1002 void
1003 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1005 cgraph_node_set_iterator iter;
1007 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1009 struct cgraph_node *node = csi_node (iter);
1010 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1012 fprintf (f, "\n");
1015 /* Dump content of SET to stderr. */
1017 DEBUG_FUNCTION void
1018 debug_cgraph_node_set (cgraph_node_set set)
1020 dump_cgraph_node_set (stderr, set);
1023 /* Hash a varpool node set element. */
1025 static hashval_t
1026 hash_varpool_node_set_element (const void *p)
1028 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1029 return htab_hash_pointer (element->node);
1032 /* Compare two varpool node set elements. */
1034 static int
1035 eq_varpool_node_set_element (const void *p1, const void *p2)
1037 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1038 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1040 return e1->node == e2->node;
1043 /* Create a new varpool node set. */
1045 varpool_node_set
1046 varpool_node_set_new (void)
1048 varpool_node_set new_node_set;
1050 new_node_set = ggc_alloc_varpool_node_set_def ();
1051 new_node_set->hashtab = htab_create_ggc (10,
1052 hash_varpool_node_set_element,
1053 eq_varpool_node_set_element,
1054 NULL);
1055 new_node_set->nodes = NULL;
1056 return new_node_set;
1059 /* Add varpool_node NODE to varpool_node_set SET. */
1061 void
1062 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1064 void **slot;
1065 varpool_node_set_element element;
1066 struct varpool_node_set_element_def dummy;
1068 dummy.node = node;
1069 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1071 if (*slot != HTAB_EMPTY_ENTRY)
1073 element = (varpool_node_set_element) *slot;
1074 gcc_assert (node == element->node
1075 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1076 == node));
1077 return;
1080 /* Insert node into hash table. */
1081 element = ggc_alloc_varpool_node_set_element_def ();
1082 element->node = node;
1083 element->index = VEC_length (varpool_node_ptr, set->nodes);
1084 *slot = element;
1086 /* Insert into node vector. */
1087 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1090 /* Remove varpool_node NODE from varpool_node_set SET. */
1092 void
1093 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1095 void **slot, **last_slot;
1096 varpool_node_set_element element, last_element;
1097 struct varpool_node *last_node;
1098 struct varpool_node_set_element_def dummy;
1100 dummy.node = node;
1101 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1102 if (slot == NULL)
1103 return;
1105 element = (varpool_node_set_element) *slot;
1106 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1107 == node);
1109 /* Remove from vector. We do this by swapping node with the last element
1110 of the vector. */
1111 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1112 if (last_node != node)
1114 dummy.node = last_node;
1115 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1116 last_element = (varpool_node_set_element) *last_slot;
1117 gcc_assert (last_element);
1119 /* Move the last element to the original spot of NODE. */
1120 last_element->index = element->index;
1121 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1122 last_node);
1125 /* Remove element from hash table. */
1126 htab_clear_slot (set->hashtab, slot);
1127 ggc_free (element);
1130 /* Find NODE in SET and return an iterator to it if found. A null iterator
1131 is returned if NODE is not in SET. */
1133 varpool_node_set_iterator
1134 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1136 void **slot;
1137 struct varpool_node_set_element_def dummy;
1138 varpool_node_set_element element;
1139 varpool_node_set_iterator vsi;
1141 dummy.node = node;
1142 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1143 if (slot == NULL)
1144 vsi.index = (unsigned) ~0;
1145 else
1147 element = (varpool_node_set_element) *slot;
1148 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1149 == node);
1150 vsi.index = element->index;
1152 vsi.set = set;
1154 return vsi;
1157 /* Dump content of SET to file F. */
1159 void
1160 dump_varpool_node_set (FILE *f, varpool_node_set set)
1162 varpool_node_set_iterator iter;
1164 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1166 struct varpool_node *node = vsi_node (iter);
1167 fprintf (f, " %s", varpool_node_name (node));
1169 fprintf (f, "\n");
1172 /* Dump content of SET to stderr. */
1174 DEBUG_FUNCTION void
1175 debug_varpool_node_set (varpool_node_set set)
1177 dump_varpool_node_set (stderr, set);
1181 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1183 static unsigned int
1184 ipa_profile (void)
1186 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1187 struct cgraph_edge *e;
1188 int order_pos;
1189 bool something_changed = false;
1190 int i;
1192 order_pos = cgraph_postorder (order);
1193 for (i = order_pos - 1; i >= 0; i--)
1195 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1197 for (e = order[i]->callees; e; e = e->next_callee)
1198 if (e->callee->local.local && !e->callee->aux)
1200 something_changed = true;
1201 e->callee->aux = (void *)1;
1204 order[i]->aux = NULL;
1207 while (something_changed)
1209 something_changed = false;
1210 for (i = order_pos - 1; i >= 0; i--)
1212 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1214 for (e = order[i]->callees; e; e = e->next_callee)
1215 if (e->callee->local.local && !e->callee->aux)
1217 something_changed = true;
1218 e->callee->aux = (void *)1;
1221 order[i]->aux = NULL;
1224 free (order);
1225 return 0;
1228 static bool
1229 gate_ipa_profile (void)
1231 return flag_ipa_profile;
1234 struct ipa_opt_pass_d pass_ipa_profile =
1237 IPA_PASS,
1238 "ipa-profile", /* name */
1239 gate_ipa_profile, /* gate */
1240 ipa_profile, /* execute */
1241 NULL, /* sub */
1242 NULL, /* next */
1243 0, /* static_pass_number */
1244 TV_IPA_PROFILE, /* tv_id */
1245 0, /* properties_required */
1246 0, /* properties_provided */
1247 0, /* properties_destroyed */
1248 0, /* todo_flags_start */
1249 0 /* todo_flags_finish */
1251 NULL, /* generate_summary */
1252 NULL, /* write_summary */
1253 NULL, /* read_summary */
1254 NULL, /* write_optimization_summary */
1255 NULL, /* read_optimization_summary */
1256 NULL, /* stmt_fixup */
1257 0, /* TODOs */
1258 NULL, /* function_transform */
1259 NULL /* variable_transform */