Merge branch 'master' into python
[official-gcc.git] / gcc / ipa.c
blob319a3f1b8bb9a921b822c2e63c69146211aebacf
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
32 /* Fill array order with all nodes with output flag set in the reverse
33 topological order. */
35 int
36 cgraph_postorder (struct cgraph_node **order)
38 struct cgraph_node *node, *node2;
39 int stack_size = 0;
40 int order_pos = 0;
41 struct cgraph_edge *edge, last;
42 int pass;
44 struct cgraph_node **stack =
45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
50 right through inline functions. */
51 for (node = cgraph_nodes; node; node = node->next)
52 node->aux = NULL;
53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
55 if (!node->aux
56 && (pass
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
60 node2 = node;
61 if (!node->callers)
62 node->aux = &last;
63 else
64 node->aux = node->callers;
65 while (node2)
67 while (node2->aux != &last)
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
72 else
73 node2->aux = &last;
74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
79 continue;
80 if (!edge->caller->aux)
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
84 else
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
87 node2 = edge->caller;
88 break;
91 if (node2->aux == &last)
93 order[order_pos++] = node2;
94 if (stack_size)
95 node2 = stack[--stack_size];
96 else
97 node2 = NULL;
101 free (stack);
102 for (node = cgraph_nodes; node; node = node->next)
103 node->aux = NULL;
104 return order_pos;
107 /* Look for all functions inlined to NODE and update their inlined_to pointers
108 to INLINED_TO. */
110 static void
111 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
122 /* Add cgraph NODE to queue starting at FIRST.
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
128 reachable. */
130 static void
131 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
135 return;
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
139 return;
140 node->aux = *first;
141 *first = node;
144 /* Add varpool NODE to queue starting at FIRST. */
146 static void
147 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
149 node->aux = *first;
150 *first = node;
153 /* Process references. */
155 static void
156 process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
161 int i;
162 struct ipa_ref *ref;
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
165 if (ref->refered_type == IPA_REF_CGRAPH)
167 struct cgraph_node *node = ipa_ref_node (ref);
168 if (!node->reachable
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
176 else
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
179 if (!node->needed)
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
188 /* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
191 static inline bool
192 varpool_can_remove_if_no_refs (struct varpool_node *node)
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
198 /* Return true when function can be marked local. */
200 static bool
201 cgraph_local_node_p (struct cgraph_node *node)
203 return (cgraph_only_called_directly_p (node)
204 && node->analyzed
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
211 /* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
213 decisions has been made. If BEFORE_INLINING_P is false this function also
214 removes unneeded bodies of extern inline functions. */
216 bool
217 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
221 struct cgraph_node *node, *next;
222 struct varpool_node *vnode, *vnext;
223 bool changed = false;
225 #ifdef ENABLE_CHECKING
226 verify_cgraph ();
227 #endif
228 if (file)
229 fprintf (file, "\nReclaiming functions:");
230 #ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
235 #endif
236 varpool_reset_queue ();
237 for (node = cgraph_nodes; node; node = node->next)
238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
239 && ((!DECL_EXTERNAL (node->decl))
240 || before_inlining_p))
242 gcc_assert (!node->global.inlined_to);
243 enqueue_cgraph_node (node, &first);
244 node->reachable = true;
246 else
248 gcc_assert (!node->aux);
249 node->reachable = false;
251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
261 else
262 vnode->needed = false;
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
267 them at all.
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
275 if (first != (struct cgraph_node *) (void *) 1)
277 struct cgraph_edge *e;
278 node = first;
279 first = (struct cgraph_node *) first->aux;
280 if (!node->reachable)
281 node->aux = (void *)2;
283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
286 if (node->reachable)
287 for (e = node->callees; e; e = e->next_callee)
288 if (!e->callee->reachable
289 && node->analyzed
290 && (!e->inline_failed || !e->callee->analyzed
291 || (!DECL_EXTERNAL (e->callee->decl))
292 || before_inlining_p))
294 e->callee->reachable = true;
295 enqueue_cgraph_node (e->callee, &first);
298 /* If any function in a comdat group is reachable, force
299 all other functions in the same comdat group to be
300 also reachable. */
301 if (node->same_comdat_group
302 && node->reachable
303 && !node->global.inlined_to)
305 for (next = node->same_comdat_group;
306 next != node;
307 next = next->same_comdat_group)
308 if (!next->reachable)
310 next->reachable = true;
311 enqueue_cgraph_node (next, &first);
315 /* We can freely remove inline clones even if they are cloned, however if
316 function is clone of real clone, we must keep it around in order to
317 make materialize_clones produce function body with the changes
318 applied. */
319 while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
321 bool noninline = node->clone_of->decl != node->decl;
322 node = node->clone_of;
323 if (noninline && !node->reachable && !node->aux)
325 enqueue_cgraph_node (node, &first);
326 break;
329 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
331 if (first_varpool != (struct varpool_node *) (void *) 1)
333 vnode = first_varpool;
334 first_varpool = (struct varpool_node *)first_varpool->aux;
335 vnode->aux = NULL;
336 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
337 /* If any function in a comdat group is reachable, force
338 all other functions in the same comdat group to be
339 also reachable. */
340 if (vnode->same_comdat_group)
342 struct varpool_node *next;
343 for (next = vnode->same_comdat_group;
344 next != vnode;
345 next = next->same_comdat_group)
346 if (!next->needed)
348 varpool_mark_needed_node (next);
349 enqueue_varpool_node (next, &first_varpool);
355 /* Remove unreachable nodes.
357 Completely unreachable functions can be fully removed from the callgraph.
358 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
359 callgraph (so we still have edges to them). We remove function body then.
361 Also we need to care functions that are unreachable but we need to keep them around
362 for later clonning. In this case we also turn them to unanalyzed nodes, but
363 keep the body around. */
364 for (node = cgraph_nodes; node; node = next)
366 next = node->next;
367 if (node->aux && !node->reachable)
369 cgraph_node_remove_callees (node);
370 node->analyzed = false;
371 node->local.inlinable = false;
373 if (!node->aux)
375 node->global.inlined_to = NULL;
376 if (file)
377 fprintf (file, " %s", cgraph_node_name (node));
378 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
379 cgraph_remove_node (node);
380 else
382 struct cgraph_edge *e;
384 /* See if there is reachable caller. */
385 for (e = node->callers; e; e = e->next_caller)
386 if (e->caller->reachable)
387 break;
389 /* If so, we need to keep node in the callgraph. */
390 if (e || node->needed)
392 struct cgraph_node *clone;
394 /* If there are still clones, we must keep body around.
395 Otherwise we can just remove the body but keep the clone. */
396 for (clone = node->clones; clone;
397 clone = clone->next_sibling_clone)
398 if (clone->aux)
399 break;
400 if (!clone)
402 cgraph_release_function_body (node);
403 node->analyzed = false;
404 node->local.inlinable = false;
406 else
407 gcc_assert (!clone->in_other_partition);
408 cgraph_node_remove_callees (node);
409 ipa_remove_all_references (&node->ref_list);
410 if (node->prev_sibling_clone)
411 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
412 else if (node->clone_of)
413 node->clone_of->clones = node->next_sibling_clone;
414 if (node->next_sibling_clone)
415 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
416 node->clone_of = NULL;
417 node->next_sibling_clone = NULL;
418 node->prev_sibling_clone = NULL;
420 else
421 cgraph_remove_node (node);
423 changed = true;
426 for (node = cgraph_nodes; node; node = node->next)
428 /* Inline clones might be kept around so their materializing allows further
429 cloning. If the function the clone is inlined into is removed, we need
430 to turn it into normal cone. */
431 if (node->global.inlined_to
432 && !node->callers)
434 gcc_assert (node->clones);
435 node->global.inlined_to = NULL;
436 update_inlined_to_pointer (node, node);
438 node->aux = NULL;
441 if (file)
442 fprintf (file, "\n");
444 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
445 are undesirable at -O0 since we do not want to remove anything. */
446 if (!optimize)
447 return changed;
449 if (file)
450 fprintf (file, "Reclaiming variables:");
451 for (vnode = varpool_nodes; vnode; vnode = vnext)
453 vnext = vnode->next;
454 if (!vnode->needed)
456 if (file)
457 fprintf (file, " %s", varpool_node_name (vnode));
458 varpool_remove_node (vnode);
459 changed = true;
463 /* Now update address_taken flags and try to promote functions to be local. */
465 if (file)
466 fprintf (file, "\nClearing address taken flags:");
467 for (node = cgraph_nodes; node; node = node->next)
468 if (node->address_taken
469 && !node->reachable_from_other_partition)
471 int i;
472 struct ipa_ref *ref;
473 bool found = false;
474 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
475 && !found; i++)
477 gcc_assert (ref->use == IPA_REF_ADDR);
478 found = true;
480 if (!found)
482 if (file)
483 fprintf (file, " %s", cgraph_node_name (node));
484 node->address_taken = false;
485 changed = true;
486 if (cgraph_local_node_p (node))
488 node->local.local = true;
489 if (file)
490 fprintf (file, " (local)");
495 #ifdef ENABLE_CHECKING
496 verify_cgraph ();
497 #endif
499 /* Reclaim alias pairs for functions that have disappeared from the
500 call graph. */
501 remove_unreachable_alias_pairs ();
503 return changed;
506 /* Discover variables that have no longer address taken or that are read only
507 and update their flags.
509 FIXME: This can not be done in between gimplify and omp_expand since
510 readonly flag plays role on what is shared and what is not. Currently we do
511 this transformation as part of ipa-reference pass, but it would make sense
512 to do it before early optimizations. */
514 void
515 ipa_discover_readonly_nonaddressable_vars (void)
517 struct varpool_node *vnode;
518 if (dump_file)
519 fprintf (dump_file, "Clearing variable flags:");
520 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
521 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
522 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
524 bool written = false;
525 bool address_taken = false;
526 int i;
527 struct ipa_ref *ref;
528 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
529 && (!written || !address_taken); i++)
530 switch (ref->use)
532 case IPA_REF_ADDR:
533 address_taken = true;
534 break;
535 case IPA_REF_LOAD:
536 break;
537 case IPA_REF_STORE:
538 written = true;
539 break;
541 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
543 if (dump_file)
544 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
545 TREE_ADDRESSABLE (vnode->decl) = 0;
547 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
548 /* Making variable in explicit section readonly can cause section
549 type conflict.
550 See e.g. gcc.c-torture/compile/pr23237.c */
551 && DECL_SECTION_NAME (vnode->decl) == NULL)
553 if (dump_file)
554 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
555 TREE_READONLY (vnode->decl) = 1;
558 if (dump_file)
559 fprintf (dump_file, "\n");
562 /* Return true when function NODE should be considered externally visible. */
564 static bool
565 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
567 if (!node->local.finalized)
568 return false;
569 if (!DECL_COMDAT (node->decl)
570 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
571 return false;
572 if (!whole_program)
573 return true;
574 if (DECL_PRESERVE_P (node->decl))
575 return true;
576 /* COMDAT functions must be shared only if they have address taken,
577 otherwise we can produce our own private implementation with
578 -fwhole-program. */
579 if (DECL_COMDAT (node->decl))
581 if (node->address_taken || !node->analyzed)
582 return true;
583 if (node->same_comdat_group)
585 struct cgraph_node *next;
587 /* If more than one function is in the same COMDAT group, it must
588 be shared even if just one function in the comdat group has
589 address taken. */
590 for (next = node->same_comdat_group;
591 next != node;
592 next = next->same_comdat_group)
593 if (next->address_taken || !next->analyzed)
594 return true;
597 if (MAIN_NAME_P (DECL_NAME (node->decl)))
598 return true;
599 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
600 return true;
601 return false;
604 /* Dissolve the same_comdat_group list in which NODE resides. */
606 static void
607 dissolve_same_comdat_group_list (struct cgraph_node *node)
609 struct cgraph_node *n = node, *next;
612 next = n->same_comdat_group;
613 n->same_comdat_group = NULL;
614 n = next;
616 while (n != node);
619 /* Mark visibility of all functions.
621 A local function is one whose calls can occur only in the current
622 compilation unit and all its calls are explicit, so we can change
623 its calling convention. We simply mark all static functions whose
624 address is not taken as local.
626 We also change the TREE_PUBLIC flag of all declarations that are public
627 in language point of view but we want to overwrite this default
628 via visibilities for the backend point of view. */
630 static unsigned int
631 function_and_variable_visibility (bool whole_program)
633 struct cgraph_node *node;
634 struct varpool_node *vnode;
636 for (node = cgraph_nodes; node; node = node->next)
638 /* C++ FE on lack of COMDAT support create local COMDAT functions
639 (that ought to be shared but can not due to object format
640 limitations). It is neccesary to keep the flag to make rest of C++ FE
641 happy. Clear the flag here to avoid confusion in middle-end. */
642 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
643 DECL_COMDAT (node->decl) = 0;
644 /* For external decls stop tracking same_comdat_group, it doesn't matter
645 what comdat group they are in when they won't be emitted in this TU,
646 and simplifies later passes. */
647 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
649 #ifdef ENABLE_CHECKING
650 struct cgraph_node *n;
652 for (n = node->same_comdat_group;
653 n != node;
654 n = n->same_comdat_group)
655 /* If at least one of same comdat group functions is external,
656 all of them have to be, otherwise it is a front-end bug. */
657 gcc_assert (DECL_EXTERNAL (n->decl));
658 #endif
659 dissolve_same_comdat_group_list (node);
661 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
662 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
663 if (cgraph_externally_visible_p (node, whole_program))
665 gcc_assert (!node->global.inlined_to);
666 node->local.externally_visible = true;
668 else
669 node->local.externally_visible = false;
670 if (!node->local.externally_visible && node->analyzed
671 && !DECL_EXTERNAL (node->decl))
673 struct cgraph_node *alias;
674 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
675 cgraph_make_decl_local (node->decl);
676 for (alias = node->same_body; alias; alias = alias->next)
677 cgraph_make_decl_local (alias->decl);
678 if (node->same_comdat_group)
679 /* cgraph_externally_visible_p has already checked all other nodes
680 in the group and they will all be made local. We need to
681 dissolve the group at once so that the predicate does not
682 segfault though. */
683 dissolve_same_comdat_group_list (node);
685 node->local.local = cgraph_local_node_p (node);
687 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
689 /* weak flag makes no sense on local variables. */
690 gcc_assert (!DECL_WEAK (vnode->decl)
691 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
692 /* In several cases declarations can not be common:
694 - when declaration has initializer
695 - when it is in weak
696 - when it has specific section
697 - when it resides in non-generic address space.
698 - if declaration is local, it will get into .local common section
699 so common flag is not needed. Frontends still produce these in
700 certain cases, such as for:
702 static int a __attribute__ ((common))
704 Canonicalize things here and clear the redundant flag. */
705 if (DECL_COMMON (vnode->decl)
706 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
707 || (DECL_INITIAL (vnode->decl)
708 && DECL_INITIAL (vnode->decl) != error_mark_node)
709 || DECL_WEAK (vnode->decl)
710 || DECL_SECTION_NAME (vnode->decl) != NULL
711 || ! (ADDR_SPACE_GENERIC_P
712 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
713 DECL_COMMON (vnode->decl) = 0;
715 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
717 if (!vnode->finalized)
718 continue;
719 if (vnode->needed
720 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
721 && (!whole_program
722 /* We can privatize comdat readonly variables whose address is not taken,
723 but doing so is not going to bring us optimization oppurtunities until
724 we start reordering datastructures. */
725 || DECL_COMDAT (vnode->decl)
726 || DECL_WEAK (vnode->decl)
727 || lookup_attribute ("externally_visible",
728 DECL_ATTRIBUTES (vnode->decl))))
729 vnode->externally_visible = true;
730 else
731 vnode->externally_visible = false;
732 if (!vnode->externally_visible)
734 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
735 cgraph_make_decl_local (vnode->decl);
737 gcc_assert (TREE_STATIC (vnode->decl));
740 if (dump_file)
742 fprintf (dump_file, "\nMarking local functions:");
743 for (node = cgraph_nodes; node; node = node->next)
744 if (node->local.local)
745 fprintf (dump_file, " %s", cgraph_node_name (node));
746 fprintf (dump_file, "\n\n");
747 fprintf (dump_file, "\nMarking externally visible functions:");
748 for (node = cgraph_nodes; node; node = node->next)
749 if (node->local.externally_visible)
750 fprintf (dump_file, " %s", cgraph_node_name (node));
751 fprintf (dump_file, "\n\n");
752 fprintf (dump_file, "\nMarking externally visible variables:");
753 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
754 if (vnode->externally_visible)
755 fprintf (dump_file, " %s", varpool_node_name (vnode));
756 fprintf (dump_file, "\n\n");
758 cgraph_function_flags_ready = true;
759 return 0;
762 /* Local function pass handling visibilities. This happens before LTO streaming
763 so in particular -fwhole-program should be ignored at this level. */
765 static unsigned int
766 local_function_and_variable_visibility (void)
768 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
771 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
774 SIMPLE_IPA_PASS,
775 "visibility", /* name */
776 NULL, /* gate */
777 local_function_and_variable_visibility,/* execute */
778 NULL, /* sub */
779 NULL, /* next */
780 0, /* static_pass_number */
781 TV_CGRAPHOPT, /* tv_id */
782 0, /* properties_required */
783 0, /* properties_provided */
784 0, /* properties_destroyed */
785 0, /* todo_flags_start */
786 TODO_remove_functions | TODO_dump_cgraph
787 | TODO_ggc_collect /* todo_flags_finish */
791 /* Do not re-run on ltrans stage. */
793 static bool
794 gate_whole_program_function_and_variable_visibility (void)
796 return !flag_ltrans;
799 /* Bring functionss local at LTO time whith -fwhole-program. */
801 static unsigned int
802 whole_program_function_and_variable_visibility (void)
804 struct cgraph_node *node;
805 struct varpool_node *vnode;
807 function_and_variable_visibility (flag_whole_program);
809 for (node = cgraph_nodes; node; node = node->next)
810 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
811 && node->local.finalized)
812 cgraph_mark_needed_node (node);
813 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
814 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
815 varpool_mark_needed_node (vnode);
816 if (dump_file)
818 fprintf (dump_file, "\nNeeded variables:");
819 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
820 if (vnode->needed)
821 fprintf (dump_file, " %s", varpool_node_name (vnode));
822 fprintf (dump_file, "\n\n");
824 return 0;
827 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
830 IPA_PASS,
831 "whole-program", /* name */
832 gate_whole_program_function_and_variable_visibility,/* gate */
833 whole_program_function_and_variable_visibility,/* execute */
834 NULL, /* sub */
835 NULL, /* next */
836 0, /* static_pass_number */
837 TV_CGRAPHOPT, /* tv_id */
838 0, /* properties_required */
839 0, /* properties_provided */
840 0, /* properties_destroyed */
841 0, /* todo_flags_start */
842 TODO_remove_functions | TODO_dump_cgraph
843 | TODO_ggc_collect /* todo_flags_finish */
845 NULL, /* generate_summary */
846 NULL, /* write_summary */
847 NULL, /* read_summary */
848 NULL, /* write_optimization_summary */
849 NULL, /* read_optimization_summary */
850 NULL, /* stmt_fixup */
851 0, /* TODOs */
852 NULL, /* function_transform */
853 NULL, /* variable_transform */
856 /* Hash a cgraph node set element. */
858 static hashval_t
859 hash_cgraph_node_set_element (const void *p)
861 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
862 return htab_hash_pointer (element->node);
865 /* Compare two cgraph node set elements. */
867 static int
868 eq_cgraph_node_set_element (const void *p1, const void *p2)
870 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
871 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
873 return e1->node == e2->node;
876 /* Create a new cgraph node set. */
878 cgraph_node_set
879 cgraph_node_set_new (void)
881 cgraph_node_set new_node_set;
883 new_node_set = GGC_NEW (struct cgraph_node_set_def);
884 new_node_set->hashtab = htab_create_ggc (10,
885 hash_cgraph_node_set_element,
886 eq_cgraph_node_set_element,
887 NULL);
888 new_node_set->nodes = NULL;
889 return new_node_set;
892 /* Add cgraph_node NODE to cgraph_node_set SET. */
894 void
895 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
897 void **slot;
898 cgraph_node_set_element element;
899 struct cgraph_node_set_element_def dummy;
901 dummy.node = node;
902 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
904 if (*slot != HTAB_EMPTY_ENTRY)
906 element = (cgraph_node_set_element) *slot;
907 gcc_assert (node == element->node
908 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
909 == node));
910 return;
913 /* Insert node into hash table. */
914 element =
915 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
916 element->node = node;
917 element->index = VEC_length (cgraph_node_ptr, set->nodes);
918 *slot = element;
920 /* Insert into node vector. */
921 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
924 /* Remove cgraph_node NODE from cgraph_node_set SET. */
926 void
927 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
929 void **slot, **last_slot;
930 cgraph_node_set_element element, last_element;
931 struct cgraph_node *last_node;
932 struct cgraph_node_set_element_def dummy;
934 dummy.node = node;
935 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
936 if (slot == NULL)
937 return;
939 element = (cgraph_node_set_element) *slot;
940 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
941 == node);
943 /* Remove from vector. We do this by swapping node with the last element
944 of the vector. */
945 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
946 if (last_node != node)
948 dummy.node = last_node;
949 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
950 last_element = (cgraph_node_set_element) *last_slot;
951 gcc_assert (last_element);
953 /* Move the last element to the original spot of NODE. */
954 last_element->index = element->index;
955 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
956 last_node);
959 /* Remove element from hash table. */
960 htab_clear_slot (set->hashtab, slot);
961 ggc_free (element);
964 /* Find NODE in SET and return an iterator to it if found. A null iterator
965 is returned if NODE is not in SET. */
967 cgraph_node_set_iterator
968 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
970 void **slot;
971 struct cgraph_node_set_element_def dummy;
972 cgraph_node_set_element element;
973 cgraph_node_set_iterator csi;
975 dummy.node = node;
976 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
977 if (slot == NULL)
978 csi.index = (unsigned) ~0;
979 else
981 element = (cgraph_node_set_element) *slot;
982 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
983 == node);
984 csi.index = element->index;
986 csi.set = set;
988 return csi;
991 /* Dump content of SET to file F. */
993 void
994 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
996 cgraph_node_set_iterator iter;
998 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1000 struct cgraph_node *node = csi_node (iter);
1001 dump_cgraph_node (f, node);
1005 /* Dump content of SET to stderr. */
1007 void
1008 debug_cgraph_node_set (cgraph_node_set set)
1010 dump_cgraph_node_set (stderr, set);
1013 /* Hash a varpool node set element. */
1015 static hashval_t
1016 hash_varpool_node_set_element (const void *p)
1018 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1019 return htab_hash_pointer (element->node);
1022 /* Compare two varpool node set elements. */
1024 static int
1025 eq_varpool_node_set_element (const void *p1, const void *p2)
1027 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1028 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1030 return e1->node == e2->node;
1033 /* Create a new varpool node set. */
1035 varpool_node_set
1036 varpool_node_set_new (void)
1038 varpool_node_set new_node_set;
1040 new_node_set = GGC_NEW (struct varpool_node_set_def);
1041 new_node_set->hashtab = htab_create_ggc (10,
1042 hash_varpool_node_set_element,
1043 eq_varpool_node_set_element,
1044 NULL);
1045 new_node_set->nodes = NULL;
1046 return new_node_set;
1049 /* Add varpool_node NODE to varpool_node_set SET. */
1051 void
1052 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1054 void **slot;
1055 varpool_node_set_element element;
1056 struct varpool_node_set_element_def dummy;
1058 dummy.node = node;
1059 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1061 if (*slot != HTAB_EMPTY_ENTRY)
1063 element = (varpool_node_set_element) *slot;
1064 gcc_assert (node == element->node
1065 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1066 == node));
1067 return;
1070 /* Insert node into hash table. */
1071 element =
1072 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
1073 element->node = node;
1074 element->index = VEC_length (varpool_node_ptr, set->nodes);
1075 *slot = element;
1077 /* Insert into node vector. */
1078 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1081 /* Remove varpool_node NODE from varpool_node_set SET. */
1083 void
1084 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1086 void **slot, **last_slot;
1087 varpool_node_set_element element, last_element;
1088 struct varpool_node *last_node;
1089 struct varpool_node_set_element_def dummy;
1091 dummy.node = node;
1092 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1093 if (slot == NULL)
1094 return;
1096 element = (varpool_node_set_element) *slot;
1097 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1098 == node);
1100 /* Remove from vector. We do this by swapping node with the last element
1101 of the vector. */
1102 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1103 if (last_node != node)
1105 dummy.node = last_node;
1106 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1107 last_element = (varpool_node_set_element) *last_slot;
1108 gcc_assert (last_element);
1110 /* Move the last element to the original spot of NODE. */
1111 last_element->index = element->index;
1112 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1113 last_node);
1116 /* Remove element from hash table. */
1117 htab_clear_slot (set->hashtab, slot);
1118 ggc_free (element);
1121 /* Find NODE in SET and return an iterator to it if found. A null iterator
1122 is returned if NODE is not in SET. */
1124 varpool_node_set_iterator
1125 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1127 void **slot;
1128 struct varpool_node_set_element_def dummy;
1129 varpool_node_set_element element;
1130 varpool_node_set_iterator vsi;
1132 dummy.node = node;
1133 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1134 if (slot == NULL)
1135 vsi.index = (unsigned) ~0;
1136 else
1138 element = (varpool_node_set_element) *slot;
1139 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1140 == node);
1141 vsi.index = element->index;
1143 vsi.set = set;
1145 return vsi;
1148 /* Dump content of SET to file F. */
1150 void
1151 dump_varpool_node_set (FILE *f, varpool_node_set set)
1153 varpool_node_set_iterator iter;
1155 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1157 struct varpool_node *node = vsi_node (iter);
1158 dump_varpool_node (f, node);
1162 /* Dump content of SET to stderr. */
1164 void
1165 debug_varpool_node_set (varpool_node_set set)
1167 dump_varpool_node_set (stderr, set);
1171 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1173 static unsigned int
1174 ipa_profile (void)
1176 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1177 struct cgraph_edge *e;
1178 int order_pos;
1179 bool something_changed = false;
1180 int i;
1182 order_pos = cgraph_postorder (order);
1183 for (i = order_pos - 1; i >= 0; i--)
1185 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1187 for (e = order[i]->callees; e; e = e->next_callee)
1188 if (e->callee->local.local && !e->callee->aux)
1190 something_changed = true;
1191 e->callee->aux = (void *)1;
1194 order[i]->aux = NULL;
1197 while (something_changed)
1199 something_changed = false;
1200 for (i = order_pos - 1; i >= 0; i--)
1202 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1204 for (e = order[i]->callees; e; e = e->next_callee)
1205 if (e->callee->local.local && !e->callee->aux)
1207 something_changed = true;
1208 e->callee->aux = (void *)1;
1211 order[i]->aux = NULL;
1214 free (order);
1215 return 0;
1218 static bool
1219 gate_ipa_profile (void)
1221 return flag_ipa_profile;
1224 struct ipa_opt_pass_d pass_ipa_profile =
1227 IPA_PASS,
1228 "ipa-profile", /* name */
1229 gate_ipa_profile, /* gate */
1230 ipa_profile, /* execute */
1231 NULL, /* sub */
1232 NULL, /* next */
1233 0, /* static_pass_number */
1234 TV_IPA_PROFILE, /* tv_id */
1235 0, /* properties_required */
1236 0, /* properties_provided */
1237 0, /* properties_destroyed */
1238 0, /* todo_flags_start */
1239 0 /* todo_flags_finish */
1241 NULL, /* generate_summary */
1242 NULL, /* write_summary */
1243 NULL, /* read_summary */
1244 NULL, /* write_optimization_summary */
1245 NULL, /* read_optimization_summary */
1246 NULL, /* stmt_fixup */
1247 0, /* TODOs */
1248 NULL, /* function_transform */
1249 NULL /* variable_transform */