2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / ipa.c
blobbac3406be396a8b573d5314475c73d277fb7e569
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
33 /* Fill array order with all nodes with output flag set in the reverse
34 topological order. */
36 int
37 cgraph_postorder (struct cgraph_node **order)
39 struct cgraph_node *node, *node2;
40 int stack_size = 0;
41 int order_pos = 0;
42 struct cgraph_edge *edge, last;
43 int pass;
45 struct cgraph_node **stack =
46 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
48 /* We have to deal with cycles nicely, so use a depth first traversal
49 output algorithm. Ignore the fact that some functions won't need
50 to be output and put them into order as well, so we get dependencies
51 right through inline functions. */
52 for (node = cgraph_nodes; node; node = node->next)
53 node->aux = NULL;
54 for (pass = 0; pass < 2; pass++)
55 for (node = cgraph_nodes; node; node = node->next)
56 if (!node->aux
57 && (pass
58 || (!cgraph_only_called_directly_p (node)
59 && !node->address_taken)))
61 node2 = node;
62 if (!node->callers)
63 node->aux = &last;
64 else
65 node->aux = node->callers;
66 while (node2)
68 while (node2->aux != &last)
70 edge = (struct cgraph_edge *) node2->aux;
71 if (edge->next_caller)
72 node2->aux = edge->next_caller;
73 else
74 node2->aux = &last;
75 /* Break possible cycles involving always-inline
76 functions by ignoring edges from always-inline
77 functions to non-always-inline functions. */
78 if (edge->caller->local.disregard_inline_limits
79 && !edge->callee->local.disregard_inline_limits)
80 continue;
81 if (!edge->caller->aux)
83 if (!edge->caller->callers)
84 edge->caller->aux = &last;
85 else
86 edge->caller->aux = edge->caller->callers;
87 stack[stack_size++] = node2;
88 node2 = edge->caller;
89 break;
92 if (node2->aux == &last)
94 order[order_pos++] = node2;
95 if (stack_size)
96 node2 = stack[--stack_size];
97 else
98 node2 = NULL;
102 free (stack);
103 for (node = cgraph_nodes; node; node = node->next)
104 node->aux = NULL;
105 return order_pos;
108 /* Look for all functions inlined to NODE and update their inlined_to pointers
109 to INLINED_TO. */
111 static void
112 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
114 struct cgraph_edge *e;
115 for (e = node->callees; e; e = e->next_callee)
116 if (e->callee->global.inlined_to)
118 e->callee->global.inlined_to = inlined_to;
119 update_inlined_to_pointer (e->callee, inlined_to);
123 /* Add cgraph NODE to queue starting at FIRST.
125 The queue is linked via AUX pointers and terminated by pointer to 1.
126 We enqueue nodes at two occasions: when we find them reachable or when we find
127 their bodies needed for further clonning. In the second case we mark them
128 by pointer to 2 after processing so they are re-queue when they become
129 reachable. */
131 static void
132 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
134 /* Node is still in queue; do nothing. */
135 if (node->aux && node->aux != (void *) 2)
136 return;
137 /* Node was already processed as unreachable, re-enqueue
138 only if it became reachable now. */
139 if (node->aux == (void *)2 && !node->reachable)
140 return;
141 node->aux = *first;
142 *first = node;
145 /* Add varpool NODE to queue starting at FIRST. */
147 static void
148 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
150 node->aux = *first;
151 *first = node;
154 /* Process references. */
156 static void
157 process_references (struct ipa_ref_list *list,
158 struct cgraph_node **first,
159 struct varpool_node **first_varpool,
160 bool before_inlining_p)
162 int i;
163 struct ipa_ref *ref;
164 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
166 if (ref->refered_type == IPA_REF_CGRAPH)
168 struct cgraph_node *node = ipa_ref_node (ref);
169 if (!node->reachable
170 && (!DECL_EXTERNAL (node->decl)
171 || before_inlining_p))
173 node->reachable = true;
174 enqueue_cgraph_node (node, first);
177 else
179 struct varpool_node *node = ipa_ref_varpool_node (ref);
180 if (!node->needed)
182 varpool_mark_needed_node (node);
183 enqueue_varpool_node (node, first_varpool);
189 /* Return true when function NODE can be removed from callgraph
190 if all direct calls are eliminated. */
192 static inline bool
193 varpool_can_remove_if_no_refs (struct varpool_node *node)
195 return (!node->force_output && !node->used_from_other_partition
196 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 /* Return true when function can be marked local. */
201 static bool
202 cgraph_local_node_p (struct cgraph_node *node)
204 return (cgraph_only_called_directly_p (node)
205 && node->analyzed
206 && !DECL_EXTERNAL (node->decl)
207 && !node->local.externally_visible
208 && !node->reachable_from_other_partition
209 && !node->in_other_partition);
212 /* Perform reachability analysis and reclaim all unreachable nodes.
213 If BEFORE_INLINING_P is true this function is called before inlining
214 decisions has been made. If BEFORE_INLINING_P is false this function also
215 removes unneeded bodies of extern inline functions. */
217 bool
218 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
220 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
221 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
222 struct cgraph_node *node, *next;
223 struct varpool_node *vnode, *vnext;
224 bool changed = false;
226 #ifdef ENABLE_CHECKING
227 verify_cgraph ();
228 #endif
229 if (file)
230 fprintf (file, "\nReclaiming functions:");
231 #ifdef ENABLE_CHECKING
232 for (node = cgraph_nodes; node; node = node->next)
233 gcc_assert (!node->aux);
234 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
235 gcc_assert (!vnode->aux);
236 #endif
237 varpool_reset_queue ();
238 for (node = cgraph_nodes; node; node = node->next)
239 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
240 && ((!DECL_EXTERNAL (node->decl))
241 || before_inlining_p))
243 gcc_assert (!node->global.inlined_to);
244 enqueue_cgraph_node (node, &first);
245 node->reachable = true;
247 else
249 gcc_assert (!node->aux);
250 node->reachable = false;
252 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
254 vnode->next_needed = NULL;
255 vnode->prev_needed = NULL;
256 if (!varpool_can_remove_if_no_refs (vnode))
258 vnode->needed = false;
259 varpool_mark_needed_node (vnode);
260 enqueue_varpool_node (vnode, &first_varpool);
262 else
263 vnode->needed = false;
266 /* Perform reachability analysis. As a special case do not consider
267 extern inline functions not inlined as live because we won't output
268 them at all.
270 We maintain two worklist, one for cgraph nodes other for varpools and
271 are finished once both are empty. */
273 while (first != (struct cgraph_node *) (void *) 1
274 || first_varpool != (struct varpool_node *) (void *) 1)
276 if (first != (struct cgraph_node *) (void *) 1)
278 struct cgraph_edge *e;
279 node = first;
280 first = (struct cgraph_node *) first->aux;
281 if (!node->reachable)
282 node->aux = (void *)2;
284 /* If we found this node reachable, first mark on the callees
285 reachable too, unless they are direct calls to extern inline functions
286 we decided to not inline. */
287 if (node->reachable)
289 for (e = node->callees; e; e = e->next_callee)
290 if (!e->callee->reachable
291 && node->analyzed
292 && (!e->inline_failed || !e->callee->analyzed
293 || (!DECL_EXTERNAL (e->callee->decl))
294 || before_inlining_p))
296 e->callee->reachable = true;
297 enqueue_cgraph_node (e->callee, &first);
299 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
302 /* If any function in a comdat group is reachable, force
303 all other functions in the same comdat group to be
304 also reachable. */
305 if (node->same_comdat_group
306 && node->reachable
307 && !node->global.inlined_to)
309 for (next = node->same_comdat_group;
310 next != node;
311 next = next->same_comdat_group)
312 if (!next->reachable)
314 next->reachable = true;
315 enqueue_cgraph_node (next, &first);
319 /* We can freely remove inline clones even if they are cloned, however if
320 function is clone of real clone, we must keep it around in order to
321 make materialize_clones produce function body with the changes
322 applied. */
323 while (node->clone_of && !node->clone_of->aux
324 && !gimple_has_body_p (node->decl))
326 bool noninline = node->clone_of->decl != node->decl;
327 node = node->clone_of;
328 if (noninline && !node->reachable && !node->aux)
330 enqueue_cgraph_node (node, &first);
331 break;
335 if (first_varpool != (struct varpool_node *) (void *) 1)
337 vnode = first_varpool;
338 first_varpool = (struct varpool_node *)first_varpool->aux;
339 vnode->aux = NULL;
340 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
341 /* If any function in a comdat group is reachable, force
342 all other functions in the same comdat group to be
343 also reachable. */
344 if (vnode->same_comdat_group)
346 struct varpool_node *next;
347 for (next = vnode->same_comdat_group;
348 next != vnode;
349 next = next->same_comdat_group)
350 if (!next->needed)
352 varpool_mark_needed_node (next);
353 enqueue_varpool_node (next, &first_varpool);
359 /* Remove unreachable nodes.
361 Completely unreachable functions can be fully removed from the callgraph.
362 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
363 callgraph (so we still have edges to them). We remove function body then.
365 Also we need to care functions that are unreachable but we need to keep them around
366 for later clonning. In this case we also turn them to unanalyzed nodes, but
367 keep the body around. */
368 for (node = cgraph_nodes; node; node = next)
370 next = node->next;
371 if (node->aux && !node->reachable)
373 cgraph_node_remove_callees (node);
374 ipa_remove_all_references (&node->ref_list);
375 node->analyzed = false;
376 node->local.inlinable = false;
378 if (!node->aux)
380 node->global.inlined_to = NULL;
381 if (file)
382 fprintf (file, " %s", cgraph_node_name (node));
383 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
384 cgraph_remove_node (node);
385 else
387 struct cgraph_edge *e;
389 /* See if there is reachable caller. */
390 for (e = node->callers; e; e = e->next_caller)
391 if (e->caller->reachable)
392 break;
394 /* If so, we need to keep node in the callgraph. */
395 if (e || node->needed)
397 struct cgraph_node *clone;
399 /* If there are still clones, we must keep body around.
400 Otherwise we can just remove the body but keep the clone. */
401 for (clone = node->clones; clone;
402 clone = clone->next_sibling_clone)
403 if (clone->aux)
404 break;
405 if (!clone)
407 cgraph_release_function_body (node);
408 node->analyzed = false;
409 node->local.inlinable = false;
411 else
412 gcc_assert (!clone->in_other_partition);
413 cgraph_node_remove_callees (node);
414 ipa_remove_all_references (&node->ref_list);
415 if (node->prev_sibling_clone)
416 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
417 else if (node->clone_of)
418 node->clone_of->clones = node->next_sibling_clone;
419 if (node->next_sibling_clone)
420 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
421 node->clone_of = NULL;
422 node->next_sibling_clone = NULL;
423 node->prev_sibling_clone = NULL;
425 else
426 cgraph_remove_node (node);
428 changed = true;
431 for (node = cgraph_nodes; node; node = node->next)
433 /* Inline clones might be kept around so their materializing allows further
434 cloning. If the function the clone is inlined into is removed, we need
435 to turn it into normal cone. */
436 if (node->global.inlined_to
437 && !node->callers)
439 gcc_assert (node->clones);
440 node->global.inlined_to = NULL;
441 update_inlined_to_pointer (node, node);
443 node->aux = NULL;
446 if (file)
447 fprintf (file, "\n");
449 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
450 are undesirable at -O0 since we do not want to remove anything. */
451 if (!optimize)
452 return changed;
454 if (file)
455 fprintf (file, "Reclaiming variables:");
456 for (vnode = varpool_nodes; vnode; vnode = vnext)
458 vnext = vnode->next;
459 if (!vnode->needed)
461 if (file)
462 fprintf (file, " %s", varpool_node_name (vnode));
463 varpool_remove_node (vnode);
464 changed = true;
468 /* Now update address_taken flags and try to promote functions to be local. */
470 if (file)
471 fprintf (file, "\nClearing address taken flags:");
472 for (node = cgraph_nodes; node; node = node->next)
473 if (node->address_taken
474 && !node->reachable_from_other_partition)
476 int i;
477 struct ipa_ref *ref;
478 bool found = false;
479 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
480 && !found; i++)
482 gcc_assert (ref->use == IPA_REF_ADDR);
483 found = true;
485 if (!found)
487 if (file)
488 fprintf (file, " %s", cgraph_node_name (node));
489 node->address_taken = false;
490 changed = true;
491 if (cgraph_local_node_p (node))
493 node->local.local = true;
494 if (file)
495 fprintf (file, " (local)");
500 #ifdef ENABLE_CHECKING
501 verify_cgraph ();
502 #endif
504 /* Reclaim alias pairs for functions that have disappeared from the
505 call graph. */
506 remove_unreachable_alias_pairs ();
508 return changed;
511 /* Discover variables that have no longer address taken or that are read only
512 and update their flags.
514 FIXME: This can not be done in between gimplify and omp_expand since
515 readonly flag plays role on what is shared and what is not. Currently we do
516 this transformation as part of whole program visibility and re-do at
517 ipa-reference pass (to take into account clonning), but it would
518 make sense to do it before early optimizations. */
520 void
521 ipa_discover_readonly_nonaddressable_vars (void)
523 struct varpool_node *vnode;
524 if (dump_file)
525 fprintf (dump_file, "Clearing variable flags:");
526 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
527 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
528 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
530 bool written = false;
531 bool address_taken = false;
532 int i;
533 struct ipa_ref *ref;
534 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
535 && (!written || !address_taken); i++)
536 switch (ref->use)
538 case IPA_REF_ADDR:
539 address_taken = true;
540 break;
541 case IPA_REF_LOAD:
542 break;
543 case IPA_REF_STORE:
544 written = true;
545 break;
547 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
549 if (dump_file)
550 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
551 TREE_ADDRESSABLE (vnode->decl) = 0;
553 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
554 /* Making variable in explicit section readonly can cause section
555 type conflict.
556 See e.g. gcc.c-torture/compile/pr23237.c */
557 && DECL_SECTION_NAME (vnode->decl) == NULL)
559 if (dump_file)
560 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
561 TREE_READONLY (vnode->decl) = 1;
564 if (dump_file)
565 fprintf (dump_file, "\n");
568 /* Return true when function NODE should be considered externally visible. */
570 static bool
571 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
573 if (!node->local.finalized)
574 return false;
575 if (!DECL_COMDAT (node->decl)
576 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
577 return false;
579 /* Do not even try to be smart about aliased nodes. Until we properly
580 represent everything by same body alias, these are just evil. */
581 if (aliased)
582 return true;
584 /* When doing link time optimizations, hidden symbols become local. */
585 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
586 /* Be sure that node is defined in IR file, not in other object
587 file. In that case we don't set used_from_other_object_file. */
588 && node->analyzed)
590 else if (!whole_program)
591 return true;
592 /* COMDAT functions must be shared only if they have address taken,
593 otherwise we can produce our own private implementation with
594 -fwhole-program. */
595 else if (DECL_COMDAT (node->decl))
597 if (node->address_taken || !node->analyzed)
598 return true;
599 if (node->same_comdat_group)
601 struct cgraph_node *next;
603 /* If more than one function is in the same COMDAT group, it must
604 be shared even if just one function in the comdat group has
605 address taken. */
606 for (next = node->same_comdat_group;
607 next != node;
608 next = next->same_comdat_group)
609 if (next->address_taken || !next->analyzed)
610 return true;
613 if (node->local.used_from_object_file)
614 return true;
615 if (DECL_PRESERVE_P (node->decl))
616 return true;
617 if (MAIN_NAME_P (DECL_NAME (node->decl)))
618 return true;
619 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
620 return true;
621 return false;
624 /* Dissolve the same_comdat_group list in which NODE resides. */
626 static void
627 dissolve_same_comdat_group_list (struct cgraph_node *node)
629 struct cgraph_node *n = node, *next;
632 next = n->same_comdat_group;
633 n->same_comdat_group = NULL;
634 n = next;
636 while (n != node);
639 /* Mark visibility of all functions.
641 A local function is one whose calls can occur only in the current
642 compilation unit and all its calls are explicit, so we can change
643 its calling convention. We simply mark all static functions whose
644 address is not taken as local.
646 We also change the TREE_PUBLIC flag of all declarations that are public
647 in language point of view but we want to overwrite this default
648 via visibilities for the backend point of view. */
650 static unsigned int
651 function_and_variable_visibility (bool whole_program)
653 struct cgraph_node *node;
654 struct varpool_node *vnode;
655 struct pointer_set_t *aliased_nodes = pointer_set_create ();
656 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
657 unsigned i;
658 alias_pair *p;
660 /* Discover aliased nodes. */
661 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
663 if (dump_file)
664 fprintf (dump_file, "Alias %s->%s",
665 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
666 IDENTIFIER_POINTER (p->target));
668 if ((node = cgraph_node_for_asm (p->target)) != NULL)
670 gcc_assert (node->needed);
671 pointer_set_insert (aliased_nodes, node);
672 if (dump_file)
673 fprintf (dump_file, " node %s/%i",
674 cgraph_node_name (node), node->uid);
676 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
678 gcc_assert (vnode->needed);
679 pointer_set_insert (aliased_vnodes, vnode);
680 if (dump_file)
681 fprintf (dump_file, " varpool node %s",
682 varpool_node_name (vnode));
684 if (dump_file)
685 fprintf (dump_file, "\n");
688 for (node = cgraph_nodes; node; node = node->next)
690 /* C++ FE on lack of COMDAT support create local COMDAT functions
691 (that ought to be shared but can not due to object format
692 limitations). It is neccesary to keep the flag to make rest of C++ FE
693 happy. Clear the flag here to avoid confusion in middle-end. */
694 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
695 DECL_COMDAT (node->decl) = 0;
696 /* For external decls stop tracking same_comdat_group, it doesn't matter
697 what comdat group they are in when they won't be emitted in this TU,
698 and simplifies later passes. */
699 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
701 #ifdef ENABLE_CHECKING
702 struct cgraph_node *n;
704 for (n = node->same_comdat_group;
705 n != node;
706 n = n->same_comdat_group)
707 /* If at least one of same comdat group functions is external,
708 all of them have to be, otherwise it is a front-end bug. */
709 gcc_assert (DECL_EXTERNAL (n->decl));
710 #endif
711 dissolve_same_comdat_group_list (node);
713 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
714 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
715 if (cgraph_externally_visible_p (node, whole_program,
716 pointer_set_contains (aliased_nodes,
717 node)))
719 gcc_assert (!node->global.inlined_to);
720 node->local.externally_visible = true;
722 else
723 node->local.externally_visible = false;
724 if (!node->local.externally_visible && node->analyzed
725 && !DECL_EXTERNAL (node->decl))
727 struct cgraph_node *alias;
728 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
729 cgraph_make_decl_local (node->decl);
730 for (alias = node->same_body; alias; alias = alias->next)
731 cgraph_make_decl_local (alias->decl);
732 if (node->same_comdat_group)
733 /* cgraph_externally_visible_p has already checked all other nodes
734 in the group and they will all be made local. We need to
735 dissolve the group at once so that the predicate does not
736 segfault though. */
737 dissolve_same_comdat_group_list (node);
739 node->local.local = cgraph_local_node_p (node);
741 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
743 /* weak flag makes no sense on local variables. */
744 gcc_assert (!DECL_WEAK (vnode->decl)
745 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
746 /* In several cases declarations can not be common:
748 - when declaration has initializer
749 - when it is in weak
750 - when it has specific section
751 - when it resides in non-generic address space.
752 - if declaration is local, it will get into .local common section
753 so common flag is not needed. Frontends still produce these in
754 certain cases, such as for:
756 static int a __attribute__ ((common))
758 Canonicalize things here and clear the redundant flag. */
759 if (DECL_COMMON (vnode->decl)
760 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
761 || (DECL_INITIAL (vnode->decl)
762 && DECL_INITIAL (vnode->decl) != error_mark_node)
763 || DECL_WEAK (vnode->decl)
764 || DECL_SECTION_NAME (vnode->decl) != NULL
765 || ! (ADDR_SPACE_GENERIC_P
766 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
767 DECL_COMMON (vnode->decl) = 0;
769 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
771 if (!vnode->finalized)
772 continue;
773 if (vnode->needed
774 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
775 && (((!whole_program
776 /* We can privatize comdat readonly variables whose address is
777 not taken, but doing so is not going to bring us
778 optimization oppurtunities until we start reordering
779 datastructures. */
780 || DECL_COMDAT (vnode->decl)
781 || DECL_WEAK (vnode->decl))
782 /* When doing linktime optimizations, all hidden symbols will
783 become local. */
784 && (!in_lto_p
785 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
786 /* We can get prevailing decision in other object file.
787 In this case we do not sed used_from_object_file. */
788 || !vnode->finalized))
789 || DECL_PRESERVE_P (vnode->decl)
790 || vnode->used_from_object_file
791 || pointer_set_contains (aliased_vnodes, vnode)
792 || lookup_attribute ("externally_visible",
793 DECL_ATTRIBUTES (vnode->decl))))
794 vnode->externally_visible = true;
795 else
796 vnode->externally_visible = false;
797 if (!vnode->externally_visible)
799 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
800 cgraph_make_decl_local (vnode->decl);
802 gcc_assert (TREE_STATIC (vnode->decl));
804 pointer_set_destroy (aliased_nodes);
805 pointer_set_destroy (aliased_vnodes);
807 if (dump_file)
809 fprintf (dump_file, "\nMarking local functions:");
810 for (node = cgraph_nodes; node; node = node->next)
811 if (node->local.local)
812 fprintf (dump_file, " %s", cgraph_node_name (node));
813 fprintf (dump_file, "\n\n");
814 fprintf (dump_file, "\nMarking externally visible functions:");
815 for (node = cgraph_nodes; node; node = node->next)
816 if (node->local.externally_visible)
817 fprintf (dump_file, " %s", cgraph_node_name (node));
818 fprintf (dump_file, "\n\n");
819 fprintf (dump_file, "\nMarking externally visible variables:");
820 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
821 if (vnode->externally_visible)
822 fprintf (dump_file, " %s", varpool_node_name (vnode));
823 fprintf (dump_file, "\n\n");
825 cgraph_function_flags_ready = true;
826 return 0;
829 /* Local function pass handling visibilities. This happens before LTO streaming
830 so in particular -fwhole-program should be ignored at this level. */
832 static unsigned int
833 local_function_and_variable_visibility (void)
835 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
838 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
841 SIMPLE_IPA_PASS,
842 "visibility", /* name */
843 NULL, /* gate */
844 local_function_and_variable_visibility,/* execute */
845 NULL, /* sub */
846 NULL, /* next */
847 0, /* static_pass_number */
848 TV_CGRAPHOPT, /* tv_id */
849 0, /* properties_required */
850 0, /* properties_provided */
851 0, /* properties_destroyed */
852 0, /* todo_flags_start */
853 TODO_remove_functions | TODO_dump_cgraph
854 | TODO_ggc_collect /* todo_flags_finish */
858 /* Do not re-run on ltrans stage. */
860 static bool
861 gate_whole_program_function_and_variable_visibility (void)
863 return !flag_ltrans;
866 /* Bring functionss local at LTO time whith -fwhole-program. */
868 static unsigned int
869 whole_program_function_and_variable_visibility (void)
871 struct cgraph_node *node;
872 struct varpool_node *vnode;
874 function_and_variable_visibility (flag_whole_program);
876 for (node = cgraph_nodes; node; node = node->next)
877 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
878 && node->local.finalized)
879 cgraph_mark_needed_node (node);
880 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
881 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
882 varpool_mark_needed_node (vnode);
883 if (dump_file)
885 fprintf (dump_file, "\nNeeded variables:");
886 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
887 if (vnode->needed)
888 fprintf (dump_file, " %s", varpool_node_name (vnode));
889 fprintf (dump_file, "\n\n");
891 if (optimize)
892 ipa_discover_readonly_nonaddressable_vars ();
893 return 0;
896 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
899 IPA_PASS,
900 "whole-program", /* name */
901 gate_whole_program_function_and_variable_visibility,/* gate */
902 whole_program_function_and_variable_visibility,/* execute */
903 NULL, /* sub */
904 NULL, /* next */
905 0, /* static_pass_number */
906 TV_CGRAPHOPT, /* tv_id */
907 0, /* properties_required */
908 0, /* properties_provided */
909 0, /* properties_destroyed */
910 0, /* todo_flags_start */
911 TODO_remove_functions | TODO_dump_cgraph
912 | TODO_ggc_collect /* todo_flags_finish */
914 NULL, /* generate_summary */
915 NULL, /* write_summary */
916 NULL, /* read_summary */
917 NULL, /* write_optimization_summary */
918 NULL, /* read_optimization_summary */
919 NULL, /* stmt_fixup */
920 0, /* TODOs */
921 NULL, /* function_transform */
922 NULL, /* variable_transform */
925 /* Hash a cgraph node set element. */
927 static hashval_t
928 hash_cgraph_node_set_element (const void *p)
930 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
931 return htab_hash_pointer (element->node);
934 /* Compare two cgraph node set elements. */
936 static int
937 eq_cgraph_node_set_element (const void *p1, const void *p2)
939 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
940 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
942 return e1->node == e2->node;
945 /* Create a new cgraph node set. */
947 cgraph_node_set
948 cgraph_node_set_new (void)
950 cgraph_node_set new_node_set;
952 new_node_set = ggc_alloc_cgraph_node_set_def ();
953 new_node_set->hashtab = htab_create_ggc (10,
954 hash_cgraph_node_set_element,
955 eq_cgraph_node_set_element,
956 NULL);
957 new_node_set->nodes = NULL;
958 return new_node_set;
961 /* Add cgraph_node NODE to cgraph_node_set SET. */
963 void
964 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
966 void **slot;
967 cgraph_node_set_element element;
968 struct cgraph_node_set_element_def dummy;
970 dummy.node = node;
971 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
973 if (*slot != HTAB_EMPTY_ENTRY)
975 element = (cgraph_node_set_element) *slot;
976 gcc_assert (node == element->node
977 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
978 == node));
979 return;
982 /* Insert node into hash table. */
983 element = ggc_alloc_cgraph_node_set_element_def ();
984 element->node = node;
985 element->index = VEC_length (cgraph_node_ptr, set->nodes);
986 *slot = element;
988 /* Insert into node vector. */
989 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
992 /* Remove cgraph_node NODE from cgraph_node_set SET. */
994 void
995 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
997 void **slot, **last_slot;
998 cgraph_node_set_element element, last_element;
999 struct cgraph_node *last_node;
1000 struct cgraph_node_set_element_def dummy;
1002 dummy.node = node;
1003 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1004 if (slot == NULL)
1005 return;
1007 element = (cgraph_node_set_element) *slot;
1008 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1009 == node);
1011 /* Remove from vector. We do this by swapping node with the last element
1012 of the vector. */
1013 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1014 if (last_node != node)
1016 dummy.node = last_node;
1017 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1018 last_element = (cgraph_node_set_element) *last_slot;
1019 gcc_assert (last_element);
1021 /* Move the last element to the original spot of NODE. */
1022 last_element->index = element->index;
1023 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1024 last_node);
1027 /* Remove element from hash table. */
1028 htab_clear_slot (set->hashtab, slot);
1029 ggc_free (element);
1032 /* Find NODE in SET and return an iterator to it if found. A null iterator
1033 is returned if NODE is not in SET. */
1035 cgraph_node_set_iterator
1036 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1038 void **slot;
1039 struct cgraph_node_set_element_def dummy;
1040 cgraph_node_set_element element;
1041 cgraph_node_set_iterator csi;
1043 dummy.node = node;
1044 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1045 if (slot == NULL)
1046 csi.index = (unsigned) ~0;
1047 else
1049 element = (cgraph_node_set_element) *slot;
1050 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1051 == node);
1052 csi.index = element->index;
1054 csi.set = set;
1056 return csi;
1059 /* Dump content of SET to file F. */
1061 void
1062 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1064 cgraph_node_set_iterator iter;
1066 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1068 struct cgraph_node *node = csi_node (iter);
1069 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1071 fprintf (f, "\n");
1074 /* Dump content of SET to stderr. */
1076 DEBUG_FUNCTION void
1077 debug_cgraph_node_set (cgraph_node_set set)
1079 dump_cgraph_node_set (stderr, set);
1082 /* Hash a varpool node set element. */
1084 static hashval_t
1085 hash_varpool_node_set_element (const void *p)
1087 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1088 return htab_hash_pointer (element->node);
1091 /* Compare two varpool node set elements. */
1093 static int
1094 eq_varpool_node_set_element (const void *p1, const void *p2)
1096 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1097 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1099 return e1->node == e2->node;
1102 /* Create a new varpool node set. */
1104 varpool_node_set
1105 varpool_node_set_new (void)
1107 varpool_node_set new_node_set;
1109 new_node_set = ggc_alloc_varpool_node_set_def ();
1110 new_node_set->hashtab = htab_create_ggc (10,
1111 hash_varpool_node_set_element,
1112 eq_varpool_node_set_element,
1113 NULL);
1114 new_node_set->nodes = NULL;
1115 return new_node_set;
1118 /* Add varpool_node NODE to varpool_node_set SET. */
1120 void
1121 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1123 void **slot;
1124 varpool_node_set_element element;
1125 struct varpool_node_set_element_def dummy;
1127 dummy.node = node;
1128 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1130 if (*slot != HTAB_EMPTY_ENTRY)
1132 element = (varpool_node_set_element) *slot;
1133 gcc_assert (node == element->node
1134 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1135 == node));
1136 return;
1139 /* Insert node into hash table. */
1140 element = ggc_alloc_varpool_node_set_element_def ();
1141 element->node = node;
1142 element->index = VEC_length (varpool_node_ptr, set->nodes);
1143 *slot = element;
1145 /* Insert into node vector. */
1146 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1149 /* Remove varpool_node NODE from varpool_node_set SET. */
1151 void
1152 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1154 void **slot, **last_slot;
1155 varpool_node_set_element element, last_element;
1156 struct varpool_node *last_node;
1157 struct varpool_node_set_element_def dummy;
1159 dummy.node = node;
1160 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1161 if (slot == NULL)
1162 return;
1164 element = (varpool_node_set_element) *slot;
1165 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1166 == node);
1168 /* Remove from vector. We do this by swapping node with the last element
1169 of the vector. */
1170 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1171 if (last_node != node)
1173 dummy.node = last_node;
1174 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1175 last_element = (varpool_node_set_element) *last_slot;
1176 gcc_assert (last_element);
1178 /* Move the last element to the original spot of NODE. */
1179 last_element->index = element->index;
1180 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1181 last_node);
1184 /* Remove element from hash table. */
1185 htab_clear_slot (set->hashtab, slot);
1186 ggc_free (element);
1189 /* Find NODE in SET and return an iterator to it if found. A null iterator
1190 is returned if NODE is not in SET. */
1192 varpool_node_set_iterator
1193 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1195 void **slot;
1196 struct varpool_node_set_element_def dummy;
1197 varpool_node_set_element element;
1198 varpool_node_set_iterator vsi;
1200 dummy.node = node;
1201 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1202 if (slot == NULL)
1203 vsi.index = (unsigned) ~0;
1204 else
1206 element = (varpool_node_set_element) *slot;
1207 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1208 == node);
1209 vsi.index = element->index;
1211 vsi.set = set;
1213 return vsi;
1216 /* Dump content of SET to file F. */
1218 void
1219 dump_varpool_node_set (FILE *f, varpool_node_set set)
1221 varpool_node_set_iterator iter;
1223 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1225 struct varpool_node *node = vsi_node (iter);
1226 fprintf (f, " %s", varpool_node_name (node));
1228 fprintf (f, "\n");
1231 /* Dump content of SET to stderr. */
1233 DEBUG_FUNCTION void
1234 debug_varpool_node_set (varpool_node_set set)
1236 dump_varpool_node_set (stderr, set);
1240 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1242 static unsigned int
1243 ipa_profile (void)
1245 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1246 struct cgraph_edge *e;
1247 int order_pos;
1248 bool something_changed = false;
1249 int i;
1251 order_pos = cgraph_postorder (order);
1252 for (i = order_pos - 1; i >= 0; i--)
1254 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1256 for (e = order[i]->callees; e; e = e->next_callee)
1257 if (e->callee->local.local && !e->callee->aux)
1259 something_changed = true;
1260 e->callee->aux = (void *)1;
1263 order[i]->aux = NULL;
1266 while (something_changed)
1268 something_changed = false;
1269 for (i = order_pos - 1; i >= 0; i--)
1271 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1273 for (e = order[i]->callees; e; e = e->next_callee)
1274 if (e->callee->local.local && !e->callee->aux)
1276 something_changed = true;
1277 e->callee->aux = (void *)1;
1280 order[i]->aux = NULL;
1283 free (order);
1284 return 0;
1287 static bool
1288 gate_ipa_profile (void)
1290 return flag_ipa_profile;
1293 struct ipa_opt_pass_d pass_ipa_profile =
1296 IPA_PASS,
1297 "ipa-profile", /* name */
1298 gate_ipa_profile, /* gate */
1299 ipa_profile, /* execute */
1300 NULL, /* sub */
1301 NULL, /* next */
1302 0, /* static_pass_number */
1303 TV_IPA_PROFILE, /* tv_id */
1304 0, /* properties_required */
1305 0, /* properties_provided */
1306 0, /* properties_destroyed */
1307 0, /* todo_flags_start */
1308 0 /* todo_flags_finish */
1310 NULL, /* generate_summary */
1311 NULL, /* write_summary */
1312 NULL, /* read_summary */
1313 NULL, /* write_optimization_summary */
1314 NULL, /* read_optimization_summary */
1315 NULL, /* stmt_fixup */
1316 0, /* TODOs */
1317 NULL, /* function_transform */
1318 NULL /* variable_transform */