Daily bump.
[official-gcc.git] / gcc / ipa.c
blob5afacd87b7a548983e02e057c465b5dbae5aaa54
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "calls.h"
26 #include "stringpool.h"
27 #include "cgraph.h"
28 #include "tree-pass.h"
29 #include "hash-map.h"
30 #include "hash-set.h"
31 #include "gimple-expr.h"
32 #include "gimplify.h"
33 #include "flags.h"
34 #include "target.h"
35 #include "tree-iterator.h"
36 #include "ipa-utils.h"
37 #include "ipa-inline.h"
38 #include "tree-inline.h"
39 #include "profile.h"
40 #include "params.h"
41 #include "internal-fn.h"
42 #include "tree-ssa-alias.h"
43 #include "gimple.h"
44 #include "dbgcnt.h"
47 /* Return true when NODE has ADDR reference. */
49 static bool
50 has_addr_references_p (struct cgraph_node *node,
51 void *data ATTRIBUTE_UNUSED)
53 int i;
54 struct ipa_ref *ref = NULL;
56 for (i = 0; node->iterate_referring (i, ref); i++)
57 if (ref->use == IPA_REF_ADDR)
58 return true;
59 return false;
62 /* Look for all functions inlined to NODE and update their inlined_to pointers
63 to INLINED_TO. */
65 static void
66 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
68 struct cgraph_edge *e;
69 for (e = node->callees; e; e = e->next_callee)
70 if (e->callee->global.inlined_to)
72 e->callee->global.inlined_to = inlined_to;
73 update_inlined_to_pointer (e->callee, inlined_to);
77 /* Add symtab NODE to queue starting at FIRST.
79 The queue is linked via AUX pointers and terminated by pointer to 1.
80 We enqueue nodes at two occasions: when we find them reachable or when we find
81 their bodies needed for further clonning. In the second case we mark them
82 by pointer to 2 after processing so they are re-queue when they become
83 reachable. */
85 static void
86 enqueue_node (symtab_node *node, symtab_node **first,
87 hash_set<symtab_node *> *reachable)
89 /* Node is still in queue; do nothing. */
90 if (node->aux && node->aux != (void *) 2)
91 return;
92 /* Node was already processed as unreachable, re-enqueue
93 only if it became reachable now. */
94 if (node->aux == (void *)2 && !reachable->contains (node))
95 return;
96 node->aux = *first;
97 *first = node;
100 /* Process references. */
102 static void
103 process_references (symtab_node *snode,
104 symtab_node **first,
105 bool before_inlining_p,
106 hash_set<symtab_node *> *reachable)
108 int i;
109 struct ipa_ref *ref = NULL;
110 for (i = 0; snode->iterate_reference (i, ref); i++)
112 symtab_node *node = ref->referred;
114 if (node->definition && !node->in_other_partition
115 && ((!DECL_EXTERNAL (node->decl) || node->alias)
116 || (((before_inlining_p
117 && (symtab->state < IPA_SSA
118 || !lookup_attribute ("always_inline",
119 DECL_ATTRIBUTES (node->decl)))))
120 /* We use variable constructors during late complation for
121 constant folding. Keep references alive so partitioning
122 knows about potential references. */
123 || (TREE_CODE (node->decl) == VAR_DECL
124 && flag_wpa
125 && ctor_for_folding (node->decl)
126 != error_mark_node))))
127 reachable->add (node);
128 enqueue_node (node, first, reachable);
132 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
133 all its potential targets as reachable to permit later inlining if
134 devirtualization happens. After inlining still keep their declarations
135 around, so we can devirtualize to a direct call.
137 Also try to make trivial devirutalization when no or only one target is
138 possible. */
140 static void
141 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
142 struct cgraph_edge *edge,
143 symtab_node **first,
144 hash_set<symtab_node *> *reachable,
145 bool before_inlining_p)
147 unsigned int i;
148 void *cache_token;
149 bool final;
150 vec <cgraph_node *>targets
151 = possible_polymorphic_call_targets
152 (edge, &final, &cache_token);
154 if (!reachable_call_targets->add (cache_token))
156 for (i = 0; i < targets.length (); i++)
158 struct cgraph_node *n = targets[i];
160 /* Do not bother to mark virtual methods in anonymous namespace;
161 either we will find use of virtual table defining it, or it is
162 unused. */
163 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
164 && type_in_anonymous_namespace_p
165 (method_class_type (TREE_TYPE (n->decl))))
166 continue;
168 /* Prior inlining, keep alive bodies of possible targets for
169 devirtualization. */
170 if (n->definition
171 && (before_inlining_p
172 && (symtab->state < IPA_SSA
173 || !lookup_attribute ("always_inline",
174 DECL_ATTRIBUTES (n->decl)))))
175 reachable->add (n);
177 /* Even after inlining we want to keep the possible targets in the
178 boundary, so late passes can still produce direct call even if
179 the chance for inlining is lost. */
180 enqueue_node (n, first, reachable);
184 /* Very trivial devirtualization; when the type is
185 final or anonymous (so we know all its derivation)
186 and there is only one possible virtual call target,
187 make the edge direct. */
188 if (final)
190 if (targets.length () <= 1 && dbg_cnt (devirt))
192 cgraph_node *target, *node = edge->caller;
193 if (targets.length () == 1)
194 target = targets[0];
195 else
196 target = cgraph_node::get_create
197 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
199 if (dump_enabled_p ())
201 location_t locus = gimple_location (edge->call_stmt);
202 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
203 "devirtualizing call in %s/%i to %s/%i\n",
204 edge->caller->name (), edge->caller->order,
205 target->name (),
206 target->order);
208 edge = edge->make_direct (target);
209 if (inline_summary_vec)
210 inline_update_overall_summary (node);
211 else if (edge->call_stmt)
212 edge->redirect_call_stmt_to_callee ();
217 /* Perform reachability analysis and reclaim all unreachable nodes.
219 The algorithm is basically mark&sweep but with some extra refinements:
221 - reachable extern inline functions needs special handling; the bodies needs
222 to stay in memory until inlining in hope that they will be inlined.
223 After inlining we release their bodies and turn them into unanalyzed
224 nodes even when they are reachable.
226 BEFORE_INLINING_P specify whether we are before or after inlining.
228 - virtual functions are kept in callgraph even if they seem unreachable in
229 hope calls to them will be devirtualized.
231 Again we remove them after inlining. In late optimization some
232 devirtualization may happen, but it is not important since we won't inline
233 the call. In theory early opts and IPA should work out all important cases.
235 - virtual clones needs bodies of their origins for later materialization;
236 this means that we want to keep the body even if the origin is unreachable
237 otherwise. To avoid origin from sitting in the callgraph and being
238 walked by IPA passes, we turn them into unanalyzed nodes with body
239 defined.
241 We maintain set of function declaration where body needs to stay in
242 body_needed_for_clonning
244 Inline clones represent special case: their declaration match the
245 declaration of origin and cgraph_remove_node already knows how to
246 reshape callgraph and preserve body when offline copy of function or
247 inline clone is being removed.
249 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
250 variables with DECL_INITIAL set. We finalize these and keep reachable
251 ones around for constant folding purposes. After inlining we however
252 stop walking their references to let everything static referneced by them
253 to be removed when it is otherwise unreachable.
255 We maintain queue of both reachable symbols (i.e. defined symbols that needs
256 to stay) and symbols that are in boundary (i.e. external symbols referenced
257 by reachable symbols or origins of clones). The queue is represented
258 as linked list by AUX pointer terminated by 1.
260 At the end we keep all reachable symbols. For symbols in boundary we always
261 turn definition into a declaration, but we may keep function body around
262 based on body_needed_for_clonning
264 All symbols that enter the queue have AUX pointer non-zero and are in the
265 boundary. Pointer set REACHABLE is used to track reachable symbols.
267 Every symbol can be visited twice - once as part of boundary and once
268 as real reachable symbol. enqueue_node needs to decide whether the
269 node needs to be re-queued for second processing. For this purpose
270 we set AUX pointer of processed symbols in the boundary to constant 2. */
272 bool
273 symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
275 symtab_node *first = (symtab_node *) (void *) 1;
276 struct cgraph_node *node, *next;
277 varpool_node *vnode, *vnext;
278 bool changed = false;
279 hash_set<symtab_node *> reachable;
280 hash_set<tree> body_needed_for_clonning;
281 hash_set<void *> reachable_call_targets;
283 timevar_push (TV_IPA_UNREACHABLE);
284 if (optimize && flag_devirtualize)
285 build_type_inheritance_graph ();
286 if (file)
287 fprintf (file, "\nReclaiming functions:");
288 #ifdef ENABLE_CHECKING
289 FOR_EACH_FUNCTION (node)
290 gcc_assert (!node->aux);
291 FOR_EACH_VARIABLE (vnode)
292 gcc_assert (!vnode->aux);
293 #endif
294 /* Mark functions whose bodies are obviously needed.
295 This is mostly when they can be referenced externally. Inline clones
296 are special since their declarations are shared with master clone and thus
297 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
298 FOR_EACH_FUNCTION (node)
300 node->used_as_abstract_origin = false;
301 if (node->definition
302 && !node->global.inlined_to
303 && !node->in_other_partition
304 && !node->can_remove_if_no_direct_calls_and_refs_p ())
306 gcc_assert (!node->global.inlined_to);
307 reachable.add (node);
308 enqueue_node (node, &first, &reachable);
310 else
311 gcc_assert (!node->aux);
314 /* Mark variables that are obviously needed. */
315 FOR_EACH_DEFINED_VARIABLE (vnode)
316 if (!vnode->can_remove_if_no_refs_p()
317 && !vnode->in_other_partition)
319 reachable.add (vnode);
320 enqueue_node (vnode, &first, &reachable);
323 /* Perform reachability analysis. */
324 while (first != (symtab_node *) (void *) 1)
326 bool in_boundary_p = !reachable.contains (first);
327 symtab_node *node = first;
329 first = (symtab_node *)first->aux;
331 /* If we are processing symbol in boundary, mark its AUX pointer for
332 possible later re-processing in enqueue_node. */
333 if (in_boundary_p)
334 node->aux = (void *)2;
335 else
337 if (TREE_CODE (node->decl) == FUNCTION_DECL
338 && DECL_ABSTRACT_ORIGIN (node->decl))
340 struct cgraph_node *origin_node
341 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
342 origin_node->used_as_abstract_origin = true;
343 enqueue_node (origin_node, &first, &reachable);
345 /* If any symbol in a comdat group is reachable, force
346 all externally visible symbols in the same comdat
347 group to be reachable as well. Comdat-local symbols
348 can be discarded if all uses were inlined. */
349 if (node->same_comdat_group)
351 symtab_node *next;
352 for (next = node->same_comdat_group;
353 next != node;
354 next = next->same_comdat_group)
355 if (!next->comdat_local_p ()
356 && !reachable.add (next))
357 enqueue_node (next, &first, &reachable);
359 /* Mark references as reachable. */
360 process_references (node, &first, before_inlining_p, &reachable);
363 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
365 /* Mark the callees reachable unless they are direct calls to extern
366 inline functions we decided to not inline. */
367 if (!in_boundary_p)
369 struct cgraph_edge *e;
370 /* Keep alive possible targets for devirtualization. */
371 if (optimize && flag_devirtualize)
373 struct cgraph_edge *next;
374 for (e = cnode->indirect_calls; e; e = next)
376 next = e->next_callee;
377 if (e->indirect_info->polymorphic)
378 walk_polymorphic_call_targets (&reachable_call_targets,
379 e, &first, &reachable,
380 before_inlining_p);
383 for (e = cnode->callees; e; e = e->next_callee)
385 if (e->callee->definition
386 && !e->callee->in_other_partition
387 && (!e->inline_failed
388 || !DECL_EXTERNAL (e->callee->decl)
389 || e->callee->alias
390 || before_inlining_p))
392 /* Be sure that we will not optimize out alias target
393 body. */
394 if (DECL_EXTERNAL (e->callee->decl)
395 && e->callee->alias
396 && before_inlining_p)
397 reachable.add (e->callee->function_symbol ());
398 reachable.add (e->callee);
400 enqueue_node (e->callee, &first, &reachable);
403 /* When inline clone exists, mark body to be preserved so when removing
404 offline copy of the function we don't kill it. */
405 if (cnode->global.inlined_to)
406 body_needed_for_clonning.add (cnode->decl);
408 /* For non-inline clones, force their origins to the boundary and ensure
409 that body is not removed. */
410 while (cnode->clone_of)
412 bool noninline = cnode->clone_of->decl != cnode->decl;
413 cnode = cnode->clone_of;
414 if (noninline)
416 body_needed_for_clonning.add (cnode->decl);
417 enqueue_node (cnode, &first, &reachable);
422 /* If any reachable function has simd clones, mark them as
423 reachable as well. */
424 if (cnode->simd_clones)
426 cgraph_node *next;
427 for (next = cnode->simd_clones;
428 next;
429 next = next->simdclone->next_clone)
430 if (in_boundary_p
431 || !reachable.add (next))
432 enqueue_node (next, &first, &reachable);
435 /* When we see constructor of external variable, keep referred nodes in the
436 boundary. This will also hold initializers of the external vars NODE
437 refers to. */
438 varpool_node *vnode = dyn_cast <varpool_node *> (node);
439 if (vnode
440 && DECL_EXTERNAL (node->decl)
441 && !vnode->alias
442 && in_boundary_p)
444 struct ipa_ref *ref = NULL;
445 for (int i = 0; node->iterate_reference (i, ref); i++)
446 enqueue_node (ref->referred, &first, &reachable);
450 /* Remove unreachable functions. */
451 for (node = first_function (); node; node = next)
453 next = next_function (node);
455 /* If node is not needed at all, remove it. */
456 if (!node->aux)
458 if (file)
459 fprintf (file, " %s/%i", node->name (), node->order);
460 node->remove ();
461 changed = true;
463 /* If node is unreachable, remove its body. */
464 else if (!reachable.contains (node))
466 if (!body_needed_for_clonning.contains (node->decl))
467 node->release_body ();
468 else if (!node->clone_of)
469 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
470 if (node->definition)
472 if (file)
473 fprintf (file, " %s/%i", node->name (), node->order);
474 node->body_removed = true;
475 node->analyzed = false;
476 node->definition = false;
477 node->cpp_implicit_alias = false;
478 node->alias = false;
479 node->thunk.thunk_p = false;
480 node->weakref = false;
481 /* After early inlining we drop always_inline attributes on
482 bodies of functions that are still referenced (have their
483 address taken). */
484 DECL_ATTRIBUTES (node->decl)
485 = remove_attribute ("always_inline",
486 DECL_ATTRIBUTES (node->decl));
487 if (!node->in_other_partition)
488 node->local.local = false;
489 node->remove_callees ();
490 node->remove_from_same_comdat_group ();
491 node->remove_all_references ();
492 changed = true;
495 else
496 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
497 || in_lto_p || DECL_RESULT (node->decl));
500 /* Inline clones might be kept around so their materializing allows further
501 cloning. If the function the clone is inlined into is removed, we need
502 to turn it into normal cone. */
503 FOR_EACH_FUNCTION (node)
505 if (node->global.inlined_to
506 && !node->callers)
508 gcc_assert (node->clones);
509 node->global.inlined_to = NULL;
510 update_inlined_to_pointer (node, node);
512 node->aux = NULL;
515 /* Remove unreachable variables. */
516 if (file)
517 fprintf (file, "\nReclaiming variables:");
518 for (vnode = first_variable (); vnode; vnode = vnext)
520 vnext = next_variable (vnode);
521 if (!vnode->aux
522 /* For can_refer_decl_in_current_unit_p we want to track for
523 all external variables if they are defined in other partition
524 or not. */
525 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
527 if (file)
528 fprintf (file, " %s/%i", vnode->name (), vnode->order);
529 vnode->remove ();
530 changed = true;
532 else if (!reachable.contains (vnode))
534 tree init;
535 if (vnode->definition)
537 if (file)
538 fprintf (file, " %s", vnode->name ());
539 changed = true;
541 vnode->body_removed = true;
542 vnode->definition = false;
543 vnode->analyzed = false;
544 vnode->aux = NULL;
546 vnode->remove_from_same_comdat_group ();
548 /* Keep body if it may be useful for constant folding. */
549 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
550 vnode->remove_initializer ();
551 else
552 DECL_INITIAL (vnode->decl) = init;
553 vnode->remove_all_references ();
555 else
556 vnode->aux = NULL;
559 /* Now update address_taken flags and try to promote functions to be local. */
560 if (file)
561 fprintf (file, "\nClearing address taken flags:");
562 FOR_EACH_DEFINED_FUNCTION (node)
563 if (node->address_taken
564 && !node->used_from_other_partition)
566 if (!node->call_for_symbol_thunks_and_aliases
567 (has_addr_references_p, NULL, true))
569 if (file)
570 fprintf (file, " %s", node->name ());
571 node->address_taken = false;
572 changed = true;
573 if (node->local_p ())
575 node->local.local = true;
576 if (file)
577 fprintf (file, " (local)");
581 if (file)
582 fprintf (file, "\n");
584 #ifdef ENABLE_CHECKING
585 symtab_node::verify_symtab_nodes ();
586 #endif
588 /* If we removed something, perhaps profile could be improved. */
589 if (changed && optimize && inline_edge_summary_vec.exists ())
590 FOR_EACH_DEFINED_FUNCTION (node)
591 ipa_propagate_frequency (node);
593 timevar_pop (TV_IPA_UNREACHABLE);
594 return changed;
597 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
598 as needed, also clear EXPLICIT_REFS if the references to given variable
599 do not need to be explicit. */
601 void
602 process_references (varpool_node *vnode,
603 bool *written, bool *address_taken,
604 bool *read, bool *explicit_refs)
606 int i;
607 struct ipa_ref *ref;
609 if (!vnode->all_refs_explicit_p ()
610 || TREE_THIS_VOLATILE (vnode->decl))
611 *explicit_refs = false;
613 for (i = 0; vnode->iterate_referring (i, ref)
614 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
615 switch (ref->use)
617 case IPA_REF_ADDR:
618 *address_taken = true;
619 break;
620 case IPA_REF_LOAD:
621 *read = true;
622 break;
623 case IPA_REF_STORE:
624 *written = true;
625 break;
626 case IPA_REF_ALIAS:
627 process_references (dyn_cast<varpool_node *> (ref->referring), written,
628 address_taken, read, explicit_refs);
629 break;
633 /* Set TREE_READONLY bit. */
635 bool
636 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
638 TREE_READONLY (vnode->decl) = true;
639 return false;
642 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
644 bool
645 set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
647 vnode->writeonly = true;
648 if (optimize)
650 DECL_INITIAL (vnode->decl) = NULL;
651 if (!vnode->alias)
652 vnode->remove_all_references ();
654 return false;
657 /* Clear addressale bit of VNODE. */
659 bool
660 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
662 vnode->address_taken = false;
663 TREE_ADDRESSABLE (vnode->decl) = 0;
664 return false;
667 /* Discover variables that have no longer address taken or that are read only
668 and update their flags.
670 FIXME: This can not be done in between gimplify and omp_expand since
671 readonly flag plays role on what is shared and what is not. Currently we do
672 this transformation as part of whole program visibility and re-do at
673 ipa-reference pass (to take into account clonning), but it would
674 make sense to do it before early optimizations. */
676 void
677 ipa_discover_readonly_nonaddressable_vars (void)
679 varpool_node *vnode;
680 if (dump_file)
681 fprintf (dump_file, "Clearing variable flags:");
682 FOR_EACH_VARIABLE (vnode)
683 if (!vnode->alias
684 && (TREE_ADDRESSABLE (vnode->decl)
685 || !vnode->writeonly
686 || !TREE_READONLY (vnode->decl)))
688 bool written = false;
689 bool address_taken = false;
690 bool read = false;
691 bool explicit_refs = true;
693 process_references (vnode, &written, &address_taken, &read, &explicit_refs);
694 if (!explicit_refs)
695 continue;
696 if (!address_taken)
698 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
699 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
700 vnode->call_for_node_and_aliases (clear_addressable_bit, NULL, true);
702 if (!address_taken && !written
703 /* Making variable in explicit section readonly can cause section
704 type conflict.
705 See e.g. gcc.c-torture/compile/pr23237.c */
706 && vnode->get_section () == NULL)
708 if (!TREE_READONLY (vnode->decl) && dump_file)
709 fprintf (dump_file, " %s (read-only)", vnode->name ());
710 vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
712 if (!vnode->writeonly && !read && !address_taken && written)
714 if (dump_file)
715 fprintf (dump_file, " %s (write-only)", vnode->name ());
716 vnode->call_for_node_and_aliases (set_writeonly_bit, NULL, true);
719 if (dump_file)
720 fprintf (dump_file, "\n");
723 /* Free inline summary. */
725 namespace {
727 const pass_data pass_data_ipa_free_inline_summary =
729 SIMPLE_IPA_PASS, /* type */
730 "free-inline-summary", /* name */
731 OPTGROUP_NONE, /* optinfo_flags */
732 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
733 0, /* properties_required */
734 0, /* properties_provided */
735 0, /* properties_destroyed */
736 0, /* todo_flags_start */
737 /* Early optimizations may make function unreachable. We can not
738 remove unreachable functions as part of the ealry opts pass because
739 TODOs are run before subpasses. Do it here. */
740 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
743 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
745 public:
746 pass_ipa_free_inline_summary (gcc::context *ctxt)
747 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
750 /* opt_pass methods: */
751 virtual unsigned int execute (function *)
753 inline_free_summary ();
754 return 0;
757 }; // class pass_ipa_free_inline_summary
759 } // anon namespace
761 simple_ipa_opt_pass *
762 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
764 return new pass_ipa_free_inline_summary (ctxt);
767 /* Generate and emit a static constructor or destructor. WHICH must
768 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
769 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
770 initialization priority for this constructor or destructor.
772 FINAL specify whether the externally visible name for collect2 should
773 be produced. */
775 static void
776 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
778 static int counter = 0;
779 char which_buf[16];
780 tree decl, name, resdecl;
782 /* The priority is encoded in the constructor or destructor name.
783 collect2 will sort the names and arrange that they are called at
784 program startup. */
785 if (final)
786 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
787 else
788 /* Proudce sane name but one not recognizable by collect2, just for the
789 case we fail to inline the function. */
790 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
791 name = get_file_function_name (which_buf);
793 decl = build_decl (input_location, FUNCTION_DECL, name,
794 build_function_type_list (void_type_node, NULL_TREE));
795 current_function_decl = decl;
797 resdecl = build_decl (input_location,
798 RESULT_DECL, NULL_TREE, void_type_node);
799 DECL_ARTIFICIAL (resdecl) = 1;
800 DECL_RESULT (decl) = resdecl;
801 DECL_CONTEXT (resdecl) = decl;
803 allocate_struct_function (decl, false);
805 TREE_STATIC (decl) = 1;
806 TREE_USED (decl) = 1;
807 DECL_ARTIFICIAL (decl) = 1;
808 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
809 DECL_SAVED_TREE (decl) = body;
810 if (!targetm.have_ctors_dtors && final)
812 TREE_PUBLIC (decl) = 1;
813 DECL_PRESERVE_P (decl) = 1;
815 DECL_UNINLINABLE (decl) = 1;
817 DECL_INITIAL (decl) = make_node (BLOCK);
818 TREE_USED (DECL_INITIAL (decl)) = 1;
820 DECL_SOURCE_LOCATION (decl) = input_location;
821 cfun->function_end_locus = input_location;
823 switch (which)
825 case 'I':
826 DECL_STATIC_CONSTRUCTOR (decl) = 1;
827 decl_init_priority_insert (decl, priority);
828 break;
829 case 'D':
830 DECL_STATIC_DESTRUCTOR (decl) = 1;
831 decl_fini_priority_insert (decl, priority);
832 break;
833 default:
834 gcc_unreachable ();
837 gimplify_function_tree (decl);
839 cgraph_node::add_new_function (decl, false);
841 set_cfun (NULL);
842 current_function_decl = NULL;
845 /* Generate and emit a static constructor or destructor. WHICH must
846 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
847 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
848 initialization priority for this constructor or destructor. */
850 void
851 cgraph_build_static_cdtor (char which, tree body, int priority)
853 cgraph_build_static_cdtor_1 (which, body, priority, false);
856 /* A vector of FUNCTION_DECLs declared as static constructors. */
857 static vec<tree> static_ctors;
858 /* A vector of FUNCTION_DECLs declared as static destructors. */
859 static vec<tree> static_dtors;
861 /* When target does not have ctors and dtors, we call all constructor
862 and destructor by special initialization/destruction function
863 recognized by collect2.
865 When we are going to build this function, collect all constructors and
866 destructors and turn them into normal functions. */
868 static void
869 record_cdtor_fn (struct cgraph_node *node)
871 if (DECL_STATIC_CONSTRUCTOR (node->decl))
872 static_ctors.safe_push (node->decl);
873 if (DECL_STATIC_DESTRUCTOR (node->decl))
874 static_dtors.safe_push (node->decl);
875 node = cgraph_node::get (node->decl);
876 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
879 /* Define global constructors/destructor functions for the CDTORS, of
880 which they are LEN. The CDTORS are sorted by initialization
881 priority. If CTOR_P is true, these are constructors; otherwise,
882 they are destructors. */
884 static void
885 build_cdtor (bool ctor_p, vec<tree> cdtors)
887 size_t i,j;
888 size_t len = cdtors.length ();
890 i = 0;
891 while (i < len)
893 tree body;
894 tree fn;
895 priority_type priority;
897 priority = 0;
898 body = NULL_TREE;
899 j = i;
902 priority_type p;
903 fn = cdtors[j];
904 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
905 if (j == i)
906 priority = p;
907 else if (p != priority)
908 break;
909 j++;
911 while (j < len);
913 /* When there is only one cdtor and target supports them, do nothing. */
914 if (j == i + 1
915 && targetm.have_ctors_dtors)
917 i++;
918 continue;
920 /* Find the next batch of constructors/destructors with the same
921 initialization priority. */
922 for (;i < j; i++)
924 tree call;
925 fn = cdtors[i];
926 call = build_call_expr (fn, 0);
927 if (ctor_p)
928 DECL_STATIC_CONSTRUCTOR (fn) = 0;
929 else
930 DECL_STATIC_DESTRUCTOR (fn) = 0;
931 /* We do not want to optimize away pure/const calls here.
932 When optimizing, these should be already removed, when not
933 optimizing, we want user to be able to breakpoint in them. */
934 TREE_SIDE_EFFECTS (call) = 1;
935 append_to_statement_list (call, &body);
937 gcc_assert (body != NULL_TREE);
938 /* Generate a function to call all the function of like
939 priority. */
940 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
944 /* Comparison function for qsort. P1 and P2 are actually of type
945 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
946 used to determine the sort order. */
948 static int
949 compare_ctor (const void *p1, const void *p2)
951 tree f1;
952 tree f2;
953 int priority1;
954 int priority2;
956 f1 = *(const tree *)p1;
957 f2 = *(const tree *)p2;
958 priority1 = DECL_INIT_PRIORITY (f1);
959 priority2 = DECL_INIT_PRIORITY (f2);
961 if (priority1 < priority2)
962 return -1;
963 else if (priority1 > priority2)
964 return 1;
965 else
966 /* Ensure a stable sort. Constructors are executed in backwarding
967 order to make LTO initialize braries first. */
968 return DECL_UID (f2) - DECL_UID (f1);
971 /* Comparison function for qsort. P1 and P2 are actually of type
972 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
973 used to determine the sort order. */
975 static int
976 compare_dtor (const void *p1, const void *p2)
978 tree f1;
979 tree f2;
980 int priority1;
981 int priority2;
983 f1 = *(const tree *)p1;
984 f2 = *(const tree *)p2;
985 priority1 = DECL_FINI_PRIORITY (f1);
986 priority2 = DECL_FINI_PRIORITY (f2);
988 if (priority1 < priority2)
989 return -1;
990 else if (priority1 > priority2)
991 return 1;
992 else
993 /* Ensure a stable sort. */
994 return DECL_UID (f1) - DECL_UID (f2);
997 /* Generate functions to call static constructors and destructors
998 for targets that do not support .ctors/.dtors sections. These
999 functions have magic names which are detected by collect2. */
1001 static void
1002 build_cdtor_fns (void)
1004 if (!static_ctors.is_empty ())
1006 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1007 static_ctors.qsort (compare_ctor);
1008 build_cdtor (/*ctor_p=*/true, static_ctors);
1011 if (!static_dtors.is_empty ())
1013 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1014 static_dtors.qsort (compare_dtor);
1015 build_cdtor (/*ctor_p=*/false, static_dtors);
1019 /* Look for constructors and destructors and produce function calling them.
1020 This is needed for targets not supporting ctors or dtors, but we perform the
1021 transformation also at linktime to merge possibly numerous
1022 constructors/destructors into single function to improve code locality and
1023 reduce size. */
1025 static unsigned int
1026 ipa_cdtor_merge (void)
1028 struct cgraph_node *node;
1029 FOR_EACH_DEFINED_FUNCTION (node)
1030 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1031 || DECL_STATIC_DESTRUCTOR (node->decl))
1032 record_cdtor_fn (node);
1033 build_cdtor_fns ();
1034 static_ctors.release ();
1035 static_dtors.release ();
1036 return 0;
1039 namespace {
1041 const pass_data pass_data_ipa_cdtor_merge =
1043 IPA_PASS, /* type */
1044 "cdtor", /* name */
1045 OPTGROUP_NONE, /* optinfo_flags */
1046 TV_CGRAPHOPT, /* tv_id */
1047 0, /* properties_required */
1048 0, /* properties_provided */
1049 0, /* properties_destroyed */
1050 0, /* todo_flags_start */
1051 0, /* todo_flags_finish */
1054 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1056 public:
1057 pass_ipa_cdtor_merge (gcc::context *ctxt)
1058 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1059 NULL, /* generate_summary */
1060 NULL, /* write_summary */
1061 NULL, /* read_summary */
1062 NULL, /* write_optimization_summary */
1063 NULL, /* read_optimization_summary */
1064 NULL, /* stmt_fixup */
1065 0, /* function_transform_todo_flags_start */
1066 NULL, /* function_transform */
1067 NULL) /* variable_transform */
1070 /* opt_pass methods: */
1071 virtual bool gate (function *);
1072 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1074 }; // class pass_ipa_cdtor_merge
1076 bool
1077 pass_ipa_cdtor_merge::gate (function *)
1079 /* Perform the pass when we have no ctors/dtors support
1080 or at LTO time to merge multiple constructors into single
1081 function. */
1082 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1085 } // anon namespace
1087 ipa_opt_pass_d *
1088 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1090 return new pass_ipa_cdtor_merge (ctxt);
1093 /* Invalid pointer representing BOTTOM for single user dataflow. */
1094 #define BOTTOM ((cgraph_node *)(size_t) 2)
1096 /* Meet operation for single user dataflow.
1097 Here we want to associate variables with sigle function that may access it.
1099 FUNCTION is current single user of a variable, VAR is variable that uses it.
1100 Latttice is stored in SINGLE_USER_MAP.
1102 We represent:
1103 - TOP by no entry in SIGNLE_USER_MAP
1104 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1105 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1107 cgraph_node *
1108 meet (cgraph_node *function, varpool_node *var,
1109 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1111 struct cgraph_node *user, **f;
1113 if (var->aux == BOTTOM)
1114 return BOTTOM;
1116 f = single_user_map.get (var);
1117 if (!f)
1118 return function;
1119 user = *f;
1120 if (!function)
1121 return user;
1122 else if (function != user)
1123 return BOTTOM;
1124 else
1125 return function;
1128 /* Propagation step of single-use dataflow.
1130 Check all uses of VNODE and see if they are used by single function FUNCTION.
1131 SINGLE_USER_MAP represents the dataflow lattice. */
1133 cgraph_node *
1134 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1135 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1137 int i;
1138 struct ipa_ref *ref;
1140 gcc_assert (!vnode->externally_visible);
1142 /* If node is an alias, first meet with its target. */
1143 if (vnode->alias)
1144 function = meet (function, vnode->get_alias_target (), single_user_map);
1146 /* Check all users and see if they correspond to a single function. */
1147 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1149 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1150 if (cnode)
1152 if (cnode->global.inlined_to)
1153 cnode = cnode->global.inlined_to;
1154 if (!function)
1155 function = cnode;
1156 else if (function != cnode)
1157 function = BOTTOM;
1159 else
1160 function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
1162 return function;
1165 /* Pass setting used_by_single_function flag.
1166 This flag is set on variable when there is only one function that may possibly
1167 referr to it. */
1169 static unsigned int
1170 ipa_single_use (void)
1172 varpool_node *first = (varpool_node *) (void *) 1;
1173 varpool_node *var;
1174 hash_map<varpool_node *, cgraph_node *> single_user_map;
1176 FOR_EACH_DEFINED_VARIABLE (var)
1177 if (!var->all_refs_explicit_p ())
1178 var->aux = BOTTOM;
1179 else
1181 /* Enqueue symbol for dataflow. */
1182 var->aux = first;
1183 first = var;
1186 /* The actual dataflow. */
1188 while (first != (void *) 1)
1190 cgraph_node *user, *orig_user, **f;
1192 var = first;
1193 first = (varpool_node *)first->aux;
1195 f = single_user_map.get (var);
1196 if (f)
1197 orig_user = *f;
1198 else
1199 orig_user = NULL;
1200 user = propagate_single_user (var, orig_user, single_user_map);
1202 gcc_checking_assert (var->aux != BOTTOM);
1204 /* If user differs, enqueue all references. */
1205 if (user != orig_user)
1207 unsigned int i;
1208 ipa_ref *ref;
1210 single_user_map.put (var, user);
1212 /* Enqueue all aliases for re-processing. */
1213 for (i = 0; var->iterate_referring (i, ref); i++)
1214 if (ref->use == IPA_REF_ALIAS
1215 && !ref->referring->aux)
1217 ref->referring->aux = first;
1218 first = dyn_cast <varpool_node *> (ref->referring);
1220 /* Enqueue all users for re-processing. */
1221 for (i = 0; var->iterate_reference (i, ref); i++)
1222 if (!ref->referred->aux
1223 && ref->referred->definition
1224 && is_a <varpool_node *> (ref->referred))
1226 ref->referred->aux = first;
1227 first = dyn_cast <varpool_node *> (ref->referred);
1230 /* If user is BOTTOM, just punt on this var. */
1231 if (user == BOTTOM)
1232 var->aux = BOTTOM;
1233 else
1234 var->aux = NULL;
1236 else
1237 var->aux = NULL;
1240 FOR_EACH_DEFINED_VARIABLE (var)
1242 if (var->aux != BOTTOM)
1244 #ifdef ENABLE_CHECKING
1245 if (!single_user_map.get (var))
1246 gcc_assert (single_user_map.get (var));
1247 #endif
1248 if (dump_file)
1250 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1251 var->name (), var->order);
1253 var->used_by_single_function = true;
1255 var->aux = NULL;
1257 return 0;
1260 namespace {
1262 const pass_data pass_data_ipa_single_use =
1264 IPA_PASS, /* type */
1265 "single-use", /* name */
1266 OPTGROUP_NONE, /* optinfo_flags */
1267 TV_CGRAPHOPT, /* tv_id */
1268 0, /* properties_required */
1269 0, /* properties_provided */
1270 0, /* properties_destroyed */
1271 0, /* todo_flags_start */
1272 0, /* todo_flags_finish */
1275 class pass_ipa_single_use : public ipa_opt_pass_d
1277 public:
1278 pass_ipa_single_use (gcc::context *ctxt)
1279 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1280 NULL, /* generate_summary */
1281 NULL, /* write_summary */
1282 NULL, /* read_summary */
1283 NULL, /* write_optimization_summary */
1284 NULL, /* read_optimization_summary */
1285 NULL, /* stmt_fixup */
1286 0, /* function_transform_todo_flags_start */
1287 NULL, /* function_transform */
1288 NULL) /* variable_transform */
1291 /* opt_pass methods: */
1292 virtual bool gate (function *);
1293 virtual unsigned int execute (function *) { return ipa_single_use (); }
1295 }; // class pass_ipa_single_use
1297 bool
1298 pass_ipa_single_use::gate (function *)
1300 return optimize;
1303 } // anon namespace
1305 ipa_opt_pass_d *
1306 make_pass_ipa_single_use (gcc::context *ctxt)
1308 return new pass_ipa_single_use (ctxt);