* config/i386/gnu-user.h (TARGET_CAN_SPLIT_STACK): Move from here ...
[official-gcc.git] / gcc / ipa.c
blobd7ec4978533e4da7359e79236eef90ebc4594309
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "calls.h"
26 #include "stringpool.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "hash-map.h"
30 #include "is-a.h"
31 #include "plugin-api.h"
32 #include "vec.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "machmode.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "ipa-ref.h"
40 #include "cgraph.h"
41 #include "tree-pass.h"
42 #include "gimple-expr.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "target.h"
46 #include "tree-iterator.h"
47 #include "ipa-utils.h"
48 #include "alloc-pool.h"
49 #include "ipa-prop.h"
50 #include "ipa-inline.h"
51 #include "tree-inline.h"
52 #include "profile.h"
53 #include "params.h"
54 #include "internal-fn.h"
55 #include "tree-ssa-alias.h"
56 #include "gimple.h"
57 #include "dbgcnt.h"
60 /* Return true when NODE has ADDR reference. */
62 static bool
63 has_addr_references_p (struct cgraph_node *node,
64 void *data ATTRIBUTE_UNUSED)
66 int i;
67 struct ipa_ref *ref = NULL;
69 for (i = 0; node->iterate_referring (i, ref); i++)
70 if (ref->use == IPA_REF_ADDR)
71 return true;
72 return false;
75 /* Look for all functions inlined to NODE and update their inlined_to pointers
76 to INLINED_TO. */
78 static void
79 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
81 struct cgraph_edge *e;
82 for (e = node->callees; e; e = e->next_callee)
83 if (e->callee->global.inlined_to)
85 e->callee->global.inlined_to = inlined_to;
86 update_inlined_to_pointer (e->callee, inlined_to);
90 /* Add symtab NODE to queue starting at FIRST.
92 The queue is linked via AUX pointers and terminated by pointer to 1.
93 We enqueue nodes at two occasions: when we find them reachable or when we find
94 their bodies needed for further clonning. In the second case we mark them
95 by pointer to 2 after processing so they are re-queue when they become
96 reachable. */
98 static void
99 enqueue_node (symtab_node *node, symtab_node **first,
100 hash_set<symtab_node *> *reachable)
102 /* Node is still in queue; do nothing. */
103 if (node->aux && node->aux != (void *) 2)
104 return;
105 /* Node was already processed as unreachable, re-enqueue
106 only if it became reachable now. */
107 if (node->aux == (void *)2 && !reachable->contains (node))
108 return;
109 node->aux = *first;
110 *first = node;
113 /* Process references. */
115 static void
116 process_references (symtab_node *snode,
117 symtab_node **first,
118 bool before_inlining_p,
119 hash_set<symtab_node *> *reachable)
121 int i;
122 struct ipa_ref *ref = NULL;
123 for (i = 0; snode->iterate_reference (i, ref); i++)
125 symtab_node *node = ref->referred;
126 symtab_node *body = node->ultimate_alias_target ();
128 if (node->definition && !node->in_other_partition
129 && ((!DECL_EXTERNAL (node->decl) || node->alias)
130 || (((before_inlining_p
131 && ((TREE_CODE (node->decl) != FUNCTION_DECL
132 && optimize)
133 || (TREE_CODE (node->decl) == FUNCTION_DECL
134 && opt_for_fn (body->decl, optimize))
135 || (symtab->state < IPA_SSA
136 && lookup_attribute
137 ("always_inline",
138 DECL_ATTRIBUTES (body->decl))))))
139 /* We use variable constructors during late compilation for
140 constant folding. Keep references alive so partitioning
141 knows about potential references. */
142 || (TREE_CODE (node->decl) == VAR_DECL
143 && flag_wpa
144 && ctor_for_folding (node->decl)
145 != error_mark_node))))
147 /* Be sure that we will not optimize out alias target
148 body. */
149 if (DECL_EXTERNAL (node->decl)
150 && node->alias
151 && before_inlining_p)
152 reachable->add (body);
153 reachable->add (node);
155 enqueue_node (node, first, reachable);
159 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
160 all its potential targets as reachable to permit later inlining if
161 devirtualization happens. After inlining still keep their declarations
162 around, so we can devirtualize to a direct call.
164 Also try to make trivial devirutalization when no or only one target is
165 possible. */
167 static void
168 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
169 struct cgraph_edge *edge,
170 symtab_node **first,
171 hash_set<symtab_node *> *reachable,
172 bool before_inlining_p)
174 unsigned int i;
175 void *cache_token;
176 bool final;
177 vec <cgraph_node *>targets
178 = possible_polymorphic_call_targets
179 (edge, &final, &cache_token);
181 if (!reachable_call_targets->add (cache_token))
183 for (i = 0; i < targets.length (); i++)
185 struct cgraph_node *n = targets[i];
187 /* Do not bother to mark virtual methods in anonymous namespace;
188 either we will find use of virtual table defining it, or it is
189 unused. */
190 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
191 && type_in_anonymous_namespace_p
192 (method_class_type (TREE_TYPE (n->decl))))
193 continue;
195 symtab_node *body = n->function_symbol ();
197 /* Prior inlining, keep alive bodies of possible targets for
198 devirtualization. */
199 if (n->definition
200 && (before_inlining_p
201 && opt_for_fn (body->decl, optimize)
202 && opt_for_fn (body->decl, flag_devirtualize)))
204 /* Be sure that we will not optimize out alias target
205 body. */
206 if (DECL_EXTERNAL (n->decl)
207 && n->alias
208 && before_inlining_p)
209 reachable->add (body);
210 reachable->add (n);
212 /* Even after inlining we want to keep the possible targets in the
213 boundary, so late passes can still produce direct call even if
214 the chance for inlining is lost. */
215 enqueue_node (n, first, reachable);
219 /* Very trivial devirtualization; when the type is
220 final or anonymous (so we know all its derivation)
221 and there is only one possible virtual call target,
222 make the edge direct. */
223 if (final)
225 if (targets.length () <= 1 && dbg_cnt (devirt))
227 cgraph_node *target, *node = edge->caller;
228 if (targets.length () == 1)
229 target = targets[0];
230 else
231 target = cgraph_node::get_create
232 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
234 if (dump_enabled_p ())
236 location_t locus;
237 if (edge->call_stmt)
238 locus = gimple_location (edge->call_stmt);
239 else
240 locus = UNKNOWN_LOCATION;
241 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
242 "devirtualizing call in %s/%i to %s/%i\n",
243 edge->caller->name (), edge->caller->order,
244 target->name (),
245 target->order);
247 edge = edge->make_direct (target);
248 if (inline_summary_vec)
249 inline_update_overall_summary (node);
250 else if (edge->call_stmt)
252 edge->redirect_call_stmt_to_callee ();
254 /* Call to __builtin_unreachable shouldn't be instrumented. */
255 if (!targets.length ())
256 gimple_call_set_with_bounds (edge->call_stmt, false);
262 /* Perform reachability analysis and reclaim all unreachable nodes.
264 The algorithm is basically mark&sweep but with some extra refinements:
266 - reachable extern inline functions needs special handling; the bodies needs
267 to stay in memory until inlining in hope that they will be inlined.
268 After inlining we release their bodies and turn them into unanalyzed
269 nodes even when they are reachable.
271 - virtual functions are kept in callgraph even if they seem unreachable in
272 hope calls to them will be devirtualized.
274 Again we remove them after inlining. In late optimization some
275 devirtualization may happen, but it is not important since we won't inline
276 the call. In theory early opts and IPA should work out all important cases.
278 - virtual clones needs bodies of their origins for later materialization;
279 this means that we want to keep the body even if the origin is unreachable
280 otherwise. To avoid origin from sitting in the callgraph and being
281 walked by IPA passes, we turn them into unanalyzed nodes with body
282 defined.
284 We maintain set of function declaration where body needs to stay in
285 body_needed_for_clonning
287 Inline clones represent special case: their declaration match the
288 declaration of origin and cgraph_remove_node already knows how to
289 reshape callgraph and preserve body when offline copy of function or
290 inline clone is being removed.
292 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
293 variables with DECL_INITIAL set. We finalize these and keep reachable
294 ones around for constant folding purposes. After inlining we however
295 stop walking their references to let everything static referneced by them
296 to be removed when it is otherwise unreachable.
298 We maintain queue of both reachable symbols (i.e. defined symbols that needs
299 to stay) and symbols that are in boundary (i.e. external symbols referenced
300 by reachable symbols or origins of clones). The queue is represented
301 as linked list by AUX pointer terminated by 1.
303 At the end we keep all reachable symbols. For symbols in boundary we always
304 turn definition into a declaration, but we may keep function body around
305 based on body_needed_for_clonning
307 All symbols that enter the queue have AUX pointer non-zero and are in the
308 boundary. Pointer set REACHABLE is used to track reachable symbols.
310 Every symbol can be visited twice - once as part of boundary and once
311 as real reachable symbol. enqueue_node needs to decide whether the
312 node needs to be re-queued for second processing. For this purpose
313 we set AUX pointer of processed symbols in the boundary to constant 2. */
315 bool
316 symbol_table::remove_unreachable_nodes (FILE *file)
318 symtab_node *first = (symtab_node *) (void *) 1;
319 struct cgraph_node *node, *next;
320 varpool_node *vnode, *vnext;
321 bool changed = false;
322 hash_set<symtab_node *> reachable;
323 hash_set<tree> body_needed_for_clonning;
324 hash_set<void *> reachable_call_targets;
325 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
326 : IPA_SSA_AFTER_INLINING);
328 timevar_push (TV_IPA_UNREACHABLE);
329 build_type_inheritance_graph ();
330 if (file)
331 fprintf (file, "\nReclaiming functions:");
332 #ifdef ENABLE_CHECKING
333 FOR_EACH_FUNCTION (node)
334 gcc_assert (!node->aux);
335 FOR_EACH_VARIABLE (vnode)
336 gcc_assert (!vnode->aux);
337 #endif
338 /* Mark functions whose bodies are obviously needed.
339 This is mostly when they can be referenced externally. Inline clones
340 are special since their declarations are shared with master clone and thus
341 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
342 FOR_EACH_FUNCTION (node)
344 node->used_as_abstract_origin = false;
345 if (node->definition
346 && !node->global.inlined_to
347 && !node->in_other_partition
348 && !node->can_remove_if_no_direct_calls_and_refs_p ())
350 gcc_assert (!node->global.inlined_to);
351 reachable.add (node);
352 enqueue_node (node, &first, &reachable);
354 else
355 gcc_assert (!node->aux);
358 /* Mark variables that are obviously needed. */
359 FOR_EACH_DEFINED_VARIABLE (vnode)
360 if (!vnode->can_remove_if_no_refs_p()
361 && !vnode->in_other_partition)
363 reachable.add (vnode);
364 enqueue_node (vnode, &first, &reachable);
367 /* Perform reachability analysis. */
368 while (first != (symtab_node *) (void *) 1)
370 bool in_boundary_p = !reachable.contains (first);
371 symtab_node *node = first;
373 first = (symtab_node *)first->aux;
375 /* If we are processing symbol in boundary, mark its AUX pointer for
376 possible later re-processing in enqueue_node. */
377 if (in_boundary_p)
378 node->aux = (void *)2;
379 else
381 if (TREE_CODE (node->decl) == FUNCTION_DECL
382 && DECL_ABSTRACT_ORIGIN (node->decl))
384 struct cgraph_node *origin_node
385 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
386 if (origin_node && !origin_node->used_as_abstract_origin)
388 origin_node->used_as_abstract_origin = true;
389 gcc_assert (!origin_node->prev_sibling_clone);
390 gcc_assert (!origin_node->next_sibling_clone);
391 for (cgraph_node *n = origin_node->clones; n;
392 n = n->next_sibling_clone)
393 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
394 n->used_as_abstract_origin = true;
395 enqueue_node (origin_node, &first, &reachable);
398 /* If any symbol in a comdat group is reachable, force
399 all externally visible symbols in the same comdat
400 group to be reachable as well. Comdat-local symbols
401 can be discarded if all uses were inlined. */
402 if (node->same_comdat_group)
404 symtab_node *next;
405 for (next = node->same_comdat_group;
406 next != node;
407 next = next->same_comdat_group)
408 if (!next->comdat_local_p ()
409 && !reachable.add (next))
410 enqueue_node (next, &first, &reachable);
412 /* Mark references as reachable. */
413 process_references (node, &first, before_inlining_p, &reachable);
416 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
418 /* Mark the callees reachable unless they are direct calls to extern
419 inline functions we decided to not inline. */
420 if (!in_boundary_p)
422 struct cgraph_edge *e;
423 /* Keep alive possible targets for devirtualization. */
424 if (opt_for_fn (cnode->decl, optimize)
425 && opt_for_fn (cnode->decl, flag_devirtualize))
427 struct cgraph_edge *next;
428 for (e = cnode->indirect_calls; e; e = next)
430 next = e->next_callee;
431 if (e->indirect_info->polymorphic)
432 walk_polymorphic_call_targets (&reachable_call_targets,
433 e, &first, &reachable,
434 before_inlining_p);
437 for (e = cnode->callees; e; e = e->next_callee)
439 symtab_node *body = e->callee->function_symbol ();
440 if (e->callee->definition
441 && !e->callee->in_other_partition
442 && (!e->inline_failed
443 || !DECL_EXTERNAL (e->callee->decl)
444 || e->callee->alias
445 || (before_inlining_p
446 && (opt_for_fn (body->decl, optimize)
447 || (symtab->state < IPA_SSA
448 && lookup_attribute
449 ("always_inline",
450 DECL_ATTRIBUTES (body->decl)))))))
452 /* Be sure that we will not optimize out alias target
453 body. */
454 if (DECL_EXTERNAL (e->callee->decl)
455 && e->callee->alias
456 && before_inlining_p)
457 reachable.add (body);
458 reachable.add (e->callee);
460 enqueue_node (e->callee, &first, &reachable);
463 /* When inline clone exists, mark body to be preserved so when removing
464 offline copy of the function we don't kill it. */
465 if (cnode->global.inlined_to)
466 body_needed_for_clonning.add (cnode->decl);
468 /* For non-inline clones, force their origins to the boundary and ensure
469 that body is not removed. */
470 while (cnode->clone_of)
472 bool noninline = cnode->clone_of->decl != cnode->decl;
473 cnode = cnode->clone_of;
474 if (noninline)
476 body_needed_for_clonning.add (cnode->decl);
477 enqueue_node (cnode, &first, &reachable);
482 /* If any reachable function has simd clones, mark them as
483 reachable as well. */
484 if (cnode->simd_clones)
486 cgraph_node *next;
487 for (next = cnode->simd_clones;
488 next;
489 next = next->simdclone->next_clone)
490 if (in_boundary_p
491 || !reachable.add (next))
492 enqueue_node (next, &first, &reachable);
495 /* When we see constructor of external variable, keep referred nodes in the
496 boundary. This will also hold initializers of the external vars NODE
497 refers to. */
498 varpool_node *vnode = dyn_cast <varpool_node *> (node);
499 if (vnode
500 && DECL_EXTERNAL (node->decl)
501 && !vnode->alias
502 && in_boundary_p)
504 struct ipa_ref *ref = NULL;
505 for (int i = 0; node->iterate_reference (i, ref); i++)
506 enqueue_node (ref->referred, &first, &reachable);
510 /* Remove unreachable functions. */
511 for (node = first_function (); node; node = next)
513 next = next_function (node);
515 /* If node is not needed at all, remove it. */
516 if (!node->aux)
518 if (file)
519 fprintf (file, " %s/%i", node->name (), node->order);
520 node->remove ();
521 changed = true;
523 /* If node is unreachable, remove its body. */
524 else if (!reachable.contains (node))
526 if (!body_needed_for_clonning.contains (node->decl))
527 node->release_body ();
528 else if (!node->clone_of)
529 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
530 if (node->definition)
532 if (file)
533 fprintf (file, " %s/%i", node->name (), node->order);
534 node->body_removed = true;
535 node->analyzed = false;
536 node->definition = false;
537 node->cpp_implicit_alias = false;
538 node->alias = false;
539 node->thunk.thunk_p = false;
540 node->weakref = false;
541 /* After early inlining we drop always_inline attributes on
542 bodies of functions that are still referenced (have their
543 address taken). */
544 DECL_ATTRIBUTES (node->decl)
545 = remove_attribute ("always_inline",
546 DECL_ATTRIBUTES (node->decl));
547 if (!node->in_other_partition)
548 node->local.local = false;
549 node->remove_callees ();
550 node->remove_from_same_comdat_group ();
551 node->remove_all_references ();
552 changed = true;
553 if (node->thunk.thunk_p
554 && node->thunk.add_pointer_bounds_args)
556 node->thunk.thunk_p = false;
557 node->thunk.add_pointer_bounds_args = false;
561 else
562 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
563 || in_lto_p || DECL_RESULT (node->decl));
566 /* Inline clones might be kept around so their materializing allows further
567 cloning. If the function the clone is inlined into is removed, we need
568 to turn it into normal cone. */
569 FOR_EACH_FUNCTION (node)
571 if (node->global.inlined_to
572 && !node->callers)
574 gcc_assert (node->clones);
575 node->global.inlined_to = NULL;
576 update_inlined_to_pointer (node, node);
578 node->aux = NULL;
581 /* Remove unreachable variables. */
582 if (file)
583 fprintf (file, "\nReclaiming variables:");
584 for (vnode = first_variable (); vnode; vnode = vnext)
586 vnext = next_variable (vnode);
587 if (!vnode->aux
588 /* For can_refer_decl_in_current_unit_p we want to track for
589 all external variables if they are defined in other partition
590 or not. */
591 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
593 if (file)
594 fprintf (file, " %s/%i", vnode->name (), vnode->order);
595 vnode->remove ();
596 changed = true;
598 else if (!reachable.contains (vnode))
600 tree init;
601 if (vnode->definition)
603 if (file)
604 fprintf (file, " %s", vnode->name ());
605 changed = true;
607 /* Keep body if it may be useful for constant folding. */
608 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
609 && !POINTER_BOUNDS_P (vnode->decl))
610 vnode->remove_initializer ();
611 else
612 DECL_INITIAL (vnode->decl) = init;
613 vnode->body_removed = true;
614 vnode->definition = false;
615 vnode->analyzed = false;
616 vnode->aux = NULL;
618 vnode->remove_from_same_comdat_group ();
620 vnode->remove_all_references ();
622 else
623 vnode->aux = NULL;
626 /* Now update address_taken flags and try to promote functions to be local. */
627 if (file)
628 fprintf (file, "\nClearing address taken flags:");
629 FOR_EACH_DEFINED_FUNCTION (node)
630 if (node->address_taken
631 && !node->used_from_other_partition)
633 if (!node->call_for_symbol_thunks_and_aliases
634 (has_addr_references_p, NULL, true)
635 && (!node->instrumentation_clone
636 || !node->instrumented_version
637 || !node->instrumented_version->address_taken))
639 if (file)
640 fprintf (file, " %s", node->name ());
641 node->address_taken = false;
642 changed = true;
643 if (node->local_p ())
645 node->local.local = true;
646 if (file)
647 fprintf (file, " (local)");
651 if (file)
652 fprintf (file, "\n");
654 #ifdef ENABLE_CHECKING
655 symtab_node::verify_symtab_nodes ();
656 #endif
658 /* If we removed something, perhaps profile could be improved. */
659 if (changed && optimize && inline_edge_summary_vec.exists ())
660 FOR_EACH_DEFINED_FUNCTION (node)
661 ipa_propagate_frequency (node);
663 timevar_pop (TV_IPA_UNREACHABLE);
664 return changed;
667 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
668 as needed, also clear EXPLICIT_REFS if the references to given variable
669 do not need to be explicit. */
671 void
672 process_references (varpool_node *vnode,
673 bool *written, bool *address_taken,
674 bool *read, bool *explicit_refs)
676 int i;
677 struct ipa_ref *ref;
679 if (!vnode->all_refs_explicit_p ()
680 || TREE_THIS_VOLATILE (vnode->decl))
681 *explicit_refs = false;
683 for (i = 0; vnode->iterate_referring (i, ref)
684 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
685 switch (ref->use)
687 case IPA_REF_ADDR:
688 *address_taken = true;
689 break;
690 case IPA_REF_LOAD:
691 *read = true;
692 break;
693 case IPA_REF_STORE:
694 *written = true;
695 break;
696 case IPA_REF_ALIAS:
697 process_references (dyn_cast<varpool_node *> (ref->referring), written,
698 address_taken, read, explicit_refs);
699 break;
700 case IPA_REF_CHKP:
701 gcc_unreachable ();
705 /* Set TREE_READONLY bit. */
707 bool
708 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
710 TREE_READONLY (vnode->decl) = true;
711 return false;
714 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
716 bool
717 set_writeonly_bit (varpool_node *vnode, void *data)
719 vnode->writeonly = true;
720 if (optimize)
722 DECL_INITIAL (vnode->decl) = NULL;
723 if (!vnode->alias)
725 if (vnode->num_references ())
726 *(bool *)data = true;
727 vnode->remove_all_references ();
730 return false;
733 /* Clear addressale bit of VNODE. */
735 bool
736 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
738 vnode->address_taken = false;
739 TREE_ADDRESSABLE (vnode->decl) = 0;
740 return false;
743 /* Discover variables that have no longer address taken or that are read only
744 and update their flags.
746 Return true when unreachable symbol removan should be done.
748 FIXME: This can not be done in between gimplify and omp_expand since
749 readonly flag plays role on what is shared and what is not. Currently we do
750 this transformation as part of whole program visibility and re-do at
751 ipa-reference pass (to take into account clonning), but it would
752 make sense to do it before early optimizations. */
754 bool
755 ipa_discover_readonly_nonaddressable_vars (void)
757 bool remove_p = false;
758 varpool_node *vnode;
759 if (dump_file)
760 fprintf (dump_file, "Clearing variable flags:");
761 FOR_EACH_VARIABLE (vnode)
762 if (!vnode->alias
763 && (TREE_ADDRESSABLE (vnode->decl)
764 || !vnode->writeonly
765 || !TREE_READONLY (vnode->decl)))
767 bool written = false;
768 bool address_taken = false;
769 bool read = false;
770 bool explicit_refs = true;
772 process_references (vnode, &written, &address_taken, &read,
773 &explicit_refs);
774 if (!explicit_refs)
775 continue;
776 if (!address_taken)
778 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
779 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
780 vnode->call_for_node_and_aliases (clear_addressable_bit, NULL,
781 true);
783 if (!address_taken && !written
784 /* Making variable in explicit section readonly can cause section
785 type conflict.
786 See e.g. gcc.c-torture/compile/pr23237.c */
787 && vnode->get_section () == NULL)
789 if (!TREE_READONLY (vnode->decl) && dump_file)
790 fprintf (dump_file, " %s (read-only)", vnode->name ());
791 vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
793 if (!vnode->writeonly && !read && !address_taken && written)
795 if (dump_file)
796 fprintf (dump_file, " %s (write-only)", vnode->name ());
797 vnode->call_for_node_and_aliases (set_writeonly_bit, &remove_p,
798 true);
801 if (dump_file)
802 fprintf (dump_file, "\n");
803 return remove_p;
806 /* Free inline summary. */
808 namespace {
810 const pass_data pass_data_ipa_free_inline_summary =
812 SIMPLE_IPA_PASS, /* type */
813 "free-inline-summary", /* name */
814 OPTGROUP_NONE, /* optinfo_flags */
815 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
816 0, /* properties_required */
817 0, /* properties_provided */
818 0, /* properties_destroyed */
819 0, /* todo_flags_start */
820 /* Early optimizations may make function unreachable. We can not
821 remove unreachable functions as part of the ealry opts pass because
822 TODOs are run before subpasses. Do it here. */
823 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
826 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
828 public:
829 pass_ipa_free_inline_summary (gcc::context *ctxt)
830 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
833 /* opt_pass methods: */
834 virtual unsigned int execute (function *)
836 inline_free_summary ();
837 return 0;
840 }; // class pass_ipa_free_inline_summary
842 } // anon namespace
844 simple_ipa_opt_pass *
845 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
847 return new pass_ipa_free_inline_summary (ctxt);
850 /* Generate and emit a static constructor or destructor. WHICH must
851 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
852 (for chp static vars constructor) or 'B' (for chkp static bounds
853 constructor). BODY is a STATEMENT_LIST containing GENERIC
854 statements. PRIORITY is the initialization priority for this
855 constructor or destructor.
857 FINAL specify whether the externally visible name for collect2 should
858 be produced. */
860 static void
861 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
863 static int counter = 0;
864 char which_buf[16];
865 tree decl, name, resdecl;
867 /* The priority is encoded in the constructor or destructor name.
868 collect2 will sort the names and arrange that they are called at
869 program startup. */
870 if (final)
871 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
872 else
873 /* Proudce sane name but one not recognizable by collect2, just for the
874 case we fail to inline the function. */
875 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
876 name = get_file_function_name (which_buf);
878 decl = build_decl (input_location, FUNCTION_DECL, name,
879 build_function_type_list (void_type_node, NULL_TREE));
880 current_function_decl = decl;
882 resdecl = build_decl (input_location,
883 RESULT_DECL, NULL_TREE, void_type_node);
884 DECL_ARTIFICIAL (resdecl) = 1;
885 DECL_RESULT (decl) = resdecl;
886 DECL_CONTEXT (resdecl) = decl;
888 allocate_struct_function (decl, false);
890 TREE_STATIC (decl) = 1;
891 TREE_USED (decl) = 1;
892 DECL_ARTIFICIAL (decl) = 1;
893 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
894 DECL_SAVED_TREE (decl) = body;
895 if (!targetm.have_ctors_dtors && final)
897 TREE_PUBLIC (decl) = 1;
898 DECL_PRESERVE_P (decl) = 1;
900 DECL_UNINLINABLE (decl) = 1;
902 DECL_INITIAL (decl) = make_node (BLOCK);
903 TREE_USED (DECL_INITIAL (decl)) = 1;
905 DECL_SOURCE_LOCATION (decl) = input_location;
906 cfun->function_end_locus = input_location;
908 switch (which)
910 case 'I':
911 DECL_STATIC_CONSTRUCTOR (decl) = 1;
912 decl_init_priority_insert (decl, priority);
913 break;
914 case 'P':
915 DECL_STATIC_CONSTRUCTOR (decl) = 1;
916 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
917 NULL,
918 NULL_TREE);
919 decl_init_priority_insert (decl, priority);
920 break;
921 case 'B':
922 DECL_STATIC_CONSTRUCTOR (decl) = 1;
923 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
924 NULL,
925 NULL_TREE);
926 decl_init_priority_insert (decl, priority);
927 break;
928 case 'D':
929 DECL_STATIC_DESTRUCTOR (decl) = 1;
930 decl_fini_priority_insert (decl, priority);
931 break;
932 default:
933 gcc_unreachable ();
936 gimplify_function_tree (decl);
938 cgraph_node::add_new_function (decl, false);
940 set_cfun (NULL);
941 current_function_decl = NULL;
944 /* Generate and emit a static constructor or destructor. WHICH must
945 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
946 (for chkp static vars constructor) or 'B' (for chkp static bounds
947 constructor). BODY is a STATEMENT_LIST containing GENERIC
948 statements. PRIORITY is the initialization priority for this
949 constructor or destructor. */
951 void
952 cgraph_build_static_cdtor (char which, tree body, int priority)
954 cgraph_build_static_cdtor_1 (which, body, priority, false);
957 /* A vector of FUNCTION_DECLs declared as static constructors. */
958 static vec<tree> static_ctors;
959 /* A vector of FUNCTION_DECLs declared as static destructors. */
960 static vec<tree> static_dtors;
962 /* When target does not have ctors and dtors, we call all constructor
963 and destructor by special initialization/destruction function
964 recognized by collect2.
966 When we are going to build this function, collect all constructors and
967 destructors and turn them into normal functions. */
969 static void
970 record_cdtor_fn (struct cgraph_node *node)
972 if (DECL_STATIC_CONSTRUCTOR (node->decl))
973 static_ctors.safe_push (node->decl);
974 if (DECL_STATIC_DESTRUCTOR (node->decl))
975 static_dtors.safe_push (node->decl);
976 node = cgraph_node::get (node->decl);
977 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
980 /* Define global constructors/destructor functions for the CDTORS, of
981 which they are LEN. The CDTORS are sorted by initialization
982 priority. If CTOR_P is true, these are constructors; otherwise,
983 they are destructors. */
985 static void
986 build_cdtor (bool ctor_p, vec<tree> cdtors)
988 size_t i,j;
989 size_t len = cdtors.length ();
991 i = 0;
992 while (i < len)
994 tree body;
995 tree fn;
996 priority_type priority;
998 priority = 0;
999 body = NULL_TREE;
1000 j = i;
1003 priority_type p;
1004 fn = cdtors[j];
1005 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1006 if (j == i)
1007 priority = p;
1008 else if (p != priority)
1009 break;
1010 j++;
1012 while (j < len);
1014 /* When there is only one cdtor and target supports them, do nothing. */
1015 if (j == i + 1
1016 && targetm.have_ctors_dtors)
1018 i++;
1019 continue;
1021 /* Find the next batch of constructors/destructors with the same
1022 initialization priority. */
1023 for (;i < j; i++)
1025 tree call;
1026 fn = cdtors[i];
1027 call = build_call_expr (fn, 0);
1028 if (ctor_p)
1029 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1030 else
1031 DECL_STATIC_DESTRUCTOR (fn) = 0;
1032 /* We do not want to optimize away pure/const calls here.
1033 When optimizing, these should be already removed, when not
1034 optimizing, we want user to be able to breakpoint in them. */
1035 TREE_SIDE_EFFECTS (call) = 1;
1036 append_to_statement_list (call, &body);
1038 gcc_assert (body != NULL_TREE);
1039 /* Generate a function to call all the function of like
1040 priority. */
1041 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1045 /* Comparison function for qsort. P1 and P2 are actually of type
1046 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1047 used to determine the sort order. */
1049 static int
1050 compare_ctor (const void *p1, const void *p2)
1052 tree f1;
1053 tree f2;
1054 int priority1;
1055 int priority2;
1057 f1 = *(const tree *)p1;
1058 f2 = *(const tree *)p2;
1059 priority1 = DECL_INIT_PRIORITY (f1);
1060 priority2 = DECL_INIT_PRIORITY (f2);
1062 if (priority1 < priority2)
1063 return -1;
1064 else if (priority1 > priority2)
1065 return 1;
1066 else
1067 /* Ensure a stable sort. Constructors are executed in backwarding
1068 order to make LTO initialize braries first. */
1069 return DECL_UID (f2) - DECL_UID (f1);
1072 /* Comparison function for qsort. P1 and P2 are actually of type
1073 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1074 used to determine the sort order. */
1076 static int
1077 compare_dtor (const void *p1, const void *p2)
1079 tree f1;
1080 tree f2;
1081 int priority1;
1082 int priority2;
1084 f1 = *(const tree *)p1;
1085 f2 = *(const tree *)p2;
1086 priority1 = DECL_FINI_PRIORITY (f1);
1087 priority2 = DECL_FINI_PRIORITY (f2);
1089 if (priority1 < priority2)
1090 return -1;
1091 else if (priority1 > priority2)
1092 return 1;
1093 else
1094 /* Ensure a stable sort. */
1095 return DECL_UID (f1) - DECL_UID (f2);
1098 /* Generate functions to call static constructors and destructors
1099 for targets that do not support .ctors/.dtors sections. These
1100 functions have magic names which are detected by collect2. */
1102 static void
1103 build_cdtor_fns (void)
1105 if (!static_ctors.is_empty ())
1107 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1108 static_ctors.qsort (compare_ctor);
1109 build_cdtor (/*ctor_p=*/true, static_ctors);
1112 if (!static_dtors.is_empty ())
1114 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1115 static_dtors.qsort (compare_dtor);
1116 build_cdtor (/*ctor_p=*/false, static_dtors);
1120 /* Look for constructors and destructors and produce function calling them.
1121 This is needed for targets not supporting ctors or dtors, but we perform the
1122 transformation also at linktime to merge possibly numerous
1123 constructors/destructors into single function to improve code locality and
1124 reduce size. */
1126 static unsigned int
1127 ipa_cdtor_merge (void)
1129 struct cgraph_node *node;
1130 FOR_EACH_DEFINED_FUNCTION (node)
1131 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1132 || DECL_STATIC_DESTRUCTOR (node->decl))
1133 record_cdtor_fn (node);
1134 build_cdtor_fns ();
1135 static_ctors.release ();
1136 static_dtors.release ();
1137 return 0;
1140 namespace {
1142 const pass_data pass_data_ipa_cdtor_merge =
1144 IPA_PASS, /* type */
1145 "cdtor", /* name */
1146 OPTGROUP_NONE, /* optinfo_flags */
1147 TV_CGRAPHOPT, /* tv_id */
1148 0, /* properties_required */
1149 0, /* properties_provided */
1150 0, /* properties_destroyed */
1151 0, /* todo_flags_start */
1152 0, /* todo_flags_finish */
1155 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1157 public:
1158 pass_ipa_cdtor_merge (gcc::context *ctxt)
1159 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1160 NULL, /* generate_summary */
1161 NULL, /* write_summary */
1162 NULL, /* read_summary */
1163 NULL, /* write_optimization_summary */
1164 NULL, /* read_optimization_summary */
1165 NULL, /* stmt_fixup */
1166 0, /* function_transform_todo_flags_start */
1167 NULL, /* function_transform */
1168 NULL) /* variable_transform */
1171 /* opt_pass methods: */
1172 virtual bool gate (function *);
1173 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1175 }; // class pass_ipa_cdtor_merge
1177 bool
1178 pass_ipa_cdtor_merge::gate (function *)
1180 /* Perform the pass when we have no ctors/dtors support
1181 or at LTO time to merge multiple constructors into single
1182 function. */
1183 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1186 } // anon namespace
1188 ipa_opt_pass_d *
1189 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1191 return new pass_ipa_cdtor_merge (ctxt);
1194 /* Invalid pointer representing BOTTOM for single user dataflow. */
1195 #define BOTTOM ((cgraph_node *)(size_t) 2)
1197 /* Meet operation for single user dataflow.
1198 Here we want to associate variables with sigle function that may access it.
1200 FUNCTION is current single user of a variable, VAR is variable that uses it.
1201 Latttice is stored in SINGLE_USER_MAP.
1203 We represent:
1204 - TOP by no entry in SIGNLE_USER_MAP
1205 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1206 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1208 cgraph_node *
1209 meet (cgraph_node *function, varpool_node *var,
1210 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1212 struct cgraph_node *user, **f;
1214 if (var->aux == BOTTOM)
1215 return BOTTOM;
1217 f = single_user_map.get (var);
1218 if (!f)
1219 return function;
1220 user = *f;
1221 if (!function)
1222 return user;
1223 else if (function != user)
1224 return BOTTOM;
1225 else
1226 return function;
1229 /* Propagation step of single-use dataflow.
1231 Check all uses of VNODE and see if they are used by single function FUNCTION.
1232 SINGLE_USER_MAP represents the dataflow lattice. */
1234 cgraph_node *
1235 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1236 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1238 int i;
1239 struct ipa_ref *ref;
1241 gcc_assert (!vnode->externally_visible);
1243 /* If node is an alias, first meet with its target. */
1244 if (vnode->alias)
1245 function = meet (function, vnode->get_alias_target (), single_user_map);
1247 /* Check all users and see if they correspond to a single function. */
1248 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1250 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1251 if (cnode)
1253 if (cnode->global.inlined_to)
1254 cnode = cnode->global.inlined_to;
1255 if (!function)
1256 function = cnode;
1257 else if (function != cnode)
1258 function = BOTTOM;
1260 else
1261 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1262 single_user_map);
1264 return function;
1267 /* Pass setting used_by_single_function flag.
1268 This flag is set on variable when there is only one function that may
1269 possibly referr to it. */
1271 static unsigned int
1272 ipa_single_use (void)
1274 varpool_node *first = (varpool_node *) (void *) 1;
1275 varpool_node *var;
1276 hash_map<varpool_node *, cgraph_node *> single_user_map;
1278 FOR_EACH_DEFINED_VARIABLE (var)
1279 if (!var->all_refs_explicit_p ())
1280 var->aux = BOTTOM;
1281 else
1283 /* Enqueue symbol for dataflow. */
1284 var->aux = first;
1285 first = var;
1288 /* The actual dataflow. */
1290 while (first != (void *) 1)
1292 cgraph_node *user, *orig_user, **f;
1294 var = first;
1295 first = (varpool_node *)first->aux;
1297 f = single_user_map.get (var);
1298 if (f)
1299 orig_user = *f;
1300 else
1301 orig_user = NULL;
1302 user = propagate_single_user (var, orig_user, single_user_map);
1304 gcc_checking_assert (var->aux != BOTTOM);
1306 /* If user differs, enqueue all references. */
1307 if (user != orig_user)
1309 unsigned int i;
1310 ipa_ref *ref;
1312 single_user_map.put (var, user);
1314 /* Enqueue all aliases for re-processing. */
1315 for (i = 0; var->iterate_referring (i, ref); i++)
1316 if (ref->use == IPA_REF_ALIAS
1317 && !ref->referring->aux)
1319 ref->referring->aux = first;
1320 first = dyn_cast <varpool_node *> (ref->referring);
1322 /* Enqueue all users for re-processing. */
1323 for (i = 0; var->iterate_reference (i, ref); i++)
1324 if (!ref->referred->aux
1325 && ref->referred->definition
1326 && is_a <varpool_node *> (ref->referred))
1328 ref->referred->aux = first;
1329 first = dyn_cast <varpool_node *> (ref->referred);
1332 /* If user is BOTTOM, just punt on this var. */
1333 if (user == BOTTOM)
1334 var->aux = BOTTOM;
1335 else
1336 var->aux = NULL;
1338 else
1339 var->aux = NULL;
1342 FOR_EACH_DEFINED_VARIABLE (var)
1344 if (var->aux != BOTTOM)
1346 #ifdef ENABLE_CHECKING
1347 /* Not having the single user known means that the VAR is
1348 unreachable. Either someone forgot to remove unreachable
1349 variables or the reachability here is wrong. */
1351 gcc_assert (single_user_map.get (var));
1352 #endif
1353 if (dump_file)
1355 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1356 var->name (), var->order);
1358 var->used_by_single_function = true;
1360 var->aux = NULL;
1362 return 0;
1365 namespace {
1367 const pass_data pass_data_ipa_single_use =
1369 IPA_PASS, /* type */
1370 "single-use", /* name */
1371 OPTGROUP_NONE, /* optinfo_flags */
1372 TV_CGRAPHOPT, /* tv_id */
1373 0, /* properties_required */
1374 0, /* properties_provided */
1375 0, /* properties_destroyed */
1376 0, /* todo_flags_start */
1377 0, /* todo_flags_finish */
1380 class pass_ipa_single_use : public ipa_opt_pass_d
1382 public:
1383 pass_ipa_single_use (gcc::context *ctxt)
1384 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1385 NULL, /* generate_summary */
1386 NULL, /* write_summary */
1387 NULL, /* read_summary */
1388 NULL, /* write_optimization_summary */
1389 NULL, /* read_optimization_summary */
1390 NULL, /* stmt_fixup */
1391 0, /* function_transform_todo_flags_start */
1392 NULL, /* function_transform */
1393 NULL) /* variable_transform */
1396 /* opt_pass methods: */
1397 virtual bool gate (function *);
1398 virtual unsigned int execute (function *) { return ipa_single_use (); }
1400 }; // class pass_ipa_single_use
1402 bool
1403 pass_ipa_single_use::gate (function *)
1405 return optimize;
1408 } // anon namespace
1410 ipa_opt_pass_d *
1411 make_pass_ipa_single_use (gcc::context *ctxt)
1413 return new pass_ipa_single_use (ctxt);