[AArch64] PR target/65491: Classify V1TF vectors as AAPCS64 short vectors rather...
[official-gcc.git] / gcc / ipa.c
blobb3752de5e1b9c537a309e5ad43ef27d775d1e729
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "stringpool.h"
38 #include "predict.h"
39 #include "basic-block.h"
40 #include "hash-map.h"
41 #include "is-a.h"
42 #include "plugin-api.h"
43 #include "hard-reg-set.h"
44 #include "input.h"
45 #include "function.h"
46 #include "ipa-ref.h"
47 #include "cgraph.h"
48 #include "tree-pass.h"
49 #include "gimple-expr.h"
50 #include "gimplify.h"
51 #include "flags.h"
52 #include "target.h"
53 #include "tree-iterator.h"
54 #include "ipa-utils.h"
55 #include "alloc-pool.h"
56 #include "symbol-summary.h"
57 #include "ipa-prop.h"
58 #include "ipa-inline.h"
59 #include "tree-inline.h"
60 #include "profile.h"
61 #include "params.h"
62 #include "internal-fn.h"
63 #include "tree-ssa-alias.h"
64 #include "gimple.h"
65 #include "dbgcnt.h"
68 /* Return true when NODE has ADDR reference. */
70 static bool
71 has_addr_references_p (struct cgraph_node *node,
72 void *data ATTRIBUTE_UNUSED)
74 int i;
75 struct ipa_ref *ref = NULL;
77 for (i = 0; node->iterate_referring (i, ref); i++)
78 if (ref->use == IPA_REF_ADDR)
79 return true;
80 return false;
83 /* Look for all functions inlined to NODE and update their inlined_to pointers
84 to INLINED_TO. */
86 static void
87 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
89 struct cgraph_edge *e;
90 for (e = node->callees; e; e = e->next_callee)
91 if (e->callee->global.inlined_to)
93 e->callee->global.inlined_to = inlined_to;
94 update_inlined_to_pointer (e->callee, inlined_to);
98 /* Add symtab NODE to queue starting at FIRST.
100 The queue is linked via AUX pointers and terminated by pointer to 1.
101 We enqueue nodes at two occasions: when we find them reachable or when we find
102 their bodies needed for further clonning. In the second case we mark them
103 by pointer to 2 after processing so they are re-queue when they become
104 reachable. */
106 static void
107 enqueue_node (symtab_node *node, symtab_node **first,
108 hash_set<symtab_node *> *reachable)
110 /* Node is still in queue; do nothing. */
111 if (node->aux && node->aux != (void *) 2)
112 return;
113 /* Node was already processed as unreachable, re-enqueue
114 only if it became reachable now. */
115 if (node->aux == (void *)2 && !reachable->contains (node))
116 return;
117 node->aux = *first;
118 *first = node;
121 /* Process references. */
123 static void
124 process_references (symtab_node *snode,
125 symtab_node **first,
126 bool before_inlining_p,
127 hash_set<symtab_node *> *reachable)
129 int i;
130 struct ipa_ref *ref = NULL;
131 for (i = 0; snode->iterate_reference (i, ref); i++)
133 symtab_node *node = ref->referred;
134 symtab_node *body = node->ultimate_alias_target ();
136 if (node->definition && !node->in_other_partition
137 && ((!DECL_EXTERNAL (node->decl) || node->alias)
138 || (((before_inlining_p
139 && ((TREE_CODE (node->decl) != FUNCTION_DECL
140 && optimize)
141 || (TREE_CODE (node->decl) == FUNCTION_DECL
142 && opt_for_fn (body->decl, optimize))
143 || (symtab->state < IPA_SSA
144 && lookup_attribute
145 ("always_inline",
146 DECL_ATTRIBUTES (body->decl))))))
147 /* We use variable constructors during late compilation for
148 constant folding. Keep references alive so partitioning
149 knows about potential references. */
150 || (TREE_CODE (node->decl) == VAR_DECL
151 && flag_wpa
152 && ctor_for_folding (node->decl)
153 != error_mark_node))))
155 /* Be sure that we will not optimize out alias target
156 body. */
157 if (DECL_EXTERNAL (node->decl)
158 && node->alias
159 && before_inlining_p)
160 reachable->add (body);
161 reachable->add (node);
163 enqueue_node (node, first, reachable);
167 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
168 all its potential targets as reachable to permit later inlining if
169 devirtualization happens. After inlining still keep their declarations
170 around, so we can devirtualize to a direct call.
172 Also try to make trivial devirutalization when no or only one target is
173 possible. */
175 static void
176 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
177 struct cgraph_edge *edge,
178 symtab_node **first,
179 hash_set<symtab_node *> *reachable,
180 bool before_inlining_p)
182 unsigned int i;
183 void *cache_token;
184 bool final;
185 vec <cgraph_node *>targets
186 = possible_polymorphic_call_targets
187 (edge, &final, &cache_token);
189 if (!reachable_call_targets->add (cache_token))
191 for (i = 0; i < targets.length (); i++)
193 struct cgraph_node *n = targets[i];
195 /* Do not bother to mark virtual methods in anonymous namespace;
196 either we will find use of virtual table defining it, or it is
197 unused. */
198 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
199 && type_in_anonymous_namespace_p
200 (method_class_type (TREE_TYPE (n->decl))))
201 continue;
203 symtab_node *body = n->function_symbol ();
205 /* Prior inlining, keep alive bodies of possible targets for
206 devirtualization. */
207 if (n->definition
208 && (before_inlining_p
209 && opt_for_fn (body->decl, optimize)
210 && opt_for_fn (body->decl, flag_devirtualize)))
212 /* Be sure that we will not optimize out alias target
213 body. */
214 if (DECL_EXTERNAL (n->decl)
215 && n->alias
216 && before_inlining_p)
217 reachable->add (body);
218 reachable->add (n);
220 /* Even after inlining we want to keep the possible targets in the
221 boundary, so late passes can still produce direct call even if
222 the chance for inlining is lost. */
223 enqueue_node (n, first, reachable);
227 /* Very trivial devirtualization; when the type is
228 final or anonymous (so we know all its derivation)
229 and there is only one possible virtual call target,
230 make the edge direct. */
231 if (final)
233 if (targets.length () <= 1 && dbg_cnt (devirt))
235 cgraph_node *target, *node = edge->caller;
236 if (targets.length () == 1)
237 target = targets[0];
238 else
239 target = cgraph_node::get_create
240 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
242 if (dump_enabled_p ())
244 location_t locus;
245 if (edge->call_stmt)
246 locus = gimple_location (edge->call_stmt);
247 else
248 locus = UNKNOWN_LOCATION;
249 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
250 "devirtualizing call in %s/%i to %s/%i\n",
251 edge->caller->name (), edge->caller->order,
252 target->name (),
253 target->order);
255 edge = edge->make_direct (target);
256 if (inline_summaries)
257 inline_update_overall_summary (node);
258 else if (edge->call_stmt)
260 edge->redirect_call_stmt_to_callee ();
262 /* Call to __builtin_unreachable shouldn't be instrumented. */
263 if (!targets.length ())
264 gimple_call_set_with_bounds (edge->call_stmt, false);
270 /* Perform reachability analysis and reclaim all unreachable nodes.
272 The algorithm is basically mark&sweep but with some extra refinements:
274 - reachable extern inline functions needs special handling; the bodies needs
275 to stay in memory until inlining in hope that they will be inlined.
276 After inlining we release their bodies and turn them into unanalyzed
277 nodes even when they are reachable.
279 - virtual functions are kept in callgraph even if they seem unreachable in
280 hope calls to them will be devirtualized.
282 Again we remove them after inlining. In late optimization some
283 devirtualization may happen, but it is not important since we won't inline
284 the call. In theory early opts and IPA should work out all important cases.
286 - virtual clones needs bodies of their origins for later materialization;
287 this means that we want to keep the body even if the origin is unreachable
288 otherwise. To avoid origin from sitting in the callgraph and being
289 walked by IPA passes, we turn them into unanalyzed nodes with body
290 defined.
292 We maintain set of function declaration where body needs to stay in
293 body_needed_for_clonning
295 Inline clones represent special case: their declaration match the
296 declaration of origin and cgraph_remove_node already knows how to
297 reshape callgraph and preserve body when offline copy of function or
298 inline clone is being removed.
300 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
301 variables with DECL_INITIAL set. We finalize these and keep reachable
302 ones around for constant folding purposes. After inlining we however
303 stop walking their references to let everything static referneced by them
304 to be removed when it is otherwise unreachable.
306 We maintain queue of both reachable symbols (i.e. defined symbols that needs
307 to stay) and symbols that are in boundary (i.e. external symbols referenced
308 by reachable symbols or origins of clones). The queue is represented
309 as linked list by AUX pointer terminated by 1.
311 At the end we keep all reachable symbols. For symbols in boundary we always
312 turn definition into a declaration, but we may keep function body around
313 based on body_needed_for_clonning
315 All symbols that enter the queue have AUX pointer non-zero and are in the
316 boundary. Pointer set REACHABLE is used to track reachable symbols.
318 Every symbol can be visited twice - once as part of boundary and once
319 as real reachable symbol. enqueue_node needs to decide whether the
320 node needs to be re-queued for second processing. For this purpose
321 we set AUX pointer of processed symbols in the boundary to constant 2. */
323 bool
324 symbol_table::remove_unreachable_nodes (FILE *file)
326 symtab_node *first = (symtab_node *) (void *) 1;
327 struct cgraph_node *node, *next;
328 varpool_node *vnode, *vnext;
329 bool changed = false;
330 hash_set<symtab_node *> reachable;
331 hash_set<tree> body_needed_for_clonning;
332 hash_set<void *> reachable_call_targets;
333 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
334 : IPA_SSA_AFTER_INLINING);
336 timevar_push (TV_IPA_UNREACHABLE);
337 build_type_inheritance_graph ();
338 if (file)
339 fprintf (file, "\nReclaiming functions:");
340 #ifdef ENABLE_CHECKING
341 FOR_EACH_FUNCTION (node)
342 gcc_assert (!node->aux);
343 FOR_EACH_VARIABLE (vnode)
344 gcc_assert (!vnode->aux);
345 #endif
346 /* Mark functions whose bodies are obviously needed.
347 This is mostly when they can be referenced externally. Inline clones
348 are special since their declarations are shared with master clone and thus
349 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
350 FOR_EACH_FUNCTION (node)
352 node->used_as_abstract_origin = false;
353 if (node->definition
354 && !node->global.inlined_to
355 && !node->in_other_partition
356 && !node->can_remove_if_no_direct_calls_and_refs_p ())
358 gcc_assert (!node->global.inlined_to);
359 reachable.add (node);
360 enqueue_node (node, &first, &reachable);
362 else
363 gcc_assert (!node->aux);
366 /* Mark variables that are obviously needed. */
367 FOR_EACH_DEFINED_VARIABLE (vnode)
368 if (!vnode->can_remove_if_no_refs_p()
369 && !vnode->in_other_partition)
371 reachable.add (vnode);
372 enqueue_node (vnode, &first, &reachable);
375 /* Perform reachability analysis. */
376 while (first != (symtab_node *) (void *) 1)
378 bool in_boundary_p = !reachable.contains (first);
379 symtab_node *node = first;
381 first = (symtab_node *)first->aux;
383 /* If we are processing symbol in boundary, mark its AUX pointer for
384 possible later re-processing in enqueue_node. */
385 if (in_boundary_p)
387 node->aux = (void *)2;
388 if (node->alias && node->analyzed)
389 enqueue_node (node->get_alias_target (), &first, &reachable);
391 else
393 if (TREE_CODE (node->decl) == FUNCTION_DECL
394 && DECL_ABSTRACT_ORIGIN (node->decl))
396 struct cgraph_node *origin_node
397 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
398 if (origin_node && !origin_node->used_as_abstract_origin)
400 origin_node->used_as_abstract_origin = true;
401 gcc_assert (!origin_node->prev_sibling_clone);
402 gcc_assert (!origin_node->next_sibling_clone);
403 for (cgraph_node *n = origin_node->clones; n;
404 n = n->next_sibling_clone)
405 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
406 n->used_as_abstract_origin = true;
409 /* If any symbol in a comdat group is reachable, force
410 all externally visible symbols in the same comdat
411 group to be reachable as well. Comdat-local symbols
412 can be discarded if all uses were inlined. */
413 if (node->same_comdat_group)
415 symtab_node *next;
416 for (next = node->same_comdat_group;
417 next != node;
418 next = next->same_comdat_group)
419 if (!next->comdat_local_p ()
420 && !reachable.add (next))
421 enqueue_node (next, &first, &reachable);
423 /* Mark references as reachable. */
424 process_references (node, &first, before_inlining_p, &reachable);
427 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
429 /* Mark the callees reachable unless they are direct calls to extern
430 inline functions we decided to not inline. */
431 if (!in_boundary_p)
433 struct cgraph_edge *e;
434 /* Keep alive possible targets for devirtualization. */
435 if (opt_for_fn (cnode->decl, optimize)
436 && opt_for_fn (cnode->decl, flag_devirtualize))
438 struct cgraph_edge *next;
439 for (e = cnode->indirect_calls; e; e = next)
441 next = e->next_callee;
442 if (e->indirect_info->polymorphic)
443 walk_polymorphic_call_targets (&reachable_call_targets,
444 e, &first, &reachable,
445 before_inlining_p);
448 for (e = cnode->callees; e; e = e->next_callee)
450 symtab_node *body = e->callee->function_symbol ();
451 if (e->callee->definition
452 && !e->callee->in_other_partition
453 && (!e->inline_failed
454 || !DECL_EXTERNAL (e->callee->decl)
455 || e->callee->alias
456 || (before_inlining_p
457 && (opt_for_fn (body->decl, optimize)
458 || (symtab->state < IPA_SSA
459 && lookup_attribute
460 ("always_inline",
461 DECL_ATTRIBUTES (body->decl)))))))
463 /* Be sure that we will not optimize out alias target
464 body. */
465 if (DECL_EXTERNAL (e->callee->decl)
466 && e->callee->alias
467 && before_inlining_p)
468 reachable.add (body);
469 reachable.add (e->callee);
471 enqueue_node (e->callee, &first, &reachable);
474 /* When inline clone exists, mark body to be preserved so when removing
475 offline copy of the function we don't kill it. */
476 if (cnode->global.inlined_to)
477 body_needed_for_clonning.add (cnode->decl);
479 /* For non-inline clones, force their origins to the boundary and ensure
480 that body is not removed. */
481 while (cnode->clone_of)
483 bool noninline = cnode->clone_of->decl != cnode->decl;
484 cnode = cnode->clone_of;
485 if (noninline)
487 body_needed_for_clonning.add (cnode->decl);
488 enqueue_node (cnode, &first, &reachable);
493 else if (cnode->thunk.thunk_p)
494 enqueue_node (cnode->callees->callee, &first, &reachable);
496 /* If any reachable function has simd clones, mark them as
497 reachable as well. */
498 if (cnode->simd_clones)
500 cgraph_node *next;
501 for (next = cnode->simd_clones;
502 next;
503 next = next->simdclone->next_clone)
504 if (in_boundary_p
505 || !reachable.add (next))
506 enqueue_node (next, &first, &reachable);
509 /* When we see constructor of external variable, keep referred nodes in the
510 boundary. This will also hold initializers of the external vars NODE
511 refers to. */
512 varpool_node *vnode = dyn_cast <varpool_node *> (node);
513 if (vnode
514 && DECL_EXTERNAL (node->decl)
515 && !vnode->alias
516 && in_boundary_p)
518 struct ipa_ref *ref = NULL;
519 for (int i = 0; node->iterate_reference (i, ref); i++)
520 enqueue_node (ref->referred, &first, &reachable);
524 /* Remove unreachable functions. */
525 for (node = first_function (); node; node = next)
527 next = next_function (node);
529 /* If node is not needed at all, remove it. */
530 if (!node->aux)
532 if (file)
533 fprintf (file, " %s/%i", node->name (), node->order);
534 node->remove ();
535 changed = true;
537 /* If node is unreachable, remove its body. */
538 else if (!reachable.contains (node))
540 /* We keep definitions of thunks and aliases in the boundary so
541 we can walk to the ultimate alias targets and function symbols
542 reliably. */
543 if (node->alias || node->thunk.thunk_p)
545 else if (!body_needed_for_clonning.contains (node->decl)
546 && !node->alias && !node->thunk.thunk_p)
547 node->release_body ();
548 else if (!node->clone_of)
549 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
550 if (node->definition && !node->alias && !node->thunk.thunk_p)
552 if (file)
553 fprintf (file, " %s/%i", node->name (), node->order);
554 node->body_removed = true;
555 node->analyzed = false;
556 node->definition = false;
557 node->cpp_implicit_alias = false;
558 node->alias = false;
559 node->thunk.thunk_p = false;
560 node->weakref = false;
561 /* After early inlining we drop always_inline attributes on
562 bodies of functions that are still referenced (have their
563 address taken). */
564 DECL_ATTRIBUTES (node->decl)
565 = remove_attribute ("always_inline",
566 DECL_ATTRIBUTES (node->decl));
567 if (!node->in_other_partition)
568 node->local.local = false;
569 node->remove_callees ();
570 node->remove_all_references ();
571 changed = true;
572 if (node->thunk.thunk_p
573 && node->thunk.add_pointer_bounds_args)
575 node->thunk.thunk_p = false;
576 node->thunk.add_pointer_bounds_args = false;
580 else
581 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
582 || in_lto_p || DECL_RESULT (node->decl));
585 /* Inline clones might be kept around so their materializing allows further
586 cloning. If the function the clone is inlined into is removed, we need
587 to turn it into normal cone. */
588 FOR_EACH_FUNCTION (node)
590 if (node->global.inlined_to
591 && !node->callers)
593 gcc_assert (node->clones);
594 node->global.inlined_to = NULL;
595 update_inlined_to_pointer (node, node);
597 node->aux = NULL;
600 /* Remove unreachable variables. */
601 if (file)
602 fprintf (file, "\nReclaiming variables:");
603 for (vnode = first_variable (); vnode; vnode = vnext)
605 vnext = next_variable (vnode);
606 if (!vnode->aux
607 /* For can_refer_decl_in_current_unit_p we want to track for
608 all external variables if they are defined in other partition
609 or not. */
610 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
612 struct ipa_ref *ref = NULL;
614 /* First remove the aliases, so varpool::remove can possibly lookup
615 the constructor and save it for future use. */
616 while (vnode->iterate_direct_aliases (0, ref))
618 if (file)
619 fprintf (file, " %s/%i", ref->referred->name (),
620 ref->referred->order);
621 ref->referring->remove ();
623 if (file)
624 fprintf (file, " %s/%i", vnode->name (), vnode->order);
625 vnext = next_variable (vnode);
626 vnode->remove ();
627 changed = true;
629 else if (!reachable.contains (vnode) && !vnode->alias)
631 tree init;
632 if (vnode->definition)
634 if (file)
635 fprintf (file, " %s", vnode->name ());
636 changed = true;
638 /* Keep body if it may be useful for constant folding. */
639 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
640 && !POINTER_BOUNDS_P (vnode->decl))
641 vnode->remove_initializer ();
642 else
643 DECL_INITIAL (vnode->decl) = init;
644 vnode->body_removed = true;
645 vnode->definition = false;
646 vnode->analyzed = false;
647 vnode->aux = NULL;
649 vnode->remove_from_same_comdat_group ();
651 vnode->remove_all_references ();
653 else
654 vnode->aux = NULL;
657 /* Now update address_taken flags and try to promote functions to be local. */
658 if (file)
659 fprintf (file, "\nClearing address taken flags:");
660 FOR_EACH_DEFINED_FUNCTION (node)
661 if (node->address_taken
662 && !node->used_from_other_partition)
664 if (!node->call_for_symbol_and_aliases
665 (has_addr_references_p, NULL, true)
666 && (!node->instrumentation_clone
667 || !node->instrumented_version
668 || !node->instrumented_version->address_taken))
670 if (file)
671 fprintf (file, " %s", node->name ());
672 node->address_taken = false;
673 changed = true;
674 if (node->local_p ())
676 node->local.local = true;
677 if (file)
678 fprintf (file, " (local)");
682 if (file)
683 fprintf (file, "\n");
685 #ifdef ENABLE_CHECKING
686 symtab_node::verify_symtab_nodes ();
687 #endif
689 /* If we removed something, perhaps profile could be improved. */
690 if (changed && optimize && inline_edge_summary_vec.exists ())
691 FOR_EACH_DEFINED_FUNCTION (node)
692 ipa_propagate_frequency (node);
694 timevar_pop (TV_IPA_UNREACHABLE);
695 return changed;
698 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
699 as needed, also clear EXPLICIT_REFS if the references to given variable
700 do not need to be explicit. */
702 void
703 process_references (varpool_node *vnode,
704 bool *written, bool *address_taken,
705 bool *read, bool *explicit_refs)
707 int i;
708 struct ipa_ref *ref;
710 if (!vnode->all_refs_explicit_p ()
711 || TREE_THIS_VOLATILE (vnode->decl))
712 *explicit_refs = false;
714 for (i = 0; vnode->iterate_referring (i, ref)
715 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
716 switch (ref->use)
718 case IPA_REF_ADDR:
719 *address_taken = true;
720 break;
721 case IPA_REF_LOAD:
722 *read = true;
723 break;
724 case IPA_REF_STORE:
725 *written = true;
726 break;
727 case IPA_REF_ALIAS:
728 process_references (dyn_cast<varpool_node *> (ref->referring), written,
729 address_taken, read, explicit_refs);
730 break;
731 case IPA_REF_CHKP:
732 gcc_unreachable ();
736 /* Set TREE_READONLY bit. */
738 bool
739 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
741 TREE_READONLY (vnode->decl) = true;
742 return false;
745 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
747 bool
748 set_writeonly_bit (varpool_node *vnode, void *data)
750 vnode->writeonly = true;
751 if (optimize)
753 DECL_INITIAL (vnode->decl) = NULL;
754 if (!vnode->alias)
756 if (vnode->num_references ())
757 *(bool *)data = true;
758 vnode->remove_all_references ();
761 return false;
764 /* Clear addressale bit of VNODE. */
766 bool
767 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
769 vnode->address_taken = false;
770 TREE_ADDRESSABLE (vnode->decl) = 0;
771 return false;
774 /* Discover variables that have no longer address taken or that are read only
775 and update their flags.
777 Return true when unreachable symbol removan should be done.
779 FIXME: This can not be done in between gimplify and omp_expand since
780 readonly flag plays role on what is shared and what is not. Currently we do
781 this transformation as part of whole program visibility and re-do at
782 ipa-reference pass (to take into account clonning), but it would
783 make sense to do it before early optimizations. */
785 bool
786 ipa_discover_readonly_nonaddressable_vars (void)
788 bool remove_p = false;
789 varpool_node *vnode;
790 if (dump_file)
791 fprintf (dump_file, "Clearing variable flags:");
792 FOR_EACH_VARIABLE (vnode)
793 if (!vnode->alias
794 && (TREE_ADDRESSABLE (vnode->decl)
795 || !vnode->writeonly
796 || !TREE_READONLY (vnode->decl)))
798 bool written = false;
799 bool address_taken = false;
800 bool read = false;
801 bool explicit_refs = true;
803 process_references (vnode, &written, &address_taken, &read,
804 &explicit_refs);
805 if (!explicit_refs)
806 continue;
807 if (!address_taken)
809 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
810 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
811 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
812 true);
814 if (!address_taken && !written
815 /* Making variable in explicit section readonly can cause section
816 type conflict.
817 See e.g. gcc.c-torture/compile/pr23237.c */
818 && vnode->get_section () == NULL)
820 if (!TREE_READONLY (vnode->decl) && dump_file)
821 fprintf (dump_file, " %s (read-only)", vnode->name ());
822 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
824 if (!vnode->writeonly && !read && !address_taken && written)
826 if (dump_file)
827 fprintf (dump_file, " %s (write-only)", vnode->name ());
828 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
829 true);
832 if (dump_file)
833 fprintf (dump_file, "\n");
834 return remove_p;
837 /* Free inline summary. */
839 namespace {
841 const pass_data pass_data_ipa_free_inline_summary =
843 SIMPLE_IPA_PASS, /* type */
844 "free-inline-summary", /* name */
845 OPTGROUP_NONE, /* optinfo_flags */
846 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
847 0, /* properties_required */
848 0, /* properties_provided */
849 0, /* properties_destroyed */
850 0, /* todo_flags_start */
851 /* Early optimizations may make function unreachable. We can not
852 remove unreachable functions as part of the ealry opts pass because
853 TODOs are run before subpasses. Do it here. */
854 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
857 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
859 public:
860 pass_ipa_free_inline_summary (gcc::context *ctxt)
861 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
864 /* opt_pass methods: */
865 virtual unsigned int execute (function *)
867 inline_free_summary ();
868 return 0;
871 }; // class pass_ipa_free_inline_summary
873 } // anon namespace
875 simple_ipa_opt_pass *
876 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
878 return new pass_ipa_free_inline_summary (ctxt);
881 /* Generate and emit a static constructor or destructor. WHICH must
882 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
883 (for chp static vars constructor) or 'B' (for chkp static bounds
884 constructor). BODY is a STATEMENT_LIST containing GENERIC
885 statements. PRIORITY is the initialization priority for this
886 constructor or destructor.
888 FINAL specify whether the externally visible name for collect2 should
889 be produced. */
891 static void
892 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
894 static int counter = 0;
895 char which_buf[16];
896 tree decl, name, resdecl;
898 /* The priority is encoded in the constructor or destructor name.
899 collect2 will sort the names and arrange that they are called at
900 program startup. */
901 if (final)
902 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
903 else
904 /* Proudce sane name but one not recognizable by collect2, just for the
905 case we fail to inline the function. */
906 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
907 name = get_file_function_name (which_buf);
909 decl = build_decl (input_location, FUNCTION_DECL, name,
910 build_function_type_list (void_type_node, NULL_TREE));
911 current_function_decl = decl;
913 resdecl = build_decl (input_location,
914 RESULT_DECL, NULL_TREE, void_type_node);
915 DECL_ARTIFICIAL (resdecl) = 1;
916 DECL_RESULT (decl) = resdecl;
917 DECL_CONTEXT (resdecl) = decl;
919 allocate_struct_function (decl, false);
921 TREE_STATIC (decl) = 1;
922 TREE_USED (decl) = 1;
923 DECL_ARTIFICIAL (decl) = 1;
924 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
925 DECL_SAVED_TREE (decl) = body;
926 if (!targetm.have_ctors_dtors && final)
928 TREE_PUBLIC (decl) = 1;
929 DECL_PRESERVE_P (decl) = 1;
931 DECL_UNINLINABLE (decl) = 1;
933 DECL_INITIAL (decl) = make_node (BLOCK);
934 TREE_USED (DECL_INITIAL (decl)) = 1;
936 DECL_SOURCE_LOCATION (decl) = input_location;
937 cfun->function_end_locus = input_location;
939 switch (which)
941 case 'I':
942 DECL_STATIC_CONSTRUCTOR (decl) = 1;
943 decl_init_priority_insert (decl, priority);
944 break;
945 case 'P':
946 DECL_STATIC_CONSTRUCTOR (decl) = 1;
947 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
948 NULL,
949 NULL_TREE);
950 decl_init_priority_insert (decl, priority);
951 break;
952 case 'B':
953 DECL_STATIC_CONSTRUCTOR (decl) = 1;
954 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
955 NULL,
956 NULL_TREE);
957 decl_init_priority_insert (decl, priority);
958 break;
959 case 'D':
960 DECL_STATIC_DESTRUCTOR (decl) = 1;
961 decl_fini_priority_insert (decl, priority);
962 break;
963 default:
964 gcc_unreachable ();
967 gimplify_function_tree (decl);
969 cgraph_node::add_new_function (decl, false);
971 set_cfun (NULL);
972 current_function_decl = NULL;
975 /* Generate and emit a static constructor or destructor. WHICH must
976 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
977 (for chkp static vars constructor) or 'B' (for chkp static bounds
978 constructor). BODY is a STATEMENT_LIST containing GENERIC
979 statements. PRIORITY is the initialization priority for this
980 constructor or destructor. */
982 void
983 cgraph_build_static_cdtor (char which, tree body, int priority)
985 cgraph_build_static_cdtor_1 (which, body, priority, false);
988 /* A vector of FUNCTION_DECLs declared as static constructors. */
989 static vec<tree> static_ctors;
990 /* A vector of FUNCTION_DECLs declared as static destructors. */
991 static vec<tree> static_dtors;
993 /* When target does not have ctors and dtors, we call all constructor
994 and destructor by special initialization/destruction function
995 recognized by collect2.
997 When we are going to build this function, collect all constructors and
998 destructors and turn them into normal functions. */
1000 static void
1001 record_cdtor_fn (struct cgraph_node *node)
1003 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1004 static_ctors.safe_push (node->decl);
1005 if (DECL_STATIC_DESTRUCTOR (node->decl))
1006 static_dtors.safe_push (node->decl);
1007 node = cgraph_node::get (node->decl);
1008 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1011 /* Define global constructors/destructor functions for the CDTORS, of
1012 which they are LEN. The CDTORS are sorted by initialization
1013 priority. If CTOR_P is true, these are constructors; otherwise,
1014 they are destructors. */
1016 static void
1017 build_cdtor (bool ctor_p, vec<tree> cdtors)
1019 size_t i,j;
1020 size_t len = cdtors.length ();
1022 i = 0;
1023 while (i < len)
1025 tree body;
1026 tree fn;
1027 priority_type priority;
1029 priority = 0;
1030 body = NULL_TREE;
1031 j = i;
1034 priority_type p;
1035 fn = cdtors[j];
1036 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1037 if (j == i)
1038 priority = p;
1039 else if (p != priority)
1040 break;
1041 j++;
1043 while (j < len);
1045 /* When there is only one cdtor and target supports them, do nothing. */
1046 if (j == i + 1
1047 && targetm.have_ctors_dtors)
1049 i++;
1050 continue;
1052 /* Find the next batch of constructors/destructors with the same
1053 initialization priority. */
1054 for (;i < j; i++)
1056 tree call;
1057 fn = cdtors[i];
1058 call = build_call_expr (fn, 0);
1059 if (ctor_p)
1060 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1061 else
1062 DECL_STATIC_DESTRUCTOR (fn) = 0;
1063 /* We do not want to optimize away pure/const calls here.
1064 When optimizing, these should be already removed, when not
1065 optimizing, we want user to be able to breakpoint in them. */
1066 TREE_SIDE_EFFECTS (call) = 1;
1067 append_to_statement_list (call, &body);
1069 gcc_assert (body != NULL_TREE);
1070 /* Generate a function to call all the function of like
1071 priority. */
1072 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1076 /* Comparison function for qsort. P1 and P2 are actually of type
1077 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1078 used to determine the sort order. */
1080 static int
1081 compare_ctor (const void *p1, const void *p2)
1083 tree f1;
1084 tree f2;
1085 int priority1;
1086 int priority2;
1088 f1 = *(const tree *)p1;
1089 f2 = *(const tree *)p2;
1090 priority1 = DECL_INIT_PRIORITY (f1);
1091 priority2 = DECL_INIT_PRIORITY (f2);
1093 if (priority1 < priority2)
1094 return -1;
1095 else if (priority1 > priority2)
1096 return 1;
1097 else
1098 /* Ensure a stable sort. Constructors are executed in backwarding
1099 order to make LTO initialize braries first. */
1100 return DECL_UID (f2) - DECL_UID (f1);
1103 /* Comparison function for qsort. P1 and P2 are actually of type
1104 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1105 used to determine the sort order. */
1107 static int
1108 compare_dtor (const void *p1, const void *p2)
1110 tree f1;
1111 tree f2;
1112 int priority1;
1113 int priority2;
1115 f1 = *(const tree *)p1;
1116 f2 = *(const tree *)p2;
1117 priority1 = DECL_FINI_PRIORITY (f1);
1118 priority2 = DECL_FINI_PRIORITY (f2);
1120 if (priority1 < priority2)
1121 return -1;
1122 else if (priority1 > priority2)
1123 return 1;
1124 else
1125 /* Ensure a stable sort. */
1126 return DECL_UID (f1) - DECL_UID (f2);
1129 /* Generate functions to call static constructors and destructors
1130 for targets that do not support .ctors/.dtors sections. These
1131 functions have magic names which are detected by collect2. */
1133 static void
1134 build_cdtor_fns (void)
1136 if (!static_ctors.is_empty ())
1138 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1139 static_ctors.qsort (compare_ctor);
1140 build_cdtor (/*ctor_p=*/true, static_ctors);
1143 if (!static_dtors.is_empty ())
1145 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1146 static_dtors.qsort (compare_dtor);
1147 build_cdtor (/*ctor_p=*/false, static_dtors);
1151 /* Look for constructors and destructors and produce function calling them.
1152 This is needed for targets not supporting ctors or dtors, but we perform the
1153 transformation also at linktime to merge possibly numerous
1154 constructors/destructors into single function to improve code locality and
1155 reduce size. */
1157 static unsigned int
1158 ipa_cdtor_merge (void)
1160 struct cgraph_node *node;
1161 FOR_EACH_DEFINED_FUNCTION (node)
1162 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1163 || DECL_STATIC_DESTRUCTOR (node->decl))
1164 record_cdtor_fn (node);
1165 build_cdtor_fns ();
1166 static_ctors.release ();
1167 static_dtors.release ();
1168 return 0;
1171 namespace {
1173 const pass_data pass_data_ipa_cdtor_merge =
1175 IPA_PASS, /* type */
1176 "cdtor", /* name */
1177 OPTGROUP_NONE, /* optinfo_flags */
1178 TV_CGRAPHOPT, /* tv_id */
1179 0, /* properties_required */
1180 0, /* properties_provided */
1181 0, /* properties_destroyed */
1182 0, /* todo_flags_start */
1183 0, /* todo_flags_finish */
1186 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1188 public:
1189 pass_ipa_cdtor_merge (gcc::context *ctxt)
1190 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1191 NULL, /* generate_summary */
1192 NULL, /* write_summary */
1193 NULL, /* read_summary */
1194 NULL, /* write_optimization_summary */
1195 NULL, /* read_optimization_summary */
1196 NULL, /* stmt_fixup */
1197 0, /* function_transform_todo_flags_start */
1198 NULL, /* function_transform */
1199 NULL) /* variable_transform */
1202 /* opt_pass methods: */
1203 virtual bool gate (function *);
1204 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1206 }; // class pass_ipa_cdtor_merge
1208 bool
1209 pass_ipa_cdtor_merge::gate (function *)
1211 /* Perform the pass when we have no ctors/dtors support
1212 or at LTO time to merge multiple constructors into single
1213 function. */
1214 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1217 } // anon namespace
1219 ipa_opt_pass_d *
1220 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1222 return new pass_ipa_cdtor_merge (ctxt);
1225 /* Invalid pointer representing BOTTOM for single user dataflow. */
1226 #define BOTTOM ((cgraph_node *)(size_t) 2)
1228 /* Meet operation for single user dataflow.
1229 Here we want to associate variables with sigle function that may access it.
1231 FUNCTION is current single user of a variable, VAR is variable that uses it.
1232 Latttice is stored in SINGLE_USER_MAP.
1234 We represent:
1235 - TOP by no entry in SIGNLE_USER_MAP
1236 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1237 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1239 cgraph_node *
1240 meet (cgraph_node *function, varpool_node *var,
1241 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1243 struct cgraph_node *user, **f;
1245 if (var->aux == BOTTOM)
1246 return BOTTOM;
1248 f = single_user_map.get (var);
1249 if (!f)
1250 return function;
1251 user = *f;
1252 if (!function)
1253 return user;
1254 else if (function != user)
1255 return BOTTOM;
1256 else
1257 return function;
1260 /* Propagation step of single-use dataflow.
1262 Check all uses of VNODE and see if they are used by single function FUNCTION.
1263 SINGLE_USER_MAP represents the dataflow lattice. */
1265 cgraph_node *
1266 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1267 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1269 int i;
1270 struct ipa_ref *ref;
1272 gcc_assert (!vnode->externally_visible);
1274 /* If node is an alias, first meet with its target. */
1275 if (vnode->alias)
1276 function = meet (function, vnode->get_alias_target (), single_user_map);
1278 /* Check all users and see if they correspond to a single function. */
1279 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1281 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1282 if (cnode)
1284 if (cnode->global.inlined_to)
1285 cnode = cnode->global.inlined_to;
1286 if (!function)
1287 function = cnode;
1288 else if (function != cnode)
1289 function = BOTTOM;
1291 else
1292 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1293 single_user_map);
1295 return function;
1298 /* Pass setting used_by_single_function flag.
1299 This flag is set on variable when there is only one function that may
1300 possibly referr to it. */
1302 static unsigned int
1303 ipa_single_use (void)
1305 varpool_node *first = (varpool_node *) (void *) 1;
1306 varpool_node *var;
1307 hash_map<varpool_node *, cgraph_node *> single_user_map;
1309 FOR_EACH_DEFINED_VARIABLE (var)
1310 if (!var->all_refs_explicit_p ())
1311 var->aux = BOTTOM;
1312 else
1314 /* Enqueue symbol for dataflow. */
1315 var->aux = first;
1316 first = var;
1319 /* The actual dataflow. */
1321 while (first != (void *) 1)
1323 cgraph_node *user, *orig_user, **f;
1325 var = first;
1326 first = (varpool_node *)first->aux;
1328 f = single_user_map.get (var);
1329 if (f)
1330 orig_user = *f;
1331 else
1332 orig_user = NULL;
1333 user = propagate_single_user (var, orig_user, single_user_map);
1335 gcc_checking_assert (var->aux != BOTTOM);
1337 /* If user differs, enqueue all references. */
1338 if (user != orig_user)
1340 unsigned int i;
1341 ipa_ref *ref;
1343 single_user_map.put (var, user);
1345 /* Enqueue all aliases for re-processing. */
1346 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1347 if (!ref->referring->aux)
1349 ref->referring->aux = first;
1350 first = dyn_cast <varpool_node *> (ref->referring);
1352 /* Enqueue all users for re-processing. */
1353 for (i = 0; var->iterate_reference (i, ref); i++)
1354 if (!ref->referred->aux
1355 && ref->referred->definition
1356 && is_a <varpool_node *> (ref->referred))
1358 ref->referred->aux = first;
1359 first = dyn_cast <varpool_node *> (ref->referred);
1362 /* If user is BOTTOM, just punt on this var. */
1363 if (user == BOTTOM)
1364 var->aux = BOTTOM;
1365 else
1366 var->aux = NULL;
1368 else
1369 var->aux = NULL;
1372 FOR_EACH_DEFINED_VARIABLE (var)
1374 if (var->aux != BOTTOM)
1376 #ifdef ENABLE_CHECKING
1377 /* Not having the single user known means that the VAR is
1378 unreachable. Either someone forgot to remove unreachable
1379 variables or the reachability here is wrong. */
1381 gcc_assert (single_user_map.get (var));
1382 #endif
1383 if (dump_file)
1385 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1386 var->name (), var->order);
1388 var->used_by_single_function = true;
1390 var->aux = NULL;
1392 return 0;
1395 namespace {
1397 const pass_data pass_data_ipa_single_use =
1399 IPA_PASS, /* type */
1400 "single-use", /* name */
1401 OPTGROUP_NONE, /* optinfo_flags */
1402 TV_CGRAPHOPT, /* tv_id */
1403 0, /* properties_required */
1404 0, /* properties_provided */
1405 0, /* properties_destroyed */
1406 0, /* todo_flags_start */
1407 0, /* todo_flags_finish */
1410 class pass_ipa_single_use : public ipa_opt_pass_d
1412 public:
1413 pass_ipa_single_use (gcc::context *ctxt)
1414 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1415 NULL, /* generate_summary */
1416 NULL, /* write_summary */
1417 NULL, /* read_summary */
1418 NULL, /* write_optimization_summary */
1419 NULL, /* read_optimization_summary */
1420 NULL, /* stmt_fixup */
1421 0, /* function_transform_todo_flags_start */
1422 NULL, /* function_transform */
1423 NULL) /* variable_transform */
1426 /* opt_pass methods: */
1427 virtual bool gate (function *);
1428 virtual unsigned int execute (function *) { return ipa_single_use (); }
1430 }; // class pass_ipa_single_use
1432 bool
1433 pass_ipa_single_use::gate (function *)
1435 return optimize;
1438 } // anon namespace
1440 ipa_opt_pass_d *
1441 make_pass_ipa_single_use (gcc::context *ctxt)
1443 return new pass_ipa_single_use (ctxt);