PR c++/77777 improve location for diagnostic
[official-gcc.git] / gcc / ipa.c
blob634c69c2b5ce2c1657241514ac859c6651ca648b
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "gimplify.h"
32 #include "tree-iterator.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
35 #include "tree-vrp.h"
36 #include "ipa-prop.h"
37 #include "ipa-fnsummary.h"
38 #include "dbgcnt.h"
39 #include "debug.h"
40 #include "stringpool.h"
41 #include "attribs.h"
43 /* Return true when NODE has ADDR reference. */
45 static bool
46 has_addr_references_p (struct cgraph_node *node,
47 void *)
49 int i;
50 struct ipa_ref *ref = NULL;
52 for (i = 0; node->iterate_referring (i, ref); i++)
53 if (ref->use == IPA_REF_ADDR)
54 return true;
55 return false;
58 /* Return true when NODE can be target of an indirect call. */
60 static bool
61 is_indirect_call_target_p (struct cgraph_node *node, void *)
63 return node->indirect_call_target;
66 /* Look for all functions inlined to NODE and update their inlined_to pointers
67 to INLINED_TO. */
69 static void
70 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
72 struct cgraph_edge *e;
73 for (e = node->callees; e; e = e->next_callee)
74 if (e->callee->global.inlined_to)
76 e->callee->global.inlined_to = inlined_to;
77 update_inlined_to_pointer (e->callee, inlined_to);
81 /* Add symtab NODE to queue starting at FIRST.
83 The queue is linked via AUX pointers and terminated by pointer to 1.
84 We enqueue nodes at two occasions: when we find them reachable or when we find
85 their bodies needed for further clonning. In the second case we mark them
86 by pointer to 2 after processing so they are re-queue when they become
87 reachable. */
89 static void
90 enqueue_node (symtab_node *node, symtab_node **first,
91 hash_set<symtab_node *> *reachable)
93 /* Node is still in queue; do nothing. */
94 if (node->aux && node->aux != (void *) 2)
95 return;
96 /* Node was already processed as unreachable, re-enqueue
97 only if it became reachable now. */
98 if (node->aux == (void *)2 && !reachable->contains (node))
99 return;
100 node->aux = *first;
101 *first = node;
104 /* Process references. */
106 static void
107 process_references (symtab_node *snode,
108 symtab_node **first,
109 bool before_inlining_p,
110 hash_set<symtab_node *> *reachable)
112 int i;
113 struct ipa_ref *ref = NULL;
114 for (i = 0; snode->iterate_reference (i, ref); i++)
116 symtab_node *node = ref->referred;
117 symtab_node *body = node->ultimate_alias_target ();
119 if (node->definition && !node->in_other_partition
120 && ((!DECL_EXTERNAL (node->decl) || node->alias)
121 || (((before_inlining_p
122 && (TREE_CODE (node->decl) != FUNCTION_DECL
123 || (TREE_CODE (node->decl) == FUNCTION_DECL
124 && opt_for_fn (body->decl, optimize))
125 || (symtab->state < IPA_SSA
126 && lookup_attribute
127 ("always_inline",
128 DECL_ATTRIBUTES (body->decl))))))
129 /* We use variable constructors during late compilation for
130 constant folding. Keep references alive so partitioning
131 knows about potential references. */
132 || (VAR_P (node->decl)
133 && (flag_wpa
134 || flag_incremental_link
135 == INCREMENTAL_LINK_LTO)
136 && dyn_cast <varpool_node *> (node)
137 ->ctor_useable_for_folding_p ()))))
139 /* Be sure that we will not optimize out alias target
140 body. */
141 if (DECL_EXTERNAL (node->decl)
142 && node->alias
143 && before_inlining_p)
144 reachable->add (body);
145 reachable->add (node);
147 enqueue_node (node, first, reachable);
151 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
152 all its potential targets as reachable to permit later inlining if
153 devirtualization happens. After inlining still keep their declarations
154 around, so we can devirtualize to a direct call.
156 Also try to make trivial devirutalization when no or only one target is
157 possible. */
159 static void
160 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
161 struct cgraph_edge *edge,
162 symtab_node **first,
163 hash_set<symtab_node *> *reachable,
164 bool before_inlining_p)
166 unsigned int i;
167 void *cache_token;
168 bool final;
169 vec <cgraph_node *>targets
170 = possible_polymorphic_call_targets
171 (edge, &final, &cache_token);
173 if (!reachable_call_targets->add (cache_token))
175 for (i = 0; i < targets.length (); i++)
177 struct cgraph_node *n = targets[i];
179 /* Do not bother to mark virtual methods in anonymous namespace;
180 either we will find use of virtual table defining it, or it is
181 unused. */
182 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
183 && type_in_anonymous_namespace_p
184 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
185 continue;
187 n->indirect_call_target = true;
188 symtab_node *body = n->function_symbol ();
190 /* Prior inlining, keep alive bodies of possible targets for
191 devirtualization. */
192 if (n->definition
193 && (before_inlining_p
194 && opt_for_fn (body->decl, optimize)
195 && opt_for_fn (body->decl, flag_devirtualize)))
197 /* Be sure that we will not optimize out alias target
198 body. */
199 if (DECL_EXTERNAL (n->decl)
200 && n->alias
201 && before_inlining_p)
202 reachable->add (body);
203 reachable->add (n);
205 /* Even after inlining we want to keep the possible targets in the
206 boundary, so late passes can still produce direct call even if
207 the chance for inlining is lost. */
208 enqueue_node (n, first, reachable);
212 /* Very trivial devirtualization; when the type is
213 final or anonymous (so we know all its derivation)
214 and there is only one possible virtual call target,
215 make the edge direct. */
216 if (final)
218 if (targets.length () <= 1 && dbg_cnt (devirt))
220 cgraph_node *target, *node = edge->caller;
221 if (targets.length () == 1)
222 target = targets[0];
223 else
224 target = cgraph_node::get_create
225 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
227 if (dump_enabled_p ())
229 location_t locus;
230 if (edge->call_stmt)
231 locus = gimple_location (edge->call_stmt);
232 else
233 locus = UNKNOWN_LOCATION;
234 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
235 "devirtualizing call in %s to %s\n",
236 edge->caller->dump_name (),
237 target->dump_name ());
239 edge = edge->make_direct (target);
240 if (ipa_fn_summaries)
241 ipa_update_overall_fn_summary (node);
242 else if (edge->call_stmt)
244 edge->redirect_call_stmt_to_callee ();
246 /* Call to __builtin_unreachable shouldn't be instrumented. */
247 if (!targets.length ())
248 gimple_call_set_with_bounds (edge->call_stmt, false);
254 /* Perform reachability analysis and reclaim all unreachable nodes.
256 The algorithm is basically mark&sweep but with some extra refinements:
258 - reachable extern inline functions needs special handling; the bodies needs
259 to stay in memory until inlining in hope that they will be inlined.
260 After inlining we release their bodies and turn them into unanalyzed
261 nodes even when they are reachable.
263 - virtual functions are kept in callgraph even if they seem unreachable in
264 hope calls to them will be devirtualized.
266 Again we remove them after inlining. In late optimization some
267 devirtualization may happen, but it is not important since we won't inline
268 the call. In theory early opts and IPA should work out all important cases.
270 - virtual clones needs bodies of their origins for later materialization;
271 this means that we want to keep the body even if the origin is unreachable
272 otherwise. To avoid origin from sitting in the callgraph and being
273 walked by IPA passes, we turn them into unanalyzed nodes with body
274 defined.
276 We maintain set of function declaration where body needs to stay in
277 body_needed_for_clonning
279 Inline clones represent special case: their declaration match the
280 declaration of origin and cgraph_remove_node already knows how to
281 reshape callgraph and preserve body when offline copy of function or
282 inline clone is being removed.
284 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
285 variables with DECL_INITIAL set. We finalize these and keep reachable
286 ones around for constant folding purposes. After inlining we however
287 stop walking their references to let everything static referneced by them
288 to be removed when it is otherwise unreachable.
290 We maintain queue of both reachable symbols (i.e. defined symbols that needs
291 to stay) and symbols that are in boundary (i.e. external symbols referenced
292 by reachable symbols or origins of clones). The queue is represented
293 as linked list by AUX pointer terminated by 1.
295 At the end we keep all reachable symbols. For symbols in boundary we always
296 turn definition into a declaration, but we may keep function body around
297 based on body_needed_for_clonning
299 All symbols that enter the queue have AUX pointer non-zero and are in the
300 boundary. Pointer set REACHABLE is used to track reachable symbols.
302 Every symbol can be visited twice - once as part of boundary and once
303 as real reachable symbol. enqueue_node needs to decide whether the
304 node needs to be re-queued for second processing. For this purpose
305 we set AUX pointer of processed symbols in the boundary to constant 2. */
307 bool
308 symbol_table::remove_unreachable_nodes (FILE *file)
310 symtab_node *first = (symtab_node *) (void *) 1;
311 struct cgraph_node *node, *next;
312 varpool_node *vnode, *vnext;
313 bool changed = false;
314 hash_set<symtab_node *> reachable;
315 hash_set<tree> body_needed_for_clonning;
316 hash_set<void *> reachable_call_targets;
317 bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
318 : IPA_SSA_AFTER_INLINING);
320 timevar_push (TV_IPA_UNREACHABLE);
321 build_type_inheritance_graph ();
322 if (file)
323 fprintf (file, "\nReclaiming functions:");
324 if (flag_checking)
326 FOR_EACH_FUNCTION (node)
327 gcc_assert (!node->aux);
328 FOR_EACH_VARIABLE (vnode)
329 gcc_assert (!vnode->aux);
331 /* Mark functions whose bodies are obviously needed.
332 This is mostly when they can be referenced externally. Inline clones
333 are special since their declarations are shared with master clone and thus
334 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
335 FOR_EACH_FUNCTION (node)
337 node->used_as_abstract_origin = false;
338 node->indirect_call_target = false;
339 if (node->definition
340 && !node->global.inlined_to
341 && !node->in_other_partition
342 && !node->can_remove_if_no_direct_calls_and_refs_p ())
344 gcc_assert (!node->global.inlined_to);
345 reachable.add (node);
346 enqueue_node (node, &first, &reachable);
348 else
349 gcc_assert (!node->aux);
352 /* Mark variables that are obviously needed. */
353 FOR_EACH_DEFINED_VARIABLE (vnode)
354 if (!vnode->can_remove_if_no_refs_p()
355 && !vnode->in_other_partition)
357 reachable.add (vnode);
358 enqueue_node (vnode, &first, &reachable);
361 /* Perform reachability analysis. */
362 while (first != (symtab_node *) (void *) 1)
364 bool in_boundary_p = !reachable.contains (first);
365 symtab_node *node = first;
367 first = (symtab_node *)first->aux;
369 /* If we are processing symbol in boundary, mark its AUX pointer for
370 possible later re-processing in enqueue_node. */
371 if (in_boundary_p)
373 node->aux = (void *)2;
374 if (node->alias && node->analyzed)
375 enqueue_node (node->get_alias_target (), &first, &reachable);
377 else
379 if (TREE_CODE (node->decl) == FUNCTION_DECL
380 && DECL_ABSTRACT_ORIGIN (node->decl))
382 struct cgraph_node *origin_node
383 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
384 if (origin_node && !origin_node->used_as_abstract_origin)
386 origin_node->used_as_abstract_origin = true;
387 gcc_assert (!origin_node->prev_sibling_clone);
388 gcc_assert (!origin_node->next_sibling_clone);
389 for (cgraph_node *n = origin_node->clones; n;
390 n = n->next_sibling_clone)
391 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
392 n->used_as_abstract_origin = true;
395 /* If any symbol in a comdat group is reachable, force
396 all externally visible symbols in the same comdat
397 group to be reachable as well. Comdat-local symbols
398 can be discarded if all uses were inlined. */
399 if (node->same_comdat_group)
401 symtab_node *next;
402 for (next = node->same_comdat_group;
403 next != node;
404 next = next->same_comdat_group)
405 if (!next->comdat_local_p ()
406 && !reachable.add (next))
407 enqueue_node (next, &first, &reachable);
409 /* Mark references as reachable. */
410 process_references (node, &first, before_inlining_p, &reachable);
413 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
415 /* Mark the callees reachable unless they are direct calls to extern
416 inline functions we decided to not inline. */
417 if (!in_boundary_p)
419 struct cgraph_edge *e;
420 /* Keep alive possible targets for devirtualization. */
421 if (opt_for_fn (cnode->decl, optimize)
422 && opt_for_fn (cnode->decl, flag_devirtualize))
424 struct cgraph_edge *next;
425 for (e = cnode->indirect_calls; e; e = next)
427 next = e->next_callee;
428 if (e->indirect_info->polymorphic)
429 walk_polymorphic_call_targets (&reachable_call_targets,
430 e, &first, &reachable,
431 before_inlining_p);
434 for (e = cnode->callees; e; e = e->next_callee)
436 symtab_node *body = e->callee->function_symbol ();
437 if (e->callee->definition
438 && !e->callee->in_other_partition
439 && (!e->inline_failed
440 || !DECL_EXTERNAL (e->callee->decl)
441 || e->callee->alias
442 || (before_inlining_p
443 && (opt_for_fn (body->decl, optimize)
444 || (symtab->state < IPA_SSA
445 && lookup_attribute
446 ("always_inline",
447 DECL_ATTRIBUTES (body->decl)))))))
449 /* Be sure that we will not optimize out alias target
450 body. */
451 if (DECL_EXTERNAL (e->callee->decl)
452 && e->callee->alias
453 && before_inlining_p)
454 reachable.add (body);
455 reachable.add (e->callee);
457 enqueue_node (e->callee, &first, &reachable);
460 /* When inline clone exists, mark body to be preserved so when removing
461 offline copy of the function we don't kill it. */
462 if (cnode->global.inlined_to)
463 body_needed_for_clonning.add (cnode->decl);
465 /* For instrumentation clones we always need original
466 function node for proper LTO privatization. */
467 if (cnode->instrumentation_clone
468 && cnode->definition)
470 gcc_assert (cnode->instrumented_version || in_lto_p);
471 if (cnode->instrumented_version)
473 enqueue_node (cnode->instrumented_version, &first,
474 &reachable);
475 reachable.add (cnode->instrumented_version);
479 /* For non-inline clones, force their origins to the boundary and ensure
480 that body is not removed. */
481 while (cnode->clone_of)
483 bool noninline = cnode->clone_of->decl != cnode->decl;
484 cnode = cnode->clone_of;
485 if (noninline)
487 body_needed_for_clonning.add (cnode->decl);
488 enqueue_node (cnode, &first, &reachable);
493 else if (cnode->thunk.thunk_p)
494 enqueue_node (cnode->callees->callee, &first, &reachable);
496 /* If any reachable function has simd clones, mark them as
497 reachable as well. */
498 if (cnode->simd_clones)
500 cgraph_node *next;
501 for (next = cnode->simd_clones;
502 next;
503 next = next->simdclone->next_clone)
504 if (in_boundary_p
505 || !reachable.add (next))
506 enqueue_node (next, &first, &reachable);
509 /* When we see constructor of external variable, keep referred nodes in the
510 boundary. This will also hold initializers of the external vars NODE
511 refers to. */
512 varpool_node *vnode = dyn_cast <varpool_node *> (node);
513 if (vnode
514 && DECL_EXTERNAL (node->decl)
515 && !vnode->alias
516 && in_boundary_p)
518 struct ipa_ref *ref = NULL;
519 for (int i = 0; node->iterate_reference (i, ref); i++)
520 enqueue_node (ref->referred, &first, &reachable);
524 /* Remove unreachable functions. */
525 for (node = first_function (); node; node = next)
527 next = next_function (node);
529 /* If node is not needed at all, remove it. */
530 if (!node->aux)
532 if (file)
533 fprintf (file, " %s", node->dump_name ());
534 node->remove ();
535 changed = true;
537 /* If node is unreachable, remove its body. */
538 else if (!reachable.contains (node))
540 /* We keep definitions of thunks and aliases in the boundary so
541 we can walk to the ultimate alias targets and function symbols
542 reliably. */
543 if (node->alias || node->thunk.thunk_p)
545 else if (!body_needed_for_clonning.contains (node->decl)
546 && !node->alias && !node->thunk.thunk_p)
547 node->release_body ();
548 else if (!node->clone_of)
549 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
550 if (node->definition && !node->alias && !node->thunk.thunk_p)
552 if (file)
553 fprintf (file, " %s", node->dump_name ());
554 node->body_removed = true;
555 node->analyzed = false;
556 node->definition = false;
557 node->cpp_implicit_alias = false;
558 node->alias = false;
559 node->transparent_alias = false;
560 node->thunk.thunk_p = false;
561 node->weakref = false;
562 /* After early inlining we drop always_inline attributes on
563 bodies of functions that are still referenced (have their
564 address taken). */
565 DECL_ATTRIBUTES (node->decl)
566 = remove_attribute ("always_inline",
567 DECL_ATTRIBUTES (node->decl));
568 if (!node->in_other_partition)
569 node->local.local = false;
570 node->remove_callees ();
571 node->remove_all_references ();
572 changed = true;
573 if (node->thunk.thunk_p
574 && node->thunk.add_pointer_bounds_args)
576 node->thunk.thunk_p = false;
577 node->thunk.add_pointer_bounds_args = false;
581 else
582 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
583 || in_lto_p || DECL_RESULT (node->decl));
586 /* Inline clones might be kept around so their materializing allows further
587 cloning. If the function the clone is inlined into is removed, we need
588 to turn it into normal cone. */
589 FOR_EACH_FUNCTION (node)
591 if (node->global.inlined_to
592 && !node->callers)
594 gcc_assert (node->clones);
595 node->global.inlined_to = NULL;
596 update_inlined_to_pointer (node, node);
598 node->aux = NULL;
601 /* Remove unreachable variables. */
602 if (file)
603 fprintf (file, "\nReclaiming variables:");
604 for (vnode = first_variable (); vnode; vnode = vnext)
606 vnext = next_variable (vnode);
607 if (!vnode->aux
608 /* For can_refer_decl_in_current_unit_p we want to track for
609 all external variables if they are defined in other partition
610 or not. */
611 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
613 struct ipa_ref *ref = NULL;
615 /* First remove the aliases, so varpool::remove can possibly lookup
616 the constructor and save it for future use. */
617 while (vnode->iterate_direct_aliases (0, ref))
619 if (file)
620 fprintf (file, " %s", ref->referred->dump_name ());
621 ref->referring->remove ();
623 if (file)
624 fprintf (file, " %s", vnode->dump_name ());
625 vnext = next_variable (vnode);
626 /* Signal removal to the debug machinery. */
627 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
629 vnode->definition = false;
630 (*debug_hooks->late_global_decl) (vnode->decl);
632 vnode->remove ();
633 changed = true;
635 else if (!reachable.contains (vnode) && !vnode->alias)
637 tree init;
638 if (vnode->definition)
640 if (file)
641 fprintf (file, " %s", vnode->name ());
642 changed = true;
644 /* Keep body if it may be useful for constant folding. */
645 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
646 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node
647 && !POINTER_BOUNDS_P (vnode->decl)))
648 vnode->remove_initializer ();
649 else
650 DECL_INITIAL (vnode->decl) = init;
651 vnode->body_removed = true;
652 vnode->definition = false;
653 vnode->analyzed = false;
654 vnode->aux = NULL;
656 vnode->remove_from_same_comdat_group ();
658 vnode->remove_all_references ();
660 else
661 vnode->aux = NULL;
664 /* Now update address_taken flags and try to promote functions to be local. */
665 if (file)
666 fprintf (file, "\nClearing address taken flags:");
667 FOR_EACH_DEFINED_FUNCTION (node)
668 if (node->address_taken
669 && !node->used_from_other_partition)
671 if (!node->call_for_symbol_and_aliases
672 (has_addr_references_p, NULL, true)
673 && (!node->instrumentation_clone
674 || !node->instrumented_version
675 || !node->instrumented_version->address_taken))
677 if (file)
678 fprintf (file, " %s", node->name ());
679 node->address_taken = false;
680 changed = true;
681 if (node->local_p ()
682 /* Virtual functions may be kept in cgraph just because
683 of possible later devirtualization. Do not mark them as
684 local too early so we won't optimize them out before
685 we are done with polymorphic call analysis. */
686 && (!before_inlining_p
687 || !node->call_for_symbol_and_aliases
688 (is_indirect_call_target_p, NULL, true)))
690 node->local.local = true;
691 if (file)
692 fprintf (file, " (local)");
696 if (file)
697 fprintf (file, "\n");
699 symtab_node::checking_verify_symtab_nodes ();
701 /* If we removed something, perhaps profile could be improved. */
702 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
703 FOR_EACH_DEFINED_FUNCTION (node)
704 ipa_propagate_frequency (node);
706 timevar_pop (TV_IPA_UNREACHABLE);
707 return changed;
710 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
711 as needed, also clear EXPLICIT_REFS if the references to given variable
712 do not need to be explicit. */
714 void
715 process_references (varpool_node *vnode,
716 bool *written, bool *address_taken,
717 bool *read, bool *explicit_refs)
719 int i;
720 struct ipa_ref *ref;
722 if (!vnode->all_refs_explicit_p ()
723 || TREE_THIS_VOLATILE (vnode->decl))
724 *explicit_refs = false;
726 for (i = 0; vnode->iterate_referring (i, ref)
727 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
728 switch (ref->use)
730 case IPA_REF_ADDR:
731 *address_taken = true;
732 break;
733 case IPA_REF_LOAD:
734 *read = true;
735 break;
736 case IPA_REF_STORE:
737 *written = true;
738 break;
739 case IPA_REF_ALIAS:
740 process_references (dyn_cast<varpool_node *> (ref->referring), written,
741 address_taken, read, explicit_refs);
742 break;
743 case IPA_REF_CHKP:
744 gcc_unreachable ();
748 /* Set TREE_READONLY bit. */
750 bool
751 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
753 TREE_READONLY (vnode->decl) = true;
754 return false;
757 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
759 bool
760 set_writeonly_bit (varpool_node *vnode, void *data)
762 vnode->writeonly = true;
763 if (optimize || in_lto_p)
765 DECL_INITIAL (vnode->decl) = NULL;
766 if (!vnode->alias)
768 if (vnode->num_references ())
769 *(bool *)data = true;
770 vnode->remove_all_references ();
773 return false;
776 /* Clear addressale bit of VNODE. */
778 bool
779 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
781 vnode->address_taken = false;
782 TREE_ADDRESSABLE (vnode->decl) = 0;
783 return false;
786 /* Discover variables that have no longer address taken or that are read only
787 and update their flags.
789 Return true when unreachable symbol removan should be done.
791 FIXME: This can not be done in between gimplify and omp_expand since
792 readonly flag plays role on what is shared and what is not. Currently we do
793 this transformation as part of whole program visibility and re-do at
794 ipa-reference pass (to take into account clonning), but it would
795 make sense to do it before early optimizations. */
797 bool
798 ipa_discover_readonly_nonaddressable_vars (void)
800 bool remove_p = false;
801 varpool_node *vnode;
802 if (dump_file)
803 fprintf (dump_file, "Clearing variable flags:");
804 FOR_EACH_VARIABLE (vnode)
805 if (!vnode->alias
806 && (TREE_ADDRESSABLE (vnode->decl)
807 || !vnode->writeonly
808 || !TREE_READONLY (vnode->decl)))
810 bool written = false;
811 bool address_taken = false;
812 bool read = false;
813 bool explicit_refs = true;
815 process_references (vnode, &written, &address_taken, &read,
816 &explicit_refs);
817 if (!explicit_refs)
818 continue;
819 if (!address_taken)
821 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
822 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
823 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
824 true);
826 if (!address_taken && !written
827 /* Making variable in explicit section readonly can cause section
828 type conflict.
829 See e.g. gcc.c-torture/compile/pr23237.c */
830 && vnode->get_section () == NULL)
832 if (!TREE_READONLY (vnode->decl) && dump_file)
833 fprintf (dump_file, " %s (read-only)", vnode->name ());
834 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
836 if (!vnode->writeonly && !read && !address_taken && written)
838 if (dump_file)
839 fprintf (dump_file, " %s (write-only)", vnode->name ());
840 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
841 true);
844 if (dump_file)
845 fprintf (dump_file, "\n");
846 return remove_p;
849 /* Generate and emit a static constructor or destructor. WHICH must
850 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
851 (for chp static vars constructor) or 'B' (for chkp static bounds
852 constructor). BODY is a STATEMENT_LIST containing GENERIC
853 statements. PRIORITY is the initialization priority for this
854 constructor or destructor.
856 FINAL specify whether the externally visible name for collect2 should
857 be produced. */
859 static void
860 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
862 static int counter = 0;
863 char which_buf[16];
864 tree decl, name, resdecl;
866 /* The priority is encoded in the constructor or destructor name.
867 collect2 will sort the names and arrange that they are called at
868 program startup. */
869 if (final)
870 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
871 else
872 /* Proudce sane name but one not recognizable by collect2, just for the
873 case we fail to inline the function. */
874 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
875 name = get_file_function_name (which_buf);
877 decl = build_decl (input_location, FUNCTION_DECL, name,
878 build_function_type_list (void_type_node, NULL_TREE));
879 current_function_decl = decl;
881 resdecl = build_decl (input_location,
882 RESULT_DECL, NULL_TREE, void_type_node);
883 DECL_ARTIFICIAL (resdecl) = 1;
884 DECL_RESULT (decl) = resdecl;
885 DECL_CONTEXT (resdecl) = decl;
887 allocate_struct_function (decl, false);
889 TREE_STATIC (decl) = 1;
890 TREE_USED (decl) = 1;
891 DECL_ARTIFICIAL (decl) = 1;
892 DECL_IGNORED_P (decl) = 1;
893 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
894 DECL_SAVED_TREE (decl) = body;
895 if (!targetm.have_ctors_dtors && final)
897 TREE_PUBLIC (decl) = 1;
898 DECL_PRESERVE_P (decl) = 1;
900 DECL_UNINLINABLE (decl) = 1;
902 DECL_INITIAL (decl) = make_node (BLOCK);
903 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
904 TREE_USED (DECL_INITIAL (decl)) = 1;
906 DECL_SOURCE_LOCATION (decl) = input_location;
907 cfun->function_end_locus = input_location;
909 switch (which)
911 case 'I':
912 DECL_STATIC_CONSTRUCTOR (decl) = 1;
913 decl_init_priority_insert (decl, priority);
914 break;
915 case 'P':
916 DECL_STATIC_CONSTRUCTOR (decl) = 1;
917 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
918 NULL,
919 NULL_TREE);
920 decl_init_priority_insert (decl, priority);
921 break;
922 case 'B':
923 DECL_STATIC_CONSTRUCTOR (decl) = 1;
924 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
925 NULL,
926 NULL_TREE);
927 decl_init_priority_insert (decl, priority);
928 break;
929 case 'D':
930 DECL_STATIC_DESTRUCTOR (decl) = 1;
931 decl_fini_priority_insert (decl, priority);
932 break;
933 default:
934 gcc_unreachable ();
937 gimplify_function_tree (decl);
939 cgraph_node::add_new_function (decl, false);
941 set_cfun (NULL);
942 current_function_decl = NULL;
945 /* Generate and emit a static constructor or destructor. WHICH must
946 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
947 (for chkp static vars constructor) or 'B' (for chkp static bounds
948 constructor). BODY is a STATEMENT_LIST containing GENERIC
949 statements. PRIORITY is the initialization priority for this
950 constructor or destructor. */
952 void
953 cgraph_build_static_cdtor (char which, tree body, int priority)
955 cgraph_build_static_cdtor_1 (which, body, priority, false);
958 /* When target does not have ctors and dtors, we call all constructor
959 and destructor by special initialization/destruction function
960 recognized by collect2.
962 When we are going to build this function, collect all constructors and
963 destructors and turn them into normal functions. */
965 static void
966 record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
968 if (DECL_STATIC_CONSTRUCTOR (node->decl))
969 ctors->safe_push (node->decl);
970 if (DECL_STATIC_DESTRUCTOR (node->decl))
971 dtors->safe_push (node->decl);
972 node = cgraph_node::get (node->decl);
973 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
976 /* Define global constructors/destructor functions for the CDTORS, of
977 which they are LEN. The CDTORS are sorted by initialization
978 priority. If CTOR_P is true, these are constructors; otherwise,
979 they are destructors. */
981 static void
982 build_cdtor (bool ctor_p, const vec<tree> &cdtors)
984 size_t i,j;
985 size_t len = cdtors.length ();
987 i = 0;
988 while (i < len)
990 tree body;
991 tree fn;
992 priority_type priority;
994 priority = 0;
995 body = NULL_TREE;
996 j = i;
999 priority_type p;
1000 fn = cdtors[j];
1001 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1002 if (j == i)
1003 priority = p;
1004 else if (p != priority)
1005 break;
1006 j++;
1008 while (j < len);
1010 /* When there is only one cdtor and target supports them, do nothing. */
1011 if (j == i + 1
1012 && targetm.have_ctors_dtors)
1014 i++;
1015 continue;
1017 /* Find the next batch of constructors/destructors with the same
1018 initialization priority. */
1019 for (;i < j; i++)
1021 tree call;
1022 fn = cdtors[i];
1023 call = build_call_expr (fn, 0);
1024 if (ctor_p)
1025 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1026 else
1027 DECL_STATIC_DESTRUCTOR (fn) = 0;
1028 /* We do not want to optimize away pure/const calls here.
1029 When optimizing, these should be already removed, when not
1030 optimizing, we want user to be able to breakpoint in them. */
1031 TREE_SIDE_EFFECTS (call) = 1;
1032 append_to_statement_list (call, &body);
1034 gcc_assert (body != NULL_TREE);
1035 /* Generate a function to call all the function of like
1036 priority. */
1037 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1041 /* Comparison function for qsort. P1 and P2 are actually of type
1042 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1043 used to determine the sort order. */
1045 static int
1046 compare_ctor (const void *p1, const void *p2)
1048 tree f1;
1049 tree f2;
1050 int priority1;
1051 int priority2;
1053 f1 = *(const tree *)p1;
1054 f2 = *(const tree *)p2;
1055 priority1 = DECL_INIT_PRIORITY (f1);
1056 priority2 = DECL_INIT_PRIORITY (f2);
1058 if (priority1 < priority2)
1059 return -1;
1060 else if (priority1 > priority2)
1061 return 1;
1062 else
1063 /* Ensure a stable sort. Constructors are executed in backwarding
1064 order to make LTO initialize braries first. */
1065 return DECL_UID (f2) - DECL_UID (f1);
1068 /* Comparison function for qsort. P1 and P2 are actually of type
1069 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1070 used to determine the sort order. */
1072 static int
1073 compare_dtor (const void *p1, const void *p2)
1075 tree f1;
1076 tree f2;
1077 int priority1;
1078 int priority2;
1080 f1 = *(const tree *)p1;
1081 f2 = *(const tree *)p2;
1082 priority1 = DECL_FINI_PRIORITY (f1);
1083 priority2 = DECL_FINI_PRIORITY (f2);
1085 if (priority1 < priority2)
1086 return -1;
1087 else if (priority1 > priority2)
1088 return 1;
1089 else
1090 /* Ensure a stable sort. */
1091 return DECL_UID (f1) - DECL_UID (f2);
1094 /* Generate functions to call static constructors and destructors
1095 for targets that do not support .ctors/.dtors sections. These
1096 functions have magic names which are detected by collect2. */
1098 static void
1099 build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1101 if (!ctors->is_empty ())
1103 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1104 ctors->qsort (compare_ctor);
1105 build_cdtor (/*ctor_p=*/true, *ctors);
1108 if (!dtors->is_empty ())
1110 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1111 dtors->qsort (compare_dtor);
1112 build_cdtor (/*ctor_p=*/false, *dtors);
1116 /* Look for constructors and destructors and produce function calling them.
1117 This is needed for targets not supporting ctors or dtors, but we perform the
1118 transformation also at linktime to merge possibly numerous
1119 constructors/destructors into single function to improve code locality and
1120 reduce size. */
1122 static unsigned int
1123 ipa_cdtor_merge (void)
1125 /* A vector of FUNCTION_DECLs declared as static constructors. */
1126 auto_vec<tree, 20> ctors;
1127 /* A vector of FUNCTION_DECLs declared as static destructors. */
1128 auto_vec<tree, 20> dtors;
1129 struct cgraph_node *node;
1130 FOR_EACH_DEFINED_FUNCTION (node)
1131 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1132 || DECL_STATIC_DESTRUCTOR (node->decl))
1133 record_cdtor_fn (node, &ctors, &dtors);
1134 build_cdtor_fns (&ctors, &dtors);
1135 return 0;
1138 namespace {
1140 const pass_data pass_data_ipa_cdtor_merge =
1142 IPA_PASS, /* type */
1143 "cdtor", /* name */
1144 OPTGROUP_NONE, /* optinfo_flags */
1145 TV_CGRAPHOPT, /* tv_id */
1146 0, /* properties_required */
1147 0, /* properties_provided */
1148 0, /* properties_destroyed */
1149 0, /* todo_flags_start */
1150 0, /* todo_flags_finish */
1153 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1155 public:
1156 pass_ipa_cdtor_merge (gcc::context *ctxt)
1157 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1158 NULL, /* generate_summary */
1159 NULL, /* write_summary */
1160 NULL, /* read_summary */
1161 NULL, /* write_optimization_summary */
1162 NULL, /* read_optimization_summary */
1163 NULL, /* stmt_fixup */
1164 0, /* function_transform_todo_flags_start */
1165 NULL, /* function_transform */
1166 NULL) /* variable_transform */
1169 /* opt_pass methods: */
1170 virtual bool gate (function *);
1171 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1173 }; // class pass_ipa_cdtor_merge
1175 bool
1176 pass_ipa_cdtor_merge::gate (function *)
1178 /* Perform the pass when we have no ctors/dtors support
1179 or at LTO time to merge multiple constructors into single
1180 function. */
1181 return !targetm.have_ctors_dtors || in_lto_p;
1184 } // anon namespace
1186 ipa_opt_pass_d *
1187 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1189 return new pass_ipa_cdtor_merge (ctxt);
1192 /* Invalid pointer representing BOTTOM for single user dataflow. */
1193 #define BOTTOM ((cgraph_node *)(size_t) 2)
1195 /* Meet operation for single user dataflow.
1196 Here we want to associate variables with sigle function that may access it.
1198 FUNCTION is current single user of a variable, VAR is variable that uses it.
1199 Latttice is stored in SINGLE_USER_MAP.
1201 We represent:
1202 - TOP by no entry in SIGNLE_USER_MAP
1203 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1204 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1206 cgraph_node *
1207 meet (cgraph_node *function, varpool_node *var,
1208 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1210 struct cgraph_node *user, **f;
1212 if (var->aux == BOTTOM)
1213 return BOTTOM;
1215 f = single_user_map.get (var);
1216 if (!f)
1217 return function;
1218 user = *f;
1219 if (!function)
1220 return user;
1221 else if (function != user)
1222 return BOTTOM;
1223 else
1224 return function;
1227 /* Propagation step of single-use dataflow.
1229 Check all uses of VNODE and see if they are used by single function FUNCTION.
1230 SINGLE_USER_MAP represents the dataflow lattice. */
1232 cgraph_node *
1233 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1234 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1236 int i;
1237 struct ipa_ref *ref;
1239 gcc_assert (!vnode->externally_visible);
1241 /* If node is an alias, first meet with its target. */
1242 if (vnode->alias)
1243 function = meet (function, vnode->get_alias_target (), single_user_map);
1245 /* Check all users and see if they correspond to a single function. */
1246 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1248 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1249 if (cnode)
1251 if (cnode->global.inlined_to)
1252 cnode = cnode->global.inlined_to;
1253 if (!function)
1254 function = cnode;
1255 else if (function != cnode)
1256 function = BOTTOM;
1258 else
1259 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1260 single_user_map);
1262 return function;
1265 /* Pass setting used_by_single_function flag.
1266 This flag is set on variable when there is only one function that may
1267 possibly referr to it. */
1269 static unsigned int
1270 ipa_single_use (void)
1272 varpool_node *first = (varpool_node *) (void *) 1;
1273 varpool_node *var;
1274 hash_map<varpool_node *, cgraph_node *> single_user_map;
1276 FOR_EACH_DEFINED_VARIABLE (var)
1277 if (!var->all_refs_explicit_p ())
1278 var->aux = BOTTOM;
1279 else
1281 /* Enqueue symbol for dataflow. */
1282 var->aux = first;
1283 first = var;
1286 /* The actual dataflow. */
1288 while (first != (void *) 1)
1290 cgraph_node *user, *orig_user, **f;
1292 var = first;
1293 first = (varpool_node *)first->aux;
1295 f = single_user_map.get (var);
1296 if (f)
1297 orig_user = *f;
1298 else
1299 orig_user = NULL;
1300 user = propagate_single_user (var, orig_user, single_user_map);
1302 gcc_checking_assert (var->aux != BOTTOM);
1304 /* If user differs, enqueue all references. */
1305 if (user != orig_user)
1307 unsigned int i;
1308 ipa_ref *ref;
1310 single_user_map.put (var, user);
1312 /* Enqueue all aliases for re-processing. */
1313 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1314 if (!ref->referring->aux)
1316 ref->referring->aux = first;
1317 first = dyn_cast <varpool_node *> (ref->referring);
1319 /* Enqueue all users for re-processing. */
1320 for (i = 0; var->iterate_reference (i, ref); i++)
1321 if (!ref->referred->aux
1322 && ref->referred->definition
1323 && is_a <varpool_node *> (ref->referred))
1325 ref->referred->aux = first;
1326 first = dyn_cast <varpool_node *> (ref->referred);
1329 /* If user is BOTTOM, just punt on this var. */
1330 if (user == BOTTOM)
1331 var->aux = BOTTOM;
1332 else
1333 var->aux = NULL;
1335 else
1336 var->aux = NULL;
1339 FOR_EACH_DEFINED_VARIABLE (var)
1341 if (var->aux != BOTTOM)
1343 /* Not having the single user known means that the VAR is
1344 unreachable. Either someone forgot to remove unreachable
1345 variables or the reachability here is wrong. */
1347 gcc_checking_assert (single_user_map.get (var));
1349 if (dump_file)
1351 fprintf (dump_file, "Variable %s is used by single function\n",
1352 var->dump_name ());
1354 var->used_by_single_function = true;
1356 var->aux = NULL;
1358 return 0;
1361 namespace {
1363 const pass_data pass_data_ipa_single_use =
1365 IPA_PASS, /* type */
1366 "single-use", /* name */
1367 OPTGROUP_NONE, /* optinfo_flags */
1368 TV_CGRAPHOPT, /* tv_id */
1369 0, /* properties_required */
1370 0, /* properties_provided */
1371 0, /* properties_destroyed */
1372 0, /* todo_flags_start */
1373 0, /* todo_flags_finish */
1376 class pass_ipa_single_use : public ipa_opt_pass_d
1378 public:
1379 pass_ipa_single_use (gcc::context *ctxt)
1380 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1381 NULL, /* generate_summary */
1382 NULL, /* write_summary */
1383 NULL, /* read_summary */
1384 NULL, /* write_optimization_summary */
1385 NULL, /* read_optimization_summary */
1386 NULL, /* stmt_fixup */
1387 0, /* function_transform_todo_flags_start */
1388 NULL, /* function_transform */
1389 NULL) /* variable_transform */
1392 /* opt_pass methods: */
1393 virtual unsigned int execute (function *) { return ipa_single_use (); }
1395 }; // class pass_ipa_single_use
1397 } // anon namespace
1399 ipa_opt_pass_d *
1400 make_pass_ipa_single_use (gcc::context *ctxt)
1402 return new pass_ipa_single_use (ctxt);
1405 /* Materialize all clones. */
1407 namespace {
1409 const pass_data pass_data_materialize_all_clones =
1411 SIMPLE_IPA_PASS, /* type */
1412 "materialize-all-clones", /* name */
1413 OPTGROUP_NONE, /* optinfo_flags */
1414 TV_IPA_OPT, /* tv_id */
1415 0, /* properties_required */
1416 0, /* properties_provided */
1417 0, /* properties_destroyed */
1418 0, /* todo_flags_start */
1419 0, /* todo_flags_finish */
1422 class pass_materialize_all_clones : public simple_ipa_opt_pass
1424 public:
1425 pass_materialize_all_clones (gcc::context *ctxt)
1426 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1429 /* opt_pass methods: */
1430 virtual unsigned int execute (function *)
1432 symtab->materialize_all_clones ();
1433 return 0;
1436 }; // class pass_materialize_all_clones
1438 } // anon namespace
1440 simple_ipa_opt_pass *
1441 make_pass_materialize_all_clones (gcc::context *ctxt)
1443 return new pass_materialize_all_clones (ctxt);