1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "stringpool.h"
28 #include "basic-block.h"
31 #include "plugin-api.h"
36 #include "hard-reg-set.h"
41 #include "tree-pass.h"
42 #include "gimple-expr.h"
46 #include "tree-iterator.h"
47 #include "ipa-utils.h"
48 #include "alloc-pool.h"
49 #include "symbol-summary.h"
51 #include "ipa-inline.h"
52 #include "tree-inline.h"
55 #include "internal-fn.h"
56 #include "tree-ssa-alias.h"
61 /* Return true when NODE has ADDR reference. */
64 has_addr_references_p (struct cgraph_node
*node
,
65 void *data ATTRIBUTE_UNUSED
)
68 struct ipa_ref
*ref
= NULL
;
70 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
71 if (ref
->use
== IPA_REF_ADDR
)
76 /* Look for all functions inlined to NODE and update their inlined_to pointers
80 update_inlined_to_pointer (struct cgraph_node
*node
, struct cgraph_node
*inlined_to
)
82 struct cgraph_edge
*e
;
83 for (e
= node
->callees
; e
; e
= e
->next_callee
)
84 if (e
->callee
->global
.inlined_to
)
86 e
->callee
->global
.inlined_to
= inlined_to
;
87 update_inlined_to_pointer (e
->callee
, inlined_to
);
91 /* Add symtab NODE to queue starting at FIRST.
93 The queue is linked via AUX pointers and terminated by pointer to 1.
94 We enqueue nodes at two occasions: when we find them reachable or when we find
95 their bodies needed for further clonning. In the second case we mark them
96 by pointer to 2 after processing so they are re-queue when they become
100 enqueue_node (symtab_node
*node
, symtab_node
**first
,
101 hash_set
<symtab_node
*> *reachable
)
103 /* Node is still in queue; do nothing. */
104 if (node
->aux
&& node
->aux
!= (void *) 2)
106 /* Node was already processed as unreachable, re-enqueue
107 only if it became reachable now. */
108 if (node
->aux
== (void *)2 && !reachable
->contains (node
))
114 /* Process references. */
117 process_references (symtab_node
*snode
,
119 bool before_inlining_p
,
120 hash_set
<symtab_node
*> *reachable
)
123 struct ipa_ref
*ref
= NULL
;
124 for (i
= 0; snode
->iterate_reference (i
, ref
); i
++)
126 symtab_node
*node
= ref
->referred
;
127 symtab_node
*body
= node
->ultimate_alias_target ();
129 if (node
->definition
&& !node
->in_other_partition
130 && ((!DECL_EXTERNAL (node
->decl
) || node
->alias
)
131 || (((before_inlining_p
132 && ((TREE_CODE (node
->decl
) != FUNCTION_DECL
134 || (TREE_CODE (node
->decl
) == FUNCTION_DECL
135 && opt_for_fn (body
->decl
, optimize
))
136 || (symtab
->state
< IPA_SSA
139 DECL_ATTRIBUTES (body
->decl
))))))
140 /* We use variable constructors during late compilation for
141 constant folding. Keep references alive so partitioning
142 knows about potential references. */
143 || (TREE_CODE (node
->decl
) == VAR_DECL
145 && ctor_for_folding (node
->decl
)
146 != error_mark_node
))))
148 /* Be sure that we will not optimize out alias target
150 if (DECL_EXTERNAL (node
->decl
)
152 && before_inlining_p
)
153 reachable
->add (body
);
154 reachable
->add (node
);
156 enqueue_node (node
, first
, reachable
);
160 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
161 all its potential targets as reachable to permit later inlining if
162 devirtualization happens. After inlining still keep their declarations
163 around, so we can devirtualize to a direct call.
165 Also try to make trivial devirutalization when no or only one target is
169 walk_polymorphic_call_targets (hash_set
<void *> *reachable_call_targets
,
170 struct cgraph_edge
*edge
,
172 hash_set
<symtab_node
*> *reachable
,
173 bool before_inlining_p
)
178 vec
<cgraph_node
*>targets
179 = possible_polymorphic_call_targets
180 (edge
, &final
, &cache_token
);
182 if (!reachable_call_targets
->add (cache_token
))
184 for (i
= 0; i
< targets
.length (); i
++)
186 struct cgraph_node
*n
= targets
[i
];
188 /* Do not bother to mark virtual methods in anonymous namespace;
189 either we will find use of virtual table defining it, or it is
191 if (TREE_CODE (TREE_TYPE (n
->decl
)) == METHOD_TYPE
192 && type_in_anonymous_namespace_p
193 (method_class_type (TREE_TYPE (n
->decl
))))
196 symtab_node
*body
= n
->function_symbol ();
198 /* Prior inlining, keep alive bodies of possible targets for
201 && (before_inlining_p
202 && opt_for_fn (body
->decl
, optimize
)
203 && opt_for_fn (body
->decl
, flag_devirtualize
)))
205 /* Be sure that we will not optimize out alias target
207 if (DECL_EXTERNAL (n
->decl
)
209 && before_inlining_p
)
210 reachable
->add (body
);
213 /* Even after inlining we want to keep the possible targets in the
214 boundary, so late passes can still produce direct call even if
215 the chance for inlining is lost. */
216 enqueue_node (n
, first
, reachable
);
220 /* Very trivial devirtualization; when the type is
221 final or anonymous (so we know all its derivation)
222 and there is only one possible virtual call target,
223 make the edge direct. */
226 if (targets
.length () <= 1 && dbg_cnt (devirt
))
228 cgraph_node
*target
, *node
= edge
->caller
;
229 if (targets
.length () == 1)
232 target
= cgraph_node::get_create
233 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
));
235 if (dump_enabled_p ())
239 locus
= gimple_location (edge
->call_stmt
);
241 locus
= UNKNOWN_LOCATION
;
242 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, locus
,
243 "devirtualizing call in %s/%i to %s/%i\n",
244 edge
->caller
->name (), edge
->caller
->order
,
248 edge
= edge
->make_direct (target
);
249 if (inline_summaries
)
250 inline_update_overall_summary (node
);
251 else if (edge
->call_stmt
)
253 edge
->redirect_call_stmt_to_callee ();
255 /* Call to __builtin_unreachable shouldn't be instrumented. */
256 if (!targets
.length ())
257 gimple_call_set_with_bounds (edge
->call_stmt
, false);
263 /* Perform reachability analysis and reclaim all unreachable nodes.
265 The algorithm is basically mark&sweep but with some extra refinements:
267 - reachable extern inline functions needs special handling; the bodies needs
268 to stay in memory until inlining in hope that they will be inlined.
269 After inlining we release their bodies and turn them into unanalyzed
270 nodes even when they are reachable.
272 - virtual functions are kept in callgraph even if they seem unreachable in
273 hope calls to them will be devirtualized.
275 Again we remove them after inlining. In late optimization some
276 devirtualization may happen, but it is not important since we won't inline
277 the call. In theory early opts and IPA should work out all important cases.
279 - virtual clones needs bodies of their origins for later materialization;
280 this means that we want to keep the body even if the origin is unreachable
281 otherwise. To avoid origin from sitting in the callgraph and being
282 walked by IPA passes, we turn them into unanalyzed nodes with body
285 We maintain set of function declaration where body needs to stay in
286 body_needed_for_clonning
288 Inline clones represent special case: their declaration match the
289 declaration of origin and cgraph_remove_node already knows how to
290 reshape callgraph and preserve body when offline copy of function or
291 inline clone is being removed.
293 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
294 variables with DECL_INITIAL set. We finalize these and keep reachable
295 ones around for constant folding purposes. After inlining we however
296 stop walking their references to let everything static referneced by them
297 to be removed when it is otherwise unreachable.
299 We maintain queue of both reachable symbols (i.e. defined symbols that needs
300 to stay) and symbols that are in boundary (i.e. external symbols referenced
301 by reachable symbols or origins of clones). The queue is represented
302 as linked list by AUX pointer terminated by 1.
304 At the end we keep all reachable symbols. For symbols in boundary we always
305 turn definition into a declaration, but we may keep function body around
306 based on body_needed_for_clonning
308 All symbols that enter the queue have AUX pointer non-zero and are in the
309 boundary. Pointer set REACHABLE is used to track reachable symbols.
311 Every symbol can be visited twice - once as part of boundary and once
312 as real reachable symbol. enqueue_node needs to decide whether the
313 node needs to be re-queued for second processing. For this purpose
314 we set AUX pointer of processed symbols in the boundary to constant 2. */
317 symbol_table::remove_unreachable_nodes (FILE *file
)
319 symtab_node
*first
= (symtab_node
*) (void *) 1;
320 struct cgraph_node
*node
, *next
;
321 varpool_node
*vnode
, *vnext
;
322 bool changed
= false;
323 hash_set
<symtab_node
*> reachable
;
324 hash_set
<tree
> body_needed_for_clonning
;
325 hash_set
<void *> reachable_call_targets
;
326 bool before_inlining_p
= symtab
->state
< (!optimize
? IPA_SSA
327 : IPA_SSA_AFTER_INLINING
);
329 timevar_push (TV_IPA_UNREACHABLE
);
330 build_type_inheritance_graph ();
332 fprintf (file
, "\nReclaiming functions:");
333 #ifdef ENABLE_CHECKING
334 FOR_EACH_FUNCTION (node
)
335 gcc_assert (!node
->aux
);
336 FOR_EACH_VARIABLE (vnode
)
337 gcc_assert (!vnode
->aux
);
339 /* Mark functions whose bodies are obviously needed.
340 This is mostly when they can be referenced externally. Inline clones
341 are special since their declarations are shared with master clone and thus
342 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
343 FOR_EACH_FUNCTION (node
)
345 node
->used_as_abstract_origin
= false;
347 && !node
->global
.inlined_to
348 && !node
->in_other_partition
349 && !node
->can_remove_if_no_direct_calls_and_refs_p ())
351 gcc_assert (!node
->global
.inlined_to
);
352 reachable
.add (node
);
353 enqueue_node (node
, &first
, &reachable
);
356 gcc_assert (!node
->aux
);
359 /* Mark variables that are obviously needed. */
360 FOR_EACH_DEFINED_VARIABLE (vnode
)
361 if (!vnode
->can_remove_if_no_refs_p()
362 && !vnode
->in_other_partition
)
364 reachable
.add (vnode
);
365 enqueue_node (vnode
, &first
, &reachable
);
368 /* Perform reachability analysis. */
369 while (first
!= (symtab_node
*) (void *) 1)
371 bool in_boundary_p
= !reachable
.contains (first
);
372 symtab_node
*node
= first
;
374 first
= (symtab_node
*)first
->aux
;
376 /* If we are processing symbol in boundary, mark its AUX pointer for
377 possible later re-processing in enqueue_node. */
379 node
->aux
= (void *)2;
382 if (TREE_CODE (node
->decl
) == FUNCTION_DECL
383 && DECL_ABSTRACT_ORIGIN (node
->decl
))
385 struct cgraph_node
*origin_node
386 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node
->decl
));
387 if (origin_node
&& !origin_node
->used_as_abstract_origin
)
389 origin_node
->used_as_abstract_origin
= true;
390 gcc_assert (!origin_node
->prev_sibling_clone
);
391 gcc_assert (!origin_node
->next_sibling_clone
);
392 for (cgraph_node
*n
= origin_node
->clones
; n
;
393 n
= n
->next_sibling_clone
)
394 if (n
->decl
== DECL_ABSTRACT_ORIGIN (node
->decl
))
395 n
->used_as_abstract_origin
= true;
396 enqueue_node (origin_node
, &first
, &reachable
);
399 /* If any symbol in a comdat group is reachable, force
400 all externally visible symbols in the same comdat
401 group to be reachable as well. Comdat-local symbols
402 can be discarded if all uses were inlined. */
403 if (node
->same_comdat_group
)
406 for (next
= node
->same_comdat_group
;
408 next
= next
->same_comdat_group
)
409 if (!next
->comdat_local_p ()
410 && !reachable
.add (next
))
411 enqueue_node (next
, &first
, &reachable
);
413 /* Mark references as reachable. */
414 process_references (node
, &first
, before_inlining_p
, &reachable
);
417 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
))
419 /* Mark the callees reachable unless they are direct calls to extern
420 inline functions we decided to not inline. */
423 struct cgraph_edge
*e
;
424 /* Keep alive possible targets for devirtualization. */
425 if (opt_for_fn (cnode
->decl
, optimize
)
426 && opt_for_fn (cnode
->decl
, flag_devirtualize
))
428 struct cgraph_edge
*next
;
429 for (e
= cnode
->indirect_calls
; e
; e
= next
)
431 next
= e
->next_callee
;
432 if (e
->indirect_info
->polymorphic
)
433 walk_polymorphic_call_targets (&reachable_call_targets
,
434 e
, &first
, &reachable
,
438 for (e
= cnode
->callees
; e
; e
= e
->next_callee
)
440 symtab_node
*body
= e
->callee
->function_symbol ();
441 if (e
->callee
->definition
442 && !e
->callee
->in_other_partition
443 && (!e
->inline_failed
444 || !DECL_EXTERNAL (e
->callee
->decl
)
446 || (before_inlining_p
447 && (opt_for_fn (body
->decl
, optimize
)
448 || (symtab
->state
< IPA_SSA
451 DECL_ATTRIBUTES (body
->decl
)))))))
453 /* Be sure that we will not optimize out alias target
455 if (DECL_EXTERNAL (e
->callee
->decl
)
457 && before_inlining_p
)
458 reachable
.add (body
);
459 reachable
.add (e
->callee
);
461 enqueue_node (e
->callee
, &first
, &reachable
);
464 /* When inline clone exists, mark body to be preserved so when removing
465 offline copy of the function we don't kill it. */
466 if (cnode
->global
.inlined_to
)
467 body_needed_for_clonning
.add (cnode
->decl
);
469 /* For non-inline clones, force their origins to the boundary and ensure
470 that body is not removed. */
471 while (cnode
->clone_of
)
473 bool noninline
= cnode
->clone_of
->decl
!= cnode
->decl
;
474 cnode
= cnode
->clone_of
;
477 body_needed_for_clonning
.add (cnode
->decl
);
478 enqueue_node (cnode
, &first
, &reachable
);
483 /* If any reachable function has simd clones, mark them as
484 reachable as well. */
485 if (cnode
->simd_clones
)
488 for (next
= cnode
->simd_clones
;
490 next
= next
->simdclone
->next_clone
)
492 || !reachable
.add (next
))
493 enqueue_node (next
, &first
, &reachable
);
496 /* When we see constructor of external variable, keep referred nodes in the
497 boundary. This will also hold initializers of the external vars NODE
499 varpool_node
*vnode
= dyn_cast
<varpool_node
*> (node
);
501 && DECL_EXTERNAL (node
->decl
)
505 struct ipa_ref
*ref
= NULL
;
506 for (int i
= 0; node
->iterate_reference (i
, ref
); i
++)
507 enqueue_node (ref
->referred
, &first
, &reachable
);
511 /* Remove unreachable functions. */
512 for (node
= first_function (); node
; node
= next
)
514 next
= next_function (node
);
516 /* If node is not needed at all, remove it. */
520 fprintf (file
, " %s/%i", node
->name (), node
->order
);
524 /* If node is unreachable, remove its body. */
525 else if (!reachable
.contains (node
))
527 if (!body_needed_for_clonning
.contains (node
->decl
))
528 node
->release_body ();
529 else if (!node
->clone_of
)
530 gcc_assert (in_lto_p
|| DECL_RESULT (node
->decl
));
531 if (node
->definition
)
534 fprintf (file
, " %s/%i", node
->name (), node
->order
);
535 node
->body_removed
= true;
536 node
->analyzed
= false;
537 node
->definition
= false;
538 node
->cpp_implicit_alias
= false;
540 node
->thunk
.thunk_p
= false;
541 node
->weakref
= false;
542 /* After early inlining we drop always_inline attributes on
543 bodies of functions that are still referenced (have their
545 DECL_ATTRIBUTES (node
->decl
)
546 = remove_attribute ("always_inline",
547 DECL_ATTRIBUTES (node
->decl
));
548 if (!node
->in_other_partition
)
549 node
->local
.local
= false;
550 node
->remove_callees ();
551 node
->remove_from_same_comdat_group ();
552 node
->remove_all_references ();
554 if (node
->thunk
.thunk_p
555 && node
->thunk
.add_pointer_bounds_args
)
557 node
->thunk
.thunk_p
= false;
558 node
->thunk
.add_pointer_bounds_args
= false;
563 gcc_assert (node
->clone_of
|| !node
->has_gimple_body_p ()
564 || in_lto_p
|| DECL_RESULT (node
->decl
));
567 /* Inline clones might be kept around so their materializing allows further
568 cloning. If the function the clone is inlined into is removed, we need
569 to turn it into normal cone. */
570 FOR_EACH_FUNCTION (node
)
572 if (node
->global
.inlined_to
575 gcc_assert (node
->clones
);
576 node
->global
.inlined_to
= NULL
;
577 update_inlined_to_pointer (node
, node
);
582 /* Remove unreachable variables. */
584 fprintf (file
, "\nReclaiming variables:");
585 for (vnode
= first_variable (); vnode
; vnode
= vnext
)
587 vnext
= next_variable (vnode
);
589 /* For can_refer_decl_in_current_unit_p we want to track for
590 all external variables if they are defined in other partition
592 && (!flag_ltrans
|| !DECL_EXTERNAL (vnode
->decl
)))
595 fprintf (file
, " %s/%i", vnode
->name (), vnode
->order
);
599 else if (!reachable
.contains (vnode
))
602 if (vnode
->definition
)
605 fprintf (file
, " %s", vnode
->name ());
608 /* Keep body if it may be useful for constant folding. */
609 if ((init
= ctor_for_folding (vnode
->decl
)) == error_mark_node
610 && !POINTER_BOUNDS_P (vnode
->decl
))
611 vnode
->remove_initializer ();
613 DECL_INITIAL (vnode
->decl
) = init
;
614 vnode
->body_removed
= true;
615 vnode
->definition
= false;
616 vnode
->analyzed
= false;
619 vnode
->remove_from_same_comdat_group ();
621 vnode
->remove_all_references ();
627 /* Now update address_taken flags and try to promote functions to be local. */
629 fprintf (file
, "\nClearing address taken flags:");
630 FOR_EACH_DEFINED_FUNCTION (node
)
631 if (node
->address_taken
632 && !node
->used_from_other_partition
)
634 if (!node
->call_for_symbol_thunks_and_aliases
635 (has_addr_references_p
, NULL
, true)
636 && (!node
->instrumentation_clone
637 || !node
->instrumented_version
638 || !node
->instrumented_version
->address_taken
))
641 fprintf (file
, " %s", node
->name ());
642 node
->address_taken
= false;
644 if (node
->local_p ())
646 node
->local
.local
= true;
648 fprintf (file
, " (local)");
653 fprintf (file
, "\n");
655 #ifdef ENABLE_CHECKING
656 symtab_node::verify_symtab_nodes ();
659 /* If we removed something, perhaps profile could be improved. */
660 if (changed
&& optimize
&& inline_edge_summary_vec
.exists ())
661 FOR_EACH_DEFINED_FUNCTION (node
)
662 ipa_propagate_frequency (node
);
664 timevar_pop (TV_IPA_UNREACHABLE
);
668 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
669 as needed, also clear EXPLICIT_REFS if the references to given variable
670 do not need to be explicit. */
673 process_references (varpool_node
*vnode
,
674 bool *written
, bool *address_taken
,
675 bool *read
, bool *explicit_refs
)
680 if (!vnode
->all_refs_explicit_p ()
681 || TREE_THIS_VOLATILE (vnode
->decl
))
682 *explicit_refs
= false;
684 for (i
= 0; vnode
->iterate_referring (i
, ref
)
685 && *explicit_refs
&& (!*written
|| !*address_taken
|| !*read
); i
++)
689 *address_taken
= true;
698 process_references (dyn_cast
<varpool_node
*> (ref
->referring
), written
,
699 address_taken
, read
, explicit_refs
);
706 /* Set TREE_READONLY bit. */
709 set_readonly_bit (varpool_node
*vnode
, void *data ATTRIBUTE_UNUSED
)
711 TREE_READONLY (vnode
->decl
) = true;
715 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
718 set_writeonly_bit (varpool_node
*vnode
, void *data
)
720 vnode
->writeonly
= true;
723 DECL_INITIAL (vnode
->decl
) = NULL
;
726 if (vnode
->num_references ())
727 *(bool *)data
= true;
728 vnode
->remove_all_references ();
734 /* Clear addressale bit of VNODE. */
737 clear_addressable_bit (varpool_node
*vnode
, void *data ATTRIBUTE_UNUSED
)
739 vnode
->address_taken
= false;
740 TREE_ADDRESSABLE (vnode
->decl
) = 0;
744 /* Discover variables that have no longer address taken or that are read only
745 and update their flags.
747 Return true when unreachable symbol removan should be done.
749 FIXME: This can not be done in between gimplify and omp_expand since
750 readonly flag plays role on what is shared and what is not. Currently we do
751 this transformation as part of whole program visibility and re-do at
752 ipa-reference pass (to take into account clonning), but it would
753 make sense to do it before early optimizations. */
756 ipa_discover_readonly_nonaddressable_vars (void)
758 bool remove_p
= false;
761 fprintf (dump_file
, "Clearing variable flags:");
762 FOR_EACH_VARIABLE (vnode
)
764 && (TREE_ADDRESSABLE (vnode
->decl
)
766 || !TREE_READONLY (vnode
->decl
)))
768 bool written
= false;
769 bool address_taken
= false;
771 bool explicit_refs
= true;
773 process_references (vnode
, &written
, &address_taken
, &read
,
779 if (TREE_ADDRESSABLE (vnode
->decl
) && dump_file
)
780 fprintf (dump_file
, " %s (non-addressable)", vnode
->name ());
781 vnode
->call_for_node_and_aliases (clear_addressable_bit
, NULL
,
784 if (!address_taken
&& !written
785 /* Making variable in explicit section readonly can cause section
787 See e.g. gcc.c-torture/compile/pr23237.c */
788 && vnode
->get_section () == NULL
)
790 if (!TREE_READONLY (vnode
->decl
) && dump_file
)
791 fprintf (dump_file
, " %s (read-only)", vnode
->name ());
792 vnode
->call_for_node_and_aliases (set_readonly_bit
, NULL
, true);
794 if (!vnode
->writeonly
&& !read
&& !address_taken
&& written
)
797 fprintf (dump_file
, " %s (write-only)", vnode
->name ());
798 vnode
->call_for_node_and_aliases (set_writeonly_bit
, &remove_p
,
803 fprintf (dump_file
, "\n");
807 /* Free inline summary. */
811 const pass_data pass_data_ipa_free_inline_summary
=
813 SIMPLE_IPA_PASS
, /* type */
814 "free-inline-summary", /* name */
815 OPTGROUP_NONE
, /* optinfo_flags */
816 TV_IPA_FREE_INLINE_SUMMARY
, /* tv_id */
817 0, /* properties_required */
818 0, /* properties_provided */
819 0, /* properties_destroyed */
820 0, /* todo_flags_start */
821 /* Early optimizations may make function unreachable. We can not
822 remove unreachable functions as part of the ealry opts pass because
823 TODOs are run before subpasses. Do it here. */
824 ( TODO_remove_functions
| TODO_dump_symtab
), /* todo_flags_finish */
827 class pass_ipa_free_inline_summary
: public simple_ipa_opt_pass
830 pass_ipa_free_inline_summary (gcc::context
*ctxt
)
831 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary
, ctxt
)
834 /* opt_pass methods: */
835 virtual unsigned int execute (function
*)
837 inline_free_summary ();
841 }; // class pass_ipa_free_inline_summary
845 simple_ipa_opt_pass
*
846 make_pass_ipa_free_inline_summary (gcc::context
*ctxt
)
848 return new pass_ipa_free_inline_summary (ctxt
);
851 /* Generate and emit a static constructor or destructor. WHICH must
852 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
853 (for chp static vars constructor) or 'B' (for chkp static bounds
854 constructor). BODY is a STATEMENT_LIST containing GENERIC
855 statements. PRIORITY is the initialization priority for this
856 constructor or destructor.
858 FINAL specify whether the externally visible name for collect2 should
862 cgraph_build_static_cdtor_1 (char which
, tree body
, int priority
, bool final
)
864 static int counter
= 0;
866 tree decl
, name
, resdecl
;
868 /* The priority is encoded in the constructor or destructor name.
869 collect2 will sort the names and arrange that they are called at
872 sprintf (which_buf
, "%c_%.5d_%d", which
, priority
, counter
++);
874 /* Proudce sane name but one not recognizable by collect2, just for the
875 case we fail to inline the function. */
876 sprintf (which_buf
, "sub_%c_%.5d_%d", which
, priority
, counter
++);
877 name
= get_file_function_name (which_buf
);
879 decl
= build_decl (input_location
, FUNCTION_DECL
, name
,
880 build_function_type_list (void_type_node
, NULL_TREE
));
881 current_function_decl
= decl
;
883 resdecl
= build_decl (input_location
,
884 RESULT_DECL
, NULL_TREE
, void_type_node
);
885 DECL_ARTIFICIAL (resdecl
) = 1;
886 DECL_RESULT (decl
) = resdecl
;
887 DECL_CONTEXT (resdecl
) = decl
;
889 allocate_struct_function (decl
, false);
891 TREE_STATIC (decl
) = 1;
892 TREE_USED (decl
) = 1;
893 DECL_ARTIFICIAL (decl
) = 1;
894 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
895 DECL_SAVED_TREE (decl
) = body
;
896 if (!targetm
.have_ctors_dtors
&& final
)
898 TREE_PUBLIC (decl
) = 1;
899 DECL_PRESERVE_P (decl
) = 1;
901 DECL_UNINLINABLE (decl
) = 1;
903 DECL_INITIAL (decl
) = make_node (BLOCK
);
904 TREE_USED (DECL_INITIAL (decl
)) = 1;
906 DECL_SOURCE_LOCATION (decl
) = input_location
;
907 cfun
->function_end_locus
= input_location
;
912 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
913 decl_init_priority_insert (decl
, priority
);
916 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
917 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("chkp ctor"),
920 decl_init_priority_insert (decl
, priority
);
923 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
924 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("bnd_legacy"),
927 decl_init_priority_insert (decl
, priority
);
930 DECL_STATIC_DESTRUCTOR (decl
) = 1;
931 decl_fini_priority_insert (decl
, priority
);
937 gimplify_function_tree (decl
);
939 cgraph_node::add_new_function (decl
, false);
942 current_function_decl
= NULL
;
945 /* Generate and emit a static constructor or destructor. WHICH must
946 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
947 (for chkp static vars constructor) or 'B' (for chkp static bounds
948 constructor). BODY is a STATEMENT_LIST containing GENERIC
949 statements. PRIORITY is the initialization priority for this
950 constructor or destructor. */
953 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
955 cgraph_build_static_cdtor_1 (which
, body
, priority
, false);
958 /* A vector of FUNCTION_DECLs declared as static constructors. */
959 static vec
<tree
> static_ctors
;
960 /* A vector of FUNCTION_DECLs declared as static destructors. */
961 static vec
<tree
> static_dtors
;
963 /* When target does not have ctors and dtors, we call all constructor
964 and destructor by special initialization/destruction function
965 recognized by collect2.
967 When we are going to build this function, collect all constructors and
968 destructors and turn them into normal functions. */
971 record_cdtor_fn (struct cgraph_node
*node
)
973 if (DECL_STATIC_CONSTRUCTOR (node
->decl
))
974 static_ctors
.safe_push (node
->decl
);
975 if (DECL_STATIC_DESTRUCTOR (node
->decl
))
976 static_dtors
.safe_push (node
->decl
);
977 node
= cgraph_node::get (node
->decl
);
978 DECL_DISREGARD_INLINE_LIMITS (node
->decl
) = 1;
981 /* Define global constructors/destructor functions for the CDTORS, of
982 which they are LEN. The CDTORS are sorted by initialization
983 priority. If CTOR_P is true, these are constructors; otherwise,
984 they are destructors. */
987 build_cdtor (bool ctor_p
, vec
<tree
> cdtors
)
990 size_t len
= cdtors
.length ();
997 priority_type priority
;
1006 p
= ctor_p
? DECL_INIT_PRIORITY (fn
) : DECL_FINI_PRIORITY (fn
);
1009 else if (p
!= priority
)
1015 /* When there is only one cdtor and target supports them, do nothing. */
1017 && targetm
.have_ctors_dtors
)
1022 /* Find the next batch of constructors/destructors with the same
1023 initialization priority. */
1028 call
= build_call_expr (fn
, 0);
1030 DECL_STATIC_CONSTRUCTOR (fn
) = 0;
1032 DECL_STATIC_DESTRUCTOR (fn
) = 0;
1033 /* We do not want to optimize away pure/const calls here.
1034 When optimizing, these should be already removed, when not
1035 optimizing, we want user to be able to breakpoint in them. */
1036 TREE_SIDE_EFFECTS (call
) = 1;
1037 append_to_statement_list (call
, &body
);
1039 gcc_assert (body
!= NULL_TREE
);
1040 /* Generate a function to call all the function of like
1042 cgraph_build_static_cdtor_1 (ctor_p
? 'I' : 'D', body
, priority
, true);
1046 /* Comparison function for qsort. P1 and P2 are actually of type
1047 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1048 used to determine the sort order. */
1051 compare_ctor (const void *p1
, const void *p2
)
1058 f1
= *(const tree
*)p1
;
1059 f2
= *(const tree
*)p2
;
1060 priority1
= DECL_INIT_PRIORITY (f1
);
1061 priority2
= DECL_INIT_PRIORITY (f2
);
1063 if (priority1
< priority2
)
1065 else if (priority1
> priority2
)
1068 /* Ensure a stable sort. Constructors are executed in backwarding
1069 order to make LTO initialize braries first. */
1070 return DECL_UID (f2
) - DECL_UID (f1
);
1073 /* Comparison function for qsort. P1 and P2 are actually of type
1074 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1075 used to determine the sort order. */
1078 compare_dtor (const void *p1
, const void *p2
)
1085 f1
= *(const tree
*)p1
;
1086 f2
= *(const tree
*)p2
;
1087 priority1
= DECL_FINI_PRIORITY (f1
);
1088 priority2
= DECL_FINI_PRIORITY (f2
);
1090 if (priority1
< priority2
)
1092 else if (priority1
> priority2
)
1095 /* Ensure a stable sort. */
1096 return DECL_UID (f1
) - DECL_UID (f2
);
1099 /* Generate functions to call static constructors and destructors
1100 for targets that do not support .ctors/.dtors sections. These
1101 functions have magic names which are detected by collect2. */
1104 build_cdtor_fns (void)
1106 if (!static_ctors
.is_empty ())
1108 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1109 static_ctors
.qsort (compare_ctor
);
1110 build_cdtor (/*ctor_p=*/true, static_ctors
);
1113 if (!static_dtors
.is_empty ())
1115 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1116 static_dtors
.qsort (compare_dtor
);
1117 build_cdtor (/*ctor_p=*/false, static_dtors
);
1121 /* Look for constructors and destructors and produce function calling them.
1122 This is needed for targets not supporting ctors or dtors, but we perform the
1123 transformation also at linktime to merge possibly numerous
1124 constructors/destructors into single function to improve code locality and
1128 ipa_cdtor_merge (void)
1130 struct cgraph_node
*node
;
1131 FOR_EACH_DEFINED_FUNCTION (node
)
1132 if (DECL_STATIC_CONSTRUCTOR (node
->decl
)
1133 || DECL_STATIC_DESTRUCTOR (node
->decl
))
1134 record_cdtor_fn (node
);
1136 static_ctors
.release ();
1137 static_dtors
.release ();
1143 const pass_data pass_data_ipa_cdtor_merge
=
1145 IPA_PASS
, /* type */
1147 OPTGROUP_NONE
, /* optinfo_flags */
1148 TV_CGRAPHOPT
, /* tv_id */
1149 0, /* properties_required */
1150 0, /* properties_provided */
1151 0, /* properties_destroyed */
1152 0, /* todo_flags_start */
1153 0, /* todo_flags_finish */
1156 class pass_ipa_cdtor_merge
: public ipa_opt_pass_d
1159 pass_ipa_cdtor_merge (gcc::context
*ctxt
)
1160 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge
, ctxt
,
1161 NULL
, /* generate_summary */
1162 NULL
, /* write_summary */
1163 NULL
, /* read_summary */
1164 NULL
, /* write_optimization_summary */
1165 NULL
, /* read_optimization_summary */
1166 NULL
, /* stmt_fixup */
1167 0, /* function_transform_todo_flags_start */
1168 NULL
, /* function_transform */
1169 NULL
) /* variable_transform */
1172 /* opt_pass methods: */
1173 virtual bool gate (function
*);
1174 virtual unsigned int execute (function
*) { return ipa_cdtor_merge (); }
1176 }; // class pass_ipa_cdtor_merge
1179 pass_ipa_cdtor_merge::gate (function
*)
1181 /* Perform the pass when we have no ctors/dtors support
1182 or at LTO time to merge multiple constructors into single
1184 return !targetm
.have_ctors_dtors
|| (optimize
&& in_lto_p
);
1190 make_pass_ipa_cdtor_merge (gcc::context
*ctxt
)
1192 return new pass_ipa_cdtor_merge (ctxt
);
1195 /* Invalid pointer representing BOTTOM for single user dataflow. */
1196 #define BOTTOM ((cgraph_node *)(size_t) 2)
1198 /* Meet operation for single user dataflow.
1199 Here we want to associate variables with sigle function that may access it.
1201 FUNCTION is current single user of a variable, VAR is variable that uses it.
1202 Latttice is stored in SINGLE_USER_MAP.
1205 - TOP by no entry in SIGNLE_USER_MAP
1206 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1207 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1210 meet (cgraph_node
*function
, varpool_node
*var
,
1211 hash_map
<varpool_node
*, cgraph_node
*> &single_user_map
)
1213 struct cgraph_node
*user
, **f
;
1215 if (var
->aux
== BOTTOM
)
1218 f
= single_user_map
.get (var
);
1224 else if (function
!= user
)
1230 /* Propagation step of single-use dataflow.
1232 Check all uses of VNODE and see if they are used by single function FUNCTION.
1233 SINGLE_USER_MAP represents the dataflow lattice. */
1236 propagate_single_user (varpool_node
*vnode
, cgraph_node
*function
,
1237 hash_map
<varpool_node
*, cgraph_node
*> &single_user_map
)
1240 struct ipa_ref
*ref
;
1242 gcc_assert (!vnode
->externally_visible
);
1244 /* If node is an alias, first meet with its target. */
1246 function
= meet (function
, vnode
->get_alias_target (), single_user_map
);
1248 /* Check all users and see if they correspond to a single function. */
1249 for (i
= 0; vnode
->iterate_referring (i
, ref
) && function
!= BOTTOM
; i
++)
1251 struct cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (ref
->referring
);
1254 if (cnode
->global
.inlined_to
)
1255 cnode
= cnode
->global
.inlined_to
;
1258 else if (function
!= cnode
)
1262 function
= meet (function
, dyn_cast
<varpool_node
*> (ref
->referring
),
1268 /* Pass setting used_by_single_function flag.
1269 This flag is set on variable when there is only one function that may
1270 possibly referr to it. */
1273 ipa_single_use (void)
1275 varpool_node
*first
= (varpool_node
*) (void *) 1;
1277 hash_map
<varpool_node
*, cgraph_node
*> single_user_map
;
1279 FOR_EACH_DEFINED_VARIABLE (var
)
1280 if (!var
->all_refs_explicit_p ())
1284 /* Enqueue symbol for dataflow. */
1289 /* The actual dataflow. */
1291 while (first
!= (void *) 1)
1293 cgraph_node
*user
, *orig_user
, **f
;
1296 first
= (varpool_node
*)first
->aux
;
1298 f
= single_user_map
.get (var
);
1303 user
= propagate_single_user (var
, orig_user
, single_user_map
);
1305 gcc_checking_assert (var
->aux
!= BOTTOM
);
1307 /* If user differs, enqueue all references. */
1308 if (user
!= orig_user
)
1313 single_user_map
.put (var
, user
);
1315 /* Enqueue all aliases for re-processing. */
1316 for (i
= 0; var
->iterate_referring (i
, ref
); i
++)
1317 if (ref
->use
== IPA_REF_ALIAS
1318 && !ref
->referring
->aux
)
1320 ref
->referring
->aux
= first
;
1321 first
= dyn_cast
<varpool_node
*> (ref
->referring
);
1323 /* Enqueue all users for re-processing. */
1324 for (i
= 0; var
->iterate_reference (i
, ref
); i
++)
1325 if (!ref
->referred
->aux
1326 && ref
->referred
->definition
1327 && is_a
<varpool_node
*> (ref
->referred
))
1329 ref
->referred
->aux
= first
;
1330 first
= dyn_cast
<varpool_node
*> (ref
->referred
);
1333 /* If user is BOTTOM, just punt on this var. */
1343 FOR_EACH_DEFINED_VARIABLE (var
)
1345 if (var
->aux
!= BOTTOM
)
1347 #ifdef ENABLE_CHECKING
1348 /* Not having the single user known means that the VAR is
1349 unreachable. Either someone forgot to remove unreachable
1350 variables or the reachability here is wrong. */
1352 gcc_assert (single_user_map
.get (var
));
1356 fprintf (dump_file
, "Variable %s/%i is used by single function\n",
1357 var
->name (), var
->order
);
1359 var
->used_by_single_function
= true;
1368 const pass_data pass_data_ipa_single_use
=
1370 IPA_PASS
, /* type */
1371 "single-use", /* name */
1372 OPTGROUP_NONE
, /* optinfo_flags */
1373 TV_CGRAPHOPT
, /* tv_id */
1374 0, /* properties_required */
1375 0, /* properties_provided */
1376 0, /* properties_destroyed */
1377 0, /* todo_flags_start */
1378 0, /* todo_flags_finish */
1381 class pass_ipa_single_use
: public ipa_opt_pass_d
1384 pass_ipa_single_use (gcc::context
*ctxt
)
1385 : ipa_opt_pass_d (pass_data_ipa_single_use
, ctxt
,
1386 NULL
, /* generate_summary */
1387 NULL
, /* write_summary */
1388 NULL
, /* read_summary */
1389 NULL
, /* write_optimization_summary */
1390 NULL
, /* read_optimization_summary */
1391 NULL
, /* stmt_fixup */
1392 0, /* function_transform_todo_flags_start */
1393 NULL
, /* function_transform */
1394 NULL
) /* variable_transform */
1397 /* opt_pass methods: */
1398 virtual bool gate (function
*);
1399 virtual unsigned int execute (function
*) { return ipa_single_use (); }
1401 }; // class pass_ipa_single_use
1404 pass_ipa_single_use::gate (function
*)
1412 make_pass_ipa_single_use (gcc::context
*ctxt
)
1414 return new pass_ipa_single_use (ctxt
);