1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pass.h"
29 #include "pointer-set.h"
31 #include "tree-iterator.h"
32 #include "ipa-utils.h"
33 #include "pointer-set.h"
34 #include "ipa-inline.h"
35 #include "hash-table.h"
36 #include "tree-inline.h"
39 #include "lto-streamer.h"
40 #include "data-streamer.h"
42 /* Look for all functions inlined to NODE and update their inlined_to pointers
46 update_inlined_to_pointer (struct cgraph_node
*node
, struct cgraph_node
*inlined_to
)
48 struct cgraph_edge
*e
;
49 for (e
= node
->callees
; e
; e
= e
->next_callee
)
50 if (e
->callee
->global
.inlined_to
)
52 e
->callee
->global
.inlined_to
= inlined_to
;
53 update_inlined_to_pointer (e
->callee
, inlined_to
);
57 /* Add symtab NODE to queue starting at FIRST.
59 The queue is linked via AUX pointers and terminated by pointer to 1.
60 We enqueue nodes at two occasions: when we find them reachable or when we find
61 their bodies needed for further clonning. In the second case we mark them
62 by pointer to 2 after processing so they are re-queue when they become
66 enqueue_node (symtab_node node
, symtab_node
*first
,
67 struct pointer_set_t
*reachable
)
69 /* Node is still in queue; do nothing. */
70 if (node
->symbol
.aux
&& node
->symbol
.aux
!= (void *) 2)
72 /* Node was already processed as unreachable, re-enqueue
73 only if it became reachable now. */
74 if (node
->symbol
.aux
== (void *)2 && !pointer_set_contains (reachable
, node
))
76 node
->symbol
.aux
= *first
;
80 /* Process references. */
83 process_references (struct ipa_ref_list
*list
,
85 bool before_inlining_p
,
86 struct pointer_set_t
*reachable
)
90 for (i
= 0; ipa_ref_list_reference_iterate (list
, i
, ref
); i
++)
92 if (is_a
<cgraph_node
> (ref
->referred
))
94 struct cgraph_node
*node
= ipa_ref_node (ref
);
97 && (!DECL_EXTERNAL (node
->symbol
.decl
)
99 || before_inlining_p
))
100 pointer_set_insert (reachable
, node
);
101 enqueue_node ((symtab_node
) node
, first
, reachable
);
105 struct varpool_node
*node
= ipa_ref_varpool_node (ref
);
108 && (!DECL_EXTERNAL (node
->symbol
.decl
)
110 || before_inlining_p
))
111 pointer_set_insert (reachable
, node
);
112 enqueue_node ((symtab_node
) node
, first
, reachable
);
118 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
121 cgraph_non_local_node_p_1 (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
123 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
124 return !(cgraph_only_called_directly_or_aliased_p (node
)
125 && !ipa_ref_has_aliases_p (&node
->symbol
.ref_list
)
127 && !DECL_EXTERNAL (node
->symbol
.decl
)
128 && !node
->symbol
.externally_visible
129 && !node
->symbol
.used_from_other_partition
130 && !node
->symbol
.in_other_partition
);
133 /* Return true when function can be marked local. */
136 cgraph_local_node_p (struct cgraph_node
*node
)
138 struct cgraph_node
*n
= cgraph_function_or_thunk_node (node
, NULL
);
140 /* FIXME: thunks can be considered local, but we need prevent i386
141 from attempting to change calling convention of them. */
142 if (n
->thunk
.thunk_p
)
144 return !cgraph_for_node_and_aliases (n
,
145 cgraph_non_local_node_p_1
, NULL
, true);
149 /* Return true when NODE has ADDR reference. */
152 has_addr_references_p (struct cgraph_node
*node
,
153 void *data ATTRIBUTE_UNUSED
)
158 for (i
= 0; ipa_ref_list_referring_iterate (&node
->symbol
.ref_list
,
160 if (ref
->use
== IPA_REF_ADDR
)
165 /* Perform reachability analysis and reclaim all unreachable nodes.
167 The algorithm is basically mark&sweep but with some extra refinements:
169 - reachable extern inline functions needs special handling; the bodies needs
170 to stay in memory until inlining in hope that they will be inlined.
171 After inlining we release their bodies and turn them into unanalyzed
172 nodes even when they are reachable.
174 BEFORE_INLINING_P specify whether we are before or after inlining.
176 - virtual functions are kept in callgraph even if they seem unreachable in
177 hope calls to them will be devirtualized.
179 Again we remove them after inlining. In late optimization some
180 devirtualization may happen, but it is not importnat since we won't inline
181 the call. In theory early opts and IPA should work out all important cases.
183 - virtual clones needs bodies of their origins for later materialization;
184 this means that we want to keep the body even if the origin is unreachable
185 otherwise. To avoid origin from sitting in the callgraph and being
186 walked by IPA passes, we turn them into unanalyzed nodes with body
189 We maintain set of function declaration where body needs to stay in
190 body_needed_for_clonning
192 Inline clones represent special case: their declaration match the
193 declaration of origin and cgraph_remove_node already knows how to
194 reshape callgraph and preserve body when offline copy of function or
195 inline clone is being removed.
197 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
198 variables with DECL_INITIAL set. We finalize these and keep reachable
199 ones around for constant folding purposes. After inlining we however
200 stop walking their references to let everything static referneced by them
201 to be removed when it is otherwise unreachable.
203 We maintain queue of both reachable symbols (i.e. defined symbols that needs
204 to stay) and symbols that are in boundary (i.e. external symbols referenced
205 by reachable symbols or origins of clones). The queue is represented
206 as linked list by AUX pointer terminated by 1.
208 A the end we keep all reachable symbols. For symbols in boundary we always
209 turn definition into a declaration, but we may keep function body around
210 based on body_needed_for_clonning
212 All symbols that enter the queue have AUX pointer non-zero and are in the
213 boundary. Pointer set REACHABLE is used to track reachable symbols.
215 Every symbol can be visited twice - once as part of boundary and once
216 as real reachable symbol. enqueue_node needs to decide whether the
217 node needs to be re-queued for second processing. For this purpose
218 we set AUX pointer of processed symbols in the boundary to constant 2. */
221 symtab_remove_unreachable_nodes (bool before_inlining_p
, FILE *file
)
223 symtab_node first
= (symtab_node
) (void *) 1;
224 struct cgraph_node
*node
, *next
;
225 struct varpool_node
*vnode
, *vnext
;
226 bool changed
= false;
227 struct pointer_set_t
*reachable
= pointer_set_create ();
228 struct pointer_set_t
*body_needed_for_clonning
= pointer_set_create ();
230 #ifdef ENABLE_CHECKING
234 fprintf (file
, "\nReclaiming functions:");
235 #ifdef ENABLE_CHECKING
236 FOR_EACH_FUNCTION (node
)
237 gcc_assert (!node
->symbol
.aux
);
238 FOR_EACH_VARIABLE (vnode
)
239 gcc_assert (!vnode
->symbol
.aux
);
241 /* Mark functions whose bodies are obviously needed.
242 This is mostly when they can be referenced externally. Inline clones
243 are special since their declarations are shared with master clone and thus
244 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
245 FOR_EACH_DEFINED_FUNCTION (node
)
246 if (!node
->global
.inlined_to
247 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node
)
248 /* Keep around virtual functions for possible devirtualization. */
249 || (before_inlining_p
250 && DECL_VIRTUAL_P (node
->symbol
.decl
))))
252 gcc_assert (!node
->global
.inlined_to
);
253 pointer_set_insert (reachable
, node
);
254 enqueue_node ((symtab_node
)node
, &first
, reachable
);
257 gcc_assert (!node
->symbol
.aux
);
259 /* Mark variables that are obviously needed. */
260 FOR_EACH_DEFINED_VARIABLE (vnode
)
261 if (!varpool_can_remove_if_no_refs (vnode
))
263 pointer_set_insert (reachable
, vnode
);
264 enqueue_node ((symtab_node
)vnode
, &first
, reachable
);
267 /* Perform reachability analysis. */
268 while (first
!= (symtab_node
) (void *) 1)
270 bool in_boundary_p
= !pointer_set_contains (reachable
, first
);
271 symtab_node node
= first
;
273 first
= (symtab_node
)first
->symbol
.aux
;
275 /* If we are processing symbol in boundary, mark its AUX pointer for
276 possible later re-processing in enqueue_node. */
278 node
->symbol
.aux
= (void *)2;
281 /* If any symbol in a comdat group is reachable, force
282 all other in the same comdat group to be also reachable. */
283 if (node
->symbol
.same_comdat_group
)
286 for (next
= node
->symbol
.same_comdat_group
;
288 next
= next
->symbol
.same_comdat_group
)
289 if (!pointer_set_insert (reachable
, next
))
290 enqueue_node ((symtab_node
) next
, &first
, reachable
);
292 /* Mark references as reachable. */
293 process_references (&node
->symbol
.ref_list
, &first
,
294 before_inlining_p
, reachable
);
297 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
299 /* Mark the callees reachable unless they are direct calls to extern
300 inline functions we decided to not inline. */
303 struct cgraph_edge
*e
;
304 for (e
= cnode
->callees
; e
; e
= e
->next_callee
)
306 if (e
->callee
->analyzed
307 && (!e
->inline_failed
308 || !DECL_EXTERNAL (e
->callee
->symbol
.decl
)
310 || before_inlining_p
))
311 pointer_set_insert (reachable
, e
->callee
);
312 enqueue_node ((symtab_node
) e
->callee
, &first
, reachable
);
315 /* When inline clone exists, mark body to be preserved so when removing
316 offline copy of the function we don't kill it. */
317 if (!cnode
->alias
&& cnode
->global
.inlined_to
)
318 pointer_set_insert (body_needed_for_clonning
, cnode
->symbol
.decl
);
321 /* For non-inline clones, force their origins to the boundary and ensure
322 that body is not removed. */
323 while (cnode
->clone_of
324 && !gimple_has_body_p (cnode
->symbol
.decl
))
326 bool noninline
= cnode
->clone_of
->symbol
.decl
!= cnode
->symbol
.decl
;
327 cnode
= cnode
->clone_of
;
330 pointer_set_insert (body_needed_for_clonning
, cnode
->symbol
.decl
);
331 enqueue_node ((symtab_node
)cnode
, &first
, reachable
);
336 /* When we see constructor of external variable, keep referred nodes in the
337 boundary. This will also hold initializers of the external vars NODE
339 varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
);
341 && DECL_EXTERNAL (node
->symbol
.decl
)
346 for (int i
= 0; ipa_ref_list_reference_iterate (&node
->symbol
.ref_list
, i
, ref
); i
++)
347 enqueue_node (ref
->referred
, &first
, reachable
);
351 /* Remove unreachable functions. */
352 for (node
= cgraph_first_function (); node
; node
= next
)
354 next
= cgraph_next_function (node
);
355 if (!node
->symbol
.aux
)
358 fprintf (file
, " %s", cgraph_node_name (node
));
359 cgraph_remove_node (node
);
362 else if (!pointer_set_contains (reachable
, node
))
367 fprintf (file
, " %s", cgraph_node_name (node
));
368 cgraph_node_remove_callees (node
);
369 ipa_remove_all_references (&node
->symbol
.ref_list
);
372 if (!pointer_set_contains (body_needed_for_clonning
, node
->symbol
.decl
)
373 && (node
->local
.finalized
|| !DECL_ARTIFICIAL (node
->symbol
.decl
)))
374 cgraph_release_function_body (node
);
375 node
->analyzed
= false;
379 /* Inline clones might be kept around so their materializing allows further
380 cloning. If the function the clone is inlined into is removed, we need
381 to turn it into normal cone. */
382 FOR_EACH_FUNCTION (node
)
384 if (node
->global
.inlined_to
387 gcc_assert (node
->clones
);
388 node
->global
.inlined_to
= NULL
;
389 update_inlined_to_pointer (node
, node
);
391 node
->symbol
.aux
= NULL
;
394 /* Remove unreachable variables. */
396 fprintf (file
, "\nReclaiming variables:");
397 for (vnode
= varpool_first_variable (); vnode
; vnode
= vnext
)
399 vnext
= varpool_next_variable (vnode
);
400 if (!vnode
->symbol
.aux
)
403 fprintf (file
, " %s", varpool_node_name (vnode
));
404 varpool_remove_node (vnode
);
407 else if (!pointer_set_contains (reachable
, vnode
))
412 fprintf (file
, " %s", varpool_node_name (vnode
));
415 vnode
->analyzed
= false;
416 vnode
->symbol
.aux
= NULL
;
419 vnode
->symbol
.aux
= NULL
;
422 pointer_set_destroy (reachable
);
423 pointer_set_destroy (body_needed_for_clonning
);
425 /* Now update address_taken flags and try to promote functions to be local. */
427 fprintf (file
, "\nClearing address taken flags:");
428 FOR_EACH_DEFINED_FUNCTION (node
)
429 if (node
->symbol
.address_taken
430 && !node
->symbol
.used_from_other_partition
)
432 if (!cgraph_for_node_and_aliases (node
, has_addr_references_p
, NULL
, true))
435 fprintf (file
, " %s", cgraph_node_name (node
));
436 node
->symbol
.address_taken
= false;
438 if (cgraph_local_node_p (node
))
440 node
->local
.local
= true;
442 fprintf (file
, " (local)");
447 fprintf (file
, "\n");
449 #ifdef ENABLE_CHECKING
453 /* If we removed something, perhaps profile could be improved. */
454 if (changed
&& optimize
&& inline_edge_summary_vec
.exists ())
455 FOR_EACH_DEFINED_FUNCTION (node
)
456 cgraph_propagate_frequency (node
);
461 /* Discover variables that have no longer address taken or that are read only
462 and update their flags.
464 FIXME: This can not be done in between gimplify and omp_expand since
465 readonly flag plays role on what is shared and what is not. Currently we do
466 this transformation as part of whole program visibility and re-do at
467 ipa-reference pass (to take into account clonning), but it would
468 make sense to do it before early optimizations. */
471 ipa_discover_readonly_nonaddressable_vars (void)
473 struct varpool_node
*vnode
;
475 fprintf (dump_file
, "Clearing variable flags:");
476 FOR_EACH_VARIABLE (vnode
)
477 if (vnode
->finalized
&& varpool_all_refs_explicit_p (vnode
)
478 && (TREE_ADDRESSABLE (vnode
->symbol
.decl
)
479 || !TREE_READONLY (vnode
->symbol
.decl
)))
481 bool written
= false;
482 bool address_taken
= false;
485 for (i
= 0; ipa_ref_list_referring_iterate (&vnode
->symbol
.ref_list
,
487 && (!written
|| !address_taken
); i
++)
491 address_taken
= true;
499 if (TREE_ADDRESSABLE (vnode
->symbol
.decl
) && !address_taken
)
502 fprintf (dump_file
, " %s (addressable)", varpool_node_name (vnode
));
503 TREE_ADDRESSABLE (vnode
->symbol
.decl
) = 0;
505 if (!TREE_READONLY (vnode
->symbol
.decl
) && !address_taken
&& !written
506 /* Making variable in explicit section readonly can cause section
508 See e.g. gcc.c-torture/compile/pr23237.c */
509 && DECL_SECTION_NAME (vnode
->symbol
.decl
) == NULL
)
512 fprintf (dump_file
, " %s (read-only)", varpool_node_name (vnode
));
513 TREE_READONLY (vnode
->symbol
.decl
) = 1;
517 fprintf (dump_file
, "\n");
520 /* Return true when there is a reference to node and it is not vtable. */
522 cgraph_address_taken_from_non_vtable_p (struct cgraph_node
*node
)
526 for (i
= 0; ipa_ref_list_referring_iterate (&node
->symbol
.ref_list
,
528 if (ref
->use
== IPA_REF_ADDR
)
530 struct varpool_node
*node
;
531 if (is_a
<cgraph_node
> (ref
->referring
))
533 node
= ipa_ref_referring_varpool_node (ref
);
534 if (!DECL_VIRTUAL_P (node
->symbol
.decl
))
540 /* COMDAT functions must be shared only if they have address taken,
541 otherwise we can produce our own private implementation with
543 Return true when turning COMDAT functoin static can not lead to wrong
544 code when the resulting object links with a library defining same COMDAT.
546 Virtual functions do have their addresses taken from the vtables,
547 but in C++ there is no way to compare their addresses for equality. */
550 cgraph_comdat_can_be_unshared_p (struct cgraph_node
*node
)
552 if ((cgraph_address_taken_from_non_vtable_p (node
)
553 && !DECL_VIRTUAL_P (node
->symbol
.decl
))
556 if (node
->symbol
.same_comdat_group
)
558 struct cgraph_node
*next
;
560 /* If more than one function is in the same COMDAT group, it must
561 be shared even if just one function in the comdat group has
563 for (next
= cgraph (node
->symbol
.same_comdat_group
);
564 next
!= node
; next
= cgraph (next
->symbol
.same_comdat_group
))
565 if (cgraph_address_taken_from_non_vtable_p (next
)
566 && !DECL_VIRTUAL_P (next
->symbol
.decl
))
572 /* Return true when function NODE should be considered externally visible. */
575 cgraph_externally_visible_p (struct cgraph_node
*node
,
578 if (!node
->local
.finalized
)
580 if (!DECL_COMDAT (node
->symbol
.decl
)
581 && (!TREE_PUBLIC (node
->symbol
.decl
)
582 || DECL_EXTERNAL (node
->symbol
.decl
)))
585 /* Do not try to localize built-in functions yet. One of problems is that we
586 end up mangling their asm for WHOPR that makes it impossible to call them
587 using the implicit built-in declarations anymore. Similarly this enables
588 us to remove them as unreachable before actual calls may appear during
589 expansion or folding. */
590 if (DECL_BUILT_IN (node
->symbol
.decl
))
593 /* If linker counts on us, we must preserve the function. */
594 if (symtab_used_from_object_file_p ((symtab_node
) node
))
596 if (DECL_PRESERVE_P (node
->symbol
.decl
))
598 if (lookup_attribute ("externally_visible",
599 DECL_ATTRIBUTES (node
->symbol
.decl
)))
601 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
602 && lookup_attribute ("dllexport",
603 DECL_ATTRIBUTES (node
->symbol
.decl
)))
605 if (node
->symbol
.resolution
== LDPR_PREVAILING_DEF_IRONLY
)
607 /* When doing LTO or whole program, we can bring COMDAT functoins static.
608 This improves code quality and we know we will duplicate them at most twice
609 (in the case that we are not using plugin and link with object file
610 implementing same COMDAT) */
611 if ((in_lto_p
|| whole_program
)
612 && DECL_COMDAT (node
->symbol
.decl
)
613 && cgraph_comdat_can_be_unshared_p (node
))
616 /* When doing link time optimizations, hidden symbols become local. */
618 && (DECL_VISIBILITY (node
->symbol
.decl
) == VISIBILITY_HIDDEN
619 || DECL_VISIBILITY (node
->symbol
.decl
) == VISIBILITY_INTERNAL
)
620 /* Be sure that node is defined in IR file, not in other object
621 file. In that case we don't set used_from_other_object_file. */
624 else if (!whole_program
)
627 if (MAIN_NAME_P (DECL_NAME (node
->symbol
.decl
)))
633 /* Return true when variable VNODE should be considered externally visible. */
636 varpool_externally_visible_p (struct varpool_node
*vnode
)
638 /* Do not touch weakrefs; while they are not externally visible,
639 dropping their DECL_EXTERNAL flags confuse most
640 of code handling them. */
641 if (vnode
->alias
&& DECL_EXTERNAL (vnode
->symbol
.decl
))
644 if (DECL_EXTERNAL (vnode
->symbol
.decl
))
647 if (!DECL_COMDAT (vnode
->symbol
.decl
) && !TREE_PUBLIC (vnode
->symbol
.decl
))
650 /* If linker counts on us, we must preserve the function. */
651 if (symtab_used_from_object_file_p ((symtab_node
) vnode
))
654 if (DECL_HARD_REGISTER (vnode
->symbol
.decl
))
656 if (DECL_PRESERVE_P (vnode
->symbol
.decl
))
658 if (lookup_attribute ("externally_visible",
659 DECL_ATTRIBUTES (vnode
->symbol
.decl
)))
661 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
662 && lookup_attribute ("dllexport",
663 DECL_ATTRIBUTES (vnode
->symbol
.decl
)))
666 /* See if we have linker information about symbol not being used or
667 if we need to make guess based on the declaration.
669 Even if the linker clams the symbol is unused, never bring internal
670 symbols that are declared by user as used or externally visible.
671 This is needed for i.e. references from asm statements. */
672 if (symtab_used_from_object_file_p ((symtab_node
) vnode
))
674 if (vnode
->symbol
.resolution
== LDPR_PREVAILING_DEF_IRONLY
)
677 /* As a special case, the COMDAT virtual tables can be unshared.
678 In LTO mode turn vtables into static variables. The variable is readonly,
679 so this does not enable more optimization, but referring static var
680 is faster for dynamic linking. Also this match logic hidding vtables
681 from LTO symbol tables. */
682 if ((in_lto_p
|| flag_whole_program
)
683 && !vnode
->symbol
.force_output
684 && DECL_COMDAT (vnode
->symbol
.decl
) && DECL_VIRTUAL_P (vnode
->symbol
.decl
))
687 /* When doing link time optimizations, hidden symbols become local. */
689 && (DECL_VISIBILITY (vnode
->symbol
.decl
) == VISIBILITY_HIDDEN
690 || DECL_VISIBILITY (vnode
->symbol
.decl
) == VISIBILITY_INTERNAL
)
691 /* Be sure that node is defined in IR file, not in other object
692 file. In that case we don't set used_from_other_object_file. */
695 else if (!flag_whole_program
)
698 /* Do not attempt to privatize COMDATS by default.
699 This would break linking with C++ libraries sharing
702 FIXME: We can do so for readonly vars with no address taken and
703 possibly also for vtables since no direct pointer comparsion is done.
704 It might be interesting to do so to reduce linking overhead. */
705 if (DECL_COMDAT (vnode
->symbol
.decl
) || DECL_WEAK (vnode
->symbol
.decl
))
710 /* Mark visibility of all functions.
712 A local function is one whose calls can occur only in the current
713 compilation unit and all its calls are explicit, so we can change
714 its calling convention. We simply mark all static functions whose
715 address is not taken as local.
717 We also change the TREE_PUBLIC flag of all declarations that are public
718 in language point of view but we want to overwrite this default
719 via visibilities for the backend point of view. */
722 function_and_variable_visibility (bool whole_program
)
724 struct cgraph_node
*node
;
725 struct varpool_node
*vnode
;
727 /* All aliases should be procssed at this point. */
728 gcc_checking_assert (!alias_pairs
|| !alias_pairs
->length());
730 FOR_EACH_FUNCTION (node
)
732 int flags
= flags_from_decl_or_type (node
->symbol
.decl
);
734 /* Optimize away PURE and CONST constructors and destructors. */
736 && (flags
& (ECF_CONST
| ECF_PURE
))
737 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
739 DECL_STATIC_CONSTRUCTOR (node
->symbol
.decl
) = 0;
740 DECL_STATIC_DESTRUCTOR (node
->symbol
.decl
) = 0;
743 /* Frontends and alias code marks nodes as needed before parsing is finished.
744 We may end up marking as node external nodes where this flag is meaningless
746 if (node
->symbol
.force_output
747 && (DECL_EXTERNAL (node
->symbol
.decl
) || !node
->analyzed
))
748 node
->symbol
.force_output
= 0;
750 /* C++ FE on lack of COMDAT support create local COMDAT functions
751 (that ought to be shared but can not due to object format
752 limitations). It is necessary to keep the flag to make rest of C++ FE
753 happy. Clear the flag here to avoid confusion in middle-end. */
754 if (DECL_COMDAT (node
->symbol
.decl
) && !TREE_PUBLIC (node
->symbol
.decl
))
755 DECL_COMDAT (node
->symbol
.decl
) = 0;
756 /* For external decls stop tracking same_comdat_group, it doesn't matter
757 what comdat group they are in when they won't be emitted in this TU,
758 and simplifies later passes. */
759 if (node
->symbol
.same_comdat_group
&& DECL_EXTERNAL (node
->symbol
.decl
))
761 #ifdef ENABLE_CHECKING
764 for (n
= node
->symbol
.same_comdat_group
;
765 n
!= (symtab_node
)node
;
766 n
= n
->symbol
.same_comdat_group
)
767 /* If at least one of same comdat group functions is external,
768 all of them have to be, otherwise it is a front-end bug. */
769 gcc_assert (DECL_EXTERNAL (n
->symbol
.decl
));
771 symtab_dissolve_same_comdat_group_list ((symtab_node
) node
);
773 gcc_assert ((!DECL_WEAK (node
->symbol
.decl
)
774 && !DECL_COMDAT (node
->symbol
.decl
))
775 || TREE_PUBLIC (node
->symbol
.decl
)
776 || DECL_EXTERNAL (node
->symbol
.decl
));
777 if (cgraph_externally_visible_p (node
, whole_program
))
779 gcc_assert (!node
->global
.inlined_to
);
780 node
->symbol
.externally_visible
= true;
783 node
->symbol
.externally_visible
= false;
784 if (!node
->symbol
.externally_visible
&& node
->analyzed
785 && !DECL_EXTERNAL (node
->symbol
.decl
))
787 gcc_assert (whole_program
|| in_lto_p
788 || !TREE_PUBLIC (node
->symbol
.decl
));
789 symtab_make_decl_local (node
->symbol
.decl
);
790 node
->symbol
.resolution
= LDPR_PREVAILING_DEF_IRONLY
;
791 if (node
->symbol
.same_comdat_group
)
792 /* cgraph_externally_visible_p has already checked all other nodes
793 in the group and they will all be made local. We need to
794 dissolve the group at once so that the predicate does not
796 symtab_dissolve_same_comdat_group_list ((symtab_node
) node
);
799 if (node
->thunk
.thunk_p
800 && TREE_PUBLIC (node
->symbol
.decl
))
802 struct cgraph_node
*decl_node
= node
;
804 decl_node
= cgraph_function_node (decl_node
->callees
->callee
, NULL
);
806 /* Thunks have the same visibility as function they are attached to.
807 Make sure the C++ front end set this up properly. */
808 if (DECL_ONE_ONLY (decl_node
->symbol
.decl
))
810 gcc_checking_assert (DECL_COMDAT (node
->symbol
.decl
)
811 == DECL_COMDAT (decl_node
->symbol
.decl
));
812 gcc_checking_assert (DECL_COMDAT_GROUP (node
->symbol
.decl
)
813 == DECL_COMDAT_GROUP (decl_node
->symbol
.decl
));
814 gcc_checking_assert (node
->symbol
.same_comdat_group
);
816 if (DECL_EXTERNAL (decl_node
->symbol
.decl
))
817 DECL_EXTERNAL (node
->symbol
.decl
) = 1;
820 FOR_EACH_DEFINED_FUNCTION (node
)
821 node
->local
.local
= cgraph_local_node_p (node
);
822 FOR_EACH_VARIABLE (vnode
)
824 /* weak flag makes no sense on local variables. */
825 gcc_assert (!DECL_WEAK (vnode
->symbol
.decl
)
826 || TREE_PUBLIC (vnode
->symbol
.decl
)
827 || DECL_EXTERNAL (vnode
->symbol
.decl
));
828 /* In several cases declarations can not be common:
830 - when declaration has initializer
832 - when it has specific section
833 - when it resides in non-generic address space.
834 - if declaration is local, it will get into .local common section
835 so common flag is not needed. Frontends still produce these in
836 certain cases, such as for:
838 static int a __attribute__ ((common))
840 Canonicalize things here and clear the redundant flag. */
841 if (DECL_COMMON (vnode
->symbol
.decl
)
842 && (!(TREE_PUBLIC (vnode
->symbol
.decl
)
843 || DECL_EXTERNAL (vnode
->symbol
.decl
))
844 || (DECL_INITIAL (vnode
->symbol
.decl
)
845 && DECL_INITIAL (vnode
->symbol
.decl
) != error_mark_node
)
846 || DECL_WEAK (vnode
->symbol
.decl
)
847 || DECL_SECTION_NAME (vnode
->symbol
.decl
) != NULL
848 || ! (ADDR_SPACE_GENERIC_P
849 (TYPE_ADDR_SPACE (TREE_TYPE (vnode
->symbol
.decl
))))))
850 DECL_COMMON (vnode
->symbol
.decl
) = 0;
852 FOR_EACH_DEFINED_VARIABLE (vnode
)
854 if (!vnode
->finalized
)
856 if (varpool_externally_visible_p (vnode
))
857 vnode
->symbol
.externally_visible
= true;
859 vnode
->symbol
.externally_visible
= false;
860 if (!vnode
->symbol
.externally_visible
)
862 gcc_assert (in_lto_p
|| whole_program
|| !TREE_PUBLIC (vnode
->symbol
.decl
));
863 symtab_make_decl_local (vnode
->symbol
.decl
);
864 if (vnode
->symbol
.same_comdat_group
)
865 symtab_dissolve_same_comdat_group_list ((symtab_node
) vnode
);
866 vnode
->symbol
.resolution
= LDPR_PREVAILING_DEF_IRONLY
;
872 fprintf (dump_file
, "\nMarking local functions:");
873 FOR_EACH_DEFINED_FUNCTION (node
)
874 if (node
->local
.local
)
875 fprintf (dump_file
, " %s", cgraph_node_name (node
));
876 fprintf (dump_file
, "\n\n");
877 fprintf (dump_file
, "\nMarking externally visible functions:");
878 FOR_EACH_DEFINED_FUNCTION (node
)
879 if (node
->symbol
.externally_visible
)
880 fprintf (dump_file
, " %s", cgraph_node_name (node
));
881 fprintf (dump_file
, "\n\n");
882 fprintf (dump_file
, "\nMarking externally visible variables:");
883 FOR_EACH_DEFINED_VARIABLE (vnode
)
884 if (vnode
->symbol
.externally_visible
)
885 fprintf (dump_file
, " %s", varpool_node_name (vnode
));
886 fprintf (dump_file
, "\n\n");
888 cgraph_function_flags_ready
= true;
892 /* Local function pass handling visibilities. This happens before LTO streaming
893 so in particular -fwhole-program should be ignored at this level. */
896 local_function_and_variable_visibility (void)
898 return function_and_variable_visibility (flag_whole_program
&& !flag_lto
);
901 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility
=
905 "visibility", /* name */
906 OPTGROUP_NONE
, /* optinfo_flags */
908 local_function_and_variable_visibility
,/* execute */
911 0, /* static_pass_number */
912 TV_CGRAPHOPT
, /* tv_id */
913 0, /* properties_required */
914 0, /* properties_provided */
915 0, /* properties_destroyed */
916 0, /* todo_flags_start */
917 TODO_remove_functions
| TODO_dump_symtab
/* todo_flags_finish */
921 /* Free inline summary. */
924 free_inline_summary (void)
926 inline_free_summary ();
930 struct simple_ipa_opt_pass pass_ipa_free_inline_summary
=
934 "*free_inline_summary", /* name */
935 OPTGROUP_NONE
, /* optinfo_flags */
937 free_inline_summary
, /* execute */
940 0, /* static_pass_number */
941 TV_IPA_FREE_INLINE_SUMMARY
, /* tv_id */
942 0, /* properties_required */
943 0, /* properties_provided */
944 0, /* properties_destroyed */
945 0, /* todo_flags_start */
946 0 /* todo_flags_finish */
950 /* Do not re-run on ltrans stage. */
953 gate_whole_program_function_and_variable_visibility (void)
958 /* Bring functionss local at LTO time with -fwhole-program. */
961 whole_program_function_and_variable_visibility (void)
963 function_and_variable_visibility (flag_whole_program
);
965 ipa_discover_readonly_nonaddressable_vars ();
969 struct ipa_opt_pass_d pass_ipa_whole_program_visibility
=
973 "whole-program", /* name */
974 OPTGROUP_NONE
, /* optinfo_flags */
975 gate_whole_program_function_and_variable_visibility
,/* gate */
976 whole_program_function_and_variable_visibility
,/* execute */
979 0, /* static_pass_number */
980 TV_CGRAPHOPT
, /* tv_id */
981 0, /* properties_required */
982 0, /* properties_provided */
983 0, /* properties_destroyed */
984 0, /* todo_flags_start */
985 TODO_remove_functions
| TODO_dump_symtab
/* todo_flags_finish */
987 NULL
, /* generate_summary */
988 NULL
, /* write_summary */
989 NULL
, /* read_summary */
990 NULL
, /* write_optimization_summary */
991 NULL
, /* read_optimization_summary */
992 NULL
, /* stmt_fixup */
994 NULL
, /* function_transform */
995 NULL
, /* variable_transform */
998 /* Entry in the histogram. */
1000 struct histogram_entry
1007 /* Histogram of profile values.
1008 The histogram is represented as an ordered vector of entries allocated via
1009 histogram_pool. During construction a separate hashtable is kept to lookup
1010 duplicate entries. */
1012 vec
<histogram_entry
*> histogram
;
1013 static alloc_pool histogram_pool
;
1015 /* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
1017 struct histogram_hash
: typed_noop_remove
<histogram_entry
>
1019 typedef histogram_entry value_type
;
1020 typedef histogram_entry compare_type
;
1021 static inline hashval_t
hash (const value_type
*);
1022 static inline int equal (const value_type
*, const compare_type
*);
1026 histogram_hash::hash (const histogram_entry
*val
)
1032 histogram_hash::equal (const histogram_entry
*val
, const histogram_entry
*val2
)
1034 return val
->count
== val2
->count
;
1037 /* Account TIME and SIZE executed COUNT times into HISTOGRAM.
1038 HASHTABLE is the on-side hash kept to avoid duplicates. */
1041 account_time_size (hash_table
<histogram_hash
> hashtable
,
1042 vec
<histogram_entry
*> &histogram
,
1043 gcov_type count
, int time
, int size
)
1045 histogram_entry key
= {count
, 0, 0};
1046 histogram_entry
**val
= hashtable
.find_slot (&key
, INSERT
);
1050 *val
= (histogram_entry
*) pool_alloc (histogram_pool
);
1052 histogram
.safe_push (*val
);
1054 (*val
)->time
+= time
;
1055 (*val
)->size
+= size
;
1059 cmp_counts (const void *v1
, const void *v2
)
1061 const histogram_entry
*h1
= *(const histogram_entry
* const *)v1
;
1062 const histogram_entry
*h2
= *(const histogram_entry
* const *)v2
;
1063 if (h1
->count
< h2
->count
)
1065 if (h1
->count
> h2
->count
)
1070 /* Dump HISTOGRAM to FILE. */
1073 dump_histogram (FILE *file
, vec
<histogram_entry
*> histogram
)
1076 gcov_type overall_time
= 0, cumulated_time
= 0, cumulated_size
= 0, overall_size
= 0;
1078 fprintf (dump_file
, "Histogram:\n");
1079 for (i
= 0; i
< histogram
.length (); i
++)
1081 overall_time
+= histogram
[i
]->count
* histogram
[i
]->time
;
1082 overall_size
+= histogram
[i
]->size
;
1088 for (i
= 0; i
< histogram
.length (); i
++)
1090 cumulated_time
+= histogram
[i
]->count
* histogram
[i
]->time
;
1091 cumulated_size
+= histogram
[i
]->size
;
1092 fprintf (file
, " "HOST_WIDEST_INT_PRINT_DEC
": time:%i (%2.2f) size:%i (%2.2f)\n",
1093 (HOST_WIDEST_INT
) histogram
[i
]->count
,
1095 cumulated_time
* 100.0 / overall_time
,
1097 cumulated_size
* 100.0 / overall_size
);
1101 /* Collect histogram from CFG profiles. */
1104 ipa_profile_generate_summary (void)
1106 struct cgraph_node
*node
;
1107 gimple_stmt_iterator gsi
;
1108 hash_table
<histogram_hash
> hashtable
;
1111 hashtable
.create (10);
1112 histogram_pool
= create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry
),
1115 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
1116 FOR_EACH_BB_FN (bb
, DECL_STRUCT_FUNCTION (node
->symbol
.decl
))
1120 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1122 time
+= estimate_num_insns (gsi_stmt (gsi
), &eni_time_weights
);
1123 size
+= estimate_num_insns (gsi_stmt (gsi
), &eni_size_weights
);
1125 account_time_size (hashtable
, histogram
, bb
->count
, time
, size
);
1127 hashtable
.dispose ();
1128 histogram
.qsort (cmp_counts
);
1131 /* Serialize the ipa info for lto. */
1134 ipa_profile_write_summary (void)
1136 struct lto_simple_output_block
*ob
1137 = lto_create_simple_output_block (LTO_section_ipa_profile
);
1140 streamer_write_uhwi_stream (ob
->main_stream
, histogram
.length());
1141 for (i
= 0; i
< histogram
.length (); i
++)
1143 streamer_write_gcov_count_stream (ob
->main_stream
, histogram
[i
]->count
);
1144 streamer_write_uhwi_stream (ob
->main_stream
, histogram
[i
]->time
);
1145 streamer_write_uhwi_stream (ob
->main_stream
, histogram
[i
]->size
);
1147 lto_destroy_simple_output_block (ob
);
1150 /* Deserialize the ipa info for lto. */
1153 ipa_profile_read_summary (void)
1155 struct lto_file_decl_data
** file_data_vec
1156 = lto_get_file_decl_data ();
1157 struct lto_file_decl_data
* file_data
;
1158 hash_table
<histogram_hash
> hashtable
;
1161 hashtable
.create (10);
1162 histogram_pool
= create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry
),
1165 while ((file_data
= file_data_vec
[j
++]))
1169 struct lto_input_block
*ib
1170 = lto_create_simple_input_block (file_data
,
1171 LTO_section_ipa_profile
,
1175 unsigned int num
= streamer_read_uhwi (ib
);
1177 for (n
= 0; n
< num
; n
++)
1179 gcov_type count
= streamer_read_gcov_count (ib
);
1180 int time
= streamer_read_uhwi (ib
);
1181 int size
= streamer_read_uhwi (ib
);
1182 account_time_size (hashtable
, histogram
,
1185 lto_destroy_simple_input_block (file_data
,
1186 LTO_section_ipa_profile
,
1190 hashtable
.dispose ();
1191 histogram
.qsort (cmp_counts
);
1194 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1199 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1200 struct cgraph_edge
*e
;
1202 bool something_changed
= false;
1204 gcov_type overall_time
= 0, cutoff
= 0, cumulated
= 0, overall_size
= 0;
1207 dump_histogram (dump_file
, histogram
);
1208 for (i
= 0; i
< (int)histogram
.length (); i
++)
1210 overall_time
+= histogram
[i
]->count
* histogram
[i
]->time
;
1211 overall_size
+= histogram
[i
]->size
;
1215 gcov_type threshold
;
1217 gcc_assert (overall_size
);
1220 gcov_type min
, cumulated_time
= 0, cumulated_size
= 0;
1222 fprintf (dump_file
, "Overall time: "HOST_WIDEST_INT_PRINT_DEC
"\n",
1223 (HOST_WIDEST_INT
)overall_time
);
1224 min
= get_hot_bb_threshold ();
1225 for (i
= 0; i
< (int)histogram
.length () && histogram
[i
]->count
>= min
;
1228 cumulated_time
+= histogram
[i
]->count
* histogram
[i
]->time
;
1229 cumulated_size
+= histogram
[i
]->size
;
1231 fprintf (dump_file
, "GCOV min count: "HOST_WIDEST_INT_PRINT_DEC
1232 " Time:%3.2f%% Size:%3.2f%%\n",
1233 (HOST_WIDEST_INT
)min
,
1234 cumulated_time
* 100.0 / overall_time
,
1235 cumulated_size
* 100.0 / overall_size
);
1237 cutoff
= (overall_time
* PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE
) + 500) / 1000;
1239 for (i
= 0; cumulated
< cutoff
; i
++)
1241 cumulated
+= histogram
[i
]->count
* histogram
[i
]->time
;
1242 threshold
= histogram
[i
]->count
;
1248 gcov_type cumulated_time
= 0, cumulated_size
= 0;
1251 i
< (int)histogram
.length () && histogram
[i
]->count
>= threshold
;
1254 cumulated_time
+= histogram
[i
]->count
* histogram
[i
]->time
;
1255 cumulated_size
+= histogram
[i
]->size
;
1257 fprintf (dump_file
, "Determined min count: "HOST_WIDEST_INT_PRINT_DEC
1258 " Time:%3.2f%% Size:%3.2f%%\n",
1259 (HOST_WIDEST_INT
)threshold
,
1260 cumulated_time
* 100.0 / overall_time
,
1261 cumulated_size
* 100.0 / overall_size
);
1263 if (threshold
> get_hot_bb_threshold ()
1267 fprintf (dump_file
, "Threshold updated.\n");
1268 set_hot_bb_threshold (threshold
);
1271 histogram
.release();
1272 free_alloc_pool (histogram_pool
);
1274 order_pos
= ipa_reverse_postorder (order
);
1275 for (i
= order_pos
- 1; i
>= 0; i
--)
1277 if (order
[i
]->local
.local
&& cgraph_propagate_frequency (order
[i
]))
1279 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1280 if (e
->callee
->local
.local
&& !e
->callee
->symbol
.aux
)
1282 something_changed
= true;
1283 e
->callee
->symbol
.aux
= (void *)1;
1286 order
[i
]->symbol
.aux
= NULL
;
1289 while (something_changed
)
1291 something_changed
= false;
1292 for (i
= order_pos
- 1; i
>= 0; i
--)
1294 if (order
[i
]->symbol
.aux
&& cgraph_propagate_frequency (order
[i
]))
1296 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1297 if (e
->callee
->local
.local
&& !e
->callee
->symbol
.aux
)
1299 something_changed
= true;
1300 e
->callee
->symbol
.aux
= (void *)1;
1303 order
[i
]->symbol
.aux
= NULL
;
1311 gate_ipa_profile (void)
1313 return flag_ipa_profile
;
1316 struct ipa_opt_pass_d pass_ipa_profile
=
1320 "profile_estimate", /* name */
1321 OPTGROUP_NONE
, /* optinfo_flags */
1322 gate_ipa_profile
, /* gate */
1323 ipa_profile
, /* execute */
1326 0, /* static_pass_number */
1327 TV_IPA_PROFILE
, /* tv_id */
1328 0, /* properties_required */
1329 0, /* properties_provided */
1330 0, /* properties_destroyed */
1331 0, /* todo_flags_start */
1332 0 /* todo_flags_finish */
1334 ipa_profile_generate_summary
, /* generate_summary */
1335 ipa_profile_write_summary
, /* write_summary */
1336 ipa_profile_read_summary
, /* read_summary */
1337 NULL
, /* write_optimization_summary */
1338 NULL
, /* read_optimization_summary */
1339 NULL
, /* stmt_fixup */
1341 NULL
, /* function_transform */
1342 NULL
/* variable_transform */
1345 /* Generate and emit a static constructor or destructor. WHICH must
1346 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1347 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1348 initialization priority for this constructor or destructor.
1350 FINAL specify whether the externally visible name for collect2 should
1354 cgraph_build_static_cdtor_1 (char which
, tree body
, int priority
, bool final
)
1356 static int counter
= 0;
1358 tree decl
, name
, resdecl
;
1360 /* The priority is encoded in the constructor or destructor name.
1361 collect2 will sort the names and arrange that they are called at
1364 sprintf (which_buf
, "%c_%.5d_%d", which
, priority
, counter
++);
1366 /* Proudce sane name but one not recognizable by collect2, just for the
1367 case we fail to inline the function. */
1368 sprintf (which_buf
, "sub_%c_%.5d_%d", which
, priority
, counter
++);
1369 name
= get_file_function_name (which_buf
);
1371 decl
= build_decl (input_location
, FUNCTION_DECL
, name
,
1372 build_function_type_list (void_type_node
, NULL_TREE
));
1373 current_function_decl
= decl
;
1375 resdecl
= build_decl (input_location
,
1376 RESULT_DECL
, NULL_TREE
, void_type_node
);
1377 DECL_ARTIFICIAL (resdecl
) = 1;
1378 DECL_RESULT (decl
) = resdecl
;
1379 DECL_CONTEXT (resdecl
) = decl
;
1381 allocate_struct_function (decl
, false);
1383 TREE_STATIC (decl
) = 1;
1384 TREE_USED (decl
) = 1;
1385 DECL_ARTIFICIAL (decl
) = 1;
1386 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
1387 DECL_SAVED_TREE (decl
) = body
;
1388 if (!targetm
.have_ctors_dtors
&& final
)
1390 TREE_PUBLIC (decl
) = 1;
1391 DECL_PRESERVE_P (decl
) = 1;
1393 DECL_UNINLINABLE (decl
) = 1;
1395 DECL_INITIAL (decl
) = make_node (BLOCK
);
1396 TREE_USED (DECL_INITIAL (decl
)) = 1;
1398 DECL_SOURCE_LOCATION (decl
) = input_location
;
1399 cfun
->function_end_locus
= input_location
;
1404 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1405 decl_init_priority_insert (decl
, priority
);
1408 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1409 decl_fini_priority_insert (decl
, priority
);
1415 gimplify_function_tree (decl
);
1417 cgraph_add_new_function (decl
, false);
1420 current_function_decl
= NULL
;
1423 /* Generate and emit a static constructor or destructor. WHICH must
1424 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1425 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1426 initialization priority for this constructor or destructor. */
1429 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
1431 cgraph_build_static_cdtor_1 (which
, body
, priority
, false);
1434 /* A vector of FUNCTION_DECLs declared as static constructors. */
1435 static vec
<tree
> static_ctors
;
1436 /* A vector of FUNCTION_DECLs declared as static destructors. */
1437 static vec
<tree
> static_dtors
;
1439 /* When target does not have ctors and dtors, we call all constructor
1440 and destructor by special initialization/destruction function
1441 recognized by collect2.
1443 When we are going to build this function, collect all constructors and
1444 destructors and turn them into normal functions. */
1447 record_cdtor_fn (struct cgraph_node
*node
)
1449 if (DECL_STATIC_CONSTRUCTOR (node
->symbol
.decl
))
1450 static_ctors
.safe_push (node
->symbol
.decl
);
1451 if (DECL_STATIC_DESTRUCTOR (node
->symbol
.decl
))
1452 static_dtors
.safe_push (node
->symbol
.decl
);
1453 node
= cgraph_get_node (node
->symbol
.decl
);
1454 DECL_DISREGARD_INLINE_LIMITS (node
->symbol
.decl
) = 1;
1457 /* Define global constructors/destructor functions for the CDTORS, of
1458 which they are LEN. The CDTORS are sorted by initialization
1459 priority. If CTOR_P is true, these are constructors; otherwise,
1460 they are destructors. */
1463 build_cdtor (bool ctor_p
, vec
<tree
> cdtors
)
1466 size_t len
= cdtors
.length ();
1473 priority_type priority
;
1482 p
= ctor_p
? DECL_INIT_PRIORITY (fn
) : DECL_FINI_PRIORITY (fn
);
1485 else if (p
!= priority
)
1491 /* When there is only one cdtor and target supports them, do nothing. */
1493 && targetm
.have_ctors_dtors
)
1498 /* Find the next batch of constructors/destructors with the same
1499 initialization priority. */
1504 call
= build_call_expr (fn
, 0);
1506 DECL_STATIC_CONSTRUCTOR (fn
) = 0;
1508 DECL_STATIC_DESTRUCTOR (fn
) = 0;
1509 /* We do not want to optimize away pure/const calls here.
1510 When optimizing, these should be already removed, when not
1511 optimizing, we want user to be able to breakpoint in them. */
1512 TREE_SIDE_EFFECTS (call
) = 1;
1513 append_to_statement_list (call
, &body
);
1515 gcc_assert (body
!= NULL_TREE
);
1516 /* Generate a function to call all the function of like
1518 cgraph_build_static_cdtor_1 (ctor_p
? 'I' : 'D', body
, priority
, true);
1522 /* Comparison function for qsort. P1 and P2 are actually of type
1523 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1524 used to determine the sort order. */
1527 compare_ctor (const void *p1
, const void *p2
)
1534 f1
= *(const tree
*)p1
;
1535 f2
= *(const tree
*)p2
;
1536 priority1
= DECL_INIT_PRIORITY (f1
);
1537 priority2
= DECL_INIT_PRIORITY (f2
);
1539 if (priority1
< priority2
)
1541 else if (priority1
> priority2
)
1544 /* Ensure a stable sort. Constructors are executed in backwarding
1545 order to make LTO initialize braries first. */
1546 return DECL_UID (f2
) - DECL_UID (f1
);
1549 /* Comparison function for qsort. P1 and P2 are actually of type
1550 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1551 used to determine the sort order. */
1554 compare_dtor (const void *p1
, const void *p2
)
1561 f1
= *(const tree
*)p1
;
1562 f2
= *(const tree
*)p2
;
1563 priority1
= DECL_FINI_PRIORITY (f1
);
1564 priority2
= DECL_FINI_PRIORITY (f2
);
1566 if (priority1
< priority2
)
1568 else if (priority1
> priority2
)
1571 /* Ensure a stable sort. */
1572 return DECL_UID (f1
) - DECL_UID (f2
);
1575 /* Generate functions to call static constructors and destructors
1576 for targets that do not support .ctors/.dtors sections. These
1577 functions have magic names which are detected by collect2. */
1580 build_cdtor_fns (void)
1582 if (!static_ctors
.is_empty ())
1584 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1585 static_ctors
.qsort (compare_ctor
);
1586 build_cdtor (/*ctor_p=*/true, static_ctors
);
1589 if (!static_dtors
.is_empty ())
1591 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1592 static_dtors
.qsort (compare_dtor
);
1593 build_cdtor (/*ctor_p=*/false, static_dtors
);
1597 /* Look for constructors and destructors and produce function calling them.
1598 This is needed for targets not supporting ctors or dtors, but we perform the
1599 transformation also at linktime to merge possibly numerous
1600 constructors/destructors into single function to improve code locality and
1604 ipa_cdtor_merge (void)
1606 struct cgraph_node
*node
;
1607 FOR_EACH_DEFINED_FUNCTION (node
)
1608 if (DECL_STATIC_CONSTRUCTOR (node
->symbol
.decl
)
1609 || DECL_STATIC_DESTRUCTOR (node
->symbol
.decl
))
1610 record_cdtor_fn (node
);
1612 static_ctors
.release ();
1613 static_dtors
.release ();
1617 /* Perform the pass when we have no ctors/dtors support
1618 or at LTO time to merge multiple constructors into single
1622 gate_ipa_cdtor_merge (void)
1624 return !targetm
.have_ctors_dtors
|| (optimize
&& in_lto_p
);
1627 struct ipa_opt_pass_d pass_ipa_cdtor_merge
=
1632 OPTGROUP_NONE
, /* optinfo_flags */
1633 gate_ipa_cdtor_merge
, /* gate */
1634 ipa_cdtor_merge
, /* execute */
1637 0, /* static_pass_number */
1638 TV_CGRAPHOPT
, /* tv_id */
1639 0, /* properties_required */
1640 0, /* properties_provided */
1641 0, /* properties_destroyed */
1642 0, /* todo_flags_start */
1643 0 /* todo_flags_finish */
1645 NULL
, /* generate_summary */
1646 NULL
, /* write_summary */
1647 NULL
, /* read_summary */
1648 NULL
, /* write_optimization_summary */
1649 NULL
, /* read_optimization_summary */
1650 NULL
, /* stmt_fixup */
1652 NULL
, /* function_transform */
1653 NULL
/* variable_transform */