1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "gcc-symtab.h"
29 #include "lto-streamer.h"
32 #include "ipa-inline.h"
33 #include "ipa-utils.h"
34 #include "lto-partition.h"
36 /* Classifcation of symbols into classes WRT partitioning. */
39 /* External declarations are ignored by partitioning algorithms and they are
40 added into the boundary later via compute_ltrans_boundary. */
42 /* Partitioned symbols are pur into one of partitions. */
44 /* Duplicated symbols (such as comdat or constant pool references) are
45 copied into every node needing them via add_symbol_to_partition. */
49 vec
<ltrans_partition
> ltrans_partitions
;
51 static void add_symbol_to_partition (ltrans_partition part
, symtab_node
*node
);
53 /* Classify symbol NODE. */
56 get_symbol_class (symtab_node
*node
)
58 /* Inline clones are always duplicated.
59 This include external delcarations. */
60 cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
62 if (DECL_ABSTRACT (node
->decl
))
63 return SYMBOL_EXTERNAL
;
65 if (cnode
&& cnode
->global
.inlined_to
)
66 return SYMBOL_DUPLICATE
;
68 /* Weakref aliases are always duplicated. */
70 return SYMBOL_DUPLICATE
;
72 /* External declarations are external. */
73 if (DECL_EXTERNAL (node
->decl
))
74 return SYMBOL_EXTERNAL
;
76 if (varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
))
78 /* Constant pool references use local symbol names that can not
79 be promoted global. We should never put into a constant pool
80 objects that can not be duplicated across partitions. */
81 if (DECL_IN_CONSTANT_POOL (node
->decl
))
82 return SYMBOL_DUPLICATE
;
83 gcc_checking_assert (vnode
->definition
);
85 /* Functions that are cloned may stay in callgraph even if they are unused.
86 Handle them as external; compute_ltrans_boundary take care to make
87 proper things to happen (i.e. to make them appear in the boundary but
88 with body streamed, so clone can me materialized). */
89 else if (!cgraph (node
)->definition
)
90 return SYMBOL_EXTERNAL
;
92 /* Comdats are duplicated to every use unless they are keyed.
93 Those do not need duplication. */
94 if (DECL_COMDAT (node
->decl
)
95 && !node
->force_output
96 && !symtab_used_from_object_file_p (node
))
97 return SYMBOL_DUPLICATE
;
99 return SYMBOL_PARTITION
;
102 /* Create new partition with name NAME. */
104 static ltrans_partition
105 new_partition (const char *name
)
107 ltrans_partition part
= XCNEW (struct ltrans_partition_def
);
108 part
->encoder
= lto_symtab_encoder_new (false);
111 ltrans_partitions
.safe_push (part
);
115 /* Free memory used by ltrans datastructures. */
118 free_ltrans_partitions (void)
121 ltrans_partition part
;
122 for (idx
= 0; ltrans_partitions
.iterate (idx
, &part
); idx
++)
124 if (part
->initializers_visited
)
125 pointer_set_destroy (part
->initializers_visited
);
126 /* Symtab encoder is freed after streaming. */
129 ltrans_partitions
.release ();
132 /* Return true if symbol is already in some partition. */
135 symbol_partitioned_p (symtab_node
*node
)
140 /* Add references into the partition. */
142 add_references_to_partition (ltrans_partition part
, symtab_node
*node
)
147 /* Add all duplicated references to the partition. */
148 for (i
= 0; ipa_ref_list_reference_iterate (&node
->ref_list
, i
, ref
); i
++)
149 if (get_symbol_class (ref
->referred
) == SYMBOL_DUPLICATE
)
150 add_symbol_to_partition (part
, ref
->referred
);
151 /* References to a readonly variable may be constant foled into its value.
152 Recursively look into the initializers of the constant variable and add
154 else if (is_a
<varpool_node
> (ref
->referred
)
155 && ctor_for_folding (ref
->referred
->decl
) != error_mark_node
156 && !lto_symtab_encoder_in_partition_p (part
->encoder
, ref
->referred
))
158 if (!part
->initializers_visited
)
159 part
->initializers_visited
= pointer_set_create ();
160 if (!pointer_set_insert (part
->initializers_visited
, ref
->referred
))
161 add_references_to_partition (part
, ref
->referred
);
165 /* Helper function for add_symbol_to_partition doing the actual dirty work
166 of adding NODE to PART. */
169 add_symbol_to_partition_1 (ltrans_partition part
, symtab_node
*node
)
171 enum symbol_class c
= get_symbol_class (node
);
176 /* If NODE is already there, we have nothing to do. */
177 if (lto_symtab_encoder_in_partition_p (part
->encoder
, node
))
180 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
183 Be lax about comdats; they may or may not be duplicated and we may
184 end up in need to duplicate keyed comdat because it has unkeyed alias. */
185 if (c
== SYMBOL_PARTITION
&& !DECL_COMDAT (node
->decl
)
186 && symbol_partitioned_p (node
))
189 /* Be sure that we never try to duplicate partitioned symbol
190 or add external symbol. */
191 gcc_assert (c
!= SYMBOL_EXTERNAL
192 && (c
== SYMBOL_DUPLICATE
|| !symbol_partitioned_p (node
)));
194 lto_set_symtab_encoder_in_partition (part
->encoder
, node
);
196 if (symbol_partitioned_p (node
))
198 node
->in_other_partition
= 1;
199 if (cgraph_dump_file
)
200 fprintf (cgraph_dump_file
, "Symbol node %s now used in multiple partitions\n",
203 node
->aux
= (void *)((size_t)node
->aux
+ 1);
205 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
207 struct cgraph_edge
*e
;
208 part
->insns
+= inline_summary (cnode
)->self_size
;
210 /* Add all inline clones and callees that are duplicated. */
211 for (e
= cnode
->callees
; e
; e
= e
->next_callee
)
212 if (!e
->inline_failed
)
213 add_symbol_to_partition_1 (part
, e
->callee
);
214 else if (get_symbol_class (e
->callee
) == SYMBOL_DUPLICATE
)
215 add_symbol_to_partition (part
, e
->callee
);
217 /* Add all thunks associated with the function. */
218 for (e
= cnode
->callers
; e
; e
= e
->next_caller
)
219 if (e
->caller
->thunk
.thunk_p
)
220 add_symbol_to_partition_1 (part
, e
->caller
);
223 add_references_to_partition (part
, node
);
225 /* Add all aliases associated with the symbol. */
226 for (i
= 0; ipa_ref_list_referring_iterate (&node
->ref_list
, i
, ref
); i
++)
227 if (ref
->use
== IPA_REF_ALIAS
&& !node
->weakref
)
228 add_symbol_to_partition_1 (part
, ref
->referring
);
230 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
231 if (node
->same_comdat_group
)
232 for (node1
= node
->same_comdat_group
;
233 node1
!= node
; node1
= node1
->same_comdat_group
)
235 bool added
= add_symbol_to_partition_1 (part
, node1
);
241 /* If symbol NODE is really part of other symbol's definition (i.e. it is
242 internal label, thunk, alias or so), return the outer symbol.
243 When add_symbol_to_partition_1 is called on the outer symbol it must
244 eventually add NODE, too. */
246 contained_in_symbol (symtab_node
*node
)
248 /* Weakrefs are never contained in anything. */
251 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
253 cnode
= cgraph_function_node (cnode
, NULL
);
254 if (cnode
->global
.inlined_to
)
255 cnode
= cnode
->global
.inlined_to
;
258 else if (varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
))
259 return varpool_variable_node (vnode
, NULL
);
263 /* Add symbol NODE to partition. When definition of NODE is part
264 of other symbol definition, add the other symbol, too. */
267 add_symbol_to_partition (ltrans_partition part
, symtab_node
*node
)
271 /* Verify that we do not try to duplicate something that can not be. */
272 gcc_checking_assert (get_symbol_class (node
) == SYMBOL_DUPLICATE
273 || !symbol_partitioned_p (node
));
275 while ((node1
= contained_in_symbol (node
)) != node
)
278 /* If we have duplicated symbol contained in something we can not duplicate,
279 we are very badly screwed. The other way is possible, so we do not
280 assert this in add_symbol_to_partition_1.
282 Be lax about comdats; they may or may not be duplicated and we may
283 end up in need to duplicate keyed comdat because it has unkeyed alias. */
284 gcc_assert (get_symbol_class (node
) == SYMBOL_DUPLICATE
285 || DECL_COMDAT (node
->decl
)
286 || !symbol_partitioned_p (node
));
287 add_symbol_to_partition_1 (part
, node
);
290 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
291 and number of varpool nodes is N_VARPOOL_NODES. */
294 undo_partition (ltrans_partition partition
, unsigned int n_nodes
)
296 while (lto_symtab_encoder_size (partition
->encoder
) > (int)n_nodes
)
298 symtab_node
*node
= lto_symtab_encoder_deref (partition
->encoder
,
301 /* After UNDO we no longer know what was visited. */
302 if (partition
->initializers_visited
)
303 pointer_set_destroy (partition
->initializers_visited
);
304 partition
->initializers_visited
= NULL
;
306 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
307 partition
->insns
-= inline_summary (cnode
)->self_size
;
308 lto_symtab_encoder_delete_node (partition
->encoder
, node
);
309 node
->aux
= (void *)((size_t)node
->aux
- 1);
313 /* Group cgrah nodes by input files. This is used mainly for testing
317 lto_1_to_1_map (void)
320 struct lto_file_decl_data
*file_data
;
321 struct pointer_map_t
*pmap
;
322 ltrans_partition partition
;
326 pmap
= pointer_map_create ();
328 FOR_EACH_SYMBOL (node
)
330 if (get_symbol_class (node
) != SYMBOL_PARTITION
331 || symbol_partitioned_p (node
))
334 file_data
= node
->lto_file_data
;
338 slot
= pointer_map_contains (pmap
, file_data
);
340 partition
= (ltrans_partition
) *slot
;
343 partition
= new_partition (file_data
->file_name
);
344 slot
= pointer_map_insert (pmap
, file_data
);
349 else if (!file_data
&& ltrans_partitions
.length ())
350 partition
= ltrans_partitions
[0];
353 partition
= new_partition ("");
354 slot
= pointer_map_insert (pmap
, NULL
);
359 add_symbol_to_partition (partition
, node
);
362 /* If the cgraph is empty, create one cgraph node set so that there is still
363 an output file for any variables that need to be exported in a DSO. */
365 new_partition ("empty");
367 pointer_map_destroy (pmap
);
371 /* Maximal partitioning. Put every new symbol into new partition if possible. */
377 ltrans_partition partition
;
380 FOR_EACH_SYMBOL (node
)
382 if (get_symbol_class (node
) != SYMBOL_PARTITION
383 || symbol_partitioned_p (node
))
385 partition
= new_partition (node
->asm_name ());
386 add_symbol_to_partition (partition
, node
);
390 new_partition ("empty");
393 /* Helper function for qsort; sort nodes by order. */
395 node_cmp (const void *pa
, const void *pb
)
397 const struct cgraph_node
*a
= *(const struct cgraph_node
* const *) pa
;
398 const struct cgraph_node
*b
= *(const struct cgraph_node
* const *) pb
;
399 return b
->order
- a
->order
;
402 /* Helper function for qsort; sort nodes by order. */
404 varpool_node_cmp (const void *pa
, const void *pb
)
406 const struct varpool_node
*a
= *(const struct varpool_node
* const *) pa
;
407 const struct varpool_node
*b
= *(const struct varpool_node
* const *) pb
;
408 return b
->order
- a
->order
;
411 /* Group cgraph nodes into equally-sized partitions.
413 The partitioning algorithm is simple: nodes are taken in predefined order.
414 The order corresponds to the order we want functions to have in the final
415 output. In the future this will be given by function reordering pass, but
416 at the moment we use the topological order, which is a good approximation.
418 The goal is to partition this linear order into intervals (partitions) so
419 that all the partitions have approximately the same size and the number of
420 callgraph or IPA reference edges crossing boundaries is minimal.
422 This is a lot faster (O(n) in size of callgraph) than algorithms doing
423 priority-based graph clustering that are generally O(n^2) and, since
424 WHOPR is designed to make things go well across partitions, it leads
427 We compute the expected size of a partition as:
429 max (total_size / lto_partitions, min_partition_size)
431 We use dynamic expected size of partition so small programs are partitioned
432 into enough partitions to allow use of multiple CPUs, while large programs
433 are not partitioned too much. Creating too many partitions significantly
434 increases the streaming overhead.
436 In the future, we would like to bound the maximal size of partitions so as
437 to prevent the LTRANS stage from consuming too much memory. At the moment,
438 however, the WPA stage is the most memory intensive for large benchmarks,
439 since too many types and declarations are read into memory.
441 The function implements a simple greedy algorithm. Nodes are being added
442 to the current partition until after 3/4 of the expected partition size is
443 reached. Past this threshold, we keep track of boundary size (number of
444 edges going to other partitions) and continue adding functions until after
445 the current partition has grown to twice the expected partition size. Then
446 the process is undone to the point where the minimal ratio of boundary size
447 and in-partition calls was reached. */
450 lto_balanced_map (void)
453 int n_varpool_nodes
= 0, varpool_pos
= 0, best_varpool_pos
= 0;
454 struct cgraph_node
**order
= XNEWVEC (struct cgraph_node
*, cgraph_max_uid
);
455 struct varpool_node
**varpool_order
= NULL
;
457 struct cgraph_node
*node
;
458 int total_size
= 0, best_total_size
= 0;
460 ltrans_partition partition
;
461 int last_visited_node
= 0;
462 struct varpool_node
*vnode
;
463 int cost
= 0, internal
= 0;
464 int best_n_nodes
= 0, best_i
= 0, best_cost
=
465 INT_MAX
, best_internal
= 0;
467 int current_order
= -1;
469 FOR_EACH_VARIABLE (vnode
)
470 gcc_assert (!vnode
->aux
);
472 FOR_EACH_DEFINED_FUNCTION (node
)
473 if (get_symbol_class (node
) == SYMBOL_PARTITION
)
475 order
[n_nodes
++] = node
;
476 total_size
+= inline_summary (node
)->size
;
479 /* Streaming works best when the source units do not cross partition
480 boundaries much. This is because importing function from a source
481 unit tends to import a lot of global trees defined there. We should
482 get better about minimizing the function bounday, but until that
483 things works smoother if we order in source order. */
484 qsort (order
, n_nodes
, sizeof (struct cgraph_node
*), node_cmp
);
485 if (!flag_toplevel_reorder
)
487 qsort (order
, n_nodes
, sizeof (struct cgraph_node
*), node_cmp
);
489 FOR_EACH_VARIABLE (vnode
)
490 if (get_symbol_class (vnode
) == SYMBOL_PARTITION
)
492 varpool_order
= XNEWVEC (struct varpool_node
*, n_varpool_nodes
);
495 FOR_EACH_VARIABLE (vnode
)
496 if (get_symbol_class (vnode
) == SYMBOL_PARTITION
)
497 varpool_order
[n_varpool_nodes
++] = vnode
;
498 qsort (varpool_order
, n_varpool_nodes
, sizeof (struct varpool_node
*),
502 /* Compute partition size and create the first partition. */
503 partition_size
= total_size
/ PARAM_VALUE (PARAM_LTO_PARTITIONS
);
504 if (partition_size
< PARAM_VALUE (MIN_PARTITION_SIZE
))
505 partition_size
= PARAM_VALUE (MIN_PARTITION_SIZE
);
507 partition
= new_partition ("");
508 if (cgraph_dump_file
)
509 fprintf (cgraph_dump_file
, "Total unit size: %i, partition size: %i\n",
510 total_size
, partition_size
);
512 for (i
= 0; i
< n_nodes
; i
++)
514 if (symbol_partitioned_p (order
[i
]))
517 current_order
= order
[i
]->order
;
519 if (!flag_toplevel_reorder
)
520 while (varpool_pos
< n_varpool_nodes
521 && varpool_order
[varpool_pos
]->order
< current_order
)
523 if (!symbol_partitioned_p (varpool_order
[varpool_pos
]))
524 add_symbol_to_partition (partition
, varpool_order
[varpool_pos
]);
528 add_symbol_to_partition (partition
, order
[i
]);
529 total_size
-= inline_summary (order
[i
])->size
;
532 /* Once we added a new node to the partition, we also want to add
533 all referenced variables unless they was already added into some
535 add_symbol_to_partition adds possibly multiple nodes and
536 variables that are needed to satisfy needs of ORDER[i].
537 We remember last visited cgraph and varpool node from last iteration
538 of outer loop that allows us to process every new addition.
540 At the same time we compute size of the boundary into COST. Every
541 callgraph or IPA reference edge leaving the partition contributes into
542 COST. Every edge inside partition was earlier computed as one leaving
543 it and thus we need to subtract it from COST. */
544 while (last_visited_node
< lto_symtab_encoder_size (partition
->encoder
))
546 struct ipa_ref_list
*refs
;
549 symtab_node
*snode
= lto_symtab_encoder_deref (partition
->encoder
,
552 if (cgraph_node
*node
= dyn_cast
<cgraph_node
> (snode
))
554 struct cgraph_edge
*edge
;
556 refs
= &node
->ref_list
;
560 gcc_assert (node
->definition
|| node
->weakref
);
562 /* Compute boundary cost of callgraph edges. */
563 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
564 if (edge
->callee
->definition
)
566 int edge_cost
= edge
->frequency
;
571 gcc_assert (edge_cost
> 0);
572 index
= lto_symtab_encoder_lookup (partition
->encoder
,
574 if (index
!= LCC_NOT_FOUND
575 && index
< last_visited_node
- 1)
576 cost
-= edge_cost
, internal
+= edge_cost
;
580 for (edge
= node
->callers
; edge
; edge
= edge
->next_caller
)
582 int edge_cost
= edge
->frequency
;
585 gcc_assert (edge
->caller
->definition
);
588 gcc_assert (edge_cost
> 0);
589 index
= lto_symtab_encoder_lookup (partition
->encoder
,
591 if (index
!= LCC_NOT_FOUND
592 && index
< last_visited_node
- 1)
600 refs
= &snode
->ref_list
;
604 /* Compute boundary cost of IPA REF edges and at the same time look into
605 variables referenced from current partition and try to add them. */
606 for (j
= 0; ipa_ref_list_reference_iterate (refs
, j
, ref
); j
++)
607 if (is_a
<varpool_node
> (ref
->referred
))
611 vnode
= ipa_ref_varpool_node (ref
);
612 if (!vnode
->definition
)
614 if (!symbol_partitioned_p (vnode
) && flag_toplevel_reorder
615 && get_symbol_class (vnode
) == SYMBOL_PARTITION
)
616 add_symbol_to_partition (partition
, vnode
);
617 index
= lto_symtab_encoder_lookup (partition
->encoder
,
619 if (index
!= LCC_NOT_FOUND
620 && index
< last_visited_node
- 1)
629 node
= ipa_ref_node (ref
);
630 if (!node
->definition
)
632 index
= lto_symtab_encoder_lookup (partition
->encoder
,
634 if (index
!= LCC_NOT_FOUND
635 && index
< last_visited_node
- 1)
640 for (j
= 0; ipa_ref_list_referring_iterate (refs
, j
, ref
); j
++)
641 if (is_a
<varpool_node
> (ref
->referring
))
645 vnode
= ipa_ref_referring_varpool_node (ref
);
646 gcc_assert (vnode
->definition
);
647 if (!symbol_partitioned_p (vnode
) && flag_toplevel_reorder
648 && get_symbol_class (vnode
) == SYMBOL_PARTITION
)
649 add_symbol_to_partition (partition
, vnode
);
650 index
= lto_symtab_encoder_lookup (partition
->encoder
,
652 if (index
!= LCC_NOT_FOUND
653 && index
< last_visited_node
- 1)
662 node
= ipa_ref_referring_node (ref
);
663 gcc_assert (node
->definition
);
664 index
= lto_symtab_encoder_lookup (partition
->encoder
,
666 if (index
!= LCC_NOT_FOUND
667 && index
< last_visited_node
- 1)
674 /* If the partition is large enough, start looking for smallest boundary cost. */
675 if (partition
->insns
< partition_size
* 3 / 4
676 || best_cost
== INT_MAX
678 || (best_internal
* (HOST_WIDE_INT
) cost
679 > (internal
* (HOST_WIDE_INT
)best_cost
)))
680 && partition
->insns
< partition_size
* 5 / 4))
683 best_internal
= internal
;
685 best_n_nodes
= lto_symtab_encoder_size (partition
->encoder
);
686 best_total_size
= total_size
;
687 best_varpool_pos
= varpool_pos
;
689 if (cgraph_dump_file
)
690 fprintf (cgraph_dump_file
, "Step %i: added %s/%i, size %i, cost %i/%i "
691 "best %i/%i, step %i\n", i
,
692 order
[i
]->name (), order
[i
]->order
,
693 partition
->insns
, cost
, internal
,
694 best_cost
, best_internal
, best_i
);
695 /* Partition is too large, unwind into step when best cost was reached and
696 start new partition. */
697 if (partition
->insns
> 2 * partition_size
)
701 if (cgraph_dump_file
)
702 fprintf (cgraph_dump_file
, "Unwinding %i insertions to step %i\n",
704 undo_partition (partition
, best_n_nodes
);
705 varpool_pos
= best_varpool_pos
;
708 /* When we are finished, avoid creating empty partition. */
709 while (i
< n_nodes
- 1 && symbol_partitioned_p (order
[i
+ 1]))
711 if (i
== n_nodes
- 1)
713 partition
= new_partition ("");
714 last_visited_node
= 0;
715 total_size
= best_total_size
;
718 if (cgraph_dump_file
)
719 fprintf (cgraph_dump_file
, "New partition\n");
723 /* Since the size of partitions is just approximate, update the size after
724 we finished current one. */
725 if (npartitions
< PARAM_VALUE (PARAM_LTO_PARTITIONS
))
726 partition_size
= total_size
727 / (PARAM_VALUE (PARAM_LTO_PARTITIONS
) - npartitions
);
729 partition_size
= INT_MAX
;
731 if (partition_size
< PARAM_VALUE (MIN_PARTITION_SIZE
))
732 partition_size
= PARAM_VALUE (MIN_PARTITION_SIZE
);
737 /* Varables that are not reachable from the code go into last partition. */
738 if (flag_toplevel_reorder
)
740 FOR_EACH_VARIABLE (vnode
)
741 if (get_symbol_class (vnode
) == SYMBOL_PARTITION
742 && !symbol_partitioned_p (vnode
))
743 add_symbol_to_partition (partition
, vnode
);
747 while (varpool_pos
< n_varpool_nodes
)
749 if (!symbol_partitioned_p (varpool_order
[varpool_pos
]))
750 add_symbol_to_partition (partition
, varpool_order
[varpool_pos
]);
753 free (varpool_order
);
758 /* Mangle NODE symbol name into a local name.
759 This is necessary to do
760 1) if two or more static vars of same assembler name
761 are merged into single ltrans unit.
762 2) if prevoiusly static var was promoted hidden to avoid possible conflict
763 with symbols defined out of the LTO world.
767 privatize_symbol_name (symtab_node
*node
)
769 tree decl
= node
->decl
;
770 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
772 /* Our renaming machinery do not handle more than one change of assembler name.
773 We should not need more than one anyway. */
774 if (node
->lto_file_data
775 && lto_get_decl_name_mapping (node
->lto_file_data
, name
) != name
)
777 if (cgraph_dump_file
)
778 fprintf (cgraph_dump_file
,
779 "Not privatizing symbol name: %s. It privatized already.\n",
783 /* Avoid mangling of already mangled clones.
784 ??? should have a flag whether a symbol has a 'private' name already,
785 since we produce some symbols like that i.e. for global constructors
786 that are not really clones. */
787 if (node
->unique_name
)
789 if (cgraph_dump_file
)
790 fprintf (cgraph_dump_file
,
791 "Not privatizing symbol name: %s. Has unique name.\n",
795 change_decl_assembler_name (decl
, clone_function_name (decl
, "lto_priv"));
796 if (node
->lto_file_data
)
797 lto_record_renamed_decl (node
->lto_file_data
, name
,
799 (DECL_ASSEMBLER_NAME (decl
)));
800 if (cgraph_dump_file
)
801 fprintf (cgraph_dump_file
,
802 "Privatizing symbol name: %s -> %s\n",
803 name
, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
)));
807 /* Promote variable VNODE to be static. */
810 promote_symbol (symtab_node
*node
)
812 /* We already promoted ... */
813 if (DECL_VISIBILITY (node
->decl
) == VISIBILITY_HIDDEN
814 && DECL_VISIBILITY_SPECIFIED (node
->decl
)
815 && TREE_PUBLIC (node
->decl
))
818 gcc_checking_assert (!TREE_PUBLIC (node
->decl
)
819 && !DECL_EXTERNAL (node
->decl
));
820 /* Be sure that newly public symbol does not conflict with anything already
821 defined by the non-LTO part. */
822 privatize_symbol_name (node
);
823 TREE_PUBLIC (node
->decl
) = 1;
824 DECL_VISIBILITY (node
->decl
) = VISIBILITY_HIDDEN
;
825 DECL_VISIBILITY_SPECIFIED (node
->decl
) = true;
826 if (cgraph_dump_file
)
827 fprintf (cgraph_dump_file
,
828 "Promoting as hidden: %s\n", node
->name ());
831 /* Return true if NODE needs named section even if it won't land in the partition
833 FIXME: we should really not use named sections for inline clones and master clones. */
836 may_need_named_section_p (lto_symtab_encoder_t encoder
, symtab_node
*node
)
838 struct cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
841 if (symtab_real_symbol_p (node
))
844 || (lto_symtab_encoder_lookup (encoder
, node
) != LCC_NOT_FOUND
845 && lto_symtab_encoder_encode_body_p (encoder
,
849 /* If NODE represents a static variable. See if there are other variables
850 of the same name in partition ENCODER (or in whole compilation unit if
851 ENCODER is NULL) and if so, mangle the statics. Always mangle all
852 conflicting statics, so we reduce changes of silently miscompiling
853 asm statemnets referring to them by symbol name. */
856 rename_statics (lto_symtab_encoder_t encoder
, symtab_node
*node
)
858 tree decl
= node
->decl
;
860 tree name
= DECL_ASSEMBLER_NAME (decl
);
862 /* See if this is static symbol. */
863 if ((node
->externally_visible
864 /* FIXME: externally_visible is somewhat illogically not set for
865 external symbols (i.e. those not defined). Remove this test
866 once this is fixed. */
867 || DECL_EXTERNAL (node
->decl
)
868 || !symtab_real_symbol_p (node
))
869 && !may_need_named_section_p (encoder
, node
))
872 /* Now walk symbols sharing the same name and see if there are any conflicts.
873 (all types of symbols counts here, since we can not have static of the
874 same name as external or public symbol.) */
875 for (s
= symtab_node_for_asm (name
);
876 s
; s
= s
->next_sharing_asm_name
)
877 if ((symtab_real_symbol_p (s
) || may_need_named_section_p (encoder
, s
))
878 && s
->decl
!= node
->decl
880 || lto_symtab_encoder_lookup (encoder
, s
) != LCC_NOT_FOUND
))
883 /* OK, no confict, so we have nothing to do. */
887 if (cgraph_dump_file
)
888 fprintf (cgraph_dump_file
,
889 "Renaming statics with asm name: %s\n", node
->name ());
891 /* Assign every symbol in the set that shares the same ASM name an unique
893 for (s
= symtab_node_for_asm (name
); s
;)
894 if (!s
->externally_visible
895 && ((symtab_real_symbol_p (s
)
896 && !DECL_EXTERNAL (node
->decl
)
897 && !TREE_PUBLIC (node
->decl
))
898 || may_need_named_section_p (encoder
, s
))
900 || lto_symtab_encoder_lookup (encoder
, s
) != LCC_NOT_FOUND
))
902 if (privatize_symbol_name (s
))
903 /* Re-start from beginning since we do not know how many symbols changed a name. */
904 s
= symtab_node_for_asm (name
);
905 else s
= s
->next_sharing_asm_name
;
907 else s
= s
->next_sharing_asm_name
;
910 /* Find out all static decls that need to be promoted to global because
911 of cross file sharing. This function must be run in the WPA mode after
912 all inlinees are added. */
915 lto_promote_cross_file_statics (void)
919 gcc_assert (flag_wpa
);
921 /* First compute boundaries. */
922 n_sets
= ltrans_partitions
.length ();
923 for (i
= 0; i
< n_sets
; i
++)
925 ltrans_partition part
926 = ltrans_partitions
[i
];
927 part
->encoder
= compute_ltrans_boundary (part
->encoder
);
930 /* Look at boundaries and promote symbols as needed. */
931 for (i
= 0; i
< n_sets
; i
++)
933 lto_symtab_encoder_iterator lsei
;
934 lto_symtab_encoder_t encoder
= ltrans_partitions
[i
]->encoder
;
936 for (lsei
= lsei_start (encoder
); !lsei_end_p (lsei
);
939 symtab_node
*node
= lsei_node (lsei
);
941 /* If symbol is static, rename it if its assembler name clash with
942 anything else in this unit. */
943 rename_statics (encoder
, node
);
945 /* No need to promote if symbol already is externally visible ... */
946 if (node
->externally_visible
947 /* ... or if it is part of current partition ... */
948 || lto_symtab_encoder_in_partition_p (encoder
, node
)
949 /* ... or if we do not partition it. This mean that it will
950 appear in every partition refernecing it. */
951 || get_symbol_class (node
) != SYMBOL_PARTITION
)
954 promote_symbol (node
);
959 /* Rename statics in the whole unit in the case that
960 we do -flto-partition=none. */
963 lto_promote_statics_nonwpa (void)
966 FOR_EACH_SYMBOL (node
)
967 rename_statics (NULL
, node
);