PR sanitizer/59009
[official-gcc.git] / gcc / lto / lto-partition.c
blob6a3d881acca2a3530126c370459dda7e87e305aa
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tm.h"
27 #include "cgraph.h"
28 #include "lto-streamer.h"
29 #include "timevar.h"
30 #include "params.h"
31 #include "ipa-inline.h"
32 #include "ipa-utils.h"
33 #include "lto-partition.h"
35 /* Classifcation of symbols into classes WRT partitioning. */
36 enum symbol_class
38 /* External declarations are ignored by partitioning algorithms and they are
39 added into the boundary later via compute_ltrans_boundary. */
40 SYMBOL_EXTERNAL,
41 /* Partitioned symbols are pur into one of partitions. */
42 SYMBOL_PARTITION,
43 /* Duplicated symbols (such as comdat or constant pool references) are
44 copied into every node needing them via add_symbol_to_partition. */
45 SYMBOL_DUPLICATE
48 vec<ltrans_partition> ltrans_partitions;
50 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
52 /* Classify symbol NODE. */
54 enum symbol_class
55 get_symbol_class (symtab_node *node)
57 /* Inline clones are always duplicated.
58 This include external delcarations. */
59 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
61 if (DECL_ABSTRACT (node->decl))
62 return SYMBOL_EXTERNAL;
64 if (cnode && cnode->global.inlined_to)
65 return SYMBOL_DUPLICATE;
67 /* Weakref aliases are always duplicated. */
68 if (node->weakref)
69 return SYMBOL_DUPLICATE;
71 /* External declarations are external. */
72 if (DECL_EXTERNAL (node->decl))
73 return SYMBOL_EXTERNAL;
75 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
77 /* Constant pool references use local symbol names that can not
78 be promoted global. We should never put into a constant pool
79 objects that can not be duplicated across partitions. */
80 if (DECL_IN_CONSTANT_POOL (node->decl))
81 return SYMBOL_DUPLICATE;
82 gcc_checking_assert (vnode->definition);
84 /* Functions that are cloned may stay in callgraph even if they are unused.
85 Handle them as external; compute_ltrans_boundary take care to make
86 proper things to happen (i.e. to make them appear in the boundary but
87 with body streamed, so clone can me materialized). */
88 else if (!cgraph (node)->definition)
89 return SYMBOL_EXTERNAL;
91 /* Comdats are duplicated to every use unless they are keyed.
92 Those do not need duplication. */
93 if (DECL_COMDAT (node->decl)
94 && !node->force_output
95 && !symtab_used_from_object_file_p (node))
96 return SYMBOL_DUPLICATE;
98 return SYMBOL_PARTITION;
101 /* Create new partition with name NAME. */
103 static ltrans_partition
104 new_partition (const char *name)
106 ltrans_partition part = XCNEW (struct ltrans_partition_def);
107 part->encoder = lto_symtab_encoder_new (false);
108 part->name = name;
109 part->insns = 0;
110 ltrans_partitions.safe_push (part);
111 return part;
114 /* Free memory used by ltrans datastructures. */
116 void
117 free_ltrans_partitions (void)
119 unsigned int idx;
120 ltrans_partition part;
121 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
123 if (part->initializers_visited)
124 pointer_set_destroy (part->initializers_visited);
125 /* Symtab encoder is freed after streaming. */
126 free (part);
128 ltrans_partitions.release ();
131 /* Return true if symbol is already in some partition. */
133 static inline bool
134 symbol_partitioned_p (symtab_node *node)
136 return node->aux;
139 /* Add references into the partition. */
140 static void
141 add_references_to_partition (ltrans_partition part, symtab_node *node)
143 int i;
144 struct ipa_ref *ref;
146 /* Add all duplicated references to the partition. */
147 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
148 if (get_symbol_class (ref->referred) == SYMBOL_DUPLICATE)
149 add_symbol_to_partition (part, ref->referred);
150 /* References to a readonly variable may be constant foled into its value.
151 Recursively look into the initializers of the constant variable and add
152 references, too. */
153 else if (is_a <varpool_node> (ref->referred)
154 && ctor_for_folding (ref->referred->decl) != error_mark_node
155 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
157 if (!part->initializers_visited)
158 part->initializers_visited = pointer_set_create ();
159 if (!pointer_set_insert (part->initializers_visited, ref->referred))
160 add_references_to_partition (part, ref->referred);
164 /* Helper function for add_symbol_to_partition doing the actual dirty work
165 of adding NODE to PART. */
167 static bool
168 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
170 enum symbol_class c = get_symbol_class (node);
171 int i;
172 struct ipa_ref *ref;
173 symtab_node *node1;
175 /* If NODE is already there, we have nothing to do. */
176 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
177 return true;
179 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
180 just once.
182 Be lax about comdats; they may or may not be duplicated and we may
183 end up in need to duplicate keyed comdat because it has unkeyed alias. */
184 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
185 && symbol_partitioned_p (node))
186 return false;
188 /* Be sure that we never try to duplicate partitioned symbol
189 or add external symbol. */
190 gcc_assert (c != SYMBOL_EXTERNAL
191 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
193 lto_set_symtab_encoder_in_partition (part->encoder, node);
195 if (symbol_partitioned_p (node))
197 node->in_other_partition = 1;
198 if (cgraph_dump_file)
199 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
200 symtab_node_name (node));
202 node->aux = (void *)((size_t)node->aux + 1);
204 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
206 struct cgraph_edge *e;
207 part->insns += inline_summary (cnode)->self_size;
209 /* Add all inline clones and callees that are duplicated. */
210 for (e = cnode->callees; e; e = e->next_callee)
211 if (!e->inline_failed)
212 add_symbol_to_partition_1 (part, e->callee);
213 else if (get_symbol_class (e->callee) == SYMBOL_DUPLICATE)
214 add_symbol_to_partition (part, e->callee);
216 /* Add all thunks associated with the function. */
217 for (e = cnode->callers; e; e = e->next_caller)
218 if (e->caller->thunk.thunk_p)
219 add_symbol_to_partition_1 (part, e->caller);
222 add_references_to_partition (part, node);
224 /* Add all aliases associated with the symbol. */
225 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
226 if (ref->use == IPA_REF_ALIAS && !node->weakref)
227 add_symbol_to_partition_1 (part, ref->referring);
229 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
230 if (node->same_comdat_group)
231 for (node1 = node->same_comdat_group;
232 node1 != node; node1 = node1->same_comdat_group)
234 bool added = add_symbol_to_partition_1 (part, node1);
235 gcc_assert (added);
237 return true;
240 /* If symbol NODE is really part of other symbol's definition (i.e. it is
241 internal label, thunk, alias or so), return the outer symbol.
242 When add_symbol_to_partition_1 is called on the outer symbol it must
243 eventually add NODE, too. */
244 static symtab_node *
245 contained_in_symbol (symtab_node *node)
247 /* Weakrefs are never contained in anything. */
248 if (node->weakref)
249 return node;
250 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
252 cnode = cgraph_function_node (cnode, NULL);
253 if (cnode->global.inlined_to)
254 cnode = cnode->global.inlined_to;
255 return cnode;
257 else if (varpool_node *vnode = dyn_cast <varpool_node> (node))
258 return varpool_variable_node (vnode, NULL);
259 return node;
262 /* Add symbol NODE to partition. When definition of NODE is part
263 of other symbol definition, add the other symbol, too. */
265 static void
266 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
268 symtab_node *node1;
270 /* Verify that we do not try to duplicate something that can not be. */
271 gcc_checking_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
272 || !symbol_partitioned_p (node));
274 while ((node1 = contained_in_symbol (node)) != node)
275 node = node1;
277 /* If we have duplicated symbol contained in something we can not duplicate,
278 we are very badly screwed. The other way is possible, so we do not
279 assert this in add_symbol_to_partition_1.
281 Be lax about comdats; they may or may not be duplicated and we may
282 end up in need to duplicate keyed comdat because it has unkeyed alias. */
283 gcc_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
284 || DECL_COMDAT (node->decl)
285 || !symbol_partitioned_p (node));
286 add_symbol_to_partition_1 (part, node);
289 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
290 and number of varpool nodes is N_VARPOOL_NODES. */
292 static void
293 undo_partition (ltrans_partition partition, unsigned int n_nodes)
295 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
297 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
298 n_nodes);
300 /* After UNDO we no longer know what was visited. */
301 if (partition->initializers_visited)
302 pointer_set_destroy (partition->initializers_visited);
303 partition->initializers_visited = NULL;
305 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
306 partition->insns -= inline_summary (cnode)->self_size;
307 lto_symtab_encoder_delete_node (partition->encoder, node);
308 node->aux = (void *)((size_t)node->aux - 1);
312 /* Group cgrah nodes by input files. This is used mainly for testing
313 right now. */
315 void
316 lto_1_to_1_map (void)
318 symtab_node *node;
319 struct lto_file_decl_data *file_data;
320 struct pointer_map_t *pmap;
321 ltrans_partition partition;
322 void **slot;
323 int npartitions = 0;
325 pmap = pointer_map_create ();
327 FOR_EACH_SYMBOL (node)
329 if (get_symbol_class (node) != SYMBOL_PARTITION
330 || symbol_partitioned_p (node))
331 continue;
333 file_data = node->lto_file_data;
335 if (file_data)
337 slot = pointer_map_contains (pmap, file_data);
338 if (slot)
339 partition = (ltrans_partition) *slot;
340 else
342 partition = new_partition (file_data->file_name);
343 slot = pointer_map_insert (pmap, file_data);
344 *slot = partition;
345 npartitions++;
348 else if (!file_data && ltrans_partitions.length ())
349 partition = ltrans_partitions[0];
350 else
352 partition = new_partition ("");
353 slot = pointer_map_insert (pmap, NULL);
354 *slot = partition;
355 npartitions++;
358 add_symbol_to_partition (partition, node);
361 /* If the cgraph is empty, create one cgraph node set so that there is still
362 an output file for any variables that need to be exported in a DSO. */
363 if (!npartitions)
364 new_partition ("empty");
366 pointer_map_destroy (pmap);
370 /* Maximal partitioning. Put every new symbol into new partition if possible. */
372 void
373 lto_max_map (void)
375 symtab_node *node;
376 ltrans_partition partition;
377 int npartitions = 0;
379 FOR_EACH_SYMBOL (node)
381 if (get_symbol_class (node) != SYMBOL_PARTITION
382 || symbol_partitioned_p (node))
383 continue;
384 partition = new_partition (symtab_node_asm_name (node));
385 add_symbol_to_partition (partition, node);
386 npartitions++;
388 if (!npartitions)
389 new_partition ("empty");
392 /* Helper function for qsort; sort nodes by order. */
393 static int
394 node_cmp (const void *pa, const void *pb)
396 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
397 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
398 return b->order - a->order;
401 /* Helper function for qsort; sort nodes by order. */
402 static int
403 varpool_node_cmp (const void *pa, const void *pb)
405 const struct varpool_node *a = *(const struct varpool_node * const *) pa;
406 const struct varpool_node *b = *(const struct varpool_node * const *) pb;
407 return b->order - a->order;
410 /* Group cgraph nodes into equally-sized partitions.
412 The partitioning algorithm is simple: nodes are taken in predefined order.
413 The order corresponds to the order we want functions to have in the final
414 output. In the future this will be given by function reordering pass, but
415 at the moment we use the topological order, which is a good approximation.
417 The goal is to partition this linear order into intervals (partitions) so
418 that all the partitions have approximately the same size and the number of
419 callgraph or IPA reference edges crossing boundaries is minimal.
421 This is a lot faster (O(n) in size of callgraph) than algorithms doing
422 priority-based graph clustering that are generally O(n^2) and, since
423 WHOPR is designed to make things go well across partitions, it leads
424 to good results.
426 We compute the expected size of a partition as:
428 max (total_size / lto_partitions, min_partition_size)
430 We use dynamic expected size of partition so small programs are partitioned
431 into enough partitions to allow use of multiple CPUs, while large programs
432 are not partitioned too much. Creating too many partitions significantly
433 increases the streaming overhead.
435 In the future, we would like to bound the maximal size of partitions so as
436 to prevent the LTRANS stage from consuming too much memory. At the moment,
437 however, the WPA stage is the most memory intensive for large benchmarks,
438 since too many types and declarations are read into memory.
440 The function implements a simple greedy algorithm. Nodes are being added
441 to the current partition until after 3/4 of the expected partition size is
442 reached. Past this threshold, we keep track of boundary size (number of
443 edges going to other partitions) and continue adding functions until after
444 the current partition has grown to twice the expected partition size. Then
445 the process is undone to the point where the minimal ratio of boundary size
446 and in-partition calls was reached. */
448 void
449 lto_balanced_map (void)
451 int n_nodes = 0;
452 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
453 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
454 struct varpool_node **varpool_order = NULL;
455 int i;
456 struct cgraph_node *node;
457 int total_size = 0, best_total_size = 0;
458 int partition_size;
459 ltrans_partition partition;
460 int last_visited_node = 0;
461 struct varpool_node *vnode;
462 int cost = 0, internal = 0;
463 int best_n_nodes = 0, best_i = 0, best_cost =
464 INT_MAX, best_internal = 0;
465 int npartitions;
466 int current_order = -1;
468 FOR_EACH_VARIABLE (vnode)
469 gcc_assert (!vnode->aux);
471 FOR_EACH_DEFINED_FUNCTION (node)
472 if (get_symbol_class (node) == SYMBOL_PARTITION)
474 order[n_nodes++] = node;
475 total_size += inline_summary (node)->size;
478 /* Streaming works best when the source units do not cross partition
479 boundaries much. This is because importing function from a source
480 unit tends to import a lot of global trees defined there. We should
481 get better about minimizing the function bounday, but until that
482 things works smoother if we order in source order. */
483 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
484 if (!flag_toplevel_reorder)
486 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
488 FOR_EACH_VARIABLE (vnode)
489 if (get_symbol_class (vnode) == SYMBOL_PARTITION)
490 n_varpool_nodes++;
491 varpool_order = XNEWVEC (struct varpool_node *, n_varpool_nodes);
493 n_varpool_nodes = 0;
494 FOR_EACH_VARIABLE (vnode)
495 if (get_symbol_class (vnode) == SYMBOL_PARTITION)
496 varpool_order[n_varpool_nodes++] = vnode;
497 qsort (varpool_order, n_varpool_nodes, sizeof (struct varpool_node *),
498 varpool_node_cmp);
501 /* Compute partition size and create the first partition. */
502 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
503 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
504 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
505 npartitions = 1;
506 partition = new_partition ("");
507 if (cgraph_dump_file)
508 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
509 total_size, partition_size);
511 for (i = 0; i < n_nodes; i++)
513 if (symbol_partitioned_p (order[i]))
514 continue;
516 current_order = order[i]->order;
518 if (!flag_toplevel_reorder)
519 while (varpool_pos < n_varpool_nodes
520 && varpool_order[varpool_pos]->order < current_order)
522 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
523 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
524 varpool_pos++;
527 add_symbol_to_partition (partition, order[i]);
528 total_size -= inline_summary (order[i])->size;
531 /* Once we added a new node to the partition, we also want to add
532 all referenced variables unless they was already added into some
533 earlier partition.
534 add_symbol_to_partition adds possibly multiple nodes and
535 variables that are needed to satisfy needs of ORDER[i].
536 We remember last visited cgraph and varpool node from last iteration
537 of outer loop that allows us to process every new addition.
539 At the same time we compute size of the boundary into COST. Every
540 callgraph or IPA reference edge leaving the partition contributes into
541 COST. Every edge inside partition was earlier computed as one leaving
542 it and thus we need to subtract it from COST. */
543 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
545 struct ipa_ref_list *refs;
546 int j;
547 struct ipa_ref *ref;
548 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
549 last_visited_node);
551 if (cgraph_node *node = dyn_cast <cgraph_node> (snode))
553 struct cgraph_edge *edge;
555 refs = &node->ref_list;
557 last_visited_node++;
559 gcc_assert (node->definition || node->weakref);
561 /* Compute boundary cost of callgraph edges. */
562 for (edge = node->callees; edge; edge = edge->next_callee)
563 if (edge->callee->definition)
565 int edge_cost = edge->frequency;
566 int index;
568 if (!edge_cost)
569 edge_cost = 1;
570 gcc_assert (edge_cost > 0);
571 index = lto_symtab_encoder_lookup (partition->encoder,
572 edge->callee);
573 if (index != LCC_NOT_FOUND
574 && index < last_visited_node - 1)
575 cost -= edge_cost, internal += edge_cost;
576 else
577 cost += edge_cost;
579 for (edge = node->callers; edge; edge = edge->next_caller)
581 int edge_cost = edge->frequency;
582 int index;
584 gcc_assert (edge->caller->definition);
585 if (!edge_cost)
586 edge_cost = 1;
587 gcc_assert (edge_cost > 0);
588 index = lto_symtab_encoder_lookup (partition->encoder,
589 edge->caller);
590 if (index != LCC_NOT_FOUND
591 && index < last_visited_node - 1)
592 cost -= edge_cost;
593 else
594 cost += edge_cost;
597 else
599 refs = &snode->ref_list;
600 last_visited_node++;
603 /* Compute boundary cost of IPA REF edges and at the same time look into
604 variables referenced from current partition and try to add them. */
605 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
606 if (is_a <varpool_node> (ref->referred))
608 int index;
610 vnode = ipa_ref_varpool_node (ref);
611 if (!vnode->definition)
612 continue;
613 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
614 && get_symbol_class (vnode) == SYMBOL_PARTITION)
615 add_symbol_to_partition (partition, vnode);
616 index = lto_symtab_encoder_lookup (partition->encoder,
617 vnode);
618 if (index != LCC_NOT_FOUND
619 && index < last_visited_node - 1)
620 cost--, internal++;
621 else
622 cost++;
624 else
626 int index;
628 node = ipa_ref_node (ref);
629 if (!node->definition)
630 continue;
631 index = lto_symtab_encoder_lookup (partition->encoder,
632 node);
633 if (index != LCC_NOT_FOUND
634 && index < last_visited_node - 1)
635 cost--, internal++;
636 else
637 cost++;
639 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
640 if (is_a <varpool_node> (ref->referring))
642 int index;
644 vnode = ipa_ref_referring_varpool_node (ref);
645 gcc_assert (vnode->definition);
646 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
647 && get_symbol_class (vnode) == SYMBOL_PARTITION)
648 add_symbol_to_partition (partition, vnode);
649 index = lto_symtab_encoder_lookup (partition->encoder,
650 vnode);
651 if (index != LCC_NOT_FOUND
652 && index < last_visited_node - 1)
653 cost--;
654 else
655 cost++;
657 else
659 int index;
661 node = ipa_ref_referring_node (ref);
662 gcc_assert (node->definition);
663 index = lto_symtab_encoder_lookup (partition->encoder,
664 node);
665 if (index != LCC_NOT_FOUND
666 && index < last_visited_node - 1)
667 cost--;
668 else
669 cost++;
673 /* If the partition is large enough, start looking for smallest boundary cost. */
674 if (partition->insns < partition_size * 3 / 4
675 || best_cost == INT_MAX
676 || ((!cost
677 || (best_internal * (HOST_WIDE_INT) cost
678 > (internal * (HOST_WIDE_INT)best_cost)))
679 && partition->insns < partition_size * 5 / 4))
681 best_cost = cost;
682 best_internal = internal;
683 best_i = i;
684 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
685 best_total_size = total_size;
686 best_varpool_pos = varpool_pos;
688 if (cgraph_dump_file)
689 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
690 "best %i/%i, step %i\n", i,
691 cgraph_node_name (order[i]), order[i]->order,
692 partition->insns, cost, internal,
693 best_cost, best_internal, best_i);
694 /* Partition is too large, unwind into step when best cost was reached and
695 start new partition. */
696 if (partition->insns > 2 * partition_size)
698 if (best_i != i)
700 if (cgraph_dump_file)
701 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
702 i - best_i, best_i);
703 undo_partition (partition, best_n_nodes);
704 varpool_pos = best_varpool_pos;
706 i = best_i;
707 /* When we are finished, avoid creating empty partition. */
708 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
709 i++;
710 if (i == n_nodes - 1)
711 break;
712 partition = new_partition ("");
713 last_visited_node = 0;
714 total_size = best_total_size;
715 cost = 0;
717 if (cgraph_dump_file)
718 fprintf (cgraph_dump_file, "New partition\n");
719 best_n_nodes = 0;
720 best_cost = INT_MAX;
722 /* Since the size of partitions is just approximate, update the size after
723 we finished current one. */
724 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
725 partition_size = total_size
726 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
727 else
728 partition_size = INT_MAX;
730 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
731 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
732 npartitions ++;
736 /* Varables that are not reachable from the code go into last partition. */
737 if (flag_toplevel_reorder)
739 FOR_EACH_VARIABLE (vnode)
740 if (get_symbol_class (vnode) == SYMBOL_PARTITION
741 && !symbol_partitioned_p (vnode))
742 add_symbol_to_partition (partition, vnode);
744 else
746 while (varpool_pos < n_varpool_nodes)
748 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
749 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
750 varpool_pos++;
752 free (varpool_order);
754 free (order);
757 /* Mangle NODE symbol name into a local name.
758 This is necessary to do
759 1) if two or more static vars of same assembler name
760 are merged into single ltrans unit.
761 2) if prevoiusly static var was promoted hidden to avoid possible conflict
762 with symbols defined out of the LTO world.
765 static bool
766 privatize_symbol_name (symtab_node *node)
768 tree decl = node->decl;
769 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
771 /* Our renaming machinery do not handle more than one change of assembler name.
772 We should not need more than one anyway. */
773 if (node->lto_file_data
774 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
776 if (cgraph_dump_file)
777 fprintf (cgraph_dump_file,
778 "Not privatizing symbol name: %s. It privatized already.\n",
779 name);
780 return false;
782 /* Avoid mangling of already mangled clones.
783 ??? should have a flag whether a symbol has a 'private' name already,
784 since we produce some symbols like that i.e. for global constructors
785 that are not really clones. */
786 if (node->unique_name)
788 if (cgraph_dump_file)
789 fprintf (cgraph_dump_file,
790 "Not privatizing symbol name: %s. Has unique name.\n",
791 name);
792 return false;
794 change_decl_assembler_name (decl, clone_function_name (decl, "lto_priv"));
795 if (node->lto_file_data)
796 lto_record_renamed_decl (node->lto_file_data, name,
797 IDENTIFIER_POINTER
798 (DECL_ASSEMBLER_NAME (decl)));
799 if (cgraph_dump_file)
800 fprintf (cgraph_dump_file,
801 "Privatizing symbol name: %s -> %s\n",
802 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
803 return true;
806 /* Promote variable VNODE to be static. */
808 static void
809 promote_symbol (symtab_node *node)
811 /* We already promoted ... */
812 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
813 && DECL_VISIBILITY_SPECIFIED (node->decl)
814 && TREE_PUBLIC (node->decl))
815 return;
817 gcc_checking_assert (!TREE_PUBLIC (node->decl)
818 && !DECL_EXTERNAL (node->decl));
819 /* Be sure that newly public symbol does not conflict with anything already
820 defined by the non-LTO part. */
821 privatize_symbol_name (node);
822 TREE_PUBLIC (node->decl) = 1;
823 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
824 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
825 if (cgraph_dump_file)
826 fprintf (cgraph_dump_file,
827 "Promoting as hidden: %s\n", symtab_node_name (node));
830 /* Return true if NODE needs named section even if it won't land in the partition
831 symbol table.
832 FIXME: we should really not use named sections for inline clones and master clones. */
834 static bool
835 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
837 struct cgraph_node *cnode = dyn_cast <cgraph_node> (node);
838 if (!cnode)
839 return false;
840 if (symtab_real_symbol_p (node))
841 return false;
842 return (!encoder
843 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
844 && lto_symtab_encoder_encode_body_p (encoder,
845 cnode)));
848 /* If NODE represents a static variable. See if there are other variables
849 of the same name in partition ENCODER (or in whole compilation unit if
850 ENCODER is NULL) and if so, mangle the statics. Always mangle all
851 conflicting statics, so we reduce changes of silently miscompiling
852 asm statemnets referring to them by symbol name. */
854 static void
855 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
857 tree decl = node->decl;
858 symtab_node *s;
859 tree name = DECL_ASSEMBLER_NAME (decl);
861 /* See if this is static symbol. */
862 if ((node->externally_visible
863 /* FIXME: externally_visible is somewhat illogically not set for
864 external symbols (i.e. those not defined). Remove this test
865 once this is fixed. */
866 || DECL_EXTERNAL (node->decl)
867 || !symtab_real_symbol_p (node))
868 && !may_need_named_section_p (encoder, node))
869 return;
871 /* Now walk symbols sharing the same name and see if there are any conflicts.
872 (all types of symbols counts here, since we can not have static of the
873 same name as external or public symbol.) */
874 for (s = symtab_node_for_asm (name);
875 s; s = s->next_sharing_asm_name)
876 if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
877 && s->decl != node->decl
878 && (!encoder
879 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
880 break;
882 /* OK, no confict, so we have nothing to do. */
883 if (!s)
884 return;
886 if (cgraph_dump_file)
887 fprintf (cgraph_dump_file,
888 "Renaming statics with asm name: %s\n", symtab_node_name (node));
890 /* Assign every symbol in the set that shares the same ASM name an unique
891 mangled name. */
892 for (s = symtab_node_for_asm (name); s;)
893 if (!s->externally_visible
894 && ((symtab_real_symbol_p (s)
895 && !DECL_EXTERNAL (node->decl)
896 && !TREE_PUBLIC (node->decl))
897 || may_need_named_section_p (encoder, s))
898 && (!encoder
899 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
901 if (privatize_symbol_name (s))
902 /* Re-start from beginning since we do not know how many symbols changed a name. */
903 s = symtab_node_for_asm (name);
904 else s = s->next_sharing_asm_name;
906 else s = s->next_sharing_asm_name;
909 /* Find out all static decls that need to be promoted to global because
910 of cross file sharing. This function must be run in the WPA mode after
911 all inlinees are added. */
913 void
914 lto_promote_cross_file_statics (void)
916 unsigned i, n_sets;
918 gcc_assert (flag_wpa);
920 /* First compute boundaries. */
921 n_sets = ltrans_partitions.length ();
922 for (i = 0; i < n_sets; i++)
924 ltrans_partition part
925 = ltrans_partitions[i];
926 part->encoder = compute_ltrans_boundary (part->encoder);
929 /* Look at boundaries and promote symbols as needed. */
930 for (i = 0; i < n_sets; i++)
932 lto_symtab_encoder_iterator lsei;
933 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
935 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
936 lsei_next (&lsei))
938 symtab_node *node = lsei_node (lsei);
940 /* If symbol is static, rename it if its assembler name clash with
941 anything else in this unit. */
942 rename_statics (encoder, node);
944 /* No need to promote if symbol already is externally visible ... */
945 if (node->externally_visible
946 /* ... or if it is part of current partition ... */
947 || lto_symtab_encoder_in_partition_p (encoder, node)
948 /* ... or if we do not partition it. This mean that it will
949 appear in every partition refernecing it. */
950 || get_symbol_class (node) != SYMBOL_PARTITION)
951 continue;
953 promote_symbol (node);
958 /* Rename statics in the whole unit in the case that
959 we do -flto-partition=none. */
961 void
962 lto_promote_statics_nonwpa (void)
964 symtab_node *node;
965 FOR_EACH_SYMBOL (node)
966 rename_statics (NULL, node);