PR preprocessor/63831
[official-gcc.git] / gcc / lto / lto-partition.c
blob809a4937b663d205f05c8c31cb87c0ca8659028b
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "predict.h"
26 #include "vec.h"
27 #include "hashtab.h"
28 #include "hash-set.h"
29 #include "machmode.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "input.h"
33 #include "function.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-expr.h"
38 #include "is-a.h"
39 #include "gimple.h"
40 #include "hash-map.h"
41 #include "plugin-api.h"
42 #include "ipa-ref.h"
43 #include "cgraph.h"
44 #include "lto-streamer.h"
45 #include "timevar.h"
46 #include "params.h"
47 #include "alloc-pool.h"
48 #include "ipa-prop.h"
49 #include "ipa-inline.h"
50 #include "ipa-utils.h"
51 #include "lto-partition.h"
53 vec<ltrans_partition> ltrans_partitions;
55 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
58 /* Create new partition with name NAME. */
60 static ltrans_partition
61 new_partition (const char *name)
63 ltrans_partition part = XCNEW (struct ltrans_partition_def);
64 part->encoder = lto_symtab_encoder_new (false);
65 part->name = name;
66 part->insns = 0;
67 ltrans_partitions.safe_push (part);
68 return part;
71 /* Free memory used by ltrans datastructures. */
73 void
74 free_ltrans_partitions (void)
76 unsigned int idx;
77 ltrans_partition part;
78 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
80 if (part->initializers_visited)
81 delete part->initializers_visited;
82 /* Symtab encoder is freed after streaming. */
83 free (part);
85 ltrans_partitions.release ();
88 /* Return true if symbol is already in some partition. */
90 static inline bool
91 symbol_partitioned_p (symtab_node *node)
93 return node->aux;
96 /* Add references into the partition. */
97 static void
98 add_references_to_partition (ltrans_partition part, symtab_node *node)
100 int i;
101 struct ipa_ref *ref = NULL;
103 /* Add all duplicated references to the partition. */
104 for (i = 0; node->iterate_reference (i, ref); i++)
105 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
106 add_symbol_to_partition (part, ref->referred);
107 /* References to a readonly variable may be constant foled into its value.
108 Recursively look into the initializers of the constant variable and add
109 references, too. */
110 else if (is_a <varpool_node *> (ref->referred)
111 && (dyn_cast <varpool_node *> (ref->referred)
112 ->ctor_useable_for_folding_p ()
113 || POINTER_BOUNDS_P (ref->referred->decl))
114 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
116 if (!part->initializers_visited)
117 part->initializers_visited = new hash_set<symtab_node *>;
118 if (!part->initializers_visited->add (ref->referred))
119 add_references_to_partition (part, ref->referred);
123 /* Helper function for add_symbol_to_partition doing the actual dirty work
124 of adding NODE to PART. */
126 static bool
127 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
129 enum symbol_partitioning_class c = node->get_partitioning_class ();
130 struct ipa_ref *ref;
131 symtab_node *node1;
133 /* If NODE is already there, we have nothing to do. */
134 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
135 return true;
137 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
138 just once.
140 Be lax about comdats; they may or may not be duplicated and we may
141 end up in need to duplicate keyed comdat because it has unkeyed alias. */
142 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
143 && symbol_partitioned_p (node))
144 return false;
146 /* Be sure that we never try to duplicate partitioned symbol
147 or add external symbol. */
148 gcc_assert (c != SYMBOL_EXTERNAL
149 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
151 lto_set_symtab_encoder_in_partition (part->encoder, node);
153 if (symbol_partitioned_p (node))
155 node->in_other_partition = 1;
156 if (symtab->dump_file)
157 fprintf (symtab->dump_file,
158 "Symbol node %s now used in multiple partitions\n",
159 node->name ());
161 node->aux = (void *)((size_t)node->aux + 1);
163 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
165 struct cgraph_edge *e;
166 if (!node->alias)
167 part->insns += inline_summary (cnode)->self_size;
169 /* Add all inline clones and callees that are duplicated. */
170 for (e = cnode->callees; e; e = e->next_callee)
171 if (!e->inline_failed)
172 add_symbol_to_partition_1 (part, e->callee);
173 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
174 add_symbol_to_partition (part, e->callee);
176 /* Add all thunks associated with the function. */
177 for (e = cnode->callers; e; e = e->next_caller)
178 if (e->caller->thunk.thunk_p)
179 add_symbol_to_partition_1 (part, e->caller);
181 /* Instrumented version is actually the same function.
182 Therefore put it into the same partition. */
183 if (cnode->instrumented_version)
184 add_symbol_to_partition_1 (part, cnode->instrumented_version);
187 add_references_to_partition (part, node);
189 /* Add all aliases associated with the symbol. */
191 FOR_EACH_ALIAS (node, ref)
192 if (!node->weakref)
193 add_symbol_to_partition_1 (part, ref->referring);
195 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
196 if (node->same_comdat_group)
197 for (node1 = node->same_comdat_group;
198 node1 != node; node1 = node1->same_comdat_group)
199 if (!node->alias)
201 bool added = add_symbol_to_partition_1 (part, node1);
202 gcc_assert (added);
204 return true;
207 /* If symbol NODE is really part of other symbol's definition (i.e. it is
208 internal label, thunk, alias or so), return the outer symbol.
209 When add_symbol_to_partition_1 is called on the outer symbol it must
210 eventually add NODE, too. */
211 static symtab_node *
212 contained_in_symbol (symtab_node *node)
214 /* Weakrefs are never contained in anything. */
215 if (node->weakref)
216 return node;
217 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
219 cnode = cnode->function_symbol ();
220 if (cnode->global.inlined_to)
221 cnode = cnode->global.inlined_to;
222 return cnode;
224 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
225 return vnode->ultimate_alias_target ();
226 return node;
229 /* Add symbol NODE to partition. When definition of NODE is part
230 of other symbol definition, add the other symbol, too. */
232 static void
233 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
235 symtab_node *node1;
237 /* Verify that we do not try to duplicate something that can not be. */
238 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
239 || !symbol_partitioned_p (node));
241 while ((node1 = contained_in_symbol (node)) != node)
242 node = node1;
244 /* If we have duplicated symbol contained in something we can not duplicate,
245 we are very badly screwed. The other way is possible, so we do not
246 assert this in add_symbol_to_partition_1.
248 Be lax about comdats; they may or may not be duplicated and we may
249 end up in need to duplicate keyed comdat because it has unkeyed alias. */
251 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
252 || DECL_COMDAT (node->decl)
253 || !symbol_partitioned_p (node));
255 add_symbol_to_partition_1 (part, node);
258 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
259 and number of varpool nodes is N_VARPOOL_NODES. */
261 static void
262 undo_partition (ltrans_partition partition, unsigned int n_nodes)
264 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
266 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
267 n_nodes);
268 cgraph_node *cnode;
270 /* After UNDO we no longer know what was visited. */
271 if (partition->initializers_visited)
272 delete partition->initializers_visited;
273 partition->initializers_visited = NULL;
275 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
276 partition->insns -= inline_summary (cnode)->self_size;
277 lto_symtab_encoder_delete_node (partition->encoder, node);
278 node->aux = (void *)((size_t)node->aux - 1);
282 /* Group cgrah nodes by input files. This is used mainly for testing
283 right now. */
285 void
286 lto_1_to_1_map (void)
288 symtab_node *node;
289 struct lto_file_decl_data *file_data;
290 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
291 ltrans_partition partition;
292 int npartitions = 0;
294 FOR_EACH_SYMBOL (node)
296 if (node->get_partitioning_class () != SYMBOL_PARTITION
297 || symbol_partitioned_p (node))
298 continue;
300 file_data = node->lto_file_data;
302 if (file_data)
304 ltrans_partition *slot = &pmap.get_or_insert (file_data);
305 if (*slot)
306 partition = *slot;
307 else
309 partition = new_partition (file_data->file_name);
310 *slot = partition;
311 npartitions++;
314 else if (!file_data && ltrans_partitions.length ())
315 partition = ltrans_partitions[0];
316 else
318 partition = new_partition ("");
319 pmap.put (NULL, partition);
320 npartitions++;
323 add_symbol_to_partition (partition, node);
326 /* If the cgraph is empty, create one cgraph node set so that there is still
327 an output file for any variables that need to be exported in a DSO. */
328 if (!npartitions)
329 new_partition ("empty");
333 /* Maximal partitioning. Put every new symbol into new partition if possible. */
335 void
336 lto_max_map (void)
338 symtab_node *node;
339 ltrans_partition partition;
340 int npartitions = 0;
342 FOR_EACH_SYMBOL (node)
344 if (node->get_partitioning_class () != SYMBOL_PARTITION
345 || symbol_partitioned_p (node))
346 continue;
347 partition = new_partition (node->asm_name ());
348 add_symbol_to_partition (partition, node);
349 npartitions++;
351 if (!npartitions)
352 new_partition ("empty");
355 /* Helper function for qsort; sort nodes by order. noreorder functions must have
356 been removed earlier. */
357 static int
358 node_cmp (const void *pa, const void *pb)
360 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
361 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
363 /* Profile reorder flag enables function reordering based on first execution
364 of a function. All functions with profile are placed in ascending
365 order at the beginning. */
367 if (flag_profile_reorder_functions)
369 /* Functions with time profile are sorted in ascending order. */
370 if (a->tp_first_run && b->tp_first_run)
371 return a->tp_first_run != b->tp_first_run
372 ? a->tp_first_run - b->tp_first_run
373 : a->order - b->order;
375 /* Functions with time profile are sorted before the functions
376 that do not have the profile. */
377 if (a->tp_first_run || b->tp_first_run)
378 return b->tp_first_run - a->tp_first_run;
381 return b->order - a->order;
384 /* Helper function for qsort; sort nodes by order. */
385 static int
386 varpool_node_cmp (const void *pa, const void *pb)
388 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
389 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
390 return b->order - a->order;
393 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
395 static void
396 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
398 unsigned i;
399 symtab_node *node;
401 next_nodes.qsort (varpool_node_cmp);
402 FOR_EACH_VEC_ELT (next_nodes, i, node)
403 if (!symbol_partitioned_p (node))
404 add_symbol_to_partition (partition, node);
408 /* Group cgraph nodes into equally-sized partitions.
410 The partitioning algorithm is simple: nodes are taken in predefined order.
411 The order corresponds to the order we want functions to have in the final
412 output. In the future this will be given by function reordering pass, but
413 at the moment we use the topological order, which is a good approximation.
415 The goal is to partition this linear order into intervals (partitions) so
416 that all the partitions have approximately the same size and the number of
417 callgraph or IPA reference edges crossing boundaries is minimal.
419 This is a lot faster (O(n) in size of callgraph) than algorithms doing
420 priority-based graph clustering that are generally O(n^2) and, since
421 WHOPR is designed to make things go well across partitions, it leads
422 to good results.
424 We compute the expected size of a partition as:
426 max (total_size / lto_partitions, min_partition_size)
428 We use dynamic expected size of partition so small programs are partitioned
429 into enough partitions to allow use of multiple CPUs, while large programs
430 are not partitioned too much. Creating too many partitions significantly
431 increases the streaming overhead.
433 In the future, we would like to bound the maximal size of partitions so as
434 to prevent the LTRANS stage from consuming too much memory. At the moment,
435 however, the WPA stage is the most memory intensive for large benchmarks,
436 since too many types and declarations are read into memory.
438 The function implements a simple greedy algorithm. Nodes are being added
439 to the current partition until after 3/4 of the expected partition size is
440 reached. Past this threshold, we keep track of boundary size (number of
441 edges going to other partitions) and continue adding functions until after
442 the current partition has grown to twice the expected partition size. Then
443 the process is undone to the point where the minimal ratio of boundary size
444 and in-partition calls was reached. */
446 void
447 lto_balanced_map (int n_lto_partitions)
449 int n_nodes = 0;
450 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
451 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
452 auto_vec<cgraph_node *> noreorder;
453 auto_vec<varpool_node *> varpool_order;
454 int i;
455 struct cgraph_node *node;
456 int total_size = 0, best_total_size = 0;
457 int partition_size;
458 ltrans_partition partition;
459 int last_visited_node = 0;
460 varpool_node *vnode;
461 int cost = 0, internal = 0;
462 int best_n_nodes = 0, best_i = 0, best_cost =
463 INT_MAX, best_internal = 0;
464 int npartitions;
465 int current_order = -1;
466 int noreorder_pos = 0;
468 FOR_EACH_VARIABLE (vnode)
469 gcc_assert (!vnode->aux);
471 FOR_EACH_DEFINED_FUNCTION (node)
472 if (node->get_partitioning_class () == SYMBOL_PARTITION)
474 if (node->no_reorder)
475 noreorder.safe_push (node);
476 else
477 order[n_nodes++] = node;
478 if (!node->alias)
479 total_size += inline_summary (node)->size;
482 /* Streaming works best when the source units do not cross partition
483 boundaries much. This is because importing function from a source
484 unit tends to import a lot of global trees defined there. We should
485 get better about minimizing the function bounday, but until that
486 things works smoother if we order in source order. */
487 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
488 noreorder.qsort (node_cmp);
490 if (symtab->dump_file)
492 for(i = 0; i < n_nodes; i++)
493 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
494 order[i]->name (), order[i]->tp_first_run);
495 for(i = 0; i < (int)noreorder.length(); i++)
496 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
497 noreorder[i]->name (), noreorder[i]->tp_first_run);
500 /* Collect all variables that should not be reordered. */
501 FOR_EACH_VARIABLE (vnode)
502 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
503 && (!flag_toplevel_reorder || vnode->no_reorder))
504 varpool_order.safe_push (vnode);
505 n_varpool_nodes = varpool_order.length ();
506 varpool_order.qsort (varpool_node_cmp);
508 /* Compute partition size and create the first partition. */
509 partition_size = total_size / n_lto_partitions;
510 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
511 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
512 npartitions = 1;
513 partition = new_partition ("");
514 if (symtab->dump_file)
515 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
516 total_size, partition_size);
518 auto_vec<symtab_node *> next_nodes;
520 for (i = 0; i < n_nodes; i++)
522 if (symbol_partitioned_p (order[i]))
523 continue;
525 current_order = order[i]->order;
527 /* Output noreorder and varpool in program order first. */
528 next_nodes.truncate (0);
529 while (varpool_pos < n_varpool_nodes
530 && varpool_order[varpool_pos]->order < current_order)
531 next_nodes.safe_push (varpool_order[varpool_pos++]);
532 while (noreorder_pos < (int)noreorder.length ()
533 && noreorder[noreorder_pos]->order < current_order)
535 if (!noreorder[noreorder_pos]->alias)
536 total_size -= inline_summary (noreorder[noreorder_pos])->size;
537 next_nodes.safe_push (noreorder[noreorder_pos++]);
539 add_sorted_nodes (next_nodes, partition);
541 add_symbol_to_partition (partition, order[i]);
542 if (!order[i]->alias)
543 total_size -= inline_summary (order[i])->size;
546 /* Once we added a new node to the partition, we also want to add
547 all referenced variables unless they was already added into some
548 earlier partition.
549 add_symbol_to_partition adds possibly multiple nodes and
550 variables that are needed to satisfy needs of ORDER[i].
551 We remember last visited cgraph and varpool node from last iteration
552 of outer loop that allows us to process every new addition.
554 At the same time we compute size of the boundary into COST. Every
555 callgraph or IPA reference edge leaving the partition contributes into
556 COST. Every edge inside partition was earlier computed as one leaving
557 it and thus we need to subtract it from COST. */
558 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
560 symtab_node *refs_node;
561 int j;
562 struct ipa_ref *ref = NULL;
563 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
564 last_visited_node);
566 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
568 struct cgraph_edge *edge;
570 refs_node = node;
572 last_visited_node++;
574 gcc_assert (node->definition || node->weakref);
576 /* Compute boundary cost of callgraph edges. */
577 for (edge = node->callees; edge; edge = edge->next_callee)
578 if (edge->callee->definition)
580 int edge_cost = edge->frequency;
581 int index;
583 if (!edge_cost)
584 edge_cost = 1;
585 gcc_assert (edge_cost > 0);
586 index = lto_symtab_encoder_lookup (partition->encoder,
587 edge->callee);
588 if (index != LCC_NOT_FOUND
589 && index < last_visited_node - 1)
590 cost -= edge_cost, internal += edge_cost;
591 else
592 cost += edge_cost;
594 for (edge = node->callers; edge; edge = edge->next_caller)
596 int edge_cost = edge->frequency;
597 int index;
599 gcc_assert (edge->caller->definition);
600 if (!edge_cost)
601 edge_cost = 1;
602 gcc_assert (edge_cost > 0);
603 index = lto_symtab_encoder_lookup (partition->encoder,
604 edge->caller);
605 if (index != LCC_NOT_FOUND
606 && index < last_visited_node - 1)
607 cost -= edge_cost;
608 else
609 cost += edge_cost;
612 else
614 refs_node = snode;
615 last_visited_node++;
618 /* Compute boundary cost of IPA REF edges and at the same time look into
619 variables referenced from current partition and try to add them. */
620 for (j = 0; refs_node->iterate_reference (j, ref); j++)
621 if (is_a <varpool_node *> (ref->referred))
623 int index;
625 vnode = dyn_cast <varpool_node *> (ref->referred);
626 if (!vnode->definition)
627 continue;
628 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
629 && !vnode->no_reorder
630 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
631 add_symbol_to_partition (partition, vnode);
632 index = lto_symtab_encoder_lookup (partition->encoder,
633 vnode);
634 if (index != LCC_NOT_FOUND
635 && index < last_visited_node - 1)
636 cost--, internal++;
637 else
638 cost++;
640 else
642 int index;
644 node = dyn_cast <cgraph_node *> (ref->referred);
645 if (!node->definition)
646 continue;
647 index = lto_symtab_encoder_lookup (partition->encoder,
648 node);
649 if (index != LCC_NOT_FOUND
650 && index < last_visited_node - 1)
651 cost--, internal++;
652 else
653 cost++;
655 for (j = 0; refs_node->iterate_referring (j, ref); j++)
656 if (is_a <varpool_node *> (ref->referring))
658 int index;
660 vnode = dyn_cast <varpool_node *> (ref->referring);
661 gcc_assert (vnode->definition);
662 /* It is better to couple variables with their users, because it allows them
663 to be removed. Coupling with objects they refer to only helps to reduce
664 number of symbols promoted to hidden. */
665 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
666 && !vnode->no_reorder
667 && !vnode->can_remove_if_no_refs_p ()
668 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
669 add_symbol_to_partition (partition, vnode);
670 index = lto_symtab_encoder_lookup (partition->encoder,
671 vnode);
672 if (index != LCC_NOT_FOUND
673 && index < last_visited_node - 1)
674 cost--;
675 else
676 cost++;
678 else
680 int index;
682 node = dyn_cast <cgraph_node *> (ref->referring);
683 gcc_assert (node->definition);
684 index = lto_symtab_encoder_lookup (partition->encoder,
685 node);
686 if (index != LCC_NOT_FOUND
687 && index < last_visited_node - 1)
688 cost--;
689 else
690 cost++;
694 /* If the partition is large enough, start looking for smallest boundary cost. */
695 if (partition->insns < partition_size * 3 / 4
696 || best_cost == INT_MAX
697 || ((!cost
698 || (best_internal * (HOST_WIDE_INT) cost
699 > (internal * (HOST_WIDE_INT)best_cost)))
700 && partition->insns < partition_size * 5 / 4))
702 best_cost = cost;
703 best_internal = internal;
704 best_i = i;
705 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
706 best_total_size = total_size;
707 best_varpool_pos = varpool_pos;
709 if (symtab->dump_file)
710 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
711 "best %i/%i, step %i\n", i,
712 order[i]->name (), order[i]->order,
713 partition->insns, cost, internal,
714 best_cost, best_internal, best_i);
715 /* Partition is too large, unwind into step when best cost was reached and
716 start new partition. */
717 if (partition->insns > 2 * partition_size)
719 if (best_i != i)
721 if (symtab->dump_file)
722 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
723 i - best_i, best_i);
724 undo_partition (partition, best_n_nodes);
725 varpool_pos = best_varpool_pos;
727 i = best_i;
728 /* When we are finished, avoid creating empty partition. */
729 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
730 i++;
731 if (i == n_nodes - 1)
732 break;
733 partition = new_partition ("");
734 last_visited_node = 0;
735 total_size = best_total_size;
736 cost = 0;
738 if (symtab->dump_file)
739 fprintf (symtab->dump_file, "New partition\n");
740 best_n_nodes = 0;
741 best_cost = INT_MAX;
743 /* Since the size of partitions is just approximate, update the size after
744 we finished current one. */
745 if (npartitions < n_lto_partitions)
746 partition_size = total_size / (n_lto_partitions - npartitions);
747 else
748 partition_size = INT_MAX;
750 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
751 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
752 npartitions ++;
756 next_nodes.truncate (0);
758 /* Varables that are not reachable from the code go into last partition. */
759 if (flag_toplevel_reorder)
761 FOR_EACH_VARIABLE (vnode)
762 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
763 && !symbol_partitioned_p (vnode)
764 && !vnode->no_reorder)
765 next_nodes.safe_push (vnode);
768 /* Output remaining ordered symbols. */
769 while (varpool_pos < n_varpool_nodes)
770 next_nodes.safe_push (varpool_order[varpool_pos++]);
771 while (noreorder_pos < (int)noreorder.length ())
772 next_nodes.safe_push (noreorder[noreorder_pos++]);
773 add_sorted_nodes (next_nodes, partition);
775 free (order);
778 /* Mangle NODE symbol name into a local name.
779 This is necessary to do
780 1) if two or more static vars of same assembler name
781 are merged into single ltrans unit.
782 2) if prevoiusly static var was promoted hidden to avoid possible conflict
783 with symbols defined out of the LTO world.
786 static bool
787 privatize_symbol_name (symtab_node *node)
789 tree decl = node->decl;
790 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
791 const char *name;
793 /* If we want to privatize instrumentation clone
794 then we need to change original function name
795 which is used via transparent alias chain. */
796 if (cnode && cnode->instrumentation_clone)
797 decl = cnode->orig_decl;
799 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
801 /* Our renaming machinery do not handle more than one change of assembler name.
802 We should not need more than one anyway. */
803 if (node->lto_file_data
804 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
806 if (symtab->dump_file)
807 fprintf (symtab->dump_file,
808 "Not privatizing symbol name: %s. It privatized already.\n",
809 name);
810 return false;
812 /* Avoid mangling of already mangled clones.
813 ??? should have a flag whether a symbol has a 'private' name already,
814 since we produce some symbols like that i.e. for global constructors
815 that are not really clones. */
816 if (node->unique_name)
818 if (symtab->dump_file)
819 fprintf (symtab->dump_file,
820 "Not privatizing symbol name: %s. Has unique name.\n",
821 name);
822 return false;
824 symtab->change_decl_assembler_name (decl,
825 clone_function_name (decl, "lto_priv"));
826 if (node->lto_file_data)
827 lto_record_renamed_decl (node->lto_file_data, name,
828 IDENTIFIER_POINTER
829 (DECL_ASSEMBLER_NAME (decl)));
830 /* We could change name which is a target of transparent alias
831 chain of instrumented function name. Fix alias chain if so .*/
832 if (cnode)
834 tree iname = NULL_TREE;
835 if (cnode->instrumentation_clone)
836 iname = DECL_ASSEMBLER_NAME (cnode->decl);
837 else if (cnode->instrumented_version
838 && cnode->instrumented_version->orig_decl == decl)
839 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
841 if (iname)
843 gcc_assert (IDENTIFIER_TRANSPARENT_ALIAS (iname));
844 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (decl);
847 if (symtab->dump_file)
848 fprintf (symtab->dump_file,
849 "Privatizing symbol name: %s -> %s\n",
850 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
851 return true;
854 /* Promote variable VNODE to be static. */
856 static void
857 promote_symbol (symtab_node *node)
859 /* We already promoted ... */
860 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
861 && DECL_VISIBILITY_SPECIFIED (node->decl)
862 && TREE_PUBLIC (node->decl))
863 return;
865 gcc_checking_assert (!TREE_PUBLIC (node->decl)
866 && !DECL_EXTERNAL (node->decl));
867 /* Be sure that newly public symbol does not conflict with anything already
868 defined by the non-LTO part. */
869 privatize_symbol_name (node);
870 TREE_PUBLIC (node->decl) = 1;
871 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
872 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
873 if (symtab->dump_file)
874 fprintf (symtab->dump_file,
875 "Promoting as hidden: %s\n", node->name ());
878 /* Return true if NODE needs named section even if it won't land in the partition
879 symbol table.
880 FIXME: we should really not use named sections for inline clones and master clones. */
882 static bool
883 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
885 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
886 if (!cnode)
887 return false;
888 if (node->real_symbol_p ())
889 return false;
890 return (!encoder
891 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
892 && lto_symtab_encoder_encode_body_p (encoder,
893 cnode)));
896 /* If NODE represents a static variable. See if there are other variables
897 of the same name in partition ENCODER (or in whole compilation unit if
898 ENCODER is NULL) and if so, mangle the statics. Always mangle all
899 conflicting statics, so we reduce changes of silently miscompiling
900 asm statements referring to them by symbol name. */
902 static void
903 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
905 tree decl = node->decl;
906 symtab_node *s;
907 tree name = DECL_ASSEMBLER_NAME (decl);
909 /* See if this is static symbol. */
910 if ((node->externally_visible
911 /* FIXME: externally_visible is somewhat illogically not set for
912 external symbols (i.e. those not defined). Remove this test
913 once this is fixed. */
914 || DECL_EXTERNAL (node->decl)
915 || !node->real_symbol_p ())
916 && !may_need_named_section_p (encoder, node))
917 return;
919 /* Now walk symbols sharing the same name and see if there are any conflicts.
920 (all types of symbols counts here, since we can not have static of the
921 same name as external or public symbol.) */
922 for (s = symtab_node::get_for_asmname (name);
923 s; s = s->next_sharing_asm_name)
924 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
925 && s->decl != node->decl
926 && (!encoder
927 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
928 break;
930 /* OK, no confict, so we have nothing to do. */
931 if (!s)
932 return;
934 if (symtab->dump_file)
935 fprintf (symtab->dump_file,
936 "Renaming statics with asm name: %s\n", node->name ());
938 /* Assign every symbol in the set that shares the same ASM name an unique
939 mangled name. */
940 for (s = symtab_node::get_for_asmname (name); s;)
941 if (!s->externally_visible
942 && ((s->real_symbol_p ()
943 && !DECL_EXTERNAL (node->decl)
944 && !TREE_PUBLIC (node->decl))
945 || may_need_named_section_p (encoder, s))
946 && (!encoder
947 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
949 if (privatize_symbol_name (s))
950 /* Re-start from beginning since we do not know how many symbols changed a name. */
951 s = symtab_node::get_for_asmname (name);
952 else s = s->next_sharing_asm_name;
954 else s = s->next_sharing_asm_name;
957 /* Find out all static decls that need to be promoted to global because
958 of cross file sharing. This function must be run in the WPA mode after
959 all inlinees are added. */
961 void
962 lto_promote_cross_file_statics (void)
964 unsigned i, n_sets;
966 gcc_assert (flag_wpa);
968 select_what_to_stream (false);
970 /* First compute boundaries. */
971 n_sets = ltrans_partitions.length ();
972 for (i = 0; i < n_sets; i++)
974 ltrans_partition part
975 = ltrans_partitions[i];
976 part->encoder = compute_ltrans_boundary (part->encoder);
979 /* Look at boundaries and promote symbols as needed. */
980 for (i = 0; i < n_sets; i++)
982 lto_symtab_encoder_iterator lsei;
983 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
985 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
986 lsei_next (&lsei))
988 symtab_node *node = lsei_node (lsei);
990 /* If symbol is static, rename it if its assembler name clash with
991 anything else in this unit. */
992 rename_statics (encoder, node);
994 /* No need to promote if symbol already is externally visible ... */
995 if (node->externally_visible
996 /* ... or if it is part of current partition ... */
997 || lto_symtab_encoder_in_partition_p (encoder, node)
998 /* ... or if we do not partition it. This mean that it will
999 appear in every partition refernecing it. */
1000 || node->get_partitioning_class () != SYMBOL_PARTITION)
1001 continue;
1003 promote_symbol (node);
1008 /* Rename statics in the whole unit in the case that
1009 we do -flto-partition=none. */
1011 void
1012 lto_promote_statics_nonwpa (void)
1014 symtab_node *node;
1015 FOR_EACH_SYMBOL (node)
1016 rename_statics (NULL, node);