svn merge -r 216846:217483 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / lto / lto-partition.c
blob14b2375226ceba1626e758c21a11cc6794a4f6e5
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "predict.h"
26 #include "vec.h"
27 #include "hashtab.h"
28 #include "hash-set.h"
29 #include "machmode.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "input.h"
33 #include "function.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-expr.h"
38 #include "is-a.h"
39 #include "gimple.h"
40 #include "hash-map.h"
41 #include "plugin-api.h"
42 #include "ipa-ref.h"
43 #include "cgraph.h"
44 #include "lto-streamer.h"
45 #include "timevar.h"
46 #include "params.h"
47 #include "alloc-pool.h"
48 #include "ipa-prop.h"
49 #include "ipa-inline.h"
50 #include "ipa-utils.h"
51 #include "lto-partition.h"
53 vec<ltrans_partition> ltrans_partitions;
55 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
58 /* Create new partition with name NAME. */
60 static ltrans_partition
61 new_partition (const char *name)
63 ltrans_partition part = XCNEW (struct ltrans_partition_def);
64 part->encoder = lto_symtab_encoder_new (false);
65 part->name = name;
66 part->insns = 0;
67 ltrans_partitions.safe_push (part);
68 return part;
71 /* Free memory used by ltrans datastructures. */
73 void
74 free_ltrans_partitions (void)
76 unsigned int idx;
77 ltrans_partition part;
78 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
80 if (part->initializers_visited)
81 delete part->initializers_visited;
82 /* Symtab encoder is freed after streaming. */
83 free (part);
85 ltrans_partitions.release ();
88 /* Return true if symbol is already in some partition. */
90 static inline bool
91 symbol_partitioned_p (symtab_node *node)
93 return node->aux;
96 /* Add references into the partition. */
97 static void
98 add_references_to_partition (ltrans_partition part, symtab_node *node)
100 int i;
101 struct ipa_ref *ref = NULL;
103 /* Add all duplicated references to the partition. */
104 for (i = 0; node->iterate_reference (i, ref); i++)
105 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
106 add_symbol_to_partition (part, ref->referred);
107 /* References to a readonly variable may be constant foled into its value.
108 Recursively look into the initializers of the constant variable and add
109 references, too. */
110 else if (is_a <varpool_node *> (ref->referred)
111 && (dyn_cast <varpool_node *> (ref->referred)
112 ->ctor_useable_for_folding_p ()
113 || POINTER_BOUNDS_P (ref->referred->decl))
114 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
116 if (!part->initializers_visited)
117 part->initializers_visited = new hash_set<symtab_node *>;
118 if (!part->initializers_visited->add (ref->referred))
119 add_references_to_partition (part, ref->referred);
123 /* Helper function for add_symbol_to_partition doing the actual dirty work
124 of adding NODE to PART. */
126 static bool
127 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
129 enum symbol_partitioning_class c = node->get_partitioning_class ();
130 struct ipa_ref *ref;
131 symtab_node *node1;
133 /* If NODE is already there, we have nothing to do. */
134 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
135 return true;
137 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
138 just once.
140 Be lax about comdats; they may or may not be duplicated and we may
141 end up in need to duplicate keyed comdat because it has unkeyed alias. */
142 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
143 && symbol_partitioned_p (node))
144 return false;
146 /* Be sure that we never try to duplicate partitioned symbol
147 or add external symbol. */
148 gcc_assert (c != SYMBOL_EXTERNAL
149 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
151 node->need_dump = true;
152 lto_set_symtab_encoder_in_partition (part->encoder, node);
154 if (symbol_partitioned_p (node))
156 node->in_other_partition = 1;
157 if (symtab->dump_file)
158 fprintf (symtab->dump_file,
159 "Symbol node %s now used in multiple partitions\n",
160 node->name ());
162 node->aux = (void *)((size_t)node->aux + 1);
164 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
166 struct cgraph_edge *e;
167 if (!node->alias)
168 part->insns += inline_summary (cnode)->self_size;
170 /* Add all inline clones and callees that are duplicated. */
171 for (e = cnode->callees; e; e = e->next_callee)
172 if (!e->inline_failed)
173 add_symbol_to_partition_1 (part, e->callee);
174 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
175 add_symbol_to_partition (part, e->callee);
177 /* Add all thunks associated with the function. */
178 for (e = cnode->callers; e; e = e->next_caller)
179 if (e->caller->thunk.thunk_p)
180 add_symbol_to_partition_1 (part, e->caller);
182 /* Instrumented version is actually the same function.
183 Therefore put it into the same partition. */
184 if (cnode->instrumented_version)
185 add_symbol_to_partition_1 (part, cnode->instrumented_version);
188 add_references_to_partition (part, node);
190 /* Add all aliases associated with the symbol. */
192 FOR_EACH_ALIAS (node, ref)
193 if (!node->weakref)
194 add_symbol_to_partition_1 (part, ref->referring);
196 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
197 if (node->same_comdat_group)
198 for (node1 = node->same_comdat_group;
199 node1 != node; node1 = node1->same_comdat_group)
200 if (!node->alias)
202 bool added = add_symbol_to_partition_1 (part, node1);
203 gcc_assert (added);
205 return true;
208 /* If symbol NODE is really part of other symbol's definition (i.e. it is
209 internal label, thunk, alias or so), return the outer symbol.
210 When add_symbol_to_partition_1 is called on the outer symbol it must
211 eventually add NODE, too. */
212 static symtab_node *
213 contained_in_symbol (symtab_node *node)
215 /* Weakrefs are never contained in anything. */
216 if (node->weakref)
217 return node;
218 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
220 cnode = cnode->function_symbol ();
221 if (cnode->global.inlined_to)
222 cnode = cnode->global.inlined_to;
223 return cnode;
225 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
226 return vnode->ultimate_alias_target ();
227 return node;
230 /* Add symbol NODE to partition. When definition of NODE is part
231 of other symbol definition, add the other symbol, too. */
233 static void
234 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
236 symtab_node *node1;
238 /* Verify that we do not try to duplicate something that can not be. */
239 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
240 || !symbol_partitioned_p (node));
242 while ((node1 = contained_in_symbol (node)) != node)
243 node = node1;
245 /* If we have duplicated symbol contained in something we can not duplicate,
246 we are very badly screwed. The other way is possible, so we do not
247 assert this in add_symbol_to_partition_1.
249 Be lax about comdats; they may or may not be duplicated and we may
250 end up in need to duplicate keyed comdat because it has unkeyed alias. */
252 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
253 || DECL_COMDAT (node->decl)
254 || !symbol_partitioned_p (node));
256 add_symbol_to_partition_1 (part, node);
259 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
260 and number of varpool nodes is N_VARPOOL_NODES. */
262 static void
263 undo_partition (ltrans_partition partition, unsigned int n_nodes)
265 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
267 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
268 n_nodes);
269 cgraph_node *cnode;
271 /* After UNDO we no longer know what was visited. */
272 if (partition->initializers_visited)
273 delete partition->initializers_visited;
274 partition->initializers_visited = NULL;
276 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
277 partition->insns -= inline_summary (cnode)->self_size;
278 lto_symtab_encoder_delete_node (partition->encoder, node);
279 node->aux = (void *)((size_t)node->aux - 1);
283 /* Group cgrah nodes by input files. This is used mainly for testing
284 right now. */
286 void
287 lto_1_to_1_map (void)
289 symtab_node *node;
290 struct lto_file_decl_data *file_data;
291 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
292 ltrans_partition partition;
293 int npartitions = 0;
295 FOR_EACH_SYMBOL (node)
297 if (node->get_partitioning_class () != SYMBOL_PARTITION
298 || symbol_partitioned_p (node))
299 continue;
301 file_data = node->lto_file_data;
303 if (file_data)
305 ltrans_partition *slot = &pmap.get_or_insert (file_data);
306 if (*slot)
307 partition = *slot;
308 else
310 partition = new_partition (file_data->file_name);
311 *slot = partition;
312 npartitions++;
315 else if (!file_data && ltrans_partitions.length ())
316 partition = ltrans_partitions[0];
317 else
319 partition = new_partition ("");
320 pmap.put (NULL, partition);
321 npartitions++;
324 add_symbol_to_partition (partition, node);
327 /* If the cgraph is empty, create one cgraph node set so that there is still
328 an output file for any variables that need to be exported in a DSO. */
329 if (!npartitions)
330 new_partition ("empty");
334 /* Maximal partitioning. Put every new symbol into new partition if possible. */
336 void
337 lto_max_map (void)
339 symtab_node *node;
340 ltrans_partition partition;
341 int npartitions = 0;
343 FOR_EACH_SYMBOL (node)
345 if (node->get_partitioning_class () != SYMBOL_PARTITION
346 || symbol_partitioned_p (node))
347 continue;
348 partition = new_partition (node->asm_name ());
349 add_symbol_to_partition (partition, node);
350 npartitions++;
352 if (!npartitions)
353 new_partition ("empty");
356 /* Helper function for qsort; sort nodes by order. noreorder functions must have
357 been removed earlier. */
358 static int
359 node_cmp (const void *pa, const void *pb)
361 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
362 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
364 /* Profile reorder flag enables function reordering based on first execution
365 of a function. All functions with profile are placed in ascending
366 order at the beginning. */
368 if (flag_profile_reorder_functions)
370 /* Functions with time profile are sorted in ascending order. */
371 if (a->tp_first_run && b->tp_first_run)
372 return a->tp_first_run != b->tp_first_run
373 ? a->tp_first_run - b->tp_first_run
374 : a->order - b->order;
376 /* Functions with time profile are sorted before the functions
377 that do not have the profile. */
378 if (a->tp_first_run || b->tp_first_run)
379 return b->tp_first_run - a->tp_first_run;
382 return b->order - a->order;
385 /* Helper function for qsort; sort nodes by order. */
386 static int
387 varpool_node_cmp (const void *pa, const void *pb)
389 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
390 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
391 return b->order - a->order;
394 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
396 static void
397 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
399 unsigned i;
400 symtab_node *node;
402 next_nodes.qsort (varpool_node_cmp);
403 FOR_EACH_VEC_ELT (next_nodes, i, node)
404 if (!symbol_partitioned_p (node))
405 add_symbol_to_partition (partition, node);
409 /* Group cgraph nodes into equally-sized partitions.
411 The partitioning algorithm is simple: nodes are taken in predefined order.
412 The order corresponds to the order we want functions to have in the final
413 output. In the future this will be given by function reordering pass, but
414 at the moment we use the topological order, which is a good approximation.
416 The goal is to partition this linear order into intervals (partitions) so
417 that all the partitions have approximately the same size and the number of
418 callgraph or IPA reference edges crossing boundaries is minimal.
420 This is a lot faster (O(n) in size of callgraph) than algorithms doing
421 priority-based graph clustering that are generally O(n^2) and, since
422 WHOPR is designed to make things go well across partitions, it leads
423 to good results.
425 We compute the expected size of a partition as:
427 max (total_size / lto_partitions, min_partition_size)
429 We use dynamic expected size of partition so small programs are partitioned
430 into enough partitions to allow use of multiple CPUs, while large programs
431 are not partitioned too much. Creating too many partitions significantly
432 increases the streaming overhead.
434 In the future, we would like to bound the maximal size of partitions so as
435 to prevent the LTRANS stage from consuming too much memory. At the moment,
436 however, the WPA stage is the most memory intensive for large benchmarks,
437 since too many types and declarations are read into memory.
439 The function implements a simple greedy algorithm. Nodes are being added
440 to the current partition until after 3/4 of the expected partition size is
441 reached. Past this threshold, we keep track of boundary size (number of
442 edges going to other partitions) and continue adding functions until after
443 the current partition has grown to twice the expected partition size. Then
444 the process is undone to the point where the minimal ratio of boundary size
445 and in-partition calls was reached. */
447 void
448 lto_balanced_map (int n_lto_partitions)
450 int n_nodes = 0;
451 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
452 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
453 auto_vec<cgraph_node *> noreorder;
454 auto_vec<varpool_node *> varpool_order;
455 int i;
456 struct cgraph_node *node;
457 int total_size = 0, best_total_size = 0;
458 int partition_size;
459 ltrans_partition partition;
460 int last_visited_node = 0;
461 varpool_node *vnode;
462 int cost = 0, internal = 0;
463 int best_n_nodes = 0, best_i = 0, best_cost =
464 INT_MAX, best_internal = 0;
465 int npartitions;
466 int current_order = -1;
467 int noreorder_pos = 0;
469 FOR_EACH_VARIABLE (vnode)
470 gcc_assert (!vnode->aux);
472 FOR_EACH_DEFINED_FUNCTION (node)
473 if (node->get_partitioning_class () == SYMBOL_PARTITION)
475 if (node->no_reorder)
476 noreorder.safe_push (node);
477 else
478 order[n_nodes++] = node;
479 if (!node->alias)
480 total_size += inline_summary (node)->size;
483 /* Streaming works best when the source units do not cross partition
484 boundaries much. This is because importing function from a source
485 unit tends to import a lot of global trees defined there. We should
486 get better about minimizing the function bounday, but until that
487 things works smoother if we order in source order. */
488 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
489 noreorder.qsort (node_cmp);
491 if (symtab->dump_file)
493 for(i = 0; i < n_nodes; i++)
494 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
495 order[i]->name (), order[i]->tp_first_run);
496 for(i = 0; i < (int)noreorder.length(); i++)
497 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
498 noreorder[i]->name (), noreorder[i]->tp_first_run);
501 /* Collect all variables that should not be reordered. */
502 FOR_EACH_VARIABLE (vnode)
503 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
504 && (!flag_toplevel_reorder || vnode->no_reorder))
505 varpool_order.safe_push (vnode);
506 n_varpool_nodes = varpool_order.length ();
507 varpool_order.qsort (varpool_node_cmp);
509 /* Compute partition size and create the first partition. */
510 partition_size = total_size / n_lto_partitions;
511 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
512 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
513 npartitions = 1;
514 partition = new_partition ("");
515 if (symtab->dump_file)
516 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
517 total_size, partition_size);
519 auto_vec<symtab_node *> next_nodes;
521 for (i = 0; i < n_nodes; i++)
523 if (symbol_partitioned_p (order[i]))
524 continue;
526 current_order = order[i]->order;
528 /* Output noreorder and varpool in program order first. */
529 next_nodes.truncate (0);
530 while (varpool_pos < n_varpool_nodes
531 && varpool_order[varpool_pos]->order < current_order)
532 next_nodes.safe_push (varpool_order[varpool_pos++]);
533 while (noreorder_pos < (int)noreorder.length ()
534 && noreorder[noreorder_pos]->order < current_order)
536 if (!noreorder[noreorder_pos]->alias)
537 total_size -= inline_summary (noreorder[noreorder_pos])->size;
538 next_nodes.safe_push (noreorder[noreorder_pos++]);
540 add_sorted_nodes (next_nodes, partition);
542 add_symbol_to_partition (partition, order[i]);
543 if (!order[i]->alias)
544 total_size -= inline_summary (order[i])->size;
547 /* Once we added a new node to the partition, we also want to add
548 all referenced variables unless they was already added into some
549 earlier partition.
550 add_symbol_to_partition adds possibly multiple nodes and
551 variables that are needed to satisfy needs of ORDER[i].
552 We remember last visited cgraph and varpool node from last iteration
553 of outer loop that allows us to process every new addition.
555 At the same time we compute size of the boundary into COST. Every
556 callgraph or IPA reference edge leaving the partition contributes into
557 COST. Every edge inside partition was earlier computed as one leaving
558 it and thus we need to subtract it from COST. */
559 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
561 symtab_node *refs_node;
562 int j;
563 struct ipa_ref *ref = NULL;
564 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
565 last_visited_node);
567 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
569 struct cgraph_edge *edge;
571 refs_node = node;
573 last_visited_node++;
575 gcc_assert (node->definition || node->weakref);
577 /* Compute boundary cost of callgraph edges. */
578 for (edge = node->callees; edge; edge = edge->next_callee)
579 if (edge->callee->definition)
581 int edge_cost = edge->frequency;
582 int index;
584 if (!edge_cost)
585 edge_cost = 1;
586 gcc_assert (edge_cost > 0);
587 index = lto_symtab_encoder_lookup (partition->encoder,
588 edge->callee);
589 if (index != LCC_NOT_FOUND
590 && index < last_visited_node - 1)
591 cost -= edge_cost, internal += edge_cost;
592 else
593 cost += edge_cost;
595 for (edge = node->callers; edge; edge = edge->next_caller)
597 int edge_cost = edge->frequency;
598 int index;
600 gcc_assert (edge->caller->definition);
601 if (!edge_cost)
602 edge_cost = 1;
603 gcc_assert (edge_cost > 0);
604 index = lto_symtab_encoder_lookup (partition->encoder,
605 edge->caller);
606 if (index != LCC_NOT_FOUND
607 && index < last_visited_node - 1)
608 cost -= edge_cost;
609 else
610 cost += edge_cost;
613 else
615 refs_node = snode;
616 last_visited_node++;
619 /* Compute boundary cost of IPA REF edges and at the same time look into
620 variables referenced from current partition and try to add them. */
621 for (j = 0; refs_node->iterate_reference (j, ref); j++)
622 if (is_a <varpool_node *> (ref->referred))
624 int index;
626 vnode = dyn_cast <varpool_node *> (ref->referred);
627 if (!vnode->definition)
628 continue;
629 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
630 && !vnode->no_reorder
631 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
632 add_symbol_to_partition (partition, vnode);
633 index = lto_symtab_encoder_lookup (partition->encoder,
634 vnode);
635 if (index != LCC_NOT_FOUND
636 && index < last_visited_node - 1)
637 cost--, internal++;
638 else
639 cost++;
641 else
643 int index;
645 node = dyn_cast <cgraph_node *> (ref->referred);
646 if (!node->definition)
647 continue;
648 index = lto_symtab_encoder_lookup (partition->encoder,
649 node);
650 if (index != LCC_NOT_FOUND
651 && index < last_visited_node - 1)
652 cost--, internal++;
653 else
654 cost++;
656 for (j = 0; refs_node->iterate_referring (j, ref); j++)
657 if (is_a <varpool_node *> (ref->referring))
659 int index;
661 vnode = dyn_cast <varpool_node *> (ref->referring);
662 gcc_assert (vnode->definition);
663 /* It is better to couple variables with their users, because it allows them
664 to be removed. Coupling with objects they refer to only helps to reduce
665 number of symbols promoted to hidden. */
666 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
667 && !vnode->no_reorder
668 && !vnode->can_remove_if_no_refs_p ()
669 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
670 add_symbol_to_partition (partition, vnode);
671 index = lto_symtab_encoder_lookup (partition->encoder,
672 vnode);
673 if (index != LCC_NOT_FOUND
674 && index < last_visited_node - 1)
675 cost--;
676 else
677 cost++;
679 else
681 int index;
683 node = dyn_cast <cgraph_node *> (ref->referring);
684 gcc_assert (node->definition);
685 index = lto_symtab_encoder_lookup (partition->encoder,
686 node);
687 if (index != LCC_NOT_FOUND
688 && index < last_visited_node - 1)
689 cost--;
690 else
691 cost++;
695 /* If the partition is large enough, start looking for smallest boundary cost. */
696 if (partition->insns < partition_size * 3 / 4
697 || best_cost == INT_MAX
698 || ((!cost
699 || (best_internal * (HOST_WIDE_INT) cost
700 > (internal * (HOST_WIDE_INT)best_cost)))
701 && partition->insns < partition_size * 5 / 4))
703 best_cost = cost;
704 best_internal = internal;
705 best_i = i;
706 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
707 best_total_size = total_size;
708 best_varpool_pos = varpool_pos;
710 if (symtab->dump_file)
711 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
712 "best %i/%i, step %i\n", i,
713 order[i]->name (), order[i]->order,
714 partition->insns, cost, internal,
715 best_cost, best_internal, best_i);
716 /* Partition is too large, unwind into step when best cost was reached and
717 start new partition. */
718 if (partition->insns > 2 * partition_size)
720 if (best_i != i)
722 if (symtab->dump_file)
723 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
724 i - best_i, best_i);
725 undo_partition (partition, best_n_nodes);
726 varpool_pos = best_varpool_pos;
728 i = best_i;
729 /* When we are finished, avoid creating empty partition. */
730 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
731 i++;
732 if (i == n_nodes - 1)
733 break;
734 partition = new_partition ("");
735 last_visited_node = 0;
736 total_size = best_total_size;
737 cost = 0;
739 if (symtab->dump_file)
740 fprintf (symtab->dump_file, "New partition\n");
741 best_n_nodes = 0;
742 best_cost = INT_MAX;
744 /* Since the size of partitions is just approximate, update the size after
745 we finished current one. */
746 if (npartitions < n_lto_partitions)
747 partition_size = total_size / (n_lto_partitions - npartitions);
748 else
749 partition_size = INT_MAX;
751 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
752 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
753 npartitions ++;
757 next_nodes.truncate (0);
759 /* Varables that are not reachable from the code go into last partition. */
760 if (flag_toplevel_reorder)
762 FOR_EACH_VARIABLE (vnode)
763 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
764 && !symbol_partitioned_p (vnode)
765 && !vnode->no_reorder)
766 next_nodes.safe_push (vnode);
769 /* Output remaining ordered symbols. */
770 while (varpool_pos < n_varpool_nodes)
771 next_nodes.safe_push (varpool_order[varpool_pos++]);
772 while (noreorder_pos < (int)noreorder.length ())
773 next_nodes.safe_push (noreorder[noreorder_pos++]);
774 add_sorted_nodes (next_nodes, partition);
776 free (order);
779 /* Mangle NODE symbol name into a local name.
780 This is necessary to do
781 1) if two or more static vars of same assembler name
782 are merged into single ltrans unit.
783 2) if prevoiusly static var was promoted hidden to avoid possible conflict
784 with symbols defined out of the LTO world.
787 static bool
788 privatize_symbol_name (symtab_node *node)
790 tree decl = node->decl;
791 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
792 cgraph_node *cnode;
794 /* Our renaming machinery do not handle more than one change of assembler name.
795 We should not need more than one anyway. */
796 if (node->lto_file_data
797 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
799 if (symtab->dump_file)
800 fprintf (symtab->dump_file,
801 "Not privatizing symbol name: %s. It privatized already.\n",
802 name);
803 return false;
805 /* Avoid mangling of already mangled clones.
806 ??? should have a flag whether a symbol has a 'private' name already,
807 since we produce some symbols like that i.e. for global constructors
808 that are not really clones. */
809 if (node->unique_name)
811 if (symtab->dump_file)
812 fprintf (symtab->dump_file,
813 "Not privatizing symbol name: %s. Has unique name.\n",
814 name);
815 return false;
817 symtab->change_decl_assembler_name (decl,
818 clone_function_name (decl, "lto_priv"));
819 if (node->lto_file_data)
820 lto_record_renamed_decl (node->lto_file_data, name,
821 IDENTIFIER_POINTER
822 (DECL_ASSEMBLER_NAME (decl)));
823 /* We could change name which is a target of transparent alias
824 chain of instrumented function name. Fix alias chain if so .*/
825 if ((cnode = dyn_cast <cgraph_node *> (node))
826 && !cnode->instrumentation_clone
827 && cnode->instrumented_version
828 && cnode->instrumented_version->orig_decl == decl)
830 tree iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
832 gcc_assert (IDENTIFIER_TRANSPARENT_ALIAS (iname));
833 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (decl);
835 if (symtab->dump_file)
836 fprintf (symtab->dump_file,
837 "Privatizing symbol name: %s -> %s\n",
838 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
839 return true;
842 /* Promote variable VNODE to be static. */
844 static void
845 promote_symbol (symtab_node *node)
847 /* We already promoted ... */
848 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
849 && DECL_VISIBILITY_SPECIFIED (node->decl)
850 && TREE_PUBLIC (node->decl))
851 return;
853 gcc_checking_assert (!TREE_PUBLIC (node->decl)
854 && !DECL_EXTERNAL (node->decl));
855 /* Be sure that newly public symbol does not conflict with anything already
856 defined by the non-LTO part. */
857 privatize_symbol_name (node);
858 TREE_PUBLIC (node->decl) = 1;
859 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
860 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
861 if (symtab->dump_file)
862 fprintf (symtab->dump_file,
863 "Promoting as hidden: %s\n", node->name ());
866 /* Return true if NODE needs named section even if it won't land in the partition
867 symbol table.
868 FIXME: we should really not use named sections for inline clones and master clones. */
870 static bool
871 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
873 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
874 if (!cnode)
875 return false;
876 if (node->real_symbol_p ())
877 return false;
878 return (!encoder
879 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
880 && lto_symtab_encoder_encode_body_p (encoder,
881 cnode)));
884 /* If NODE represents a static variable. See if there are other variables
885 of the same name in partition ENCODER (or in whole compilation unit if
886 ENCODER is NULL) and if so, mangle the statics. Always mangle all
887 conflicting statics, so we reduce changes of silently miscompiling
888 asm statements referring to them by symbol name. */
890 static void
891 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
893 tree decl = node->decl;
894 symtab_node *s;
895 tree name = DECL_ASSEMBLER_NAME (decl);
897 /* See if this is static symbol. */
898 if ((node->externally_visible
899 /* FIXME: externally_visible is somewhat illogically not set for
900 external symbols (i.e. those not defined). Remove this test
901 once this is fixed. */
902 || DECL_EXTERNAL (node->decl)
903 || !node->real_symbol_p ())
904 && !may_need_named_section_p (encoder, node))
905 return;
907 /* Now walk symbols sharing the same name and see if there are any conflicts.
908 (all types of symbols counts here, since we can not have static of the
909 same name as external or public symbol.) */
910 for (s = symtab_node::get_for_asmname (name);
911 s; s = s->next_sharing_asm_name)
912 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
913 && s->decl != node->decl
914 && (!encoder
915 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
916 break;
918 /* OK, no confict, so we have nothing to do. */
919 if (!s)
920 return;
922 if (symtab->dump_file)
923 fprintf (symtab->dump_file,
924 "Renaming statics with asm name: %s\n", node->name ());
926 /* Assign every symbol in the set that shares the same ASM name an unique
927 mangled name. */
928 for (s = symtab_node::get_for_asmname (name); s;)
929 if (!s->externally_visible
930 && ((s->real_symbol_p ()
931 && !DECL_EXTERNAL (node->decl)
932 && !TREE_PUBLIC (node->decl))
933 || may_need_named_section_p (encoder, s))
934 && (!encoder
935 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
937 if (privatize_symbol_name (s))
938 /* Re-start from beginning since we do not know how many symbols changed a name. */
939 s = symtab_node::get_for_asmname (name);
940 else s = s->next_sharing_asm_name;
942 else s = s->next_sharing_asm_name;
945 /* Find out all static decls that need to be promoted to global because
946 of cross file sharing. This function must be run in the WPA mode after
947 all inlinees are added. */
949 void
950 lto_promote_cross_file_statics (void)
952 unsigned i, n_sets;
954 gcc_assert (flag_wpa);
956 select_what_to_dump (false);
958 /* First compute boundaries. */
959 n_sets = ltrans_partitions.length ();
960 for (i = 0; i < n_sets; i++)
962 ltrans_partition part
963 = ltrans_partitions[i];
964 part->encoder = compute_ltrans_boundary (part->encoder);
967 /* Look at boundaries and promote symbols as needed. */
968 for (i = 0; i < n_sets; i++)
970 lto_symtab_encoder_iterator lsei;
971 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
973 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
974 lsei_next (&lsei))
976 symtab_node *node = lsei_node (lsei);
978 /* If symbol is static, rename it if its assembler name clash with
979 anything else in this unit. */
980 rename_statics (encoder, node);
982 /* No need to promote if symbol already is externally visible ... */
983 if (node->externally_visible
984 /* ... or if it is part of current partition ... */
985 || lto_symtab_encoder_in_partition_p (encoder, node)
986 /* ... or if we do not partition it. This mean that it will
987 appear in every partition refernecing it. */
988 || node->get_partitioning_class () != SYMBOL_PARTITION)
989 continue;
991 promote_symbol (node);
996 /* Rename statics in the whole unit in the case that
997 we do -flto-partition=none. */
999 void
1000 lto_promote_statics_nonwpa (void)
1002 symtab_node *node;
1003 FOR_EACH_SYMBOL (node)
1004 rename_statics (NULL, node);