Document passes.def.
[official-gcc.git] / gcc / lto / lto-partition.c
blob5b46af9d9074234207447d6c245eb26d1c48f3bb
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "gcc-symtab.h"
26 #include "basic-block.h"
27 #include "tree-ssa-alias.h"
28 #include "internal-fn.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "tm.h"
33 #include "cgraph.h"
34 #include "lto-streamer.h"
35 #include "timevar.h"
36 #include "params.h"
37 #include "ipa-inline.h"
38 #include "ipa-utils.h"
39 #include "lto-partition.h"
41 /* Classifcation of symbols into classes WRT partitioning. */
42 enum symbol_class
44 /* External declarations are ignored by partitioning algorithms and they are
45 added into the boundary later via compute_ltrans_boundary. */
46 SYMBOL_EXTERNAL,
47 /* Partitioned symbols are pur into one of partitions. */
48 SYMBOL_PARTITION,
49 /* Duplicated symbols (such as comdat or constant pool references) are
50 copied into every node needing them via add_symbol_to_partition. */
51 SYMBOL_DUPLICATE
54 vec<ltrans_partition> ltrans_partitions;
56 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
58 /* Classify symbol NODE. */
60 enum symbol_class
61 get_symbol_class (symtab_node *node)
63 /* Inline clones are always duplicated.
64 This include external delcarations. */
65 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
67 if (DECL_ABSTRACT (node->decl))
68 return SYMBOL_EXTERNAL;
70 if (cnode && cnode->global.inlined_to)
71 return SYMBOL_DUPLICATE;
73 /* Weakref aliases are always duplicated. */
74 if (node->weakref)
75 return SYMBOL_DUPLICATE;
77 /* External declarations are external. */
78 if (DECL_EXTERNAL (node->decl))
79 return SYMBOL_EXTERNAL;
81 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
83 /* Constant pool references use local symbol names that can not
84 be promoted global. We should never put into a constant pool
85 objects that can not be duplicated across partitions. */
86 if (DECL_IN_CONSTANT_POOL (node->decl))
87 return SYMBOL_DUPLICATE;
88 gcc_checking_assert (vnode->definition);
90 /* Functions that are cloned may stay in callgraph even if they are unused.
91 Handle them as external; compute_ltrans_boundary take care to make
92 proper things to happen (i.e. to make them appear in the boundary but
93 with body streamed, so clone can me materialized). */
94 else if (!cgraph (node)->definition)
95 return SYMBOL_EXTERNAL;
97 /* Comdats are duplicated to every use unless they are keyed.
98 Those do not need duplication. */
99 if (DECL_COMDAT (node->decl)
100 && !node->force_output
101 && !symtab_used_from_object_file_p (node))
102 return SYMBOL_DUPLICATE;
104 return SYMBOL_PARTITION;
107 /* Create new partition with name NAME. */
109 static ltrans_partition
110 new_partition (const char *name)
112 ltrans_partition part = XCNEW (struct ltrans_partition_def);
113 part->encoder = lto_symtab_encoder_new (false);
114 part->name = name;
115 part->insns = 0;
116 ltrans_partitions.safe_push (part);
117 return part;
120 /* Free memory used by ltrans datastructures. */
122 void
123 free_ltrans_partitions (void)
125 unsigned int idx;
126 ltrans_partition part;
127 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
129 if (part->initializers_visited)
130 pointer_set_destroy (part->initializers_visited);
131 /* Symtab encoder is freed after streaming. */
132 free (part);
134 ltrans_partitions.release ();
137 /* Return true if symbol is already in some partition. */
139 static inline bool
140 symbol_partitioned_p (symtab_node *node)
142 return node->aux;
145 /* Add references into the partition. */
146 static void
147 add_references_to_partition (ltrans_partition part, symtab_node *node)
149 int i;
150 struct ipa_ref *ref;
152 /* Add all duplicated references to the partition. */
153 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
154 if (get_symbol_class (ref->referred) == SYMBOL_DUPLICATE)
155 add_symbol_to_partition (part, ref->referred);
156 /* References to a readonly variable may be constant foled into its value.
157 Recursively look into the initializers of the constant variable and add
158 references, too. */
159 else if (is_a <varpool_node> (ref->referred)
160 && ctor_for_folding (ref->referred->decl) != error_mark_node
161 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
163 if (!part->initializers_visited)
164 part->initializers_visited = pointer_set_create ();
165 if (!pointer_set_insert (part->initializers_visited, ref->referred))
166 add_references_to_partition (part, ref->referred);
170 /* Helper function for add_symbol_to_partition doing the actual dirty work
171 of adding NODE to PART. */
173 static bool
174 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
176 enum symbol_class c = get_symbol_class (node);
177 int i;
178 struct ipa_ref *ref;
179 symtab_node *node1;
181 /* If NODE is already there, we have nothing to do. */
182 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
183 return true;
185 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
186 just once.
188 Be lax about comdats; they may or may not be duplicated and we may
189 end up in need to duplicate keyed comdat because it has unkeyed alias. */
190 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
191 && symbol_partitioned_p (node))
192 return false;
194 /* Be sure that we never try to duplicate partitioned symbol
195 or add external symbol. */
196 gcc_assert (c != SYMBOL_EXTERNAL
197 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
199 lto_set_symtab_encoder_in_partition (part->encoder, node);
201 if (symbol_partitioned_p (node))
203 node->in_other_partition = 1;
204 if (cgraph_dump_file)
205 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
206 node->name ());
208 node->aux = (void *)((size_t)node->aux + 1);
210 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
212 struct cgraph_edge *e;
213 part->insns += inline_summary (cnode)->self_size;
215 /* Add all inline clones and callees that are duplicated. */
216 for (e = cnode->callees; e; e = e->next_callee)
217 if (!e->inline_failed)
218 add_symbol_to_partition_1 (part, e->callee);
219 else if (get_symbol_class (e->callee) == SYMBOL_DUPLICATE)
220 add_symbol_to_partition (part, e->callee);
222 /* Add all thunks associated with the function. */
223 for (e = cnode->callers; e; e = e->next_caller)
224 if (e->caller->thunk.thunk_p)
225 add_symbol_to_partition_1 (part, e->caller);
228 add_references_to_partition (part, node);
230 /* Add all aliases associated with the symbol. */
231 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
232 if (ref->use == IPA_REF_ALIAS && !node->weakref)
233 add_symbol_to_partition_1 (part, ref->referring);
235 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
236 if (node->same_comdat_group)
237 for (node1 = node->same_comdat_group;
238 node1 != node; node1 = node1->same_comdat_group)
240 bool added = add_symbol_to_partition_1 (part, node1);
241 gcc_assert (added);
243 return true;
246 /* If symbol NODE is really part of other symbol's definition (i.e. it is
247 internal label, thunk, alias or so), return the outer symbol.
248 When add_symbol_to_partition_1 is called on the outer symbol it must
249 eventually add NODE, too. */
250 static symtab_node *
251 contained_in_symbol (symtab_node *node)
253 /* Weakrefs are never contained in anything. */
254 if (node->weakref)
255 return node;
256 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
258 cnode = cgraph_function_node (cnode, NULL);
259 if (cnode->global.inlined_to)
260 cnode = cnode->global.inlined_to;
261 return cnode;
263 else if (varpool_node *vnode = dyn_cast <varpool_node> (node))
264 return varpool_variable_node (vnode, NULL);
265 return node;
268 /* Add symbol NODE to partition. When definition of NODE is part
269 of other symbol definition, add the other symbol, too. */
271 static void
272 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
274 symtab_node *node1;
276 /* Verify that we do not try to duplicate something that can not be. */
277 gcc_checking_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
278 || !symbol_partitioned_p (node));
280 while ((node1 = contained_in_symbol (node)) != node)
281 node = node1;
283 /* If we have duplicated symbol contained in something we can not duplicate,
284 we are very badly screwed. The other way is possible, so we do not
285 assert this in add_symbol_to_partition_1.
287 Be lax about comdats; they may or may not be duplicated and we may
288 end up in need to duplicate keyed comdat because it has unkeyed alias. */
289 gcc_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
290 || DECL_COMDAT (node->decl)
291 || !symbol_partitioned_p (node));
292 add_symbol_to_partition_1 (part, node);
295 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
296 and number of varpool nodes is N_VARPOOL_NODES. */
298 static void
299 undo_partition (ltrans_partition partition, unsigned int n_nodes)
301 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
303 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
304 n_nodes);
306 /* After UNDO we no longer know what was visited. */
307 if (partition->initializers_visited)
308 pointer_set_destroy (partition->initializers_visited);
309 partition->initializers_visited = NULL;
311 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
312 partition->insns -= inline_summary (cnode)->self_size;
313 lto_symtab_encoder_delete_node (partition->encoder, node);
314 node->aux = (void *)((size_t)node->aux - 1);
318 /* Group cgrah nodes by input files. This is used mainly for testing
319 right now. */
321 void
322 lto_1_to_1_map (void)
324 symtab_node *node;
325 struct lto_file_decl_data *file_data;
326 struct pointer_map_t *pmap;
327 ltrans_partition partition;
328 void **slot;
329 int npartitions = 0;
331 pmap = pointer_map_create ();
333 FOR_EACH_SYMBOL (node)
335 if (get_symbol_class (node) != SYMBOL_PARTITION
336 || symbol_partitioned_p (node))
337 continue;
339 file_data = node->lto_file_data;
341 if (file_data)
343 slot = pointer_map_contains (pmap, file_data);
344 if (slot)
345 partition = (ltrans_partition) *slot;
346 else
348 partition = new_partition (file_data->file_name);
349 slot = pointer_map_insert (pmap, file_data);
350 *slot = partition;
351 npartitions++;
354 else if (!file_data && ltrans_partitions.length ())
355 partition = ltrans_partitions[0];
356 else
358 partition = new_partition ("");
359 slot = pointer_map_insert (pmap, NULL);
360 *slot = partition;
361 npartitions++;
364 add_symbol_to_partition (partition, node);
367 /* If the cgraph is empty, create one cgraph node set so that there is still
368 an output file for any variables that need to be exported in a DSO. */
369 if (!npartitions)
370 new_partition ("empty");
372 pointer_map_destroy (pmap);
376 /* Maximal partitioning. Put every new symbol into new partition if possible. */
378 void
379 lto_max_map (void)
381 symtab_node *node;
382 ltrans_partition partition;
383 int npartitions = 0;
385 FOR_EACH_SYMBOL (node)
387 if (get_symbol_class (node) != SYMBOL_PARTITION
388 || symbol_partitioned_p (node))
389 continue;
390 partition = new_partition (node->asm_name ());
391 add_symbol_to_partition (partition, node);
392 npartitions++;
394 if (!npartitions)
395 new_partition ("empty");
398 /* Helper function for qsort; sort nodes by order. */
399 static int
400 node_cmp (const void *pa, const void *pb)
402 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
403 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
404 return b->order - a->order;
407 /* Helper function for qsort; sort nodes by order. */
408 static int
409 varpool_node_cmp (const void *pa, const void *pb)
411 const varpool_node *a = *(const varpool_node * const *) pa;
412 const varpool_node *b = *(const varpool_node * const *) pb;
413 return b->order - a->order;
416 /* Group cgraph nodes into equally-sized partitions.
418 The partitioning algorithm is simple: nodes are taken in predefined order.
419 The order corresponds to the order we want functions to have in the final
420 output. In the future this will be given by function reordering pass, but
421 at the moment we use the topological order, which is a good approximation.
423 The goal is to partition this linear order into intervals (partitions) so
424 that all the partitions have approximately the same size and the number of
425 callgraph or IPA reference edges crossing boundaries is minimal.
427 This is a lot faster (O(n) in size of callgraph) than algorithms doing
428 priority-based graph clustering that are generally O(n^2) and, since
429 WHOPR is designed to make things go well across partitions, it leads
430 to good results.
432 We compute the expected size of a partition as:
434 max (total_size / lto_partitions, min_partition_size)
436 We use dynamic expected size of partition so small programs are partitioned
437 into enough partitions to allow use of multiple CPUs, while large programs
438 are not partitioned too much. Creating too many partitions significantly
439 increases the streaming overhead.
441 In the future, we would like to bound the maximal size of partitions so as
442 to prevent the LTRANS stage from consuming too much memory. At the moment,
443 however, the WPA stage is the most memory intensive for large benchmarks,
444 since too many types and declarations are read into memory.
446 The function implements a simple greedy algorithm. Nodes are being added
447 to the current partition until after 3/4 of the expected partition size is
448 reached. Past this threshold, we keep track of boundary size (number of
449 edges going to other partitions) and continue adding functions until after
450 the current partition has grown to twice the expected partition size. Then
451 the process is undone to the point where the minimal ratio of boundary size
452 and in-partition calls was reached. */
454 void
455 lto_balanced_map (void)
457 int n_nodes = 0;
458 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
459 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
460 varpool_node **varpool_order = NULL;
461 int i;
462 struct cgraph_node *node;
463 int total_size = 0, best_total_size = 0;
464 int partition_size;
465 ltrans_partition partition;
466 int last_visited_node = 0;
467 varpool_node *vnode;
468 int cost = 0, internal = 0;
469 int best_n_nodes = 0, best_i = 0, best_cost =
470 INT_MAX, best_internal = 0;
471 int npartitions;
472 int current_order = -1;
474 FOR_EACH_VARIABLE (vnode)
475 gcc_assert (!vnode->aux);
477 FOR_EACH_DEFINED_FUNCTION (node)
478 if (get_symbol_class (node) == SYMBOL_PARTITION)
480 order[n_nodes++] = node;
481 total_size += inline_summary (node)->size;
484 /* Streaming works best when the source units do not cross partition
485 boundaries much. This is because importing function from a source
486 unit tends to import a lot of global trees defined there. We should
487 get better about minimizing the function bounday, but until that
488 things works smoother if we order in source order. */
489 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
490 if (!flag_toplevel_reorder)
492 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
494 FOR_EACH_VARIABLE (vnode)
495 if (get_symbol_class (vnode) == SYMBOL_PARTITION)
496 n_varpool_nodes++;
497 varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
499 n_varpool_nodes = 0;
500 FOR_EACH_VARIABLE (vnode)
501 if (get_symbol_class (vnode) == SYMBOL_PARTITION)
502 varpool_order[n_varpool_nodes++] = vnode;
503 qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
504 varpool_node_cmp);
507 /* Compute partition size and create the first partition. */
508 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
509 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
510 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
511 npartitions = 1;
512 partition = new_partition ("");
513 if (cgraph_dump_file)
514 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
515 total_size, partition_size);
517 for (i = 0; i < n_nodes; i++)
519 if (symbol_partitioned_p (order[i]))
520 continue;
522 current_order = order[i]->order;
524 if (!flag_toplevel_reorder)
525 while (varpool_pos < n_varpool_nodes
526 && varpool_order[varpool_pos]->order < current_order)
528 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
529 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
530 varpool_pos++;
533 add_symbol_to_partition (partition, order[i]);
534 total_size -= inline_summary (order[i])->size;
537 /* Once we added a new node to the partition, we also want to add
538 all referenced variables unless they was already added into some
539 earlier partition.
540 add_symbol_to_partition adds possibly multiple nodes and
541 variables that are needed to satisfy needs of ORDER[i].
542 We remember last visited cgraph and varpool node from last iteration
543 of outer loop that allows us to process every new addition.
545 At the same time we compute size of the boundary into COST. Every
546 callgraph or IPA reference edge leaving the partition contributes into
547 COST. Every edge inside partition was earlier computed as one leaving
548 it and thus we need to subtract it from COST. */
549 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
551 struct ipa_ref_list *refs;
552 int j;
553 struct ipa_ref *ref;
554 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
555 last_visited_node);
557 if (cgraph_node *node = dyn_cast <cgraph_node> (snode))
559 struct cgraph_edge *edge;
561 refs = &node->ref_list;
563 last_visited_node++;
565 gcc_assert (node->definition || node->weakref);
567 /* Compute boundary cost of callgraph edges. */
568 for (edge = node->callees; edge; edge = edge->next_callee)
569 if (edge->callee->definition)
571 int edge_cost = edge->frequency;
572 int index;
574 if (!edge_cost)
575 edge_cost = 1;
576 gcc_assert (edge_cost > 0);
577 index = lto_symtab_encoder_lookup (partition->encoder,
578 edge->callee);
579 if (index != LCC_NOT_FOUND
580 && index < last_visited_node - 1)
581 cost -= edge_cost, internal += edge_cost;
582 else
583 cost += edge_cost;
585 for (edge = node->callers; edge; edge = edge->next_caller)
587 int edge_cost = edge->frequency;
588 int index;
590 gcc_assert (edge->caller->definition);
591 if (!edge_cost)
592 edge_cost = 1;
593 gcc_assert (edge_cost > 0);
594 index = lto_symtab_encoder_lookup (partition->encoder,
595 edge->caller);
596 if (index != LCC_NOT_FOUND
597 && index < last_visited_node - 1)
598 cost -= edge_cost;
599 else
600 cost += edge_cost;
603 else
605 refs = &snode->ref_list;
606 last_visited_node++;
609 /* Compute boundary cost of IPA REF edges and at the same time look into
610 variables referenced from current partition and try to add them. */
611 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
612 if (is_a <varpool_node> (ref->referred))
614 int index;
616 vnode = ipa_ref_varpool_node (ref);
617 if (!vnode->definition)
618 continue;
619 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
620 && get_symbol_class (vnode) == SYMBOL_PARTITION)
621 add_symbol_to_partition (partition, vnode);
622 index = lto_symtab_encoder_lookup (partition->encoder,
623 vnode);
624 if (index != LCC_NOT_FOUND
625 && index < last_visited_node - 1)
626 cost--, internal++;
627 else
628 cost++;
630 else
632 int index;
634 node = ipa_ref_node (ref);
635 if (!node->definition)
636 continue;
637 index = lto_symtab_encoder_lookup (partition->encoder,
638 node);
639 if (index != LCC_NOT_FOUND
640 && index < last_visited_node - 1)
641 cost--, internal++;
642 else
643 cost++;
645 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
646 if (is_a <varpool_node> (ref->referring))
648 int index;
650 vnode = ipa_ref_referring_varpool_node (ref);
651 gcc_assert (vnode->definition);
652 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
653 && get_symbol_class (vnode) == SYMBOL_PARTITION)
654 add_symbol_to_partition (partition, vnode);
655 index = lto_symtab_encoder_lookup (partition->encoder,
656 vnode);
657 if (index != LCC_NOT_FOUND
658 && index < last_visited_node - 1)
659 cost--;
660 else
661 cost++;
663 else
665 int index;
667 node = ipa_ref_referring_node (ref);
668 gcc_assert (node->definition);
669 index = lto_symtab_encoder_lookup (partition->encoder,
670 node);
671 if (index != LCC_NOT_FOUND
672 && index < last_visited_node - 1)
673 cost--;
674 else
675 cost++;
679 /* If the partition is large enough, start looking for smallest boundary cost. */
680 if (partition->insns < partition_size * 3 / 4
681 || best_cost == INT_MAX
682 || ((!cost
683 || (best_internal * (HOST_WIDE_INT) cost
684 > (internal * (HOST_WIDE_INT)best_cost)))
685 && partition->insns < partition_size * 5 / 4))
687 best_cost = cost;
688 best_internal = internal;
689 best_i = i;
690 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
691 best_total_size = total_size;
692 best_varpool_pos = varpool_pos;
694 if (cgraph_dump_file)
695 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
696 "best %i/%i, step %i\n", i,
697 order[i]->name (), order[i]->order,
698 partition->insns, cost, internal,
699 best_cost, best_internal, best_i);
700 /* Partition is too large, unwind into step when best cost was reached and
701 start new partition. */
702 if (partition->insns > 2 * partition_size)
704 if (best_i != i)
706 if (cgraph_dump_file)
707 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
708 i - best_i, best_i);
709 undo_partition (partition, best_n_nodes);
710 varpool_pos = best_varpool_pos;
712 i = best_i;
713 /* When we are finished, avoid creating empty partition. */
714 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
715 i++;
716 if (i == n_nodes - 1)
717 break;
718 partition = new_partition ("");
719 last_visited_node = 0;
720 total_size = best_total_size;
721 cost = 0;
723 if (cgraph_dump_file)
724 fprintf (cgraph_dump_file, "New partition\n");
725 best_n_nodes = 0;
726 best_cost = INT_MAX;
728 /* Since the size of partitions is just approximate, update the size after
729 we finished current one. */
730 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
731 partition_size = total_size
732 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
733 else
734 partition_size = INT_MAX;
736 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
737 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
738 npartitions ++;
742 /* Varables that are not reachable from the code go into last partition. */
743 if (flag_toplevel_reorder)
745 FOR_EACH_VARIABLE (vnode)
746 if (get_symbol_class (vnode) == SYMBOL_PARTITION
747 && !symbol_partitioned_p (vnode))
748 add_symbol_to_partition (partition, vnode);
750 else
752 while (varpool_pos < n_varpool_nodes)
754 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
755 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
756 varpool_pos++;
758 free (varpool_order);
760 free (order);
763 /* Mangle NODE symbol name into a local name.
764 This is necessary to do
765 1) if two or more static vars of same assembler name
766 are merged into single ltrans unit.
767 2) if prevoiusly static var was promoted hidden to avoid possible conflict
768 with symbols defined out of the LTO world.
771 static bool
772 privatize_symbol_name (symtab_node *node)
774 tree decl = node->decl;
775 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
777 /* Our renaming machinery do not handle more than one change of assembler name.
778 We should not need more than one anyway. */
779 if (node->lto_file_data
780 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
782 if (cgraph_dump_file)
783 fprintf (cgraph_dump_file,
784 "Not privatizing symbol name: %s. It privatized already.\n",
785 name);
786 return false;
788 /* Avoid mangling of already mangled clones.
789 ??? should have a flag whether a symbol has a 'private' name already,
790 since we produce some symbols like that i.e. for global constructors
791 that are not really clones. */
792 if (node->unique_name)
794 if (cgraph_dump_file)
795 fprintf (cgraph_dump_file,
796 "Not privatizing symbol name: %s. Has unique name.\n",
797 name);
798 return false;
800 change_decl_assembler_name (decl, clone_function_name (decl, "lto_priv"));
801 if (node->lto_file_data)
802 lto_record_renamed_decl (node->lto_file_data, name,
803 IDENTIFIER_POINTER
804 (DECL_ASSEMBLER_NAME (decl)));
805 if (cgraph_dump_file)
806 fprintf (cgraph_dump_file,
807 "Privatizing symbol name: %s -> %s\n",
808 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
809 return true;
812 /* Promote variable VNODE to be static. */
814 static void
815 promote_symbol (symtab_node *node)
817 /* We already promoted ... */
818 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
819 && DECL_VISIBILITY_SPECIFIED (node->decl)
820 && TREE_PUBLIC (node->decl))
821 return;
823 gcc_checking_assert (!TREE_PUBLIC (node->decl)
824 && !DECL_EXTERNAL (node->decl));
825 /* Be sure that newly public symbol does not conflict with anything already
826 defined by the non-LTO part. */
827 privatize_symbol_name (node);
828 TREE_PUBLIC (node->decl) = 1;
829 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
830 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
831 if (cgraph_dump_file)
832 fprintf (cgraph_dump_file,
833 "Promoting as hidden: %s\n", node->name ());
836 /* Return true if NODE needs named section even if it won't land in the partition
837 symbol table.
838 FIXME: we should really not use named sections for inline clones and master clones. */
840 static bool
841 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
843 struct cgraph_node *cnode = dyn_cast <cgraph_node> (node);
844 if (!cnode)
845 return false;
846 if (symtab_real_symbol_p (node))
847 return false;
848 return (!encoder
849 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
850 && lto_symtab_encoder_encode_body_p (encoder,
851 cnode)));
854 /* If NODE represents a static variable. See if there are other variables
855 of the same name in partition ENCODER (or in whole compilation unit if
856 ENCODER is NULL) and if so, mangle the statics. Always mangle all
857 conflicting statics, so we reduce changes of silently miscompiling
858 asm statemnets referring to them by symbol name. */
860 static void
861 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
863 tree decl = node->decl;
864 symtab_node *s;
865 tree name = DECL_ASSEMBLER_NAME (decl);
867 /* See if this is static symbol. */
868 if ((node->externally_visible
869 /* FIXME: externally_visible is somewhat illogically not set for
870 external symbols (i.e. those not defined). Remove this test
871 once this is fixed. */
872 || DECL_EXTERNAL (node->decl)
873 || !symtab_real_symbol_p (node))
874 && !may_need_named_section_p (encoder, node))
875 return;
877 /* Now walk symbols sharing the same name and see if there are any conflicts.
878 (all types of symbols counts here, since we can not have static of the
879 same name as external or public symbol.) */
880 for (s = symtab_node_for_asm (name);
881 s; s = s->next_sharing_asm_name)
882 if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
883 && s->decl != node->decl
884 && (!encoder
885 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
886 break;
888 /* OK, no confict, so we have nothing to do. */
889 if (!s)
890 return;
892 if (cgraph_dump_file)
893 fprintf (cgraph_dump_file,
894 "Renaming statics with asm name: %s\n", node->name ());
896 /* Assign every symbol in the set that shares the same ASM name an unique
897 mangled name. */
898 for (s = symtab_node_for_asm (name); s;)
899 if (!s->externally_visible
900 && ((symtab_real_symbol_p (s)
901 && !DECL_EXTERNAL (node->decl)
902 && !TREE_PUBLIC (node->decl))
903 || may_need_named_section_p (encoder, s))
904 && (!encoder
905 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
907 if (privatize_symbol_name (s))
908 /* Re-start from beginning since we do not know how many symbols changed a name. */
909 s = symtab_node_for_asm (name);
910 else s = s->next_sharing_asm_name;
912 else s = s->next_sharing_asm_name;
915 /* Find out all static decls that need to be promoted to global because
916 of cross file sharing. This function must be run in the WPA mode after
917 all inlinees are added. */
919 void
920 lto_promote_cross_file_statics (void)
922 unsigned i, n_sets;
924 gcc_assert (flag_wpa);
926 /* First compute boundaries. */
927 n_sets = ltrans_partitions.length ();
928 for (i = 0; i < n_sets; i++)
930 ltrans_partition part
931 = ltrans_partitions[i];
932 part->encoder = compute_ltrans_boundary (part->encoder);
935 /* Look at boundaries and promote symbols as needed. */
936 for (i = 0; i < n_sets; i++)
938 lto_symtab_encoder_iterator lsei;
939 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
941 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
942 lsei_next (&lsei))
944 symtab_node *node = lsei_node (lsei);
946 /* If symbol is static, rename it if its assembler name clash with
947 anything else in this unit. */
948 rename_statics (encoder, node);
950 /* No need to promote if symbol already is externally visible ... */
951 if (node->externally_visible
952 /* ... or if it is part of current partition ... */
953 || lto_symtab_encoder_in_partition_p (encoder, node)
954 /* ... or if we do not partition it. This mean that it will
955 appear in every partition refernecing it. */
956 || get_symbol_class (node) != SYMBOL_PARTITION)
957 continue;
959 promote_symbol (node);
964 /* Rename statics in the whole unit in the case that
965 we do -flto-partition=none. */
967 void
968 lto_promote_statics_nonwpa (void)
970 symtab_node *node;
971 FOR_EACH_SYMBOL (node)
972 rename_statics (NULL, node);