PR/56490
[official-gcc.git] / gcc / lto / lto-partition.c
blob833d9263b607aa82d733532a120bfaaf014f768c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "tm.h"
26 #include "cgraph.h"
27 #include "lto-streamer.h"
28 #include "timevar.h"
29 #include "params.h"
30 #include "ipa-inline.h"
31 #include "ipa-utils.h"
32 #include "lto-partition.h"
34 /* Classifcation of symbols into classes WRT partitioning. */
35 enum symbol_class
37 /* External declarations are ignored by partitioning algorithms and they are
38 added into the boundary later via compute_ltrans_boundary. */
39 SYMBOL_EXTERNAL,
40 /* Partitioned symbols are pur into one of partitions. */
41 SYMBOL_PARTITION,
42 /* Duplicated symbols (such as comdat or constant pool references) are
43 copied into every node needing them via add_symbol_to_partition. */
44 SYMBOL_DUPLICATE
47 vec<ltrans_partition> ltrans_partitions;
49 static void add_symbol_to_partition (ltrans_partition part, symtab_node node);
51 /* Classify symbol NODE. */
53 enum symbol_class
54 get_symbol_class (symtab_node node)
56 /* Inline clones are always duplicated.
57 This include external delcarations. */
58 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
59 if (cnode && cnode->global.inlined_to)
60 return SYMBOL_DUPLICATE;
62 /* External declarations are external. */
63 if (DECL_EXTERNAL (node->symbol.decl))
64 return SYMBOL_EXTERNAL;
66 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
68 /* Constant pool references use local symbol names that can not
69 be promoted global. We should never put into a constant pool
70 objects that can not be duplicated across partitions. */
71 if (DECL_IN_CONSTANT_POOL (node->symbol.decl))
72 return SYMBOL_DUPLICATE;
73 gcc_checking_assert (vnode->analyzed);
75 /* Functions that are cloned may stay in callgraph even if they are unused.
76 Handle them as external; compute_ltrans_boundary take care to make
77 proper things to happen (i.e. to make them appear in the boundary but
78 with body streamed, so clone can me materialized). */
79 else if (!cgraph (node)->analyzed)
80 return SYMBOL_EXTERNAL;
82 /* Weakref aliases are always duplicated. */
83 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
84 return SYMBOL_DUPLICATE;
86 /* Comdats are duplicated to every use unless they are keyed.
87 Those do not need duplication. */
88 if (DECL_COMDAT (node->symbol.decl)
89 && !node->symbol.force_output
90 && !symtab_used_from_object_file_p ((symtab_node) node))
91 return SYMBOL_DUPLICATE;
93 return SYMBOL_PARTITION;
96 /* Create new partition with name NAME. */
98 static ltrans_partition
99 new_partition (const char *name)
101 ltrans_partition part = XCNEW (struct ltrans_partition_def);
102 part->encoder = lto_symtab_encoder_new (false);
103 part->name = name;
104 part->insns = 0;
105 ltrans_partitions.safe_push (part);
106 return part;
109 /* Free memory used by ltrans datastructures. */
111 void
112 free_ltrans_partitions (void)
114 unsigned int idx;
115 ltrans_partition part;
116 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
118 if (part->initializers_visited)
119 pointer_set_destroy (part->initializers_visited);
120 /* Symtab encoder is freed after streaming. */
121 free (part);
123 ltrans_partitions.release ();
126 /* Return true if symbol is already in some partition. */
128 static inline bool
129 symbol_partitioned_p (symtab_node node)
131 return node->symbol.aux;
134 /* Add references into the partition. */
135 static void
136 add_references_to_partition (ltrans_partition part, symtab_node node)
138 int i;
139 struct ipa_ref *ref;
141 /* Add all duplicated references to the partition. */
142 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
143 if (get_symbol_class (ref->referred) == SYMBOL_DUPLICATE)
144 add_symbol_to_partition (part, ref->referred);
145 /* References to a readonly variable may be constant foled into its value.
146 Recursively look into the initializers of the constant variable and add
147 references, too. */
148 else if (is_a <varpool_node> (ref->referred)
149 && const_value_known_p (ref->referred->symbol.decl)
150 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
152 if (!part->initializers_visited)
153 part->initializers_visited = pointer_set_create ();
154 if (!pointer_set_insert (part->initializers_visited, ref->referred))
155 add_references_to_partition (part, ref->referred);
159 /* Helper function for add_symbol_to_partition doing the actual dirty work
160 of adding NODE to PART. */
162 static bool
163 add_symbol_to_partition_1 (ltrans_partition part, symtab_node node)
165 enum symbol_class c = get_symbol_class (node);
166 int i;
167 struct ipa_ref *ref;
168 symtab_node node1;
170 /* If NODE is already there, we have nothing to do. */
171 if (lto_symtab_encoder_in_partition_p (part->encoder, (symtab_node) node))
172 return true;
174 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
175 just once.
177 Be lax about comdats; they may or may not be duplicated and we may
178 end up in need to duplicate keyed comdat because it has unkeyed alias. */
179 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->symbol.decl)
180 && symbol_partitioned_p (node))
181 return false;
183 /* Be sure that we never try to duplicate partitioned symbol
184 or add external symbol. */
185 gcc_assert (c != SYMBOL_EXTERNAL
186 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
188 lto_set_symtab_encoder_in_partition (part->encoder, (symtab_node) node);
190 if (symbol_partitioned_p (node))
192 node->symbol.in_other_partition = 1;
193 if (cgraph_dump_file)
194 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
195 symtab_node_name (node));
197 node->symbol.aux = (void *)((size_t)node->symbol.aux + 1);
199 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
201 struct cgraph_edge *e;
202 part->insns += inline_summary (cnode)->self_size;
204 /* Add all inline clones and callees that are duplicated. */
205 for (e = cnode->callees; e; e = e->next_callee)
206 if (!e->inline_failed)
207 add_symbol_to_partition_1 (part, (symtab_node) e->callee);
208 else if (get_symbol_class ((symtab_node) e->callee) == SYMBOL_DUPLICATE)
209 add_symbol_to_partition (part, (symtab_node) e->callee);
211 /* Add all thunks associated with the function. */
212 for (e = cnode->callers; e; e = e->next_caller)
213 if (e->caller->thunk.thunk_p)
214 add_symbol_to_partition_1 (part, (symtab_node) e->caller);
217 add_references_to_partition (part, node);
219 /* Add all aliases associated with the symbol. */
220 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list, i, ref); i++)
221 if (ref->use == IPA_REF_ALIAS
222 && !lookup_attribute ("weakref",
223 DECL_ATTRIBUTES
224 (ref->referring->symbol.decl)))
225 add_symbol_to_partition_1 (part, ref->referring);
227 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
228 if (node->symbol.same_comdat_group)
229 for (node1 = node->symbol.same_comdat_group;
230 node1 != node; node1 = node1->symbol.same_comdat_group)
232 bool added = add_symbol_to_partition_1 (part, node1);
233 gcc_assert (added);
235 return true;
238 /* If symbol NODE is really part of other symbol's definition (i.e. it is
239 internal label, thunk, alias or so), return the outer symbol.
240 When add_symbol_to_partition_1 is called on the outer symbol it must
241 eventually add NODE, too. */
242 static symtab_node
243 contained_in_symbol (symtab_node node)
245 /* Weakrefs are never contained in anything. */
246 if (lookup_attribute ("weakref",
247 DECL_ATTRIBUTES (node->symbol.decl)))
248 return node;
249 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
251 cnode = cgraph_function_node (cnode, NULL);
252 if (cnode->global.inlined_to)
253 cnode = cnode->global.inlined_to;
254 return (symtab_node) cnode;
256 else if (varpool_node *vnode = dyn_cast <varpool_node> (node))
257 return (symtab_node) varpool_variable_node (vnode, NULL);
258 return node;
261 /* Add symbol NODE to partition. When definition of NODE is part
262 of other symbol definition, add the other symbol, too. */
264 static void
265 add_symbol_to_partition (ltrans_partition part, symtab_node node)
267 symtab_node node1;
269 /* Verify that we do not try to duplicate something that can not be. */
270 gcc_checking_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
271 || !symbol_partitioned_p (node));
273 while ((node1 = contained_in_symbol (node)) != node)
274 node = node1;
276 /* If we have duplicated symbol contained in something we can not duplicate,
277 we are very badly screwed. The other way is possible, so we do not
278 assert this in add_symbol_to_partition_1.
280 Be lax about comdats; they may or may not be duplicated and we may
281 end up in need to duplicate keyed comdat because it has unkeyed alias. */
282 gcc_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
283 || DECL_COMDAT (node->symbol.decl)
284 || !symbol_partitioned_p (node));
285 add_symbol_to_partition_1 (part, node);
288 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
289 and number of varpool nodes is N_VARPOOL_NODES. */
291 static void
292 undo_partition (ltrans_partition partition, unsigned int n_nodes)
294 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
296 symtab_node node = lto_symtab_encoder_deref (partition->encoder,
297 n_nodes);
299 /* After UNDO we no longer know what was visited. */
300 if (partition->initializers_visited)
301 pointer_set_destroy (partition->initializers_visited);
302 partition->initializers_visited = NULL;
304 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
305 partition->insns -= inline_summary (cnode)->self_size;
306 lto_symtab_encoder_delete_node (partition->encoder, node);
307 node->symbol.aux = (void *)((size_t)node->symbol.aux - 1);
311 /* Group cgrah nodes by input files. This is used mainly for testing
312 right now. */
314 void
315 lto_1_to_1_map (void)
317 symtab_node node;
318 struct lto_file_decl_data *file_data;
319 struct pointer_map_t *pmap;
320 ltrans_partition partition;
321 void **slot;
322 int npartitions = 0;
324 pmap = pointer_map_create ();
326 FOR_EACH_SYMBOL (node)
328 if (get_symbol_class (node) != SYMBOL_PARTITION
329 || symbol_partitioned_p (node))
330 continue;
332 file_data = node->symbol.lto_file_data;
334 if (file_data)
336 slot = pointer_map_contains (pmap, file_data);
337 if (slot)
338 partition = (ltrans_partition) *slot;
339 else
341 partition = new_partition (file_data->file_name);
342 slot = pointer_map_insert (pmap, file_data);
343 *slot = partition;
344 npartitions++;
347 else if (!file_data && ltrans_partitions.length ())
348 partition = ltrans_partitions[0];
349 else
351 partition = new_partition ("");
352 slot = pointer_map_insert (pmap, NULL);
353 *slot = partition;
354 npartitions++;
357 add_symbol_to_partition (partition, (symtab_node) node);
360 /* If the cgraph is empty, create one cgraph node set so that there is still
361 an output file for any variables that need to be exported in a DSO. */
362 if (!npartitions)
363 new_partition ("empty");
365 pointer_map_destroy (pmap);
369 /* Maximal partitioning. Put every new symbol into new partition if possible. */
371 void
372 lto_max_map (void)
374 symtab_node node;
375 ltrans_partition partition;
376 int npartitions = 0;
378 FOR_EACH_SYMBOL (node)
380 if (get_symbol_class (node) != SYMBOL_PARTITION
381 || symbol_partitioned_p (node))
382 continue;
383 partition = new_partition (symtab_node_asm_name (node));
384 add_symbol_to_partition (partition, (symtab_node) node);
385 npartitions++;
387 if (!npartitions)
388 new_partition ("empty");
391 /* Helper function for qsort; sort nodes by order. */
392 static int
393 node_cmp (const void *pa, const void *pb)
395 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
396 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
397 return b->symbol.order - a->symbol.order;
400 /* Helper function for qsort; sort nodes by order. */
401 static int
402 varpool_node_cmp (const void *pa, const void *pb)
404 const struct varpool_node *a = *(const struct varpool_node * const *) pa;
405 const struct varpool_node *b = *(const struct varpool_node * const *) pb;
406 return b->symbol.order - a->symbol.order;
409 /* Group cgraph nodes into equally-sized partitions.
411 The partitioning algorithm is simple: nodes are taken in predefined order.
412 The order corresponds to the order we want functions to have in the final
413 output. In the future this will be given by function reordering pass, but
414 at the moment we use the topological order, which is a good approximation.
416 The goal is to partition this linear order into intervals (partitions) so
417 that all the partitions have approximately the same size and the number of
418 callgraph or IPA reference edges crossing boundaries is minimal.
420 This is a lot faster (O(n) in size of callgraph) than algorithms doing
421 priority-based graph clustering that are generally O(n^2) and, since
422 WHOPR is designed to make things go well across partitions, it leads
423 to good results.
425 We compute the expected size of a partition as:
427 max (total_size / lto_partitions, min_partition_size)
429 We use dynamic expected size of partition so small programs are partitioned
430 into enough partitions to allow use of multiple CPUs, while large programs
431 are not partitioned too much. Creating too many partitions significantly
432 increases the streaming overhead.
434 In the future, we would like to bound the maximal size of partitions so as
435 to prevent the LTRANS stage from consuming too much memory. At the moment,
436 however, the WPA stage is the most memory intensive for large benchmarks,
437 since too many types and declarations are read into memory.
439 The function implements a simple greedy algorithm. Nodes are being added
440 to the current partition until after 3/4 of the expected partition size is
441 reached. Past this threshold, we keep track of boundary size (number of
442 edges going to other partitions) and continue adding functions until after
443 the current partition has grown to twice the expected partition size. Then
444 the process is undone to the point where the minimal ratio of boundary size
445 and in-partition calls was reached. */
447 void
448 lto_balanced_map (void)
450 int n_nodes = 0;
451 int n_varpool_nodes = 0, varpool_pos = 0;
452 struct cgraph_node **postorder =
453 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
454 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
455 struct varpool_node **varpool_order = NULL;
456 int i, postorder_len;
457 struct cgraph_node *node;
458 int total_size = 0, best_total_size = 0;
459 int partition_size;
460 ltrans_partition partition;
461 int last_visited_node = 0;
462 struct varpool_node *vnode;
463 int cost = 0, internal = 0;
464 int best_n_nodes = 0, best_i = 0, best_cost =
465 INT_MAX, best_internal = 0;
466 int npartitions;
467 int current_order = -1;
469 FOR_EACH_VARIABLE (vnode)
470 gcc_assert (!vnode->symbol.aux);
471 /* Until we have better ordering facility, use toplogical order.
472 Include only nodes we will partition and compute estimate of program
473 size. Note that since nodes that are not partitioned might be put into
474 multiple partitions, this is just an estimate of real size. This is why
475 we keep partition_size updated after every partition is finalized. */
476 postorder_len = ipa_reverse_postorder (postorder);
478 for (i = 0; i < postorder_len; i++)
480 node = postorder[i];
481 if (get_symbol_class ((symtab_node) node) == SYMBOL_PARTITION)
483 order[n_nodes++] = node;
484 total_size += inline_summary (node)->size;
487 free (postorder);
489 if (!flag_toplevel_reorder)
491 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
493 FOR_EACH_VARIABLE (vnode)
494 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
495 n_varpool_nodes++;
496 varpool_order = XNEWVEC (struct varpool_node *, n_varpool_nodes);
498 n_varpool_nodes = 0;
499 FOR_EACH_VARIABLE (vnode)
500 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
501 varpool_order[n_varpool_nodes++] = vnode;
502 qsort (varpool_order, n_varpool_nodes, sizeof (struct varpool_node *),
503 varpool_node_cmp);
506 /* Compute partition size and create the first partition. */
507 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
508 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
509 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
510 npartitions = 1;
511 partition = new_partition ("");
512 if (cgraph_dump_file)
513 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
514 total_size, partition_size);
516 for (i = 0; i < n_nodes; i++)
518 if (symbol_partitioned_p ((symtab_node) order[i]))
519 continue;
521 current_order = order[i]->symbol.order;
523 if (!flag_toplevel_reorder)
524 while (varpool_pos < n_varpool_nodes
525 && varpool_order[varpool_pos]->symbol.order < current_order)
527 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
528 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
529 varpool_pos++;
532 add_symbol_to_partition (partition, (symtab_node) order[i]);
533 total_size -= inline_summary (order[i])->size;
536 /* Once we added a new node to the partition, we also want to add
537 all referenced variables unless they was already added into some
538 earlier partition.
539 add_symbol_to_partition adds possibly multiple nodes and
540 variables that are needed to satisfy needs of ORDER[i].
541 We remember last visited cgraph and varpool node from last iteration
542 of outer loop that allows us to process every new addition.
544 At the same time we compute size of the boundary into COST. Every
545 callgraph or IPA reference edge leaving the partition contributes into
546 COST. Every edge inside partition was earlier computed as one leaving
547 it and thus we need to subtract it from COST. */
548 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
550 struct ipa_ref_list *refs;
551 int j;
552 struct ipa_ref *ref;
553 symtab_node snode = lto_symtab_encoder_deref (partition->encoder,
554 last_visited_node);
556 if (cgraph_node *node = dyn_cast <cgraph_node> (snode))
558 struct cgraph_edge *edge;
560 refs = &node->symbol.ref_list;
562 last_visited_node++;
564 gcc_assert (node->analyzed);
566 /* Compute boundary cost of callgraph edges. */
567 for (edge = node->callees; edge; edge = edge->next_callee)
568 if (edge->callee->analyzed)
570 int edge_cost = edge->frequency;
571 int index;
573 if (!edge_cost)
574 edge_cost = 1;
575 gcc_assert (edge_cost > 0);
576 index = lto_symtab_encoder_lookup (partition->encoder,
577 (symtab_node)edge->callee);
578 if (index != LCC_NOT_FOUND
579 && index < last_visited_node - 1)
580 cost -= edge_cost, internal += edge_cost;
581 else
582 cost += edge_cost;
584 for (edge = node->callers; edge; edge = edge->next_caller)
586 int edge_cost = edge->frequency;
587 int index;
589 gcc_assert (edge->caller->analyzed);
590 if (!edge_cost)
591 edge_cost = 1;
592 gcc_assert (edge_cost > 0);
593 index = lto_symtab_encoder_lookup (partition->encoder,
594 (symtab_node)edge->caller);
595 if (index != LCC_NOT_FOUND
596 && index < last_visited_node - 1)
597 cost -= edge_cost;
598 else
599 cost += edge_cost;
602 else
604 refs = &snode->symbol.ref_list;
605 last_visited_node++;
608 /* Compute boundary cost of IPA REF edges and at the same time look into
609 variables referenced from current partition and try to add them. */
610 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
611 if (is_a <varpool_node> (ref->referred))
613 int index;
615 vnode = ipa_ref_varpool_node (ref);
616 if (!vnode->finalized)
617 continue;
618 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
619 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
620 add_symbol_to_partition (partition, (symtab_node) vnode);
621 index = lto_symtab_encoder_lookup (partition->encoder,
622 (symtab_node)vnode);
623 if (index != LCC_NOT_FOUND
624 && index < last_visited_node - 1)
625 cost--, internal++;
626 else
627 cost++;
629 else
631 int index;
633 node = ipa_ref_node (ref);
634 if (!node->analyzed)
635 continue;
636 index = lto_symtab_encoder_lookup (partition->encoder,
637 (symtab_node)node);
638 if (index != LCC_NOT_FOUND
639 && index < last_visited_node - 1)
640 cost--, internal++;
641 else
642 cost++;
644 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
645 if (is_a <varpool_node> (ref->referring))
647 int index;
649 vnode = ipa_ref_referring_varpool_node (ref);
650 gcc_assert (vnode->finalized);
651 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
652 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
653 add_symbol_to_partition (partition, (symtab_node) vnode);
654 index = lto_symtab_encoder_lookup (partition->encoder,
655 (symtab_node)vnode);
656 if (index != LCC_NOT_FOUND
657 && index < last_visited_node - 1)
658 cost--;
659 else
660 cost++;
662 else
664 int index;
666 node = ipa_ref_referring_node (ref);
667 gcc_assert (node->analyzed);
668 index = lto_symtab_encoder_lookup (partition->encoder,
669 (symtab_node)node);
670 if (index != LCC_NOT_FOUND
671 && index < last_visited_node - 1)
672 cost--;
673 else
674 cost++;
678 /* If the partition is large enough, start looking for smallest boundary cost. */
679 if (partition->insns < partition_size * 3 / 4
680 || best_cost == INT_MAX
681 || ((!cost
682 || (best_internal * (HOST_WIDE_INT) cost
683 > (internal * (HOST_WIDE_INT)best_cost)))
684 && partition->insns < partition_size * 5 / 4))
686 best_cost = cost;
687 best_internal = internal;
688 best_i = i;
689 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
690 best_total_size = total_size;
692 if (cgraph_dump_file)
693 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i best %i/%i, step %i\n", i,
694 cgraph_node_name (order[i]), order[i]->uid, partition->insns, cost, internal,
695 best_cost, best_internal, best_i);
696 /* Partition is too large, unwind into step when best cost was reached and
697 start new partition. */
698 if (partition->insns > 2 * partition_size)
700 if (best_i != i)
702 if (cgraph_dump_file)
703 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
704 i - best_i, best_i);
705 undo_partition (partition, best_n_nodes);
707 i = best_i;
708 /* When we are finished, avoid creating empty partition. */
709 while (i < n_nodes - 1 && symbol_partitioned_p ((symtab_node) order[i + 1]))
710 i++;
711 if (i == n_nodes - 1)
712 break;
713 partition = new_partition ("");
714 last_visited_node = 0;
715 total_size = best_total_size;
716 cost = 0;
718 if (cgraph_dump_file)
719 fprintf (cgraph_dump_file, "New partition\n");
720 best_n_nodes = 0;
721 best_cost = INT_MAX;
723 /* Since the size of partitions is just approximate, update the size after
724 we finished current one. */
725 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
726 partition_size = total_size
727 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
728 else
729 partition_size = INT_MAX;
731 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
732 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
733 npartitions ++;
737 /* Varables that are not reachable from the code go into last partition. */
738 if (flag_toplevel_reorder)
740 FOR_EACH_VARIABLE (vnode)
741 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION
742 && !symbol_partitioned_p ((symtab_node) vnode))
743 add_symbol_to_partition (partition, (symtab_node) vnode);
745 else
747 while (varpool_pos < n_varpool_nodes)
749 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
750 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
751 varpool_pos++;
753 free (varpool_order);
755 free (order);
758 /* Promote variable VNODE to be static. */
760 static void
761 promote_symbol (symtab_node node)
763 /* We already promoted ... */
764 if (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
765 && DECL_VISIBILITY_SPECIFIED (node->symbol.decl)
766 && TREE_PUBLIC (node->symbol.decl))
767 return;
769 gcc_checking_assert (!TREE_PUBLIC (node->symbol.decl)
770 && !DECL_EXTERNAL (node->symbol.decl));
771 TREE_PUBLIC (node->symbol.decl) = 1;
772 DECL_VISIBILITY (node->symbol.decl) = VISIBILITY_HIDDEN;
773 DECL_VISIBILITY_SPECIFIED (node->symbol.decl) = true;
774 if (cgraph_dump_file)
775 fprintf (cgraph_dump_file,
776 "Promoting as hidden: %s\n", symtab_node_name (node));
780 /* Find out all static decls that need to be promoted to global because
781 of cross file sharing. This function must be run in the WPA mode after
782 all inlinees are added. */
784 void
785 lto_promote_cross_file_statics (void)
787 unsigned i, n_sets;
789 gcc_assert (flag_wpa);
791 /* First compute boundaries. */
792 n_sets = ltrans_partitions.length ();
793 for (i = 0; i < n_sets; i++)
795 ltrans_partition part
796 = ltrans_partitions[i];
797 part->encoder = compute_ltrans_boundary (part->encoder);
800 /* Look at boundaries and promote symbols as needed. */
801 for (i = 0; i < n_sets; i++)
803 lto_symtab_encoder_iterator lsei;
804 lto_symtab_encoder_t encoder;
805 ltrans_partition part
806 = ltrans_partitions[i];
808 encoder = part->encoder;
809 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
810 lsei_next (&lsei))
812 symtab_node node = lsei_node (lsei);
814 /* No need to promote if symbol already is externally visible ... */
815 if (node->symbol.externally_visible
816 /* ... or if it is part of current partition ... */
817 || lto_symtab_encoder_in_partition_p (encoder, node)
818 /* ... or if we do not partition it. This mean that it will
819 appear in every partition refernecing it. */
820 || get_symbol_class ((symtab_node) node) != SYMBOL_PARTITION)
821 continue;
823 promote_symbol (node);