target/112280 - properly guard permute query
[official-gcc.git] / gcc / tree-ssa-coalesce.cc
blob8d8727de32f1682818272e175bbac1ae3819316d
1 /* Coalesce SSA_NAMES together for the out-of-ssa pass.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "predict.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "tree-ssa.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "dumpfile.h"
35 #include "gimple-iterator.h"
36 #include "tree-ssa-live.h"
37 #include "tree-ssa-coalesce.h"
38 #include "explow.h"
39 #include "tree-dfa.h"
40 #include "stor-layout.h"
41 #include "gimple-lower-bitint.h"
43 /* This set of routines implements a coalesce_list. This is an object which
44 is used to track pairs of ssa_names which are desirable to coalesce
45 together to avoid copies. Costs are associated with each pair, and when
46 all desired information has been collected, the object can be used to
47 order the pairs for processing. */
49 /* This structure defines a pair entry. */
51 struct coalesce_pair
53 int first_element;
54 int second_element;
55 int cost;
57 /* A count of the number of unique partitions this pair would conflict
58 with if coalescing was successful. This is the secondary sort key,
59 given two pairs with equal costs, we will prefer the pair with a smaller
60 conflict set.
62 This is lazily initialized when we discover two coalescing pairs have
63 the same primary cost.
65 Note this is not updated and propagated as pairs are coalesced. */
66 int conflict_count;
68 /* The order in which coalescing pairs are discovered is recorded in this
69 field, which is used as the final tie breaker when sorting coalesce
70 pairs. */
71 int index;
74 /* This represents a conflict graph. Implemented as an array of bitmaps.
75 A full matrix is used for conflicts rather than just upper triangular form.
76 this makes it much simpler and faster to perform conflict merges. */
78 struct ssa_conflicts
80 bitmap_obstack obstack; /* A place to allocate our bitmaps. */
81 vec<bitmap> conflicts;
84 /* The narrow API of the qsort comparison function doesn't allow easy
85 access to additional arguments. So we have two globals (ick) to hold
86 the data we need. They're initialized before the call to qsort and
87 wiped immediately after. */
88 static ssa_conflicts *conflicts_;
89 static var_map map_;
91 /* Coalesce pair hashtable helpers. */
93 struct coalesce_pair_hasher : nofree_ptr_hash <coalesce_pair>
95 static inline hashval_t hash (const coalesce_pair *);
96 static inline bool equal (const coalesce_pair *, const coalesce_pair *);
99 /* Hash function for coalesce list. Calculate hash for PAIR. */
101 inline hashval_t
102 coalesce_pair_hasher::hash (const coalesce_pair *pair)
104 hashval_t a = (hashval_t)(pair->first_element);
105 hashval_t b = (hashval_t)(pair->second_element);
107 return b * (b - 1) / 2 + a;
110 /* Equality function for coalesce list hash table. Compare PAIR1 and PAIR2,
111 returning TRUE if the two pairs are equivalent. */
113 inline bool
114 coalesce_pair_hasher::equal (const coalesce_pair *p1, const coalesce_pair *p2)
116 return (p1->first_element == p2->first_element
117 && p1->second_element == p2->second_element);
120 typedef hash_table<coalesce_pair_hasher> coalesce_table_type;
121 typedef coalesce_table_type::iterator coalesce_iterator_type;
124 struct cost_one_pair
126 int first_element;
127 int second_element;
128 cost_one_pair *next;
131 /* This structure maintains the list of coalesce pairs. */
133 struct coalesce_list
135 coalesce_table_type *list; /* Hash table. */
136 coalesce_pair **sorted; /* List when sorted. */
137 int num_sorted; /* Number in the sorted list. */
138 cost_one_pair *cost_one_list;/* Single use coalesces with cost 1. */
139 obstack ob;
142 #define NO_BEST_COALESCE -1
143 #define MUST_COALESCE_COST INT_MAX
146 /* Return cost of execution of copy instruction with FREQUENCY. */
148 static inline int
149 coalesce_cost (int frequency, bool optimize_for_size)
151 /* Base costs on BB frequencies bounded by 1. */
152 int cost = frequency;
154 if (!cost)
155 cost = 1;
157 if (optimize_for_size)
158 cost = 1;
160 return cost;
164 /* Return the cost of executing a copy instruction in basic block BB. */
166 static inline int
167 coalesce_cost_bb (basic_block bb)
169 return coalesce_cost (bb->count.to_frequency (cfun),
170 optimize_bb_for_size_p (bb));
174 /* Return the cost of executing a copy instruction on edge E. */
176 static inline int
177 coalesce_cost_edge (edge e)
179 int mult = 1;
181 /* Inserting copy on critical edge costs more than inserting it elsewhere. */
182 if (EDGE_CRITICAL_P (e))
183 mult = 2;
184 if (e->flags & EDGE_ABNORMAL)
185 return MUST_COALESCE_COST;
186 if (e->flags & EDGE_EH)
188 edge e2;
189 edge_iterator ei;
190 FOR_EACH_EDGE (e2, ei, e->dest->preds)
191 if (e2 != e)
193 /* Putting code on EH edge that leads to BB
194 with multiple predecestors imply splitting of
195 edge too. */
196 if (mult < 2)
197 mult = 2;
198 /* If there are multiple EH predecestors, we
199 also copy EH regions and produce separate
200 landing pad. This is expensive. */
201 if (e2->flags & EDGE_EH)
203 mult = 5;
204 break;
209 return coalesce_cost (EDGE_FREQUENCY (e),
210 optimize_edge_for_size_p (e)) * mult;
214 /* Retrieve a pair to coalesce from the cost_one_list in CL. Returns the
215 2 elements via P1 and P2. 1 is returned by the function if there is a pair,
216 NO_BEST_COALESCE is returned if there aren't any. */
218 static inline int
219 pop_cost_one_pair (coalesce_list *cl, int *p1, int *p2)
221 cost_one_pair *ptr;
223 ptr = cl->cost_one_list;
224 if (!ptr)
225 return NO_BEST_COALESCE;
227 *p1 = ptr->first_element;
228 *p2 = ptr->second_element;
229 cl->cost_one_list = ptr->next;
231 return 1;
234 /* Retrieve the most expensive remaining pair to coalesce from CL. Returns the
235 2 elements via P1 and P2. Their calculated cost is returned by the function.
236 NO_BEST_COALESCE is returned if the coalesce list is empty. */
238 static inline int
239 pop_best_coalesce (coalesce_list *cl, int *p1, int *p2)
241 coalesce_pair *node;
242 int ret;
244 if (cl->sorted == NULL)
245 return pop_cost_one_pair (cl, p1, p2);
247 if (cl->num_sorted == 0)
248 return pop_cost_one_pair (cl, p1, p2);
250 node = cl->sorted[--(cl->num_sorted)];
251 *p1 = node->first_element;
252 *p2 = node->second_element;
253 ret = node->cost;
255 return ret;
259 /* Create a new empty coalesce list object and return it. */
261 static inline coalesce_list *
262 create_coalesce_list (void)
264 coalesce_list *list;
265 unsigned size = num_ssa_names * 3;
267 if (size < 40)
268 size = 40;
270 list = (coalesce_list *) xmalloc (sizeof (struct coalesce_list));
271 list->list = new coalesce_table_type (size);
272 list->sorted = NULL;
273 list->num_sorted = 0;
274 list->cost_one_list = NULL;
275 gcc_obstack_init (&list->ob);
276 return list;
280 /* Delete coalesce list CL. */
282 static inline void
283 delete_coalesce_list (coalesce_list *cl)
285 gcc_assert (cl->cost_one_list == NULL);
286 delete cl->list;
287 cl->list = NULL;
288 free (cl->sorted);
289 gcc_assert (cl->num_sorted == 0);
290 obstack_free (&cl->ob, NULL);
291 free (cl);
294 /* Return the number of unique coalesce pairs in CL. */
296 static inline int
297 num_coalesce_pairs (coalesce_list *cl)
299 return cl->list->elements ();
302 /* Find a matching coalesce pair object in CL for the pair P1 and P2. If
303 one isn't found, return NULL if CREATE is false, otherwise create a new
304 coalesce pair object and return it. */
306 static coalesce_pair *
307 find_coalesce_pair (coalesce_list *cl, int p1, int p2, bool create)
309 struct coalesce_pair p;
310 coalesce_pair **slot;
311 unsigned int hash;
313 /* Normalize so that p1 is the smaller value. */
314 if (p2 < p1)
316 p.first_element = p2;
317 p.second_element = p1;
319 else
321 p.first_element = p1;
322 p.second_element = p2;
325 hash = coalesce_pair_hasher::hash (&p);
326 slot = cl->list->find_slot_with_hash (&p, hash, create ? INSERT : NO_INSERT);
327 if (!slot)
328 return NULL;
330 if (!*slot)
332 struct coalesce_pair * pair = XOBNEW (&cl->ob, struct coalesce_pair);
333 gcc_assert (cl->sorted == NULL);
334 pair->first_element = p.first_element;
335 pair->second_element = p.second_element;
336 pair->cost = 0;
337 pair->index = num_coalesce_pairs (cl);
338 pair->conflict_count = 0;
339 *slot = pair;
342 return (struct coalesce_pair *) *slot;
345 static inline void
346 add_cost_one_coalesce (coalesce_list *cl, int p1, int p2)
348 cost_one_pair *pair;
350 pair = XOBNEW (&cl->ob, cost_one_pair);
351 pair->first_element = p1;
352 pair->second_element = p2;
353 pair->next = cl->cost_one_list;
354 cl->cost_one_list = pair;
358 /* Add a coalesce between P1 and P2 in list CL with a cost of VALUE. */
360 static inline void
361 add_coalesce (coalesce_list *cl, int p1, int p2, int value)
363 coalesce_pair *node;
365 gcc_assert (cl->sorted == NULL);
366 if (p1 == p2)
367 return;
369 node = find_coalesce_pair (cl, p1, p2, true);
371 /* Once the value is at least MUST_COALESCE_COST - 1, leave it that way. */
372 if (node->cost < MUST_COALESCE_COST - 1)
374 if (value < MUST_COALESCE_COST - 1)
375 node->cost += value;
376 else
377 node->cost = value;
381 /* Compute and record how many unique conflicts would exist for the
382 representative partition for each coalesce pair in CL.
384 CONFLICTS is the conflict graph and MAP is the current partition view. */
386 static void
387 initialize_conflict_count (coalesce_pair *p,
388 ssa_conflicts *conflicts,
389 var_map map)
391 int p1 = var_to_partition (map, ssa_name (p->first_element));
392 int p2 = var_to_partition (map, ssa_name (p->second_element));
394 /* 4 cases. If both P1 and P2 have conflicts, then build their
395 union and count the members. Else handle the degenerate cases
396 in the obvious ways. */
397 if (conflicts->conflicts[p1] && conflicts->conflicts[p2])
398 p->conflict_count = bitmap_count_unique_bits (conflicts->conflicts[p1],
399 conflicts->conflicts[p2]);
400 else if (conflicts->conflicts[p1])
401 p->conflict_count = bitmap_count_bits (conflicts->conflicts[p1]);
402 else if (conflicts->conflicts[p2])
403 p->conflict_count = bitmap_count_bits (conflicts->conflicts[p2]);
404 else
405 p->conflict_count = 0;
409 /* Comparison function to allow qsort to sort P1 and P2 in Ascending order. */
411 static int
412 compare_pairs (const void *p1, const void *p2)
414 coalesce_pair *const *const pp1 = (coalesce_pair *const *) p1;
415 coalesce_pair *const *const pp2 = (coalesce_pair *const *) p2;
416 int result;
418 result = (* pp1)->cost - (* pp2)->cost;
419 /* We use the size of the resulting conflict set as the secondary sort key.
420 Given two equal costing coalesce pairs, we want to prefer the pair that
421 has the smaller conflict set. */
422 if (result == 0)
424 if (flag_expensive_optimizations)
426 /* Lazily initialize the conflict counts as it's fairly expensive
427 to compute. */
428 if ((*pp2)->conflict_count == 0)
429 initialize_conflict_count (*pp2, conflicts_, map_);
430 if ((*pp1)->conflict_count == 0)
431 initialize_conflict_count (*pp1, conflicts_, map_);
433 result = (*pp2)->conflict_count - (*pp1)->conflict_count;
436 /* And if everything else is equal, then sort based on which
437 coalesce pair was found first. */
438 if (result == 0)
439 result = (*pp2)->index - (*pp1)->index;
442 return result;
445 /* Iterate over CL using ITER, returning values in PAIR. */
447 #define FOR_EACH_PARTITION_PAIR(PAIR, ITER, CL) \
448 FOR_EACH_HASH_TABLE_ELEMENT (*(CL)->list, (PAIR), coalesce_pair_p, (ITER))
451 /* Prepare CL for removal of preferred pairs. When finished they are sorted
452 in order from most important coalesce to least important. */
454 static void
455 sort_coalesce_list (coalesce_list *cl, ssa_conflicts *conflicts, var_map map)
457 unsigned x, num;
458 coalesce_pair *p;
459 coalesce_iterator_type ppi;
461 gcc_assert (cl->sorted == NULL);
463 num = num_coalesce_pairs (cl);
464 cl->num_sorted = num;
465 if (num == 0)
466 return;
468 /* Allocate a vector for the pair pointers. */
469 cl->sorted = XNEWVEC (coalesce_pair *, num);
471 /* Populate the vector with pointers to the pairs. */
472 x = 0;
473 FOR_EACH_PARTITION_PAIR (p, ppi, cl)
474 cl->sorted[x++] = p;
475 gcc_assert (x == num);
477 /* Already sorted. */
478 if (num == 1)
479 return;
481 /* We don't want to depend on qsort_r, so we have to stuff away
482 additional data into globals so it can be referenced in
483 compare_pairs. */
484 conflicts_ = conflicts;
485 map_ = map;
486 qsort (cl->sorted, num, sizeof (coalesce_pair *), compare_pairs);
487 conflicts_ = NULL;
488 map_ = NULL;
492 /* Send debug info for coalesce list CL to file F. */
494 static void
495 dump_coalesce_list (FILE *f, coalesce_list *cl)
497 coalesce_pair *node;
498 coalesce_iterator_type ppi;
500 int x;
501 tree var;
503 if (cl->sorted == NULL)
505 fprintf (f, "Coalesce List:\n");
506 FOR_EACH_PARTITION_PAIR (node, ppi, cl)
508 tree var1 = ssa_name (node->first_element);
509 tree var2 = ssa_name (node->second_element);
510 print_generic_expr (f, var1, TDF_SLIM);
511 fprintf (f, " <-> ");
512 print_generic_expr (f, var2, TDF_SLIM);
513 fprintf (f, " (%1d, %1d), ", node->cost, node->conflict_count);
514 fprintf (f, "\n");
517 else
519 fprintf (f, "Sorted Coalesce list:\n");
520 for (x = cl->num_sorted - 1 ; x >=0; x--)
522 node = cl->sorted[x];
523 fprintf (f, "(%d, %d) ", node->cost, node->conflict_count);
524 var = ssa_name (node->first_element);
525 print_generic_expr (f, var, TDF_SLIM);
526 fprintf (f, " <-> ");
527 var = ssa_name (node->second_element);
528 print_generic_expr (f, var, TDF_SLIM);
529 fprintf (f, "\n");
535 /* Return an empty new conflict graph for SIZE elements. */
537 static inline ssa_conflicts *
538 ssa_conflicts_new (unsigned size)
540 ssa_conflicts *ptr;
542 ptr = XNEW (ssa_conflicts);
543 bitmap_obstack_initialize (&ptr->obstack);
544 ptr->conflicts.create (size);
545 ptr->conflicts.safe_grow_cleared (size, true);
546 return ptr;
550 /* Free storage for conflict graph PTR. */
552 static inline void
553 ssa_conflicts_delete (ssa_conflicts *ptr)
555 bitmap_obstack_release (&ptr->obstack);
556 ptr->conflicts.release ();
557 free (ptr);
561 /* Test if elements X and Y conflict in graph PTR. */
563 static inline bool
564 ssa_conflicts_test_p (ssa_conflicts *ptr, unsigned x, unsigned y)
566 bitmap bx = ptr->conflicts[x];
567 bitmap by = ptr->conflicts[y];
569 gcc_checking_assert (x != y);
571 if (bx)
572 /* Avoid the lookup if Y has no conflicts. */
573 return by ? bitmap_bit_p (bx, y) : false;
574 else
575 return false;
579 /* Add a conflict with Y to the bitmap for X in graph PTR. */
581 static inline void
582 ssa_conflicts_add_one (ssa_conflicts *ptr, unsigned x, unsigned y)
584 bitmap bx = ptr->conflicts[x];
585 /* If there are no conflicts yet, allocate the bitmap and set bit. */
586 if (! bx)
587 bx = ptr->conflicts[x] = BITMAP_ALLOC (&ptr->obstack);
588 bitmap_set_bit (bx, y);
592 /* Add conflicts between X and Y in graph PTR. */
594 static inline void
595 ssa_conflicts_add (ssa_conflicts *ptr, unsigned x, unsigned y)
597 gcc_checking_assert (x != y);
598 ssa_conflicts_add_one (ptr, x, y);
599 ssa_conflicts_add_one (ptr, y, x);
603 /* Merge all Y's conflict into X in graph PTR. */
605 static inline void
606 ssa_conflicts_merge (ssa_conflicts *ptr, unsigned x, unsigned y)
608 unsigned z;
609 bitmap_iterator bi;
610 bitmap bx = ptr->conflicts[x];
611 bitmap by = ptr->conflicts[y];
613 gcc_checking_assert (x != y);
614 if (! by)
615 return;
617 /* Add a conflict between X and every one Y has. If the bitmap doesn't
618 exist, then it has already been coalesced, and we don't need to add a
619 conflict. */
620 EXECUTE_IF_SET_IN_BITMAP (by, 0, z, bi)
622 bitmap bz = ptr->conflicts[z];
623 if (bz)
625 bool was_there = bitmap_clear_bit (bz, y);
626 gcc_checking_assert (was_there);
627 bitmap_set_bit (bz, x);
631 if (bx)
633 /* If X has conflicts, add Y's to X. */
634 bitmap_ior_into (bx, by);
635 BITMAP_FREE (by);
636 ptr->conflicts[y] = NULL;
638 else
640 /* If X has no conflicts, simply use Y's. */
641 ptr->conflicts[x] = by;
642 ptr->conflicts[y] = NULL;
647 /* Dump a conflicts graph. */
649 static void
650 ssa_conflicts_dump (FILE *file, ssa_conflicts *ptr)
652 unsigned x;
653 bitmap b;
655 fprintf (file, "\nConflict graph:\n");
657 FOR_EACH_VEC_ELT (ptr->conflicts, x, b)
658 if (b)
660 fprintf (file, "%d: ", x);
661 dump_bitmap (file, b);
666 /* This structure is used to efficiently record the current status of live
667 SSA_NAMES when building a conflict graph.
668 LIVE_BASE_VAR has a bit set for each base variable which has at least one
669 ssa version live.
670 LIVE_BASE_PARTITIONS is an array of bitmaps using the basevar table as an
671 index, and is used to track what partitions of each base variable are
672 live. This makes it easy to add conflicts between just live partitions
673 with the same base variable.
674 The values in LIVE_BASE_PARTITIONS are only valid if the base variable is
675 marked as being live. This delays clearing of these bitmaps until
676 they are actually needed again. */
678 class live_track
680 public:
681 bitmap_obstack obstack; /* A place to allocate our bitmaps. */
682 bitmap_head live_base_var; /* Indicates if a basevar is live. */
683 bitmap_head *live_base_partitions; /* Live partitions for each basevar. */
684 var_map map; /* Var_map being used for partition mapping. */
688 /* This routine will create a new live track structure based on the partitions
689 in MAP. */
691 static live_track *
692 new_live_track (var_map map)
694 live_track *ptr;
695 int lim, x;
697 /* Make sure there is a partition view in place. */
698 gcc_assert (map->partition_to_base_index != NULL);
700 ptr = XNEW (live_track);
701 ptr->map = map;
702 lim = num_basevars (map);
703 bitmap_obstack_initialize (&ptr->obstack);
704 ptr->live_base_partitions = XNEWVEC (bitmap_head, lim);
705 bitmap_initialize (&ptr->live_base_var, &ptr->obstack);
706 for (x = 0; x < lim; x++)
707 bitmap_initialize (&ptr->live_base_partitions[x], &ptr->obstack);
708 return ptr;
712 /* This routine will free the memory associated with PTR. */
714 static void
715 delete_live_track (live_track *ptr)
717 bitmap_obstack_release (&ptr->obstack);
718 XDELETEVEC (ptr->live_base_partitions);
719 XDELETE (ptr);
723 /* This function will remove PARTITION from the live list in PTR. */
725 static inline void
726 live_track_remove_partition (live_track *ptr, int partition)
728 int root;
730 root = basevar_index (ptr->map, partition);
731 bitmap_clear_bit (&ptr->live_base_partitions[root], partition);
732 /* If the element list is empty, make the base variable not live either. */
733 if (bitmap_empty_p (&ptr->live_base_partitions[root]))
734 bitmap_clear_bit (&ptr->live_base_var, root);
738 /* This function will adds PARTITION to the live list in PTR. */
740 static inline void
741 live_track_add_partition (live_track *ptr, int partition)
743 int root;
745 root = basevar_index (ptr->map, partition);
746 /* If this base var wasn't live before, it is now. Clear the element list
747 since it was delayed until needed. */
748 if (bitmap_set_bit (&ptr->live_base_var, root))
749 bitmap_clear (&ptr->live_base_partitions[root]);
750 bitmap_set_bit (&ptr->live_base_partitions[root], partition);
755 /* Clear the live bit for VAR in PTR. */
757 static inline void
758 live_track_clear_var (live_track *ptr, tree var)
760 int p;
762 p = var_to_partition (ptr->map, var);
763 if (p != NO_PARTITION)
764 live_track_remove_partition (ptr, p);
768 /* Return TRUE if VAR is live in PTR. */
770 static inline bool
771 live_track_live_p (live_track *ptr, tree var)
773 int p, root;
775 p = var_to_partition (ptr->map, var);
776 if (p != NO_PARTITION)
778 root = basevar_index (ptr->map, p);
779 if (bitmap_bit_p (&ptr->live_base_var, root))
780 return bitmap_bit_p (&ptr->live_base_partitions[root], p);
782 return false;
786 /* This routine will add USE to PTR. USE will be marked as live in both the
787 ssa live map and the live bitmap for the root of USE. */
789 static inline void
790 live_track_process_use (live_track *ptr, tree use)
792 int p;
794 p = var_to_partition (ptr->map, use);
795 if (p == NO_PARTITION)
796 return;
798 /* Mark as live in the appropriate live list. */
799 live_track_add_partition (ptr, p);
803 /* This routine will process a DEF in PTR. DEF will be removed from the live
804 lists, and if there are any other live partitions with the same base
805 variable, conflicts will be added to GRAPH. */
807 static inline void
808 live_track_process_def (live_track *ptr, tree def, ssa_conflicts *graph)
810 int p, root;
811 bitmap b;
812 unsigned x;
813 bitmap_iterator bi;
815 p = var_to_partition (ptr->map, def);
816 if (p == NO_PARTITION)
817 return;
819 /* Clear the liveness bit. */
820 live_track_remove_partition (ptr, p);
822 /* If the bitmap isn't empty now, conflicts need to be added. */
823 root = basevar_index (ptr->map, p);
824 if (bitmap_bit_p (&ptr->live_base_var, root))
826 b = &ptr->live_base_partitions[root];
827 EXECUTE_IF_SET_IN_BITMAP (b, 0, x, bi)
828 ssa_conflicts_add (graph, p, x);
833 /* Initialize PTR with the partitions set in INIT. */
835 static inline void
836 live_track_init (live_track *ptr, bitmap init)
838 unsigned p;
839 bitmap_iterator bi;
841 /* Mark all live on exit partitions. */
842 EXECUTE_IF_SET_IN_BITMAP (init, 0, p, bi)
843 live_track_add_partition (ptr, p);
847 /* This routine will clear all live partitions in PTR. */
849 static inline void
850 live_track_clear_base_vars (live_track *ptr)
852 /* Simply clear the live base list. Anything marked as live in the element
853 lists will be cleared later if/when the base variable ever comes alive
854 again. */
855 bitmap_clear (&ptr->live_base_var);
859 /* Build a conflict graph based on LIVEINFO. Any partitions which are in the
860 partition view of the var_map liveinfo is based on get entries in the
861 conflict graph. Only conflicts between ssa_name partitions with the same
862 base variable are added. */
864 static ssa_conflicts *
865 build_ssa_conflict_graph (tree_live_info_p liveinfo)
867 ssa_conflicts *graph;
868 var_map map;
869 basic_block bb;
870 ssa_op_iter iter;
871 live_track *live;
872 basic_block entry;
874 /* If inter-variable coalescing is enabled, we may attempt to
875 coalesce variables from different base variables, including
876 different parameters, so we have to make sure default defs live
877 at the entry block conflict with each other. */
878 if (flag_tree_coalesce_vars)
879 entry = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
880 else
881 entry = NULL;
883 map = live_var_map (liveinfo);
884 graph = ssa_conflicts_new (num_var_partitions (map));
886 live = new_live_track (map);
888 for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (i, &bb); ++i)
890 /* Start with live on exit temporaries. */
891 live_track_init (live, live_on_exit (liveinfo, bb));
893 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
894 gsi_prev (&gsi))
896 tree var;
897 gimple *stmt = gsi_stmt (gsi);
899 /* A copy between 2 partitions does not introduce an interference
900 by itself. If they did, you would never be able to coalesce
901 two things which are copied. If the two variables really do
902 conflict, they will conflict elsewhere in the program.
904 This is handled by simply removing the SRC of the copy from the
905 live list, and processing the stmt normally. */
906 if (is_gimple_assign (stmt))
908 tree lhs = gimple_assign_lhs (stmt);
909 tree rhs1 = gimple_assign_rhs1 (stmt);
910 if (gimple_assign_copy_p (stmt)
911 && TREE_CODE (lhs) == SSA_NAME
912 && TREE_CODE (rhs1) == SSA_NAME)
913 live_track_clear_var (live, rhs1);
915 else if (is_gimple_debug (stmt))
916 continue;
918 if (map->bitint)
920 build_bitint_stmt_ssa_conflicts (stmt, live, graph, map->bitint,
921 live_track_process_def,
922 live_track_process_use);
923 continue;
926 /* For stmts with more than one SSA_NAME definition pretend all the
927 SSA_NAME outputs but the first one are live at this point, so
928 that conflicts are added in between all those even when they are
929 actually not really live after the asm, because expansion might
930 copy those into pseudos after the asm and if multiple outputs
931 share the same partition, it might overwrite those that should
932 be live. E.g.
933 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
934 return a;
935 See PR70593. */
936 bool first = true;
937 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
938 if (first)
939 first = false;
940 else
941 live_track_process_use (live, var);
943 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
944 live_track_process_def (live, var, graph);
946 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
947 live_track_process_use (live, var);
950 /* If result of a PHI is unused, looping over the statements will not
951 record any conflicts since the def was never live. Since the PHI node
952 is going to be translated out of SSA form, it will insert a copy.
953 There must be a conflict recorded between the result of the PHI and
954 any variables that are live. Otherwise the out-of-ssa translation
955 may create incorrect code. */
956 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
957 gsi_next (&gsi))
959 gphi *phi = gsi.phi ();
960 tree result = PHI_RESULT (phi);
961 if (virtual_operand_p (result))
962 continue;
963 if (live_track_live_p (live, result))
964 live_track_process_def (live, result, graph);
967 /* Pretend there are defs for params' default defs at the start
968 of the (post-)entry block. This will prevent PARM_DECLs from
969 coalescing into the same partition. Although RESULT_DECLs'
970 default defs don't have a useful initial value, we have to
971 prevent them from coalescing with PARM_DECLs' default defs
972 too, otherwise assign_parms would attempt to assign different
973 RTL to the same partition. */
974 if (bb == entry)
976 unsigned i;
977 tree var;
979 FOR_EACH_SSA_NAME (i, var, cfun)
981 if (!SSA_NAME_IS_DEFAULT_DEF (var)
982 || !SSA_NAME_VAR (var)
983 || VAR_P (SSA_NAME_VAR (var)))
984 continue;
986 live_track_process_def (live, var, graph);
987 /* Process a use too, so that it remains live and
988 conflicts with other parms' default defs, even unused
989 ones. */
990 live_track_process_use (live, var);
994 live_track_clear_base_vars (live);
997 delete_live_track (live);
998 return graph;
1001 /* Print a failure to coalesce a MUST_COALESCE pair X and Y. */
1003 static inline void
1004 fail_abnormal_edge_coalesce (int x, int y)
1006 fprintf (stderr, "\nUnable to coalesce ssa_names %d and %d",x, y);
1007 fprintf (stderr, " which are marked as MUST COALESCE.\n");
1008 print_generic_expr (stderr, ssa_name (x), TDF_SLIM);
1009 fprintf (stderr, " and ");
1010 print_generic_stmt (stderr, ssa_name (y), TDF_SLIM);
1012 internal_error ("SSA corruption");
1015 /* If VAR is an SSA_NAME associated with a PARM_DECL or a RESULT_DECL,
1016 and the DECL's default def is unused (i.e., it was introduced by
1017 create_default_def for out-of-ssa), mark VAR and the default def for
1018 coalescing. */
1020 static void
1021 coalesce_with_default (tree var, coalesce_list *cl, bitmap used_in_copy)
1023 if (SSA_NAME_IS_DEFAULT_DEF (var)
1024 || !SSA_NAME_VAR (var)
1025 || VAR_P (SSA_NAME_VAR (var)))
1026 return;
1028 tree ssa = ssa_default_def (cfun, SSA_NAME_VAR (var));
1029 if (!has_zero_uses (ssa))
1030 return;
1032 add_cost_one_coalesce (cl, SSA_NAME_VERSION (ssa), SSA_NAME_VERSION (var));
1033 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (var));
1034 /* Default defs will have their used_in_copy bits set at the beginning of
1035 populate_coalesce_list_for_outofssa. */
1039 /* Given var_map MAP for a region, this function creates and returns a coalesce
1040 list as well as recording related ssa names in USED_IN_COPIES for use later
1041 in the out-of-ssa or live range computation process. */
1043 static coalesce_list *
1044 create_coalesce_list_for_region (var_map map, bitmap used_in_copy)
1046 gimple_stmt_iterator gsi;
1047 basic_block bb;
1048 coalesce_list *cl = create_coalesce_list ();
1049 gimple *stmt;
1050 int v1, v2, cost;
1052 for (unsigned j = 0; map->vec_bbs.iterate (j, &bb); ++j)
1054 tree arg;
1056 for (gphi_iterator gpi = gsi_start_phis (bb);
1057 !gsi_end_p (gpi);
1058 gsi_next (&gpi))
1060 gphi *phi = gpi.phi ();
1061 size_t i;
1062 int ver;
1063 tree res;
1064 bool saw_copy = false;
1066 res = gimple_phi_result (phi);
1067 if (virtual_operand_p (res))
1068 continue;
1069 ver = SSA_NAME_VERSION (res);
1070 if (map->bitint && !bitmap_bit_p (map->bitint, ver))
1071 continue;
1073 /* Register ssa_names and coalesces between the args and the result
1074 of all PHI. */
1075 for (i = 0; i < gimple_phi_num_args (phi); i++)
1077 edge e = gimple_phi_arg_edge (phi, i);
1078 arg = PHI_ARG_DEF (phi, i);
1079 if (TREE_CODE (arg) != SSA_NAME)
1080 continue;
1082 if (gimple_can_coalesce_p (arg, res)
1083 || (e->flags & EDGE_ABNORMAL))
1085 saw_copy = true;
1086 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (arg));
1087 if ((e->flags & EDGE_ABNORMAL) == 0)
1089 int cost = coalesce_cost_edge (e);
1090 if (cost == 1 && has_single_use (arg))
1091 add_cost_one_coalesce (cl, ver, SSA_NAME_VERSION (arg));
1092 else
1093 add_coalesce (cl, ver, SSA_NAME_VERSION (arg), cost);
1097 if (saw_copy)
1098 bitmap_set_bit (used_in_copy, ver);
1101 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1103 stmt = gsi_stmt (gsi);
1105 if (is_gimple_debug (stmt))
1106 continue;
1108 /* Check for copy coalesces. */
1109 switch (gimple_code (stmt))
1111 case GIMPLE_ASSIGN:
1113 tree lhs = gimple_assign_lhs (stmt);
1114 tree rhs1 = gimple_assign_rhs1 (stmt);
1115 if (gimple_assign_ssa_name_copy_p (stmt)
1116 && gimple_can_coalesce_p (lhs, rhs1))
1118 v1 = SSA_NAME_VERSION (lhs);
1119 v2 = SSA_NAME_VERSION (rhs1);
1120 if (map->bitint && !bitmap_bit_p (map->bitint, v1))
1121 break;
1122 cost = coalesce_cost_bb (bb);
1123 add_coalesce (cl, v1, v2, cost);
1124 bitmap_set_bit (used_in_copy, v1);
1125 bitmap_set_bit (used_in_copy, v2);
1128 break;
1130 case GIMPLE_RETURN:
1132 tree res = DECL_RESULT (current_function_decl);
1133 if (VOID_TYPE_P (TREE_TYPE (res))
1134 || !is_gimple_reg (res))
1135 break;
1136 tree rhs1 = gimple_return_retval (as_a <greturn *> (stmt));
1137 if (!rhs1)
1138 break;
1139 tree lhs = ssa_default_def (cfun, res);
1140 if (map->bitint && !lhs)
1141 break;
1142 gcc_assert (lhs);
1143 if (TREE_CODE (rhs1) == SSA_NAME
1144 && gimple_can_coalesce_p (lhs, rhs1))
1146 v1 = SSA_NAME_VERSION (lhs);
1147 v2 = SSA_NAME_VERSION (rhs1);
1148 if (map->bitint && !bitmap_bit_p (map->bitint, v1))
1149 break;
1150 cost = coalesce_cost_bb (bb);
1151 add_coalesce (cl, v1, v2, cost);
1152 bitmap_set_bit (used_in_copy, v1);
1153 bitmap_set_bit (used_in_copy, v2);
1155 break;
1158 case GIMPLE_ASM:
1160 gasm *asm_stmt = as_a <gasm *> (stmt);
1161 unsigned long noutputs, i;
1162 unsigned long ninputs;
1163 tree *outputs, link;
1164 noutputs = gimple_asm_noutputs (asm_stmt);
1165 ninputs = gimple_asm_ninputs (asm_stmt);
1166 outputs = (tree *) alloca (noutputs * sizeof (tree));
1167 for (i = 0; i < noutputs; ++i)
1169 link = gimple_asm_output_op (asm_stmt, i);
1170 outputs[i] = TREE_VALUE (link);
1173 for (i = 0; i < ninputs; ++i)
1175 const char *constraint;
1176 tree input;
1177 char *end;
1178 unsigned long match;
1180 link = gimple_asm_input_op (asm_stmt, i);
1181 constraint
1182 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1183 input = TREE_VALUE (link);
1185 if (TREE_CODE (input) != SSA_NAME)
1186 continue;
1188 match = strtoul (constraint, &end, 10);
1189 if (match >= noutputs || end == constraint)
1190 continue;
1192 if (TREE_CODE (outputs[match]) != SSA_NAME)
1193 continue;
1195 v1 = SSA_NAME_VERSION (outputs[match]);
1196 v2 = SSA_NAME_VERSION (input);
1197 if (map->bitint && !bitmap_bit_p (map->bitint, v1))
1198 continue;
1200 if (gimple_can_coalesce_p (outputs[match], input))
1202 cost = coalesce_cost (REG_BR_PROB_BASE,
1203 optimize_bb_for_size_p (bb));
1204 add_coalesce (cl, v1, v2, cost);
1205 bitmap_set_bit (used_in_copy, v1);
1206 bitmap_set_bit (used_in_copy, v2);
1209 break;
1212 default:
1213 break;
1218 return cl;
1222 /* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
1224 struct ssa_name_var_hash : nofree_ptr_hash <tree_node>
1226 static inline hashval_t hash (const tree_node *);
1227 static inline int equal (const tree_node *, const tree_node *);
1230 inline hashval_t
1231 ssa_name_var_hash::hash (const_tree n)
1233 return DECL_UID (SSA_NAME_VAR (n));
1236 inline int
1237 ssa_name_var_hash::equal (const tree_node *n1, const tree_node *n2)
1239 return SSA_NAME_VAR (n1) == SSA_NAME_VAR (n2);
1243 /* This function populates coalesce list CL as well as recording related ssa
1244 names in USED_IN_COPIES for use later in the out-of-ssa process. */
1246 static void
1247 populate_coalesce_list_for_outofssa (coalesce_list *cl, bitmap used_in_copy)
1249 tree var;
1250 tree first;
1251 int v1, v2, cost;
1252 unsigned i;
1254 /* Process result decls and live on entry variables for entry into the
1255 coalesce list. */
1256 first = NULL_TREE;
1257 FOR_EACH_SSA_NAME (i, var, cfun)
1259 if (!virtual_operand_p (var))
1261 coalesce_with_default (var, cl, used_in_copy);
1263 /* Add coalesces between all the result decls. */
1264 if (SSA_NAME_VAR (var)
1265 && TREE_CODE (SSA_NAME_VAR (var)) == RESULT_DECL)
1267 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (var));
1268 if (first == NULL_TREE)
1269 first = var;
1270 else
1272 gcc_assert (gimple_can_coalesce_p (var, first));
1273 v1 = SSA_NAME_VERSION (first);
1274 v2 = SSA_NAME_VERSION (var);
1275 cost = coalesce_cost_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
1276 add_coalesce (cl, v1, v2, cost);
1279 /* Mark any default_def variables as being in the coalesce list
1280 since they will have to be coalesced with the base variable. If
1281 not marked as present, they won't be in the coalesce view. */
1282 if (SSA_NAME_IS_DEFAULT_DEF (var)
1283 && (!has_zero_uses (var)
1284 || (SSA_NAME_VAR (var)
1285 && !VAR_P (SSA_NAME_VAR (var)))))
1286 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (var));
1290 /* If this optimization is disabled, we need to coalesce all the
1291 names originating from the same SSA_NAME_VAR so debug info
1292 remains undisturbed. */
1293 if (!flag_tree_coalesce_vars)
1295 tree a;
1296 hash_table<ssa_name_var_hash> ssa_name_hash (10);
1298 FOR_EACH_SSA_NAME (i, a, cfun)
1300 if (SSA_NAME_VAR (a)
1301 && !DECL_IGNORED_P (SSA_NAME_VAR (a))
1302 && (!has_zero_uses (a) || !SSA_NAME_IS_DEFAULT_DEF (a)
1303 || !VAR_P (SSA_NAME_VAR (a))))
1305 tree *slot = ssa_name_hash.find_slot (a, INSERT);
1307 if (!*slot)
1308 *slot = a;
1309 else
1311 /* If the variable is a PARM_DECL or a RESULT_DECL, we
1312 _require_ that all the names originating from it be
1313 coalesced, because there must be a single partition
1314 containing all the names so that it can be assigned
1315 the canonical RTL location of the DECL safely.
1316 If in_lto_p, a function could have been compiled
1317 originally with optimizations and only the link
1318 performed at -O0, so we can't actually require it. */
1319 const int cost
1320 = (VAR_P (SSA_NAME_VAR (a)) || in_lto_p)
1321 ? MUST_COALESCE_COST - 1 : MUST_COALESCE_COST;
1322 add_coalesce (cl, SSA_NAME_VERSION (a),
1323 SSA_NAME_VERSION (*slot), cost);
1324 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (a));
1325 bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (*slot));
1333 /* Attempt to coalesce ssa versions X and Y together using the partition
1334 mapping in MAP and checking conflicts in GRAPH. Output any debug info to
1335 DEBUG, if it is nun-NULL. */
1337 static inline bool
1338 attempt_coalesce (var_map map, ssa_conflicts *graph, int x, int y,
1339 FILE *debug)
1341 int z;
1342 tree var1, var2;
1343 int p1, p2;
1345 p1 = var_to_partition (map, ssa_name (x));
1346 p2 = var_to_partition (map, ssa_name (y));
1348 if (debug)
1350 fprintf (debug, "(%d)", x);
1351 print_generic_expr (debug, partition_to_var (map, p1), TDF_SLIM);
1352 fprintf (debug, " & (%d)", y);
1353 print_generic_expr (debug, partition_to_var (map, p2), TDF_SLIM);
1356 if (p1 == p2)
1358 if (debug)
1359 fprintf (debug, ": Already Coalesced.\n");
1360 return true;
1363 if (debug)
1364 fprintf (debug, " [map: %d, %d] ", p1, p2);
1367 if (!ssa_conflicts_test_p (graph, p1, p2))
1369 var1 = partition_to_var (map, p1);
1370 var2 = partition_to_var (map, p2);
1372 z = var_union (map, var1, var2);
1373 if (z == NO_PARTITION)
1375 if (debug)
1376 fprintf (debug, ": Unable to perform partition union.\n");
1377 return false;
1380 /* z is the new combined partition. Remove the other partition from
1381 the list, and merge the conflicts. */
1382 if (z == p1)
1383 ssa_conflicts_merge (graph, p1, p2);
1384 else
1385 ssa_conflicts_merge (graph, p2, p1);
1387 if (debug)
1388 fprintf (debug, ": Success -> %d\n", z);
1390 return true;
1393 if (debug)
1394 fprintf (debug, ": Fail due to conflict\n");
1396 return false;
1400 /* Attempt to Coalesce partitions in MAP which occur in the list CL using
1401 GRAPH. Debug output is sent to DEBUG if it is non-NULL. */
1403 static void
1404 coalesce_partitions (var_map map, ssa_conflicts *graph, coalesce_list *cl,
1405 FILE *debug)
1407 int x = 0, y = 0;
1408 tree var1, var2;
1409 int cost;
1410 basic_block bb;
1411 edge e;
1412 edge_iterator ei;
1414 /* First, coalesce all the copies across abnormal edges. These are not placed
1415 in the coalesce list because they do not need to be sorted, and simply
1416 consume extra memory/compilation time in large programs. */
1418 FOR_EACH_BB_FN (bb, cfun)
1420 FOR_EACH_EDGE (e, ei, bb->preds)
1421 if (e->flags & EDGE_ABNORMAL)
1423 gphi_iterator gsi;
1424 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1425 gsi_next (&gsi))
1427 gphi *phi = gsi.phi ();
1428 tree res = PHI_RESULT (phi);
1429 if (virtual_operand_p (res))
1430 continue;
1431 tree arg = PHI_ARG_DEF (phi, e->dest_idx);
1432 if (SSA_NAME_IS_DEFAULT_DEF (arg)
1433 && (!SSA_NAME_VAR (arg)
1434 || TREE_CODE (SSA_NAME_VAR (arg)) != PARM_DECL))
1435 continue;
1437 int v1 = SSA_NAME_VERSION (res);
1438 int v2 = SSA_NAME_VERSION (arg);
1440 if (debug)
1441 fprintf (debug, "Abnormal coalesce: ");
1443 if (!attempt_coalesce (map, graph, v1, v2, debug))
1444 fail_abnormal_edge_coalesce (v1, v2);
1449 /* Now process the items in the coalesce list. */
1451 while ((cost = pop_best_coalesce (cl, &x, &y)) != NO_BEST_COALESCE)
1453 var1 = ssa_name (x);
1454 var2 = ssa_name (y);
1456 /* Assert the coalesces have the same base variable. */
1457 gcc_assert (gimple_can_coalesce_p (var1, var2));
1459 if (debug)
1460 fprintf (debug, "Coalesce list: ");
1461 attempt_coalesce (map, graph, x, y, debug);
1466 /* Output partition map MAP with coalescing plan PART to file F. */
1468 void
1469 dump_part_var_map (FILE *f, partition part, var_map map)
1471 int t;
1472 unsigned x, y;
1473 int p;
1475 fprintf (f, "\nCoalescible Partition map \n\n");
1477 for (x = 0; x < map->num_partitions; x++)
1479 if (map->view_to_partition != NULL)
1480 p = map->view_to_partition[x];
1481 else
1482 p = x;
1484 if (ssa_name (p) == NULL_TREE
1485 || virtual_operand_p (ssa_name (p)))
1486 continue;
1488 t = 0;
1489 for (y = 1; y < num_ssa_names; y++)
1491 tree var = version_to_var (map, y);
1492 if (!var)
1493 continue;
1494 int q = var_to_partition (map, var);
1495 p = partition_find (part, q);
1496 gcc_assert (map->partition_to_base_index[q]
1497 == map->partition_to_base_index[p]);
1499 if (p == (int)x)
1501 if (t++ == 0)
1503 fprintf (f, "Partition %d, base %d (", x,
1504 map->partition_to_base_index[q]);
1505 print_generic_expr (f, partition_to_var (map, q), TDF_SLIM);
1506 fprintf (f, " - ");
1508 fprintf (f, "%d ", y);
1511 if (t != 0)
1512 fprintf (f, ")\n");
1514 fprintf (f, "\n");
1517 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
1518 coalescing together, false otherwise.
1520 This must stay consistent with compute_samebase_partition_bases and
1521 compute_optimized_partition_bases. */
1523 bool
1524 gimple_can_coalesce_p (tree name1, tree name2)
1526 /* First check the SSA_NAME's associated DECL. Without
1527 optimization, we only want to coalesce if they have the same DECL
1528 or both have no associated DECL. */
1529 tree var1 = SSA_NAME_VAR (name1);
1530 tree var2 = SSA_NAME_VAR (name2);
1531 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
1532 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
1533 if (var1 != var2 && !flag_tree_coalesce_vars)
1534 return false;
1536 /* Now check the types. If the types are the same, then we should
1537 try to coalesce V1 and V2. */
1538 tree t1 = TREE_TYPE (name1);
1539 tree t2 = TREE_TYPE (name2);
1540 if (t1 == t2)
1542 check_modes:
1543 /* If the base variables are the same, we're good: none of the
1544 other tests below could possibly fail. */
1545 var1 = SSA_NAME_VAR (name1);
1546 var2 = SSA_NAME_VAR (name2);
1547 if (var1 == var2)
1548 return true;
1550 /* We don't want to coalesce two SSA names if one of the base
1551 variables is supposed to be a register while the other is
1552 supposed to be on the stack. Anonymous SSA names most often
1553 take registers, but when not optimizing, user variables
1554 should go on the stack, so coalescing them with the anonymous
1555 variable as the partition leader would end up assigning the
1556 user variable to a register. Don't do that! */
1557 bool reg1 = use_register_for_decl (name1);
1558 bool reg2 = use_register_for_decl (name2);
1559 if (reg1 != reg2)
1560 return false;
1562 /* Check that the promoted modes and unsignedness are the same.
1563 We don't want to coalesce if the promoted modes would be
1564 different, or if they would sign-extend differently. Only
1565 PARM_DECLs and RESULT_DECLs have different promotion rules,
1566 so skip the test if both are variables, or both are anonymous
1567 SSA_NAMEs. */
1568 int unsigned1, unsigned2;
1569 return ((!var1 || VAR_P (var1)) && (!var2 || VAR_P (var2)))
1570 || ((promote_ssa_mode (name1, &unsigned1)
1571 == promote_ssa_mode (name2, &unsigned2))
1572 && unsigned1 == unsigned2);
1575 /* If alignment requirements are different, we can't coalesce. */
1576 if (MINIMUM_ALIGNMENT (t1,
1577 var1 ? DECL_MODE (var1) : TYPE_MODE (t1),
1578 var1 ? LOCAL_DECL_ALIGNMENT (var1) : TYPE_ALIGN (t1))
1579 != MINIMUM_ALIGNMENT (t2,
1580 var2 ? DECL_MODE (var2) : TYPE_MODE (t2),
1581 var2 ? LOCAL_DECL_ALIGNMENT (var2) : TYPE_ALIGN (t2)))
1582 return false;
1584 /* If the types are not the same, see whether they are compatible. This
1585 (for example) allows coalescing when the types are fundamentally the
1586 same, but just have different names. */
1587 if (types_compatible_p (t1, t2))
1588 goto check_modes;
1590 return false;
1593 /* Fill in MAP's partition_to_base_index, with one index for each
1594 partition of SSA names USED_IN_COPIES and related by CL coalesce
1595 possibilities. This must match gimple_can_coalesce_p in the
1596 optimized case. */
1598 static void
1599 compute_optimized_partition_bases (var_map map, bitmap used_in_copies,
1600 coalesce_list *cl)
1602 int parts = num_var_partitions (map);
1603 partition tentative = partition_new (parts);
1605 /* Partition the SSA versions so that, for each coalescible
1606 pair, both of its members are in the same partition in
1607 TENTATIVE. */
1608 gcc_assert (!cl->sorted);
1609 coalesce_pair *node;
1610 coalesce_iterator_type ppi;
1611 FOR_EACH_PARTITION_PAIR (node, ppi, cl)
1613 tree v1 = ssa_name (node->first_element);
1614 int p1 = partition_find (tentative, var_to_partition (map, v1));
1615 tree v2 = ssa_name (node->second_element);
1616 int p2 = partition_find (tentative, var_to_partition (map, v2));
1618 if (p1 == p2)
1619 continue;
1621 partition_union (tentative, p1, p2);
1624 /* We have to deal with cost one pairs too. */
1625 for (cost_one_pair *co = cl->cost_one_list; co; co = co->next)
1627 tree v1 = ssa_name (co->first_element);
1628 int p1 = partition_find (tentative, var_to_partition (map, v1));
1629 tree v2 = ssa_name (co->second_element);
1630 int p2 = partition_find (tentative, var_to_partition (map, v2));
1632 if (p1 == p2)
1633 continue;
1635 partition_union (tentative, p1, p2);
1638 /* And also with abnormal edges. */
1639 basic_block bb;
1640 edge e;
1641 unsigned i;
1642 edge_iterator ei;
1643 for (i = 0; map->vec_bbs.iterate (i, &bb); ++i)
1645 FOR_EACH_EDGE (e, ei, bb->preds)
1646 if (e->flags & EDGE_ABNORMAL)
1648 gphi_iterator gsi;
1649 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1650 gsi_next (&gsi))
1652 gphi *phi = gsi.phi ();
1653 tree res = PHI_RESULT (phi);
1654 if (virtual_operand_p (res))
1655 continue;
1656 tree arg = PHI_ARG_DEF (phi, e->dest_idx);
1657 if (SSA_NAME_IS_DEFAULT_DEF (arg)
1658 && (!SSA_NAME_VAR (arg)
1659 || TREE_CODE (SSA_NAME_VAR (arg)) != PARM_DECL))
1660 continue;
1662 int p1 = partition_find (tentative, var_to_partition (map, res));
1663 int p2 = partition_find (tentative, var_to_partition (map, arg));
1665 if (p1 == p2)
1666 continue;
1668 partition_union (tentative, p1, p2);
1673 if (map->bitint
1674 && flag_tree_coalesce_vars
1675 && (optimize > 1 || parts < 500))
1676 for (i = 0; i < (unsigned) parts; ++i)
1678 tree s1 = partition_to_var (map, i);
1679 int p1 = partition_find (tentative, i);
1680 for (unsigned j = i + 1; j < (unsigned) parts; ++j)
1682 tree s2 = partition_to_var (map, j);
1683 if (s1 == s2)
1684 continue;
1685 if (tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (s1)),
1686 TYPE_SIZE (TREE_TYPE (s2))))
1688 int p2 = partition_find (tentative, j);
1690 if (p1 == p2)
1691 continue;
1693 partition_union (tentative, p1, p2);
1694 if (partition_find (tentative, i) != p1)
1695 break;
1700 map->partition_to_base_index = XCNEWVEC (int, parts);
1701 auto_vec<unsigned int> index_map (parts);
1702 if (parts)
1703 index_map.quick_grow (parts);
1705 const unsigned no_part = -1;
1706 unsigned count = parts;
1707 while (count)
1708 index_map[--count] = no_part;
1710 /* Initialize MAP's mapping from partition to base index, using
1711 as base indices an enumeration of the TENTATIVE partitions in
1712 which each SSA version ended up, so that we compute conflicts
1713 between all SSA versions that ended up in the same potential
1714 coalesce partition. */
1715 bitmap_iterator bi;
1716 EXECUTE_IF_SET_IN_BITMAP (used_in_copies, 0, i, bi)
1718 int pidx = var_to_partition (map, ssa_name (i));
1719 int base = partition_find (tentative, pidx);
1720 if (index_map[base] != no_part)
1721 continue;
1722 index_map[base] = count++;
1725 map->num_basevars = count;
1727 EXECUTE_IF_SET_IN_BITMAP (used_in_copies, 0, i, bi)
1729 int pidx = var_to_partition (map, ssa_name (i));
1730 int base = partition_find (tentative, pidx);
1731 gcc_assert (index_map[base] < count);
1732 map->partition_to_base_index[pidx] = index_map[base];
1735 if (dump_file && (dump_flags & TDF_DETAILS))
1736 dump_part_var_map (dump_file, tentative, map);
1738 partition_delete (tentative);
1741 /* For the bitint lowering pass, try harder. Partitions which contain
1742 SSA_NAME default def of a PARM_DECL or have RESULT_DECL need to have
1743 compatible types because they will use that RESULT_DECL or PARM_DECL.
1744 Other partitions can have even incompatible _BitInt types, as long
1745 as they have the same size - those will use VAR_DECLs which are just
1746 arrays of the limbs. */
1748 static void
1749 coalesce_bitint (var_map map, ssa_conflicts *graph)
1751 unsigned n = num_var_partitions (map);
1752 if (optimize <= 1 && n > 500)
1753 return;
1755 bool try_same_size = false;
1756 FILE *debug_file = (dump_flags & TDF_DETAILS) ? dump_file : NULL;
1757 for (unsigned i = 0; i < n; ++i)
1759 tree s1 = partition_to_var (map, i);
1760 if ((unsigned) var_to_partition (map, s1) != i)
1761 continue;
1762 int v1 = SSA_NAME_VERSION (s1);
1763 for (unsigned j = i + 1; j < n; ++j)
1765 tree s2 = partition_to_var (map, j);
1766 if (s1 == s2 || (unsigned) var_to_partition (map, s2) != j)
1767 continue;
1768 if (!types_compatible_p (TREE_TYPE (s1), TREE_TYPE (s2)))
1770 if (!try_same_size
1771 && tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (s1)),
1772 TYPE_SIZE (TREE_TYPE (s2))))
1773 try_same_size = true;
1774 continue;
1776 int v2 = SSA_NAME_VERSION (s2);
1777 if (attempt_coalesce (map, graph, v1, v2, debug_file)
1778 && partition_to_var (map, i) != s1)
1779 break;
1783 if (!try_same_size)
1784 return;
1786 unsigned i;
1787 bitmap_iterator bi;
1788 bitmap same_type = NULL;
1790 EXECUTE_IF_SET_IN_BITMAP (map->bitint, 0, i, bi)
1792 tree s = ssa_name (i);
1793 if (!SSA_NAME_VAR (s))
1794 continue;
1795 if (TREE_CODE (SSA_NAME_VAR (s)) != RESULT_DECL
1796 && (TREE_CODE (SSA_NAME_VAR (s)) != PARM_DECL
1797 || !SSA_NAME_IS_DEFAULT_DEF (s)))
1798 continue;
1799 if (same_type == NULL)
1800 same_type = BITMAP_ALLOC (NULL);
1801 int p = var_to_partition (map, s);
1802 bitmap_set_bit (same_type, p);
1805 for (i = 0; i < n; ++i)
1807 if (same_type && bitmap_bit_p (same_type, i))
1808 continue;
1809 tree s1 = partition_to_var (map, i);
1810 if ((unsigned) var_to_partition (map, s1) != i)
1811 continue;
1812 int v1 = SSA_NAME_VERSION (s1);
1813 for (unsigned j = i + 1; j < n; ++j)
1815 if (same_type && bitmap_bit_p (same_type, j))
1816 continue;
1818 tree s2 = partition_to_var (map, j);
1819 if (s1 == s2 || (unsigned) var_to_partition (map, s2) != j)
1820 continue;
1822 if (!tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (s1)),
1823 TYPE_SIZE (TREE_TYPE (s2))))
1824 continue;
1826 int v2 = SSA_NAME_VERSION (s2);
1827 if (attempt_coalesce (map, graph, v1, v2, debug_file)
1828 && partition_to_var (map, i) != s1)
1829 break;
1833 BITMAP_FREE (same_type);
1836 /* Given an initial var_map MAP, coalesce variables and return a partition map
1837 with the resulting coalesce. Note that this function is called in either
1838 live range computation context or out-of-ssa context, indicated by MAP. */
1840 extern void
1841 coalesce_ssa_name (var_map map)
1843 tree_live_info_p liveinfo;
1844 ssa_conflicts *graph;
1845 coalesce_list *cl;
1846 auto_bitmap used_in_copies;
1848 bitmap_tree_view (used_in_copies);
1849 cl = create_coalesce_list_for_region (map, used_in_copies);
1850 if (map->outofssa_p)
1851 populate_coalesce_list_for_outofssa (cl, used_in_copies);
1852 bitmap_list_view (used_in_copies);
1853 if (map->bitint)
1854 bitmap_ior_into (used_in_copies, map->bitint);
1856 if (dump_file && (dump_flags & TDF_DETAILS))
1857 dump_var_map (dump_file, map);
1859 partition_view_bitmap (map, used_in_copies);
1861 compute_optimized_partition_bases (map, used_in_copies, cl);
1863 if (num_var_partitions (map) < 1)
1865 delete_coalesce_list (cl);
1866 return;
1869 if (dump_file && (dump_flags & TDF_DETAILS))
1870 dump_var_map (dump_file, map);
1872 liveinfo = calculate_live_ranges (map, false);
1874 if (dump_file && (dump_flags & TDF_DETAILS))
1875 dump_live_info (dump_file, liveinfo, LIVEDUMP_ENTRY);
1877 /* Build a conflict graph. */
1878 graph = build_ssa_conflict_graph (liveinfo);
1879 delete_tree_live_info (liveinfo);
1880 if (dump_file && (dump_flags & TDF_DETAILS))
1881 ssa_conflicts_dump (dump_file, graph);
1883 sort_coalesce_list (cl, graph, map);
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1887 fprintf (dump_file, "\nAfter sorting:\n");
1888 dump_coalesce_list (dump_file, cl);
1891 /* First, coalesce all live on entry variables to their base variable.
1892 This will ensure the first use is coming from the correct location. */
1894 if (dump_file && (dump_flags & TDF_DETAILS))
1895 dump_var_map (dump_file, map);
1897 /* Now coalesce everything in the list. */
1898 coalesce_partitions (map, graph, cl,
1899 ((dump_flags & TDF_DETAILS) ? dump_file : NULL));
1901 delete_coalesce_list (cl);
1903 if (map->bitint && flag_tree_coalesce_vars)
1904 coalesce_bitint (map, graph);
1906 ssa_conflicts_delete (graph);