2014-03-14 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / lra-assigns.c
blob28d4a0f104cfdf33ba7b49636a1ac242fb5e0cf3
1 /* Assign reload pseudos.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file's main objective is to assign hard registers to reload
23 pseudos. It also tries to allocate hard registers to other
24 pseudos, but at a lower priority than the reload pseudos. The pass
25 does not transform the RTL.
27 We must allocate a hard register to every reload pseudo. We try to
28 increase the chances of finding a viable allocation by assigning
29 the pseudos in order of fewest available hard registers first. If
30 we still fail to find a hard register, we spill other (non-reload)
31 pseudos in order to make room.
33 find_hard_regno_for finds hard registers for allocation without
34 spilling. spill_for does the same with spilling. Both functions
35 use a cost model to determine the most profitable choice of hard
36 and spill registers.
38 Once we have finished allocating reload pseudos, we also try to
39 assign registers to other (non-reload) pseudos. This is useful if
40 hard registers were freed up by the spilling just described.
42 We try to assign hard registers by collecting pseudos into threads.
43 These threads contain reload and inheritance pseudos that are
44 connected by copies (move insns). Doing this improves the chances
45 of pseudos in the thread getting the same hard register and, as a
46 result, of allowing some move insns to be deleted.
48 When we assign a hard register to a pseudo, we decrease the cost of
49 using the same hard register for pseudos that are connected by
50 copies.
52 If two hard registers have the same frequency-derived cost, we
53 prefer hard registers with higher priorities. The mapping of
54 registers to priorities is controlled by the register_priority
55 target hook. For example, x86-64 has a few register priorities:
56 hard registers with and without REX prefixes have different
57 priorities. This permits us to generate smaller code as insns
58 without REX prefixes are shorter.
60 If a few hard registers are still equally good for the assignment,
61 we choose the least used hard register. It is called leveling and
62 may be profitable for some targets.
64 Only insns with changed allocation pseudos are processed on the
65 next constraint pass.
67 The pseudo live-ranges are used to find conflicting pseudos.
69 For understanding the code, it is important to keep in mind that
70 inheritance, split, and reload pseudos created since last
71 constraint pass have regno >= lra_constraint_new_regno_start.
72 Inheritance and split pseudos created on any pass are in the
73 corresponding bitmaps. Inheritance and split pseudos since the
74 last constraint pass have also the corresponding non-negative
75 restore_regno. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "hard-reg-set.h"
82 #include "rtl.h"
83 #include "rtl-error.h"
84 #include "tm_p.h"
85 #include "target.h"
86 #include "insn-config.h"
87 #include "recog.h"
88 #include "output.h"
89 #include "regs.h"
90 #include "function.h"
91 #include "expr.h"
92 #include "basic-block.h"
93 #include "except.h"
94 #include "df.h"
95 #include "ira.h"
96 #include "sparseset.h"
97 #include "params.h"
98 #include "lra-int.h"
100 /* Array containing corresponding values of function
101 lra_get_allocno_class. It is used to speed up the code. */
102 static enum reg_class *regno_allocno_class_array;
104 /* Information about the thread to which a pseudo belongs. Threads are
105 a set of connected reload and inheritance pseudos with the same set of
106 available hard registers. Lone registers belong to their own threads. */
107 struct regno_assign_info
109 /* First/next pseudo of the same thread. */
110 int first, next;
111 /* Frequency of the thread (execution frequency of only reload
112 pseudos in the thread when the thread contains a reload pseudo).
113 Defined only for the first thread pseudo. */
114 int freq;
117 /* Map regno to the corresponding regno assignment info. */
118 static struct regno_assign_info *regno_assign_info;
120 /* All inherited, subreg or optional pseudos created before last spill
121 sub-pass. Such pseudos are permitted to get memory instead of hard
122 regs. */
123 static bitmap_head non_reload_pseudos;
125 /* Process a pseudo copy with execution frequency COPY_FREQ connecting
126 REGNO1 and REGNO2 to form threads. */
127 static void
128 process_copy_to_form_thread (int regno1, int regno2, int copy_freq)
130 int last, regno1_first, regno2_first;
132 lra_assert (regno1 >= lra_constraint_new_regno_start
133 && regno2 >= lra_constraint_new_regno_start);
134 regno1_first = regno_assign_info[regno1].first;
135 regno2_first = regno_assign_info[regno2].first;
136 if (regno1_first != regno2_first)
138 for (last = regno2_first;
139 regno_assign_info[last].next >= 0;
140 last = regno_assign_info[last].next)
141 regno_assign_info[last].first = regno1_first;
142 regno_assign_info[last].first = regno1_first;
143 regno_assign_info[last].next = regno_assign_info[regno1_first].next;
144 regno_assign_info[regno1_first].next = regno2_first;
145 regno_assign_info[regno1_first].freq
146 += regno_assign_info[regno2_first].freq;
148 regno_assign_info[regno1_first].freq -= 2 * copy_freq;
149 lra_assert (regno_assign_info[regno1_first].freq >= 0);
152 /* Initialize REGNO_ASSIGN_INFO and form threads. */
153 static void
154 init_regno_assign_info (void)
156 int i, regno1, regno2, max_regno = max_reg_num ();
157 lra_copy_t cp;
159 regno_assign_info = XNEWVEC (struct regno_assign_info, max_regno);
160 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
162 regno_assign_info[i].first = i;
163 regno_assign_info[i].next = -1;
164 regno_assign_info[i].freq = lra_reg_info[i].freq;
166 /* Form the threads. */
167 for (i = 0; (cp = lra_get_copy (i)) != NULL; i++)
168 if ((regno1 = cp->regno1) >= lra_constraint_new_regno_start
169 && (regno2 = cp->regno2) >= lra_constraint_new_regno_start
170 && reg_renumber[regno1] < 0 && lra_reg_info[regno1].nrefs != 0
171 && reg_renumber[regno2] < 0 && lra_reg_info[regno2].nrefs != 0
172 && (ira_class_hard_regs_num[regno_allocno_class_array[regno1]]
173 == ira_class_hard_regs_num[regno_allocno_class_array[regno2]]))
174 process_copy_to_form_thread (regno1, regno2, cp->freq);
177 /* Free REGNO_ASSIGN_INFO. */
178 static void
179 finish_regno_assign_info (void)
181 free (regno_assign_info);
184 /* The function is used to sort *reload* and *inheritance* pseudos to
185 try to assign them hard registers. We put pseudos from the same
186 thread always nearby. */
187 static int
188 reload_pseudo_compare_func (const void *v1p, const void *v2p)
190 int r1 = *(const int *) v1p, r2 = *(const int *) v2p;
191 enum reg_class cl1 = regno_allocno_class_array[r1];
192 enum reg_class cl2 = regno_allocno_class_array[r2];
193 int diff;
195 lra_assert (r1 >= lra_constraint_new_regno_start
196 && r2 >= lra_constraint_new_regno_start);
198 /* Prefer to assign reload registers with smaller classes first to
199 guarantee assignment to all reload registers. */
200 if ((diff = (ira_class_hard_regs_num[cl1]
201 - ira_class_hard_regs_num[cl2])) != 0)
202 return diff;
203 if ((diff
204 = (ira_reg_class_max_nregs[cl2][lra_reg_info[r2].biggest_mode]
205 - ira_reg_class_max_nregs[cl1][lra_reg_info[r1].biggest_mode])) != 0
206 /* The code below executes rarely as nregs == 1 in most cases.
207 So we should not worry about using faster data structures to
208 check reload pseudos. */
209 && ! bitmap_bit_p (&non_reload_pseudos, r1)
210 && ! bitmap_bit_p (&non_reload_pseudos, r2))
211 return diff;
212 if ((diff = (regno_assign_info[regno_assign_info[r2].first].freq
213 - regno_assign_info[regno_assign_info[r1].first].freq)) != 0)
214 return diff;
215 /* Allocate bigger pseudos first to avoid register file
216 fragmentation. */
217 if ((diff
218 = (ira_reg_class_max_nregs[cl2][lra_reg_info[r2].biggest_mode]
219 - ira_reg_class_max_nregs[cl1][lra_reg_info[r1].biggest_mode])) != 0)
220 return diff;
221 /* Put pseudos from the thread nearby. */
222 if ((diff = regno_assign_info[r1].first - regno_assign_info[r2].first) != 0)
223 return diff;
224 /* If regs are equally good, sort by their numbers, so that the
225 results of qsort leave nothing to chance. */
226 return r1 - r2;
229 /* The function is used to sort *non-reload* pseudos to try to assign
230 them hard registers. The order calculation is simpler than in the
231 previous function and based on the pseudo frequency usage. */
232 static int
233 pseudo_compare_func (const void *v1p, const void *v2p)
235 int r1 = *(const int *) v1p, r2 = *(const int *) v2p;
236 int diff;
238 /* Prefer to assign more frequently used registers first. */
239 if ((diff = lra_reg_info[r2].freq - lra_reg_info[r1].freq) != 0)
240 return diff;
242 /* If regs are equally good, sort by their numbers, so that the
243 results of qsort leave nothing to chance. */
244 return r1 - r2;
247 /* Arrays of size LRA_LIVE_MAX_POINT mapping a program point to the
248 pseudo live ranges with given start point. We insert only live
249 ranges of pseudos interesting for assignment purposes. They are
250 reload pseudos and pseudos assigned to hard registers. */
251 static lra_live_range_t *start_point_ranges;
253 /* Used as a flag that a live range is not inserted in the start point
254 chain. */
255 static struct lra_live_range not_in_chain_mark;
257 /* Create and set up START_POINT_RANGES. */
258 static void
259 create_live_range_start_chains (void)
261 int i, max_regno;
262 lra_live_range_t r;
264 start_point_ranges = XCNEWVEC (lra_live_range_t, lra_live_max_point);
265 max_regno = max_reg_num ();
266 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
267 if (i >= lra_constraint_new_regno_start || reg_renumber[i] >= 0)
269 for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
271 r->start_next = start_point_ranges[r->start];
272 start_point_ranges[r->start] = r;
275 else
277 for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
278 r->start_next = &not_in_chain_mark;
282 /* Insert live ranges of pseudo REGNO into start chains if they are
283 not there yet. */
284 static void
285 insert_in_live_range_start_chain (int regno)
287 lra_live_range_t r = lra_reg_info[regno].live_ranges;
289 if (r->start_next != &not_in_chain_mark)
290 return;
291 for (; r != NULL; r = r->next)
293 r->start_next = start_point_ranges[r->start];
294 start_point_ranges[r->start] = r;
298 /* Free START_POINT_RANGES. */
299 static void
300 finish_live_range_start_chains (void)
302 gcc_assert (start_point_ranges != NULL);
303 free (start_point_ranges);
304 start_point_ranges = NULL;
307 /* Map: program point -> bitmap of all pseudos living at the point and
308 assigned to hard registers. */
309 static bitmap_head *live_hard_reg_pseudos;
310 static bitmap_obstack live_hard_reg_pseudos_bitmap_obstack;
312 /* reg_renumber corresponding to pseudos marked in
313 live_hard_reg_pseudos. reg_renumber might be not matched to
314 live_hard_reg_pseudos but live_pseudos_reg_renumber always reflects
315 live_hard_reg_pseudos. */
316 static int *live_pseudos_reg_renumber;
318 /* Sparseset used to calculate living hard reg pseudos for some program
319 point range. */
320 static sparseset live_range_hard_reg_pseudos;
322 /* Sparseset used to calculate living reload/inheritance pseudos for
323 some program point range. */
324 static sparseset live_range_reload_inheritance_pseudos;
326 /* Allocate and initialize the data about living pseudos at program
327 points. */
328 static void
329 init_lives (void)
331 int i, max_regno = max_reg_num ();
333 live_range_hard_reg_pseudos = sparseset_alloc (max_regno);
334 live_range_reload_inheritance_pseudos = sparseset_alloc (max_regno);
335 live_hard_reg_pseudos = XNEWVEC (bitmap_head, lra_live_max_point);
336 bitmap_obstack_initialize (&live_hard_reg_pseudos_bitmap_obstack);
337 for (i = 0; i < lra_live_max_point; i++)
338 bitmap_initialize (&live_hard_reg_pseudos[i],
339 &live_hard_reg_pseudos_bitmap_obstack);
340 live_pseudos_reg_renumber = XNEWVEC (int, max_regno);
341 for (i = 0; i < max_regno; i++)
342 live_pseudos_reg_renumber[i] = -1;
345 /* Free the data about living pseudos at program points. */
346 static void
347 finish_lives (void)
349 sparseset_free (live_range_hard_reg_pseudos);
350 sparseset_free (live_range_reload_inheritance_pseudos);
351 free (live_hard_reg_pseudos);
352 bitmap_obstack_release (&live_hard_reg_pseudos_bitmap_obstack);
353 free (live_pseudos_reg_renumber);
356 /* Update the LIVE_HARD_REG_PSEUDOS and LIVE_PSEUDOS_REG_RENUMBER
357 entries for pseudo REGNO. Assume that the register has been
358 spilled if FREE_P, otherwise assume that it has been assigned
359 reg_renumber[REGNO] (if >= 0). We also insert the pseudo live
360 ranges in the start chains when it is assumed to be assigned to a
361 hard register because we use the chains of pseudos assigned to hard
362 registers during allocation. */
363 static void
364 update_lives (int regno, bool free_p)
366 int p;
367 lra_live_range_t r;
369 if (reg_renumber[regno] < 0)
370 return;
371 live_pseudos_reg_renumber[regno] = free_p ? -1 : reg_renumber[regno];
372 for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
374 for (p = r->start; p <= r->finish; p++)
375 if (free_p)
376 bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
377 else
379 bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
380 insert_in_live_range_start_chain (regno);
385 /* Sparseset used to calculate reload pseudos conflicting with a given
386 pseudo when we are trying to find a hard register for the given
387 pseudo. */
388 static sparseset conflict_reload_and_inheritance_pseudos;
390 /* Map: program point -> bitmap of all reload and inheritance pseudos
391 living at the point. */
392 static bitmap_head *live_reload_and_inheritance_pseudos;
393 static bitmap_obstack live_reload_and_inheritance_pseudos_bitmap_obstack;
395 /* Allocate and initialize data about living reload pseudos at any
396 given program point. */
397 static void
398 init_live_reload_and_inheritance_pseudos (void)
400 int i, p, max_regno = max_reg_num ();
401 lra_live_range_t r;
403 conflict_reload_and_inheritance_pseudos = sparseset_alloc (max_regno);
404 live_reload_and_inheritance_pseudos = XNEWVEC (bitmap_head, lra_live_max_point);
405 bitmap_obstack_initialize (&live_reload_and_inheritance_pseudos_bitmap_obstack);
406 for (p = 0; p < lra_live_max_point; p++)
407 bitmap_initialize (&live_reload_and_inheritance_pseudos[p],
408 &live_reload_and_inheritance_pseudos_bitmap_obstack);
409 for (i = lra_constraint_new_regno_start; i < max_regno; i++)
411 for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
412 for (p = r->start; p <= r->finish; p++)
413 bitmap_set_bit (&live_reload_and_inheritance_pseudos[p], i);
417 /* Finalize data about living reload pseudos at any given program
418 point. */
419 static void
420 finish_live_reload_and_inheritance_pseudos (void)
422 sparseset_free (conflict_reload_and_inheritance_pseudos);
423 free (live_reload_and_inheritance_pseudos);
424 bitmap_obstack_release (&live_reload_and_inheritance_pseudos_bitmap_obstack);
427 /* The value used to check that cost of given hard reg is really
428 defined currently. */
429 static int curr_hard_regno_costs_check = 0;
430 /* Array used to check that cost of the corresponding hard reg (the
431 array element index) is really defined currently. */
432 static int hard_regno_costs_check[FIRST_PSEUDO_REGISTER];
433 /* The current costs of allocation of hard regs. Defined only if the
434 value of the corresponding element of the previous array is equal to
435 CURR_HARD_REGNO_COSTS_CHECK. */
436 static int hard_regno_costs[FIRST_PSEUDO_REGISTER];
438 /* Adjust cost of HARD_REGNO by INCR. Reset the cost first if it is
439 not defined yet. */
440 static inline void
441 adjust_hard_regno_cost (int hard_regno, int incr)
443 if (hard_regno_costs_check[hard_regno] != curr_hard_regno_costs_check)
444 hard_regno_costs[hard_regno] = 0;
445 hard_regno_costs_check[hard_regno] = curr_hard_regno_costs_check;
446 hard_regno_costs[hard_regno] += incr;
449 /* Try to find a free hard register for pseudo REGNO. Return the
450 hard register on success and set *COST to the cost of using
451 that register. (If several registers have equal cost, the one with
452 the highest priority wins.) Return -1 on failure.
454 If TRY_ONLY_HARD_REGNO >= 0, consider only that hard register,
455 otherwise consider all hard registers in REGNO's class. */
456 static int
457 find_hard_regno_for (int regno, int *cost, int try_only_hard_regno)
459 HARD_REG_SET conflict_set;
460 int best_cost = INT_MAX, best_priority = INT_MIN, best_usage = INT_MAX;
461 lra_live_range_t r;
462 int p, i, j, rclass_size, best_hard_regno, priority, hard_regno;
463 int hr, conflict_hr, nregs;
464 enum machine_mode biggest_mode;
465 unsigned int k, conflict_regno;
466 int offset, val, biggest_nregs, nregs_diff;
467 enum reg_class rclass;
468 bitmap_iterator bi;
469 bool *rclass_intersect_p;
470 HARD_REG_SET impossible_start_hard_regs;
472 COPY_HARD_REG_SET (conflict_set, lra_no_alloc_regs);
473 rclass = regno_allocno_class_array[regno];
474 rclass_intersect_p = ira_reg_classes_intersect_p[rclass];
475 curr_hard_regno_costs_check++;
476 sparseset_clear (conflict_reload_and_inheritance_pseudos);
477 sparseset_clear (live_range_hard_reg_pseudos);
478 IOR_HARD_REG_SET (conflict_set, lra_reg_info[regno].conflict_hard_regs);
479 biggest_mode = lra_reg_info[regno].biggest_mode;
480 for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
482 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[r->start], 0, k, bi)
483 if (rclass_intersect_p[regno_allocno_class_array[k]])
484 sparseset_set_bit (live_range_hard_reg_pseudos, k);
485 EXECUTE_IF_SET_IN_BITMAP (&live_reload_and_inheritance_pseudos[r->start],
486 0, k, bi)
487 if (lra_reg_info[k].preferred_hard_regno1 >= 0
488 && live_pseudos_reg_renumber[k] < 0
489 && rclass_intersect_p[regno_allocno_class_array[k]])
490 sparseset_set_bit (conflict_reload_and_inheritance_pseudos, k);
491 for (p = r->start + 1; p <= r->finish; p++)
493 lra_live_range_t r2;
495 for (r2 = start_point_ranges[p];
496 r2 != NULL;
497 r2 = r2->start_next)
499 if (r2->regno >= lra_constraint_new_regno_start
500 && lra_reg_info[r2->regno].preferred_hard_regno1 >= 0
501 && live_pseudos_reg_renumber[r2->regno] < 0
502 && rclass_intersect_p[regno_allocno_class_array[r2->regno]])
503 sparseset_set_bit (conflict_reload_and_inheritance_pseudos,
504 r2->regno);
505 if (live_pseudos_reg_renumber[r2->regno] >= 0
506 && rclass_intersect_p[regno_allocno_class_array[r2->regno]])
507 sparseset_set_bit (live_range_hard_reg_pseudos, r2->regno);
511 if ((hard_regno = lra_reg_info[regno].preferred_hard_regno1) >= 0)
513 adjust_hard_regno_cost
514 (hard_regno, -lra_reg_info[regno].preferred_hard_regno_profit1);
515 if ((hard_regno = lra_reg_info[regno].preferred_hard_regno2) >= 0)
516 adjust_hard_regno_cost
517 (hard_regno, -lra_reg_info[regno].preferred_hard_regno_profit2);
519 #ifdef STACK_REGS
520 if (lra_reg_info[regno].no_stack_p)
521 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
522 SET_HARD_REG_BIT (conflict_set, i);
523 #endif
524 sparseset_clear_bit (conflict_reload_and_inheritance_pseudos, regno);
525 val = lra_reg_info[regno].val;
526 offset = lra_reg_info[regno].offset;
527 CLEAR_HARD_REG_SET (impossible_start_hard_regs);
528 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos, conflict_regno)
529 if (lra_reg_val_equal_p (conflict_regno, val, offset))
531 conflict_hr = live_pseudos_reg_renumber[conflict_regno];
532 nregs = (hard_regno_nregs[conflict_hr]
533 [lra_reg_info[conflict_regno].biggest_mode]);
534 /* Remember about multi-register pseudos. For example, 2 hard
535 register pseudos can start on the same hard register but can
536 not start on HR and HR+1/HR-1. */
537 for (hr = conflict_hr + 1;
538 hr < FIRST_PSEUDO_REGISTER && hr < conflict_hr + nregs;
539 hr++)
540 SET_HARD_REG_BIT (impossible_start_hard_regs, hr);
541 for (hr = conflict_hr - 1;
542 hr >= 0 && hr + hard_regno_nregs[hr][biggest_mode] > conflict_hr;
543 hr--)
544 SET_HARD_REG_BIT (impossible_start_hard_regs, hr);
546 else
548 add_to_hard_reg_set (&conflict_set,
549 lra_reg_info[conflict_regno].biggest_mode,
550 live_pseudos_reg_renumber[conflict_regno]);
551 if (hard_reg_set_subset_p (reg_class_contents[rclass],
552 conflict_set))
553 return -1;
555 EXECUTE_IF_SET_IN_SPARSESET (conflict_reload_and_inheritance_pseudos,
556 conflict_regno)
557 if (!lra_reg_val_equal_p (conflict_regno, val, offset))
559 lra_assert (live_pseudos_reg_renumber[conflict_regno] < 0);
560 if ((hard_regno
561 = lra_reg_info[conflict_regno].preferred_hard_regno1) >= 0)
563 adjust_hard_regno_cost
564 (hard_regno,
565 lra_reg_info[conflict_regno].preferred_hard_regno_profit1);
566 if ((hard_regno
567 = lra_reg_info[conflict_regno].preferred_hard_regno2) >= 0)
568 adjust_hard_regno_cost
569 (hard_regno,
570 lra_reg_info[conflict_regno].preferred_hard_regno_profit2);
573 /* Make sure that all registers in a multi-word pseudo belong to the
574 required class. */
575 IOR_COMPL_HARD_REG_SET (conflict_set, reg_class_contents[rclass]);
576 lra_assert (rclass != NO_REGS);
577 rclass_size = ira_class_hard_regs_num[rclass];
578 best_hard_regno = -1;
579 hard_regno = ira_class_hard_regs[rclass][0];
580 biggest_nregs = hard_regno_nregs[hard_regno][biggest_mode];
581 nregs_diff = (biggest_nregs
582 - hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (regno)]);
583 for (i = 0; i < rclass_size; i++)
585 if (try_only_hard_regno >= 0)
586 hard_regno = try_only_hard_regno;
587 else
588 hard_regno = ira_class_hard_regs[rclass][i];
589 if (! overlaps_hard_reg_set_p (conflict_set,
590 PSEUDO_REGNO_MODE (regno), hard_regno)
591 /* We can not use prohibited_class_mode_regs because it is
592 not defined for all classes. */
593 && HARD_REGNO_MODE_OK (hard_regno, PSEUDO_REGNO_MODE (regno))
594 && ! TEST_HARD_REG_BIT (impossible_start_hard_regs, hard_regno)
595 && (nregs_diff == 0
596 || (WORDS_BIG_ENDIAN
597 ? (hard_regno - nregs_diff >= 0
598 && TEST_HARD_REG_BIT (reg_class_contents[rclass],
599 hard_regno - nregs_diff))
600 : TEST_HARD_REG_BIT (reg_class_contents[rclass],
601 hard_regno + nregs_diff))))
603 if (hard_regno_costs_check[hard_regno]
604 != curr_hard_regno_costs_check)
606 hard_regno_costs_check[hard_regno] = curr_hard_regno_costs_check;
607 hard_regno_costs[hard_regno] = 0;
609 for (j = 0;
610 j < hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (regno)];
611 j++)
612 if (! TEST_HARD_REG_BIT (call_used_reg_set, hard_regno + j)
613 && ! df_regs_ever_live_p (hard_regno + j))
614 /* It needs save restore. */
615 hard_regno_costs[hard_regno]
616 += (2
617 * REG_FREQ_FROM_BB (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
618 + 1);
619 priority = targetm.register_priority (hard_regno);
620 if (best_hard_regno < 0 || hard_regno_costs[hard_regno] < best_cost
621 || (hard_regno_costs[hard_regno] == best_cost
622 && (priority > best_priority
623 || (targetm.register_usage_leveling_p ()
624 && priority == best_priority
625 && best_usage > lra_hard_reg_usage[hard_regno]))))
627 best_hard_regno = hard_regno;
628 best_cost = hard_regno_costs[hard_regno];
629 best_priority = priority;
630 best_usage = lra_hard_reg_usage[hard_regno];
633 if (try_only_hard_regno >= 0)
634 break;
636 if (best_hard_regno >= 0)
637 *cost = best_cost - lra_reg_info[regno].freq;
638 return best_hard_regno;
641 /* Current value used for checking elements in
642 update_hard_regno_preference_check. */
643 static int curr_update_hard_regno_preference_check;
644 /* If an element value is equal to the above variable value, then the
645 corresponding regno has been processed for preference
646 propagation. */
647 static int *update_hard_regno_preference_check;
649 /* Update the preference for using HARD_REGNO for pseudos that are
650 connected directly or indirectly with REGNO. Apply divisor DIV
651 to any preference adjustments.
653 The more indirectly a pseudo is connected, the smaller its effect
654 should be. We therefore increase DIV on each "hop". */
655 static void
656 update_hard_regno_preference (int regno, int hard_regno, int div)
658 int another_regno, cost;
659 lra_copy_t cp, next_cp;
661 /* Search depth 5 seems to be enough. */
662 if (div > (1 << 5))
663 return;
664 for (cp = lra_reg_info[regno].copies; cp != NULL; cp = next_cp)
666 if (cp->regno1 == regno)
668 next_cp = cp->regno1_next;
669 another_regno = cp->regno2;
671 else if (cp->regno2 == regno)
673 next_cp = cp->regno2_next;
674 another_regno = cp->regno1;
676 else
677 gcc_unreachable ();
678 if (reg_renumber[another_regno] < 0
679 && (update_hard_regno_preference_check[another_regno]
680 != curr_update_hard_regno_preference_check))
682 update_hard_regno_preference_check[another_regno]
683 = curr_update_hard_regno_preference_check;
684 cost = cp->freq < div ? 1 : cp->freq / div;
685 lra_setup_reload_pseudo_preferenced_hard_reg
686 (another_regno, hard_regno, cost);
687 update_hard_regno_preference (another_regno, hard_regno, div * 2);
692 /* Return prefix title for pseudo REGNO. */
693 static const char *
694 pseudo_prefix_title (int regno)
696 return
697 (regno < lra_constraint_new_regno_start ? ""
698 : bitmap_bit_p (&lra_inheritance_pseudos, regno) ? "inheritance "
699 : bitmap_bit_p (&lra_split_regs, regno) ? "split "
700 : bitmap_bit_p (&lra_optional_reload_pseudos, regno) ? "optional reload "
701 : bitmap_bit_p (&lra_subreg_reload_pseudos, regno) ? "subreg reload "
702 : "reload ");
705 /* Update REG_RENUMBER and other pseudo preferences by assignment of
706 HARD_REGNO to pseudo REGNO and print about it if PRINT_P. */
707 void
708 lra_setup_reg_renumber (int regno, int hard_regno, bool print_p)
710 int i, hr;
712 /* We can not just reassign hard register. */
713 lra_assert (hard_regno < 0 || reg_renumber[regno] < 0);
714 if ((hr = hard_regno) < 0)
715 hr = reg_renumber[regno];
716 reg_renumber[regno] = hard_regno;
717 lra_assert (hr >= 0);
718 for (i = 0; i < hard_regno_nregs[hr][PSEUDO_REGNO_MODE (regno)]; i++)
719 if (hard_regno < 0)
720 lra_hard_reg_usage[hr + i] -= lra_reg_info[regno].freq;
721 else
722 lra_hard_reg_usage[hr + i] += lra_reg_info[regno].freq;
723 if (print_p && lra_dump_file != NULL)
724 fprintf (lra_dump_file, " Assign %d to %sr%d (freq=%d)\n",
725 reg_renumber[regno], pseudo_prefix_title (regno),
726 regno, lra_reg_info[regno].freq);
727 if (hard_regno >= 0)
729 curr_update_hard_regno_preference_check++;
730 update_hard_regno_preference (regno, hard_regno, 1);
734 /* Pseudos which occur in insns containing a particular pseudo. */
735 static bitmap_head insn_conflict_pseudos;
737 /* Bitmaps used to contain spill pseudos for given pseudo hard regno
738 and best spill pseudos for given pseudo (and best hard regno). */
739 static bitmap_head spill_pseudos_bitmap, best_spill_pseudos_bitmap;
741 /* Current pseudo check for validity of elements in
742 TRY_HARD_REG_PSEUDOS. */
743 static int curr_pseudo_check;
744 /* Array used for validity of elements in TRY_HARD_REG_PSEUDOS. */
745 static int try_hard_reg_pseudos_check[FIRST_PSEUDO_REGISTER];
746 /* Pseudos who hold given hard register at the considered points. */
747 static bitmap_head try_hard_reg_pseudos[FIRST_PSEUDO_REGISTER];
749 /* Set up try_hard_reg_pseudos for given program point P and class
750 RCLASS. Those are pseudos living at P and assigned to a hard
751 register of RCLASS. In other words, those are pseudos which can be
752 spilled to assign a hard register of RCLASS to a pseudo living at
753 P. */
754 static void
755 setup_try_hard_regno_pseudos (int p, enum reg_class rclass)
757 int i, hard_regno;
758 enum machine_mode mode;
759 unsigned int spill_regno;
760 bitmap_iterator bi;
762 /* Find what pseudos could be spilled. */
763 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[p], 0, spill_regno, bi)
765 mode = PSEUDO_REGNO_MODE (spill_regno);
766 hard_regno = live_pseudos_reg_renumber[spill_regno];
767 if (overlaps_hard_reg_set_p (reg_class_contents[rclass],
768 mode, hard_regno))
770 for (i = hard_regno_nregs[hard_regno][mode] - 1; i >= 0; i--)
772 if (try_hard_reg_pseudos_check[hard_regno + i]
773 != curr_pseudo_check)
775 try_hard_reg_pseudos_check[hard_regno + i]
776 = curr_pseudo_check;
777 bitmap_clear (&try_hard_reg_pseudos[hard_regno + i]);
779 bitmap_set_bit (&try_hard_reg_pseudos[hard_regno + i],
780 spill_regno);
786 /* Assign temporarily HARD_REGNO to pseudo REGNO. Temporary
787 assignment means that we might undo the data change. */
788 static void
789 assign_temporarily (int regno, int hard_regno)
791 int p;
792 lra_live_range_t r;
794 for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
796 for (p = r->start; p <= r->finish; p++)
797 if (hard_regno < 0)
798 bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
799 else
801 bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
802 insert_in_live_range_start_chain (regno);
805 live_pseudos_reg_renumber[regno] = hard_regno;
808 /* Array used for sorting reload pseudos for subsequent allocation
809 after spilling some pseudo. */
810 static int *sorted_reload_pseudos;
812 /* Spill some pseudos for a reload pseudo REGNO and return hard
813 register which should be used for pseudo after spilling. The
814 function adds spilled pseudos to SPILLED_PSEUDO_BITMAP. When we
815 choose hard register (and pseudos occupying the hard registers and
816 to be spilled), we take into account not only how REGNO will
817 benefit from the spills but also how other reload pseudos not yet
818 assigned to hard registers benefit from the spills too. In very
819 rare cases, the function can fail and return -1. */
820 static int
821 spill_for (int regno, bitmap spilled_pseudo_bitmap)
823 int i, j, n, p, hard_regno, best_hard_regno, cost, best_cost, rclass_size;
824 int reload_hard_regno, reload_cost;
825 enum machine_mode mode;
826 enum reg_class rclass;
827 unsigned int spill_regno, reload_regno, uid;
828 int insn_pseudos_num, best_insn_pseudos_num;
829 lra_live_range_t r;
830 bitmap_iterator bi;
832 rclass = regno_allocno_class_array[regno];
833 lra_assert (reg_renumber[regno] < 0 && rclass != NO_REGS);
834 bitmap_clear (&insn_conflict_pseudos);
835 bitmap_clear (&best_spill_pseudos_bitmap);
836 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
838 struct lra_insn_reg *ir;
840 for (ir = lra_get_insn_regs (uid); ir != NULL; ir = ir->next)
841 if (ir->regno >= FIRST_PSEUDO_REGISTER)
842 bitmap_set_bit (&insn_conflict_pseudos, ir->regno);
844 best_hard_regno = -1;
845 best_cost = INT_MAX;
846 best_insn_pseudos_num = INT_MAX;
847 rclass_size = ira_class_hard_regs_num[rclass];
848 mode = PSEUDO_REGNO_MODE (regno);
849 /* Invalidate try_hard_reg_pseudos elements. */
850 curr_pseudo_check++;
851 for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
852 for (p = r->start; p <= r->finish; p++)
853 setup_try_hard_regno_pseudos (p, rclass);
854 for (i = 0; i < rclass_size; i++)
856 hard_regno = ira_class_hard_regs[rclass][i];
857 bitmap_clear (&spill_pseudos_bitmap);
858 for (j = hard_regno_nregs[hard_regno][mode] - 1; j >= 0; j--)
860 if (try_hard_reg_pseudos_check[hard_regno + j] != curr_pseudo_check)
861 continue;
862 lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos[hard_regno + j]));
863 bitmap_ior_into (&spill_pseudos_bitmap,
864 &try_hard_reg_pseudos[hard_regno + j]);
866 /* Spill pseudos. */
867 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
868 if ((int) spill_regno >= lra_constraint_new_regno_start
869 && ! bitmap_bit_p (&lra_inheritance_pseudos, spill_regno)
870 && ! bitmap_bit_p (&lra_split_regs, spill_regno)
871 && ! bitmap_bit_p (&lra_subreg_reload_pseudos, spill_regno)
872 && ! bitmap_bit_p (&lra_optional_reload_pseudos, spill_regno))
873 goto fail;
874 insn_pseudos_num = 0;
875 if (lra_dump_file != NULL)
876 fprintf (lra_dump_file, " Trying %d:", hard_regno);
877 sparseset_clear (live_range_reload_inheritance_pseudos);
878 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
880 if (bitmap_bit_p (&insn_conflict_pseudos, spill_regno))
881 insn_pseudos_num++;
882 for (r = lra_reg_info[spill_regno].live_ranges;
883 r != NULL;
884 r = r->next)
886 for (p = r->start; p <= r->finish; p++)
888 lra_live_range_t r2;
890 for (r2 = start_point_ranges[p];
891 r2 != NULL;
892 r2 = r2->start_next)
893 if (r2->regno >= lra_constraint_new_regno_start)
894 sparseset_set_bit (live_range_reload_inheritance_pseudos,
895 r2->regno);
899 n = 0;
900 if (sparseset_cardinality (live_range_reload_inheritance_pseudos)
901 <= (unsigned)LRA_MAX_CONSIDERED_RELOAD_PSEUDOS)
902 EXECUTE_IF_SET_IN_SPARSESET (live_range_reload_inheritance_pseudos,
903 reload_regno)
904 if ((int) reload_regno != regno
905 && (ira_reg_classes_intersect_p
906 [rclass][regno_allocno_class_array[reload_regno]])
907 && live_pseudos_reg_renumber[reload_regno] < 0
908 && find_hard_regno_for (reload_regno, &cost, -1) < 0)
909 sorted_reload_pseudos[n++] = reload_regno;
910 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
912 update_lives (spill_regno, true);
913 if (lra_dump_file != NULL)
914 fprintf (lra_dump_file, " spill %d(freq=%d)",
915 spill_regno, lra_reg_info[spill_regno].freq);
917 hard_regno = find_hard_regno_for (regno, &cost, -1);
918 if (hard_regno >= 0)
920 assign_temporarily (regno, hard_regno);
921 qsort (sorted_reload_pseudos, n, sizeof (int),
922 reload_pseudo_compare_func);
923 for (j = 0; j < n; j++)
925 reload_regno = sorted_reload_pseudos[j];
926 lra_assert (live_pseudos_reg_renumber[reload_regno] < 0);
927 if ((reload_hard_regno
928 = find_hard_regno_for (reload_regno,
929 &reload_cost, -1)) >= 0)
931 if (lra_dump_file != NULL)
932 fprintf (lra_dump_file, " assign %d(cost=%d)",
933 reload_regno, reload_cost);
934 assign_temporarily (reload_regno, reload_hard_regno);
935 cost += reload_cost;
938 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
940 rtx x;
942 cost += lra_reg_info[spill_regno].freq;
943 if (ira_reg_equiv[spill_regno].memory != NULL
944 || ira_reg_equiv[spill_regno].constant != NULL)
945 for (x = ira_reg_equiv[spill_regno].init_insns;
946 x != NULL;
947 x = XEXP (x, 1))
948 cost -= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (XEXP (x, 0)));
950 if (best_insn_pseudos_num > insn_pseudos_num
951 || (best_insn_pseudos_num == insn_pseudos_num
952 && best_cost > cost))
954 best_insn_pseudos_num = insn_pseudos_num;
955 best_cost = cost;
956 best_hard_regno = hard_regno;
957 bitmap_copy (&best_spill_pseudos_bitmap, &spill_pseudos_bitmap);
958 if (lra_dump_file != NULL)
959 fprintf (lra_dump_file, " Now best %d(cost=%d)\n",
960 hard_regno, cost);
962 assign_temporarily (regno, -1);
963 for (j = 0; j < n; j++)
965 reload_regno = sorted_reload_pseudos[j];
966 if (live_pseudos_reg_renumber[reload_regno] >= 0)
967 assign_temporarily (reload_regno, -1);
970 if (lra_dump_file != NULL)
971 fprintf (lra_dump_file, "\n");
972 /* Restore the live hard reg pseudo info for spilled pseudos. */
973 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
974 update_lives (spill_regno, false);
975 fail:
978 /* Spill: */
979 EXECUTE_IF_SET_IN_BITMAP (&best_spill_pseudos_bitmap, 0, spill_regno, bi)
981 if (lra_dump_file != NULL)
982 fprintf (lra_dump_file, " Spill %sr%d(hr=%d, freq=%d) for r%d\n",
983 pseudo_prefix_title (spill_regno),
984 spill_regno, reg_renumber[spill_regno],
985 lra_reg_info[spill_regno].freq, regno);
986 update_lives (spill_regno, true);
987 lra_setup_reg_renumber (spill_regno, -1, false);
989 bitmap_ior_into (spilled_pseudo_bitmap, &best_spill_pseudos_bitmap);
990 return best_hard_regno;
993 /* Assign HARD_REGNO to REGNO. */
994 static void
995 assign_hard_regno (int hard_regno, int regno)
997 int i;
999 lra_assert (hard_regno >= 0);
1000 lra_setup_reg_renumber (regno, hard_regno, true);
1001 update_lives (regno, false);
1002 for (i = 0;
1003 i < hard_regno_nregs[hard_regno][lra_reg_info[regno].biggest_mode];
1004 i++)
1005 df_set_regs_ever_live (hard_regno + i, true);
1008 /* Array used for sorting different pseudos. */
1009 static int *sorted_pseudos;
1011 /* The constraints pass is allowed to create equivalences between
1012 pseudos that make the current allocation "incorrect" (in the sense
1013 that pseudos are assigned to hard registers from their own conflict
1014 sets). The global variable lra_risky_transformations_p says
1015 whether this might have happened.
1017 Process pseudos assigned to hard registers (less frequently used
1018 first), spill if a conflict is found, and mark the spilled pseudos
1019 in SPILLED_PSEUDO_BITMAP. Set up LIVE_HARD_REG_PSEUDOS from
1020 pseudos, assigned to hard registers. */
1021 static void
1022 setup_live_pseudos_and_spill_after_risky_transforms (bitmap
1023 spilled_pseudo_bitmap)
1025 int p, i, j, n, regno, hard_regno;
1026 unsigned int k, conflict_regno;
1027 int val, offset;
1028 HARD_REG_SET conflict_set;
1029 enum machine_mode mode;
1030 lra_live_range_t r;
1031 bitmap_iterator bi;
1032 int max_regno = max_reg_num ();
1034 if (! lra_risky_transformations_p)
1036 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1037 if (reg_renumber[i] >= 0 && lra_reg_info[i].nrefs > 0)
1038 update_lives (i, false);
1039 return;
1041 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1042 if (reg_renumber[i] >= 0 && lra_reg_info[i].nrefs > 0)
1043 sorted_pseudos[n++] = i;
1044 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func);
1045 for (i = n - 1; i >= 0; i--)
1047 regno = sorted_pseudos[i];
1048 hard_regno = reg_renumber[regno];
1049 lra_assert (hard_regno >= 0);
1050 mode = lra_reg_info[regno].biggest_mode;
1051 sparseset_clear (live_range_hard_reg_pseudos);
1052 for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
1054 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[r->start], 0, k, bi)
1055 sparseset_set_bit (live_range_hard_reg_pseudos, k);
1056 for (p = r->start + 1; p <= r->finish; p++)
1058 lra_live_range_t r2;
1060 for (r2 = start_point_ranges[p];
1061 r2 != NULL;
1062 r2 = r2->start_next)
1063 if (live_pseudos_reg_renumber[r2->regno] >= 0)
1064 sparseset_set_bit (live_range_hard_reg_pseudos, r2->regno);
1067 COPY_HARD_REG_SET (conflict_set, lra_no_alloc_regs);
1068 IOR_HARD_REG_SET (conflict_set, lra_reg_info[regno].conflict_hard_regs);
1069 val = lra_reg_info[regno].val;
1070 offset = lra_reg_info[regno].offset;
1071 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos, conflict_regno)
1072 if (!lra_reg_val_equal_p (conflict_regno, val, offset)
1073 /* If it is multi-register pseudos they should start on
1074 the same hard register. */
1075 || hard_regno != reg_renumber[conflict_regno])
1076 add_to_hard_reg_set (&conflict_set,
1077 lra_reg_info[conflict_regno].biggest_mode,
1078 reg_renumber[conflict_regno]);
1079 if (! overlaps_hard_reg_set_p (conflict_set, mode, hard_regno))
1081 update_lives (regno, false);
1082 continue;
1084 bitmap_set_bit (spilled_pseudo_bitmap, regno);
1085 for (j = 0;
1086 j < hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (regno)];
1087 j++)
1088 lra_hard_reg_usage[hard_regno + j] -= lra_reg_info[regno].freq;
1089 reg_renumber[regno] = -1;
1090 if (lra_dump_file != NULL)
1091 fprintf (lra_dump_file, " Spill r%d after risky transformations\n",
1092 regno);
1096 /* Improve allocation by assigning the same hard regno of inheritance
1097 pseudos to the connected pseudos. We need this because inheritance
1098 pseudos are allocated after reload pseudos in the thread and when
1099 we assign a hard register to a reload pseudo we don't know yet that
1100 the connected inheritance pseudos can get the same hard register.
1101 Add pseudos with changed allocation to bitmap CHANGED_PSEUDOS. */
1102 static void
1103 improve_inheritance (bitmap changed_pseudos)
1105 unsigned int k;
1106 int regno, another_regno, hard_regno, another_hard_regno, cost, i, n;
1107 lra_copy_t cp, next_cp;
1108 bitmap_iterator bi;
1110 if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
1111 return;
1112 n = 0;
1113 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, k, bi)
1114 if (reg_renumber[k] >= 0 && lra_reg_info[k].nrefs != 0)
1115 sorted_pseudos[n++] = k;
1116 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func);
1117 for (i = 0; i < n; i++)
1119 regno = sorted_pseudos[i];
1120 hard_regno = reg_renumber[regno];
1121 lra_assert (hard_regno >= 0);
1122 for (cp = lra_reg_info[regno].copies; cp != NULL; cp = next_cp)
1124 if (cp->regno1 == regno)
1126 next_cp = cp->regno1_next;
1127 another_regno = cp->regno2;
1129 else if (cp->regno2 == regno)
1131 next_cp = cp->regno2_next;
1132 another_regno = cp->regno1;
1134 else
1135 gcc_unreachable ();
1136 /* Don't change reload pseudo allocation. It might have
1137 this allocation for a purpose and changing it can result
1138 in LRA cycling. */
1139 if ((another_regno < lra_constraint_new_regno_start
1140 || bitmap_bit_p (&lra_inheritance_pseudos, another_regno))
1141 && (another_hard_regno = reg_renumber[another_regno]) >= 0
1142 && another_hard_regno != hard_regno)
1144 if (lra_dump_file != NULL)
1145 fprintf
1146 (lra_dump_file,
1147 " Improving inheritance for %d(%d) and %d(%d)...\n",
1148 regno, hard_regno, another_regno, another_hard_regno);
1149 update_lives (another_regno, true);
1150 lra_setup_reg_renumber (another_regno, -1, false);
1151 if (hard_regno
1152 == find_hard_regno_for (another_regno, &cost, hard_regno))
1153 assign_hard_regno (hard_regno, another_regno);
1154 else
1155 assign_hard_regno (another_hard_regno, another_regno);
1156 bitmap_set_bit (changed_pseudos, another_regno);
1163 /* Bitmap finally containing all pseudos spilled on this assignment
1164 pass. */
1165 static bitmap_head all_spilled_pseudos;
1166 /* All pseudos whose allocation was changed. */
1167 static bitmap_head changed_pseudo_bitmap;
1169 /* Assign hard registers to reload pseudos and other pseudos. */
1170 static void
1171 assign_by_spills (void)
1173 int i, n, nfails, iter, regno, hard_regno, cost, restore_regno;
1174 rtx insn;
1175 basic_block bb;
1176 bitmap_head changed_insns, do_not_assign_nonreload_pseudos;
1177 unsigned int u;
1178 bitmap_iterator bi;
1179 bool reload_p;
1180 int max_regno = max_reg_num ();
1182 for (n = 0, i = lra_constraint_new_regno_start; i < max_regno; i++)
1183 if (reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
1184 && regno_allocno_class_array[i] != NO_REGS)
1185 sorted_pseudos[n++] = i;
1186 bitmap_initialize (&insn_conflict_pseudos, &reg_obstack);
1187 bitmap_initialize (&spill_pseudos_bitmap, &reg_obstack);
1188 bitmap_initialize (&best_spill_pseudos_bitmap, &reg_obstack);
1189 update_hard_regno_preference_check = XCNEWVEC (int, max_regno);
1190 curr_update_hard_regno_preference_check = 0;
1191 memset (try_hard_reg_pseudos_check, 0, sizeof (try_hard_reg_pseudos_check));
1192 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1193 bitmap_initialize (&try_hard_reg_pseudos[i], &reg_obstack);
1194 curr_pseudo_check = 0;
1195 bitmap_initialize (&changed_insns, &reg_obstack);
1196 bitmap_initialize (&non_reload_pseudos, &reg_obstack);
1197 bitmap_ior (&non_reload_pseudos, &lra_inheritance_pseudos, &lra_split_regs);
1198 bitmap_ior_into (&non_reload_pseudos, &lra_subreg_reload_pseudos);
1199 bitmap_ior_into (&non_reload_pseudos, &lra_optional_reload_pseudos);
1200 for (iter = 0; iter <= 1; iter++)
1202 qsort (sorted_pseudos, n, sizeof (int), reload_pseudo_compare_func);
1203 nfails = 0;
1204 for (i = 0; i < n; i++)
1206 regno = sorted_pseudos[i];
1207 if (lra_dump_file != NULL)
1208 fprintf (lra_dump_file, " Assigning to %d "
1209 "(cl=%s, orig=%d, freq=%d, tfirst=%d, tfreq=%d)...\n",
1210 regno, reg_class_names[regno_allocno_class_array[regno]],
1211 ORIGINAL_REGNO (regno_reg_rtx[regno]),
1212 lra_reg_info[regno].freq, regno_assign_info[regno].first,
1213 regno_assign_info[regno_assign_info[regno].first].freq);
1214 hard_regno = find_hard_regno_for (regno, &cost, -1);
1215 reload_p = ! bitmap_bit_p (&non_reload_pseudos, regno);
1216 if (hard_regno < 0 && reload_p)
1217 hard_regno = spill_for (regno, &all_spilled_pseudos);
1218 if (hard_regno < 0)
1220 if (reload_p)
1221 sorted_pseudos[nfails++] = regno;
1223 else
1225 /* This register might have been spilled by the previous
1226 pass. Indicate that it is no longer spilled. */
1227 bitmap_clear_bit (&all_spilled_pseudos, regno);
1228 assign_hard_regno (hard_regno, regno);
1229 if (! reload_p)
1230 /* As non-reload pseudo assignment is changed we
1231 should reconsider insns referring for the
1232 pseudo. */
1233 bitmap_set_bit (&changed_pseudo_bitmap, regno);
1236 if (nfails == 0)
1237 break;
1238 if (iter > 0)
1240 /* We did not assign hard regs to reload pseudos after two
1241 iteration. It means something is wrong with asm insn
1242 constraints. Report it. */
1243 bool asm_p = false;
1244 bitmap_head failed_reload_insns;
1246 bitmap_initialize (&failed_reload_insns, &reg_obstack);
1247 for (i = 0; i < nfails; i++)
1249 regno = sorted_pseudos[i];
1250 bitmap_ior_into (&failed_reload_insns,
1251 &lra_reg_info[regno].insn_bitmap);
1252 /* Assign an arbitrary hard register of regno class to
1253 avoid further trouble with the asm insns. */
1254 bitmap_clear_bit (&all_spilled_pseudos, regno);
1255 assign_hard_regno
1256 (ira_class_hard_regs[regno_allocno_class_array[regno]][0],
1257 regno);
1259 EXECUTE_IF_SET_IN_BITMAP (&failed_reload_insns, 0, u, bi)
1261 insn = lra_insn_recog_data[u]->insn;
1262 if (asm_noperands (PATTERN (insn)) >= 0)
1264 asm_p = true;
1265 error_for_asm (insn,
1266 "%<asm%> operand has impossible constraints");
1267 /* Avoid further trouble with this insn.
1268 For asm goto, instead of fixing up all the edges
1269 just clear the template and clear input operands
1270 (asm goto doesn't have any output operands). */
1271 if (JUMP_P (insn))
1273 rtx asm_op = extract_asm_operands (PATTERN (insn));
1274 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1275 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1276 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1277 lra_update_insn_regno_info (insn);
1279 else
1281 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1282 lra_set_insn_deleted (insn);
1286 lra_assert (asm_p);
1287 break;
1289 /* This is a very rare event. We can not assign a hard
1290 register to reload pseudo because the hard register was
1291 assigned to another reload pseudo on a previous
1292 assignment pass. For x86 example, on the 1st pass we
1293 assigned CX (although another hard register could be used
1294 for this) to reload pseudo in an insn, on the 2nd pass we
1295 need CX (and only this) hard register for a new reload
1296 pseudo in the same insn. */
1297 if (lra_dump_file != NULL)
1298 fprintf (lra_dump_file, " 2nd iter for reload pseudo assignments:\n");
1299 for (i = 0; i < nfails; i++)
1301 if (lra_dump_file != NULL)
1302 fprintf (lra_dump_file, " Reload r%d assignment failure\n",
1303 sorted_pseudos[i]);
1304 bitmap_ior_into (&changed_insns,
1305 &lra_reg_info[sorted_pseudos[i]].insn_bitmap);
1308 /* FIXME: Look up the changed insns in the cached LRA insn data using
1309 an EXECUTE_IF_SET_IN_BITMAP over changed_insns. */
1310 FOR_EACH_BB_FN (bb, cfun)
1311 FOR_BB_INSNS (bb, insn)
1312 if (bitmap_bit_p (&changed_insns, INSN_UID (insn)))
1314 lra_insn_recog_data_t data;
1315 struct lra_insn_reg *r;
1317 data = lra_get_insn_recog_data (insn);
1318 for (r = data->regs; r != NULL; r = r->next)
1320 regno = r->regno;
1321 /* A reload pseudo did not get a hard register on the
1322 first iteration because of the conflict with
1323 another reload pseudos in the same insn. So we
1324 consider only reload pseudos assigned to hard
1325 registers. We shall exclude inheritance pseudos as
1326 they can occur in original insns (not reload ones).
1327 We can omit the check for split pseudos because
1328 they occur only in move insns containing non-reload
1329 pseudos. */
1330 if (regno < lra_constraint_new_regno_start
1331 || bitmap_bit_p (&lra_inheritance_pseudos, regno)
1332 || reg_renumber[regno] < 0)
1333 continue;
1334 sorted_pseudos[nfails++] = regno;
1335 if (lra_dump_file != NULL)
1336 fprintf (lra_dump_file,
1337 " Spill reload r%d(hr=%d, freq=%d)\n",
1338 regno, reg_renumber[regno],
1339 lra_reg_info[regno].freq);
1340 update_lives (regno, true);
1341 lra_setup_reg_renumber (regno, -1, false);
1344 n = nfails;
1346 improve_inheritance (&changed_pseudo_bitmap);
1347 bitmap_clear (&non_reload_pseudos);
1348 bitmap_clear (&changed_insns);
1349 if (! lra_simple_p)
1351 /* We should not assign to original pseudos of inheritance
1352 pseudos or split pseudos if any its inheritance pseudo did
1353 not get hard register or any its split pseudo was not split
1354 because undo inheritance/split pass will extend live range of
1355 such inheritance or split pseudos. */
1356 bitmap_initialize (&do_not_assign_nonreload_pseudos, &reg_obstack);
1357 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, u, bi)
1358 if ((restore_regno = lra_reg_info[u].restore_regno) >= 0
1359 && reg_renumber[u] < 0
1360 && bitmap_bit_p (&lra_inheritance_pseudos, u))
1361 bitmap_set_bit (&do_not_assign_nonreload_pseudos, restore_regno);
1362 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, u, bi)
1363 if ((restore_regno = lra_reg_info[u].restore_regno) >= 0
1364 && reg_renumber[u] >= 0)
1365 bitmap_set_bit (&do_not_assign_nonreload_pseudos, restore_regno);
1366 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1367 if (((i < lra_constraint_new_regno_start
1368 && ! bitmap_bit_p (&do_not_assign_nonreload_pseudos, i))
1369 || (bitmap_bit_p (&lra_inheritance_pseudos, i)
1370 && lra_reg_info[i].restore_regno >= 0)
1371 || (bitmap_bit_p (&lra_split_regs, i)
1372 && lra_reg_info[i].restore_regno >= 0)
1373 || bitmap_bit_p (&lra_subreg_reload_pseudos, i)
1374 || bitmap_bit_p (&lra_optional_reload_pseudos, i))
1375 && reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
1376 && regno_allocno_class_array[i] != NO_REGS)
1377 sorted_pseudos[n++] = i;
1378 bitmap_clear (&do_not_assign_nonreload_pseudos);
1379 if (n != 0 && lra_dump_file != NULL)
1380 fprintf (lra_dump_file, " Reassigning non-reload pseudos\n");
1381 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func);
1382 for (i = 0; i < n; i++)
1384 regno = sorted_pseudos[i];
1385 hard_regno = find_hard_regno_for (regno, &cost, -1);
1386 if (hard_regno >= 0)
1388 assign_hard_regno (hard_regno, regno);
1389 /* We change allocation for non-reload pseudo on this
1390 iteration -- mark the pseudo for invalidation of used
1391 alternatives of insns containing the pseudo. */
1392 bitmap_set_bit (&changed_pseudo_bitmap, regno);
1396 free (update_hard_regno_preference_check);
1397 bitmap_clear (&best_spill_pseudos_bitmap);
1398 bitmap_clear (&spill_pseudos_bitmap);
1399 bitmap_clear (&insn_conflict_pseudos);
1403 /* Entry function to assign hard registers to new reload pseudos
1404 starting with LRA_CONSTRAINT_NEW_REGNO_START (by possible spilling
1405 of old pseudos) and possibly to the old pseudos. The function adds
1406 what insns to process for the next constraint pass. Those are all
1407 insns who contains non-reload and non-inheritance pseudos with
1408 changed allocation.
1410 Return true if we did not spill any non-reload and non-inheritance
1411 pseudos. */
1412 bool
1413 lra_assign (void)
1415 int i;
1416 unsigned int u;
1417 bitmap_iterator bi;
1418 bitmap_head insns_to_process;
1419 bool no_spills_p;
1420 int max_regno = max_reg_num ();
1422 timevar_push (TV_LRA_ASSIGN);
1423 init_lives ();
1424 sorted_pseudos = XNEWVEC (int, max_regno);
1425 sorted_reload_pseudos = XNEWVEC (int, max_regno);
1426 regno_allocno_class_array = XNEWVEC (enum reg_class, max_regno);
1427 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1428 regno_allocno_class_array[i] = lra_get_allocno_class (i);
1429 init_regno_assign_info ();
1430 bitmap_initialize (&all_spilled_pseudos, &reg_obstack);
1431 create_live_range_start_chains ();
1432 setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos);
1433 #ifdef ENABLE_CHECKING
1434 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1435 if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
1436 && lra_reg_info[i].call_p
1437 && overlaps_hard_reg_set_p (call_used_reg_set,
1438 PSEUDO_REGNO_MODE (i), reg_renumber[i]))
1439 gcc_unreachable ();
1440 #endif
1441 /* Setup insns to process on the next constraint pass. */
1442 bitmap_initialize (&changed_pseudo_bitmap, &reg_obstack);
1443 init_live_reload_and_inheritance_pseudos ();
1444 assign_by_spills ();
1445 finish_live_reload_and_inheritance_pseudos ();
1446 bitmap_ior_into (&changed_pseudo_bitmap, &all_spilled_pseudos);
1447 no_spills_p = true;
1448 EXECUTE_IF_SET_IN_BITMAP (&all_spilled_pseudos, 0, u, bi)
1449 /* We ignore spilled pseudos created on last inheritance pass
1450 because they will be removed. */
1451 if (lra_reg_info[u].restore_regno < 0)
1453 no_spills_p = false;
1454 break;
1456 finish_live_range_start_chains ();
1457 bitmap_clear (&all_spilled_pseudos);
1458 bitmap_initialize (&insns_to_process, &reg_obstack);
1459 EXECUTE_IF_SET_IN_BITMAP (&changed_pseudo_bitmap, 0, u, bi)
1460 bitmap_ior_into (&insns_to_process, &lra_reg_info[u].insn_bitmap);
1461 bitmap_clear (&changed_pseudo_bitmap);
1462 EXECUTE_IF_SET_IN_BITMAP (&insns_to_process, 0, u, bi)
1464 lra_push_insn_by_uid (u);
1465 /* Invalidate alternatives for insn should be processed. */
1466 lra_set_used_insn_alternative_by_uid (u, -1);
1468 bitmap_clear (&insns_to_process);
1469 finish_regno_assign_info ();
1470 free (regno_allocno_class_array);
1471 free (sorted_pseudos);
1472 free (sorted_reload_pseudos);
1473 finish_lives ();
1474 timevar_pop (TV_LRA_ASSIGN);
1475 return no_spills_p;