1 /* Assign reload pseudos.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file's main objective is to assign hard registers to reload
23 pseudos. It also tries to allocate hard registers to other
24 pseudos, but at a lower priority than the reload pseudos. The pass
25 does not transform the RTL.
27 We must allocate a hard register to every reload pseudo. We try to
28 increase the chances of finding a viable allocation by assigning
29 the pseudos in order of fewest available hard registers first. If
30 we still fail to find a hard register, we spill other (non-reload)
31 pseudos in order to make room.
33 find_hard_regno_for finds hard registers for allocation without
34 spilling. spill_for does the same with spilling. Both functions
35 use a cost model to determine the most profitable choice of hard
38 Once we have finished allocating reload pseudos, we also try to
39 assign registers to other (non-reload) pseudos. This is useful if
40 hard registers were freed up by the spilling just described.
42 We try to assign hard registers by collecting pseudos into threads.
43 These threads contain reload and inheritance pseudos that are
44 connected by copies (move insns). Doing this improves the chances
45 of pseudos in the thread getting the same hard register and, as a
46 result, of allowing some move insns to be deleted.
48 When we assign a hard register to a pseudo, we decrease the cost of
49 using the same hard register for pseudos that are connected by
52 If two hard registers have the same frequency-derived cost, we
53 prefer hard registers with higher priorities. The mapping of
54 registers to priorities is controlled by the register_priority
55 target hook. For example, x86-64 has a few register priorities:
56 hard registers with and without REX prefixes have different
57 priorities. This permits us to generate smaller code as insns
58 without REX prefixes are shorter.
60 If a few hard registers are still equally good for the assignment,
61 we choose the least used hard register. It is called leveling and
62 may be profitable for some targets.
64 Only insns with changed allocation pseudos are processed on the
67 The pseudo live-ranges are used to find conflicting pseudos.
69 For understanding the code, it is important to keep in mind that
70 inheritance, split, and reload pseudos created since last
71 constraint pass have regno >= lra_constraint_new_regno_start.
72 Inheritance and split pseudos created on any pass are in the
73 corresponding bitmaps. Inheritance and split pseudos since the
74 last constraint pass have also the corresponding non-negative
79 #include "coretypes.h"
81 #include "hard-reg-set.h"
83 #include "rtl-error.h"
86 #include "insn-config.h"
98 #include "dominance.h"
100 #include "basic-block.h"
104 #include "sparseset.h"
108 /* Current iteration number of the pass and current iteration number
109 of the pass after the latest spill pass when any former reload
110 pseudo was spilled. */
111 int lra_assignment_iter
;
112 int lra_assignment_iter_after_spill
;
114 /* Flag of spilling former reload pseudos on this pass. */
115 static bool former_reload_pseudo_spill_p
;
117 /* Array containing corresponding values of function
118 lra_get_allocno_class. It is used to speed up the code. */
119 static enum reg_class
*regno_allocno_class_array
;
121 /* Information about the thread to which a pseudo belongs. Threads are
122 a set of connected reload and inheritance pseudos with the same set of
123 available hard registers. Lone registers belong to their own threads. */
124 struct regno_assign_info
126 /* First/next pseudo of the same thread. */
128 /* Frequency of the thread (execution frequency of only reload
129 pseudos in the thread when the thread contains a reload pseudo).
130 Defined only for the first thread pseudo. */
134 /* Map regno to the corresponding regno assignment info. */
135 static struct regno_assign_info
*regno_assign_info
;
137 /* All inherited, subreg or optional pseudos created before last spill
138 sub-pass. Such pseudos are permitted to get memory instead of hard
140 static bitmap_head non_reload_pseudos
;
142 /* Process a pseudo copy with execution frequency COPY_FREQ connecting
143 REGNO1 and REGNO2 to form threads. */
145 process_copy_to_form_thread (int regno1
, int regno2
, int copy_freq
)
147 int last
, regno1_first
, regno2_first
;
149 lra_assert (regno1
>= lra_constraint_new_regno_start
150 && regno2
>= lra_constraint_new_regno_start
);
151 regno1_first
= regno_assign_info
[regno1
].first
;
152 regno2_first
= regno_assign_info
[regno2
].first
;
153 if (regno1_first
!= regno2_first
)
155 for (last
= regno2_first
;
156 regno_assign_info
[last
].next
>= 0;
157 last
= regno_assign_info
[last
].next
)
158 regno_assign_info
[last
].first
= regno1_first
;
159 regno_assign_info
[last
].first
= regno1_first
;
160 regno_assign_info
[last
].next
= regno_assign_info
[regno1_first
].next
;
161 regno_assign_info
[regno1_first
].next
= regno2_first
;
162 regno_assign_info
[regno1_first
].freq
163 += regno_assign_info
[regno2_first
].freq
;
165 regno_assign_info
[regno1_first
].freq
-= 2 * copy_freq
;
166 lra_assert (regno_assign_info
[regno1_first
].freq
>= 0);
169 /* Initialize REGNO_ASSIGN_INFO and form threads. */
171 init_regno_assign_info (void)
173 int i
, regno1
, regno2
, max_regno
= max_reg_num ();
176 regno_assign_info
= XNEWVEC (struct regno_assign_info
, max_regno
);
177 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
179 regno_assign_info
[i
].first
= i
;
180 regno_assign_info
[i
].next
= -1;
181 regno_assign_info
[i
].freq
= lra_reg_info
[i
].freq
;
183 /* Form the threads. */
184 for (i
= 0; (cp
= lra_get_copy (i
)) != NULL
; i
++)
185 if ((regno1
= cp
->regno1
) >= lra_constraint_new_regno_start
186 && (regno2
= cp
->regno2
) >= lra_constraint_new_regno_start
187 && reg_renumber
[regno1
] < 0 && lra_reg_info
[regno1
].nrefs
!= 0
188 && reg_renumber
[regno2
] < 0 && lra_reg_info
[regno2
].nrefs
!= 0
189 && (ira_class_hard_regs_num
[regno_allocno_class_array
[regno1
]]
190 == ira_class_hard_regs_num
[regno_allocno_class_array
[regno2
]]))
191 process_copy_to_form_thread (regno1
, regno2
, cp
->freq
);
194 /* Free REGNO_ASSIGN_INFO. */
196 finish_regno_assign_info (void)
198 free (regno_assign_info
);
201 /* The function is used to sort *reload* and *inheritance* pseudos to
202 try to assign them hard registers. We put pseudos from the same
203 thread always nearby. */
205 reload_pseudo_compare_func (const void *v1p
, const void *v2p
)
207 int r1
= *(const int *) v1p
, r2
= *(const int *) v2p
;
208 enum reg_class cl1
= regno_allocno_class_array
[r1
];
209 enum reg_class cl2
= regno_allocno_class_array
[r2
];
212 lra_assert (r1
>= lra_constraint_new_regno_start
213 && r2
>= lra_constraint_new_regno_start
);
215 /* Prefer to assign reload registers with smaller classes first to
216 guarantee assignment to all reload registers. */
217 if ((diff
= (ira_class_hard_regs_num
[cl1
]
218 - ira_class_hard_regs_num
[cl2
])) != 0)
221 = (ira_reg_class_max_nregs
[cl2
][lra_reg_info
[r2
].biggest_mode
]
222 - ira_reg_class_max_nregs
[cl1
][lra_reg_info
[r1
].biggest_mode
])) != 0
223 /* The code below executes rarely as nregs == 1 in most cases.
224 So we should not worry about using faster data structures to
225 check reload pseudos. */
226 && ! bitmap_bit_p (&non_reload_pseudos
, r1
)
227 && ! bitmap_bit_p (&non_reload_pseudos
, r2
))
229 if ((diff
= (regno_assign_info
[regno_assign_info
[r2
].first
].freq
230 - regno_assign_info
[regno_assign_info
[r1
].first
].freq
)) != 0)
232 /* Allocate bigger pseudos first to avoid register file
235 = (ira_reg_class_max_nregs
[cl2
][lra_reg_info
[r2
].biggest_mode
]
236 - ira_reg_class_max_nregs
[cl1
][lra_reg_info
[r1
].biggest_mode
])) != 0)
238 /* Put pseudos from the thread nearby. */
239 if ((diff
= regno_assign_info
[r1
].first
- regno_assign_info
[r2
].first
) != 0)
241 /* If regs are equally good, sort by their numbers, so that the
242 results of qsort leave nothing to chance. */
246 /* The function is used to sort *non-reload* pseudos to try to assign
247 them hard registers. The order calculation is simpler than in the
248 previous function and based on the pseudo frequency usage. */
250 pseudo_compare_func (const void *v1p
, const void *v2p
)
252 int r1
= *(const int *) v1p
, r2
= *(const int *) v2p
;
255 /* Prefer to assign more frequently used registers first. */
256 if ((diff
= lra_reg_info
[r2
].freq
- lra_reg_info
[r1
].freq
) != 0)
259 /* If regs are equally good, sort by their numbers, so that the
260 results of qsort leave nothing to chance. */
264 /* Arrays of size LRA_LIVE_MAX_POINT mapping a program point to the
265 pseudo live ranges with given start point. We insert only live
266 ranges of pseudos interesting for assignment purposes. They are
267 reload pseudos and pseudos assigned to hard registers. */
268 static lra_live_range_t
*start_point_ranges
;
270 /* Used as a flag that a live range is not inserted in the start point
272 static struct lra_live_range not_in_chain_mark
;
274 /* Create and set up START_POINT_RANGES. */
276 create_live_range_start_chains (void)
281 start_point_ranges
= XCNEWVEC (lra_live_range_t
, lra_live_max_point
);
282 max_regno
= max_reg_num ();
283 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
284 if (i
>= lra_constraint_new_regno_start
|| reg_renumber
[i
] >= 0)
286 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
288 r
->start_next
= start_point_ranges
[r
->start
];
289 start_point_ranges
[r
->start
] = r
;
294 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
295 r
->start_next
= ¬_in_chain_mark
;
299 /* Insert live ranges of pseudo REGNO into start chains if they are
302 insert_in_live_range_start_chain (int regno
)
304 lra_live_range_t r
= lra_reg_info
[regno
].live_ranges
;
306 if (r
->start_next
!= ¬_in_chain_mark
)
308 for (; r
!= NULL
; r
= r
->next
)
310 r
->start_next
= start_point_ranges
[r
->start
];
311 start_point_ranges
[r
->start
] = r
;
315 /* Free START_POINT_RANGES. */
317 finish_live_range_start_chains (void)
319 gcc_assert (start_point_ranges
!= NULL
);
320 free (start_point_ranges
);
321 start_point_ranges
= NULL
;
324 /* Map: program point -> bitmap of all pseudos living at the point and
325 assigned to hard registers. */
326 static bitmap_head
*live_hard_reg_pseudos
;
327 static bitmap_obstack live_hard_reg_pseudos_bitmap_obstack
;
329 /* reg_renumber corresponding to pseudos marked in
330 live_hard_reg_pseudos. reg_renumber might be not matched to
331 live_hard_reg_pseudos but live_pseudos_reg_renumber always reflects
332 live_hard_reg_pseudos. */
333 static int *live_pseudos_reg_renumber
;
335 /* Sparseset used to calculate living hard reg pseudos for some program
337 static sparseset live_range_hard_reg_pseudos
;
339 /* Sparseset used to calculate living reload/inheritance pseudos for
340 some program point range. */
341 static sparseset live_range_reload_inheritance_pseudos
;
343 /* Allocate and initialize the data about living pseudos at program
348 int i
, max_regno
= max_reg_num ();
350 live_range_hard_reg_pseudos
= sparseset_alloc (max_regno
);
351 live_range_reload_inheritance_pseudos
= sparseset_alloc (max_regno
);
352 live_hard_reg_pseudos
= XNEWVEC (bitmap_head
, lra_live_max_point
);
353 bitmap_obstack_initialize (&live_hard_reg_pseudos_bitmap_obstack
);
354 for (i
= 0; i
< lra_live_max_point
; i
++)
355 bitmap_initialize (&live_hard_reg_pseudos
[i
],
356 &live_hard_reg_pseudos_bitmap_obstack
);
357 live_pseudos_reg_renumber
= XNEWVEC (int, max_regno
);
358 for (i
= 0; i
< max_regno
; i
++)
359 live_pseudos_reg_renumber
[i
] = -1;
362 /* Free the data about living pseudos at program points. */
366 sparseset_free (live_range_hard_reg_pseudos
);
367 sparseset_free (live_range_reload_inheritance_pseudos
);
368 free (live_hard_reg_pseudos
);
369 bitmap_obstack_release (&live_hard_reg_pseudos_bitmap_obstack
);
370 free (live_pseudos_reg_renumber
);
373 /* Update the LIVE_HARD_REG_PSEUDOS and LIVE_PSEUDOS_REG_RENUMBER
374 entries for pseudo REGNO. Assume that the register has been
375 spilled if FREE_P, otherwise assume that it has been assigned
376 reg_renumber[REGNO] (if >= 0). We also insert the pseudo live
377 ranges in the start chains when it is assumed to be assigned to a
378 hard register because we use the chains of pseudos assigned to hard
379 registers during allocation. */
381 update_lives (int regno
, bool free_p
)
386 if (reg_renumber
[regno
] < 0)
388 live_pseudos_reg_renumber
[regno
] = free_p
? -1 : reg_renumber
[regno
];
389 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
391 for (p
= r
->start
; p
<= r
->finish
; p
++)
393 bitmap_clear_bit (&live_hard_reg_pseudos
[p
], regno
);
396 bitmap_set_bit (&live_hard_reg_pseudos
[p
], regno
);
397 insert_in_live_range_start_chain (regno
);
402 /* Sparseset used to calculate reload pseudos conflicting with a given
403 pseudo when we are trying to find a hard register for the given
405 static sparseset conflict_reload_and_inheritance_pseudos
;
407 /* Map: program point -> bitmap of all reload and inheritance pseudos
408 living at the point. */
409 static bitmap_head
*live_reload_and_inheritance_pseudos
;
410 static bitmap_obstack live_reload_and_inheritance_pseudos_bitmap_obstack
;
412 /* Allocate and initialize data about living reload pseudos at any
413 given program point. */
415 init_live_reload_and_inheritance_pseudos (void)
417 int i
, p
, max_regno
= max_reg_num ();
420 conflict_reload_and_inheritance_pseudos
= sparseset_alloc (max_regno
);
421 live_reload_and_inheritance_pseudos
= XNEWVEC (bitmap_head
, lra_live_max_point
);
422 bitmap_obstack_initialize (&live_reload_and_inheritance_pseudos_bitmap_obstack
);
423 for (p
= 0; p
< lra_live_max_point
; p
++)
424 bitmap_initialize (&live_reload_and_inheritance_pseudos
[p
],
425 &live_reload_and_inheritance_pseudos_bitmap_obstack
);
426 for (i
= lra_constraint_new_regno_start
; i
< max_regno
; i
++)
428 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
429 for (p
= r
->start
; p
<= r
->finish
; p
++)
430 bitmap_set_bit (&live_reload_and_inheritance_pseudos
[p
], i
);
434 /* Finalize data about living reload pseudos at any given program
437 finish_live_reload_and_inheritance_pseudos (void)
439 sparseset_free (conflict_reload_and_inheritance_pseudos
);
440 free (live_reload_and_inheritance_pseudos
);
441 bitmap_obstack_release (&live_reload_and_inheritance_pseudos_bitmap_obstack
);
444 /* The value used to check that cost of given hard reg is really
445 defined currently. */
446 static int curr_hard_regno_costs_check
= 0;
447 /* Array used to check that cost of the corresponding hard reg (the
448 array element index) is really defined currently. */
449 static int hard_regno_costs_check
[FIRST_PSEUDO_REGISTER
];
450 /* The current costs of allocation of hard regs. Defined only if the
451 value of the corresponding element of the previous array is equal to
452 CURR_HARD_REGNO_COSTS_CHECK. */
453 static int hard_regno_costs
[FIRST_PSEUDO_REGISTER
];
455 /* Adjust cost of HARD_REGNO by INCR. Reset the cost first if it is
458 adjust_hard_regno_cost (int hard_regno
, int incr
)
460 if (hard_regno_costs_check
[hard_regno
] != curr_hard_regno_costs_check
)
461 hard_regno_costs
[hard_regno
] = 0;
462 hard_regno_costs_check
[hard_regno
] = curr_hard_regno_costs_check
;
463 hard_regno_costs
[hard_regno
] += incr
;
466 /* Try to find a free hard register for pseudo REGNO. Return the
467 hard register on success and set *COST to the cost of using
468 that register. (If several registers have equal cost, the one with
469 the highest priority wins.) Return -1 on failure.
471 If FIRST_P, return the first available hard reg ignoring other
472 criteria, e.g. allocation cost. This approach results in less hard
473 reg pool fragmentation and permit to allocate hard regs to reload
474 pseudos in complicated situations where pseudo sizes are different.
476 If TRY_ONLY_HARD_REGNO >= 0, consider only that hard register,
477 otherwise consider all hard registers in REGNO's class. */
479 find_hard_regno_for (int regno
, int *cost
, int try_only_hard_regno
,
482 HARD_REG_SET conflict_set
;
483 int best_cost
= INT_MAX
, best_priority
= INT_MIN
, best_usage
= INT_MAX
;
485 int p
, i
, j
, rclass_size
, best_hard_regno
, priority
, hard_regno
;
486 int hr
, conflict_hr
, nregs
;
487 machine_mode biggest_mode
;
488 unsigned int k
, conflict_regno
;
489 int offset
, val
, biggest_nregs
, nregs_diff
;
490 enum reg_class rclass
;
492 bool *rclass_intersect_p
;
493 HARD_REG_SET impossible_start_hard_regs
, available_regs
;
495 COPY_HARD_REG_SET (conflict_set
, lra_no_alloc_regs
);
496 rclass
= regno_allocno_class_array
[regno
];
497 rclass_intersect_p
= ira_reg_classes_intersect_p
[rclass
];
498 curr_hard_regno_costs_check
++;
499 sparseset_clear (conflict_reload_and_inheritance_pseudos
);
500 sparseset_clear (live_range_hard_reg_pseudos
);
501 IOR_HARD_REG_SET (conflict_set
, lra_reg_info
[regno
].conflict_hard_regs
);
502 biggest_mode
= lra_reg_info
[regno
].biggest_mode
;
503 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
505 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[r
->start
], 0, k
, bi
)
506 if (rclass_intersect_p
[regno_allocno_class_array
[k
]])
507 sparseset_set_bit (live_range_hard_reg_pseudos
, k
);
508 EXECUTE_IF_SET_IN_BITMAP (&live_reload_and_inheritance_pseudos
[r
->start
],
510 if (lra_reg_info
[k
].preferred_hard_regno1
>= 0
511 && live_pseudos_reg_renumber
[k
] < 0
512 && rclass_intersect_p
[regno_allocno_class_array
[k
]])
513 sparseset_set_bit (conflict_reload_and_inheritance_pseudos
, k
);
514 for (p
= r
->start
+ 1; p
<= r
->finish
; p
++)
518 for (r2
= start_point_ranges
[p
];
522 if (r2
->regno
>= lra_constraint_new_regno_start
523 && lra_reg_info
[r2
->regno
].preferred_hard_regno1
>= 0
524 && live_pseudos_reg_renumber
[r2
->regno
] < 0
525 && rclass_intersect_p
[regno_allocno_class_array
[r2
->regno
]])
526 sparseset_set_bit (conflict_reload_and_inheritance_pseudos
,
528 if (live_pseudos_reg_renumber
[r2
->regno
] >= 0
529 && rclass_intersect_p
[regno_allocno_class_array
[r2
->regno
]])
530 sparseset_set_bit (live_range_hard_reg_pseudos
, r2
->regno
);
534 if ((hard_regno
= lra_reg_info
[regno
].preferred_hard_regno1
) >= 0)
536 adjust_hard_regno_cost
537 (hard_regno
, -lra_reg_info
[regno
].preferred_hard_regno_profit1
);
538 if ((hard_regno
= lra_reg_info
[regno
].preferred_hard_regno2
) >= 0)
539 adjust_hard_regno_cost
540 (hard_regno
, -lra_reg_info
[regno
].preferred_hard_regno_profit2
);
543 if (lra_reg_info
[regno
].no_stack_p
)
544 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
545 SET_HARD_REG_BIT (conflict_set
, i
);
547 sparseset_clear_bit (conflict_reload_and_inheritance_pseudos
, regno
);
548 val
= lra_reg_info
[regno
].val
;
549 offset
= lra_reg_info
[regno
].offset
;
550 CLEAR_HARD_REG_SET (impossible_start_hard_regs
);
551 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos
, conflict_regno
)
552 if (lra_reg_val_equal_p (conflict_regno
, val
, offset
))
554 conflict_hr
= live_pseudos_reg_renumber
[conflict_regno
];
555 nregs
= (hard_regno_nregs
[conflict_hr
]
556 [lra_reg_info
[conflict_regno
].biggest_mode
]);
557 /* Remember about multi-register pseudos. For example, 2 hard
558 register pseudos can start on the same hard register but can
559 not start on HR and HR+1/HR-1. */
560 for (hr
= conflict_hr
+ 1;
561 hr
< FIRST_PSEUDO_REGISTER
&& hr
< conflict_hr
+ nregs
;
563 SET_HARD_REG_BIT (impossible_start_hard_regs
, hr
);
564 for (hr
= conflict_hr
- 1;
565 hr
>= 0 && hr
+ hard_regno_nregs
[hr
][biggest_mode
] > conflict_hr
;
567 SET_HARD_REG_BIT (impossible_start_hard_regs
, hr
);
571 add_to_hard_reg_set (&conflict_set
,
572 lra_reg_info
[conflict_regno
].biggest_mode
,
573 live_pseudos_reg_renumber
[conflict_regno
]);
574 if (hard_reg_set_subset_p (reg_class_contents
[rclass
],
578 EXECUTE_IF_SET_IN_SPARSESET (conflict_reload_and_inheritance_pseudos
,
580 if (!lra_reg_val_equal_p (conflict_regno
, val
, offset
))
582 lra_assert (live_pseudos_reg_renumber
[conflict_regno
] < 0);
584 = lra_reg_info
[conflict_regno
].preferred_hard_regno1
) >= 0)
586 adjust_hard_regno_cost
588 lra_reg_info
[conflict_regno
].preferred_hard_regno_profit1
);
590 = lra_reg_info
[conflict_regno
].preferred_hard_regno2
) >= 0)
591 adjust_hard_regno_cost
593 lra_reg_info
[conflict_regno
].preferred_hard_regno_profit2
);
596 /* Make sure that all registers in a multi-word pseudo belong to the
598 IOR_COMPL_HARD_REG_SET (conflict_set
, reg_class_contents
[rclass
]);
599 lra_assert (rclass
!= NO_REGS
);
600 rclass_size
= ira_class_hard_regs_num
[rclass
];
601 best_hard_regno
= -1;
602 hard_regno
= ira_class_hard_regs
[rclass
][0];
603 biggest_nregs
= hard_regno_nregs
[hard_regno
][biggest_mode
];
604 nregs_diff
= (biggest_nregs
605 - hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)]);
606 COPY_HARD_REG_SET (available_regs
, reg_class_contents
[rclass
]);
607 AND_COMPL_HARD_REG_SET (available_regs
, lra_no_alloc_regs
);
608 for (i
= 0; i
< rclass_size
; i
++)
610 if (try_only_hard_regno
>= 0)
611 hard_regno
= try_only_hard_regno
;
613 hard_regno
= ira_class_hard_regs
[rclass
][i
];
614 if (! overlaps_hard_reg_set_p (conflict_set
,
615 PSEUDO_REGNO_MODE (regno
), hard_regno
)
616 /* We can not use prohibited_class_mode_regs because it is
617 not defined for all classes. */
618 && HARD_REGNO_MODE_OK (hard_regno
, PSEUDO_REGNO_MODE (regno
))
619 && ! TEST_HARD_REG_BIT (impossible_start_hard_regs
, hard_regno
)
622 ? (hard_regno
- nregs_diff
>= 0
623 && TEST_HARD_REG_BIT (available_regs
,
624 hard_regno
- nregs_diff
))
625 : TEST_HARD_REG_BIT (available_regs
,
626 hard_regno
+ nregs_diff
))))
628 if (hard_regno_costs_check
[hard_regno
]
629 != curr_hard_regno_costs_check
)
631 hard_regno_costs_check
[hard_regno
] = curr_hard_regno_costs_check
;
632 hard_regno_costs
[hard_regno
] = 0;
635 j
< hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)];
637 if (! TEST_HARD_REG_BIT (call_used_reg_set
, hard_regno
+ j
)
638 && ! df_regs_ever_live_p (hard_regno
+ j
))
639 /* It needs save restore. */
640 hard_regno_costs
[hard_regno
]
642 * REG_FREQ_FROM_BB (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
644 priority
= targetm
.register_priority (hard_regno
);
645 if (best_hard_regno
< 0 || hard_regno_costs
[hard_regno
] < best_cost
646 || (hard_regno_costs
[hard_regno
] == best_cost
647 && (priority
> best_priority
648 || (targetm
.register_usage_leveling_p ()
649 && priority
== best_priority
650 && best_usage
> lra_hard_reg_usage
[hard_regno
]))))
652 best_hard_regno
= hard_regno
;
653 best_cost
= hard_regno_costs
[hard_regno
];
654 best_priority
= priority
;
655 best_usage
= lra_hard_reg_usage
[hard_regno
];
658 if (try_only_hard_regno
>= 0 || (first_p
&& best_hard_regno
>= 0))
661 if (best_hard_regno
>= 0)
662 *cost
= best_cost
- lra_reg_info
[regno
].freq
;
663 return best_hard_regno
;
666 /* Current value used for checking elements in
667 update_hard_regno_preference_check. */
668 static int curr_update_hard_regno_preference_check
;
669 /* If an element value is equal to the above variable value, then the
670 corresponding regno has been processed for preference
672 static int *update_hard_regno_preference_check
;
674 /* Update the preference for using HARD_REGNO for pseudos that are
675 connected directly or indirectly with REGNO. Apply divisor DIV
676 to any preference adjustments.
678 The more indirectly a pseudo is connected, the smaller its effect
679 should be. We therefore increase DIV on each "hop". */
681 update_hard_regno_preference (int regno
, int hard_regno
, int div
)
683 int another_regno
, cost
;
684 lra_copy_t cp
, next_cp
;
686 /* Search depth 5 seems to be enough. */
689 for (cp
= lra_reg_info
[regno
].copies
; cp
!= NULL
; cp
= next_cp
)
691 if (cp
->regno1
== regno
)
693 next_cp
= cp
->regno1_next
;
694 another_regno
= cp
->regno2
;
696 else if (cp
->regno2
== regno
)
698 next_cp
= cp
->regno2_next
;
699 another_regno
= cp
->regno1
;
703 if (reg_renumber
[another_regno
] < 0
704 && (update_hard_regno_preference_check
[another_regno
]
705 != curr_update_hard_regno_preference_check
))
707 update_hard_regno_preference_check
[another_regno
]
708 = curr_update_hard_regno_preference_check
;
709 cost
= cp
->freq
< div
? 1 : cp
->freq
/ div
;
710 lra_setup_reload_pseudo_preferenced_hard_reg
711 (another_regno
, hard_regno
, cost
);
712 update_hard_regno_preference (another_regno
, hard_regno
, div
* 2);
717 /* Return prefix title for pseudo REGNO. */
719 pseudo_prefix_title (int regno
)
722 (regno
< lra_constraint_new_regno_start
? ""
723 : bitmap_bit_p (&lra_inheritance_pseudos
, regno
) ? "inheritance "
724 : bitmap_bit_p (&lra_split_regs
, regno
) ? "split "
725 : bitmap_bit_p (&lra_optional_reload_pseudos
, regno
) ? "optional reload "
726 : bitmap_bit_p (&lra_subreg_reload_pseudos
, regno
) ? "subreg reload "
730 /* Update REG_RENUMBER and other pseudo preferences by assignment of
731 HARD_REGNO to pseudo REGNO and print about it if PRINT_P. */
733 lra_setup_reg_renumber (int regno
, int hard_regno
, bool print_p
)
737 /* We can not just reassign hard register. */
738 lra_assert (hard_regno
< 0 || reg_renumber
[regno
] < 0);
739 if ((hr
= hard_regno
) < 0)
740 hr
= reg_renumber
[regno
];
741 reg_renumber
[regno
] = hard_regno
;
742 lra_assert (hr
>= 0);
743 for (i
= 0; i
< hard_regno_nregs
[hr
][PSEUDO_REGNO_MODE (regno
)]; i
++)
745 lra_hard_reg_usage
[hr
+ i
] -= lra_reg_info
[regno
].freq
;
747 lra_hard_reg_usage
[hr
+ i
] += lra_reg_info
[regno
].freq
;
748 if (print_p
&& lra_dump_file
!= NULL
)
749 fprintf (lra_dump_file
, " Assign %d to %sr%d (freq=%d)\n",
750 reg_renumber
[regno
], pseudo_prefix_title (regno
),
751 regno
, lra_reg_info
[regno
].freq
);
754 curr_update_hard_regno_preference_check
++;
755 update_hard_regno_preference (regno
, hard_regno
, 1);
759 /* Pseudos which occur in insns containing a particular pseudo. */
760 static bitmap_head insn_conflict_pseudos
;
762 /* Bitmaps used to contain spill pseudos for given pseudo hard regno
763 and best spill pseudos for given pseudo (and best hard regno). */
764 static bitmap_head spill_pseudos_bitmap
, best_spill_pseudos_bitmap
;
766 /* Current pseudo check for validity of elements in
767 TRY_HARD_REG_PSEUDOS. */
768 static int curr_pseudo_check
;
769 /* Array used for validity of elements in TRY_HARD_REG_PSEUDOS. */
770 static int try_hard_reg_pseudos_check
[FIRST_PSEUDO_REGISTER
];
771 /* Pseudos who hold given hard register at the considered points. */
772 static bitmap_head try_hard_reg_pseudos
[FIRST_PSEUDO_REGISTER
];
774 /* Set up try_hard_reg_pseudos for given program point P and class
775 RCLASS. Those are pseudos living at P and assigned to a hard
776 register of RCLASS. In other words, those are pseudos which can be
777 spilled to assign a hard register of RCLASS to a pseudo living at
780 setup_try_hard_regno_pseudos (int p
, enum reg_class rclass
)
784 unsigned int spill_regno
;
787 /* Find what pseudos could be spilled. */
788 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[p
], 0, spill_regno
, bi
)
790 mode
= PSEUDO_REGNO_MODE (spill_regno
);
791 hard_regno
= live_pseudos_reg_renumber
[spill_regno
];
792 if (overlaps_hard_reg_set_p (reg_class_contents
[rclass
],
795 for (i
= hard_regno_nregs
[hard_regno
][mode
] - 1; i
>= 0; i
--)
797 if (try_hard_reg_pseudos_check
[hard_regno
+ i
]
798 != curr_pseudo_check
)
800 try_hard_reg_pseudos_check
[hard_regno
+ i
]
802 bitmap_clear (&try_hard_reg_pseudos
[hard_regno
+ i
]);
804 bitmap_set_bit (&try_hard_reg_pseudos
[hard_regno
+ i
],
811 /* Assign temporarily HARD_REGNO to pseudo REGNO. Temporary
812 assignment means that we might undo the data change. */
814 assign_temporarily (int regno
, int hard_regno
)
819 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
821 for (p
= r
->start
; p
<= r
->finish
; p
++)
823 bitmap_clear_bit (&live_hard_reg_pseudos
[p
], regno
);
826 bitmap_set_bit (&live_hard_reg_pseudos
[p
], regno
);
827 insert_in_live_range_start_chain (regno
);
830 live_pseudos_reg_renumber
[regno
] = hard_regno
;
833 /* Array used for sorting reload pseudos for subsequent allocation
834 after spilling some pseudo. */
835 static int *sorted_reload_pseudos
;
837 /* Spill some pseudos for a reload pseudo REGNO and return hard
838 register which should be used for pseudo after spilling. The
839 function adds spilled pseudos to SPILLED_PSEUDO_BITMAP. When we
840 choose hard register (and pseudos occupying the hard registers and
841 to be spilled), we take into account not only how REGNO will
842 benefit from the spills but also how other reload pseudos not yet
843 assigned to hard registers benefit from the spills too. In very
844 rare cases, the function can fail and return -1.
846 If FIRST_P, return the first available hard reg ignoring other
847 criteria, e.g. allocation cost and cost of spilling non-reload
848 pseudos. This approach results in less hard reg pool fragmentation
849 and permit to allocate hard regs to reload pseudos in complicated
850 situations where pseudo sizes are different. */
852 spill_for (int regno
, bitmap spilled_pseudo_bitmap
, bool first_p
)
854 int i
, j
, n
, p
, hard_regno
, best_hard_regno
, cost
, best_cost
, rclass_size
;
855 int reload_hard_regno
, reload_cost
;
857 enum reg_class rclass
;
858 unsigned int spill_regno
, reload_regno
, uid
;
859 int insn_pseudos_num
, best_insn_pseudos_num
;
863 rclass
= regno_allocno_class_array
[regno
];
864 lra_assert (reg_renumber
[regno
] < 0 && rclass
!= NO_REGS
);
865 bitmap_clear (&insn_conflict_pseudos
);
866 bitmap_clear (&best_spill_pseudos_bitmap
);
867 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info
[regno
].insn_bitmap
, 0, uid
, bi
)
869 struct lra_insn_reg
*ir
;
871 for (ir
= lra_get_insn_regs (uid
); ir
!= NULL
; ir
= ir
->next
)
872 if (ir
->regno
>= FIRST_PSEUDO_REGISTER
)
873 bitmap_set_bit (&insn_conflict_pseudos
, ir
->regno
);
875 best_hard_regno
= -1;
877 best_insn_pseudos_num
= INT_MAX
;
878 rclass_size
= ira_class_hard_regs_num
[rclass
];
879 mode
= PSEUDO_REGNO_MODE (regno
);
880 /* Invalidate try_hard_reg_pseudos elements. */
882 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
883 for (p
= r
->start
; p
<= r
->finish
; p
++)
884 setup_try_hard_regno_pseudos (p
, rclass
);
885 for (i
= 0; i
< rclass_size
; i
++)
887 hard_regno
= ira_class_hard_regs
[rclass
][i
];
888 bitmap_clear (&spill_pseudos_bitmap
);
889 for (j
= hard_regno_nregs
[hard_regno
][mode
] - 1; j
>= 0; j
--)
891 if (try_hard_reg_pseudos_check
[hard_regno
+ j
] != curr_pseudo_check
)
893 lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos
[hard_regno
+ j
]));
894 bitmap_ior_into (&spill_pseudos_bitmap
,
895 &try_hard_reg_pseudos
[hard_regno
+ j
]);
898 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
899 if ((pic_offset_table_rtx
!= NULL
900 && spill_regno
== REGNO (pic_offset_table_rtx
))
901 || ((int) spill_regno
>= lra_constraint_new_regno_start
902 && ! bitmap_bit_p (&lra_inheritance_pseudos
, spill_regno
)
903 && ! bitmap_bit_p (&lra_split_regs
, spill_regno
)
904 && ! bitmap_bit_p (&lra_subreg_reload_pseudos
, spill_regno
)
905 && ! bitmap_bit_p (&lra_optional_reload_pseudos
, spill_regno
)))
907 insn_pseudos_num
= 0;
908 if (lra_dump_file
!= NULL
)
909 fprintf (lra_dump_file
, " Trying %d:", hard_regno
);
910 sparseset_clear (live_range_reload_inheritance_pseudos
);
911 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
913 if (bitmap_bit_p (&insn_conflict_pseudos
, spill_regno
))
915 for (r
= lra_reg_info
[spill_regno
].live_ranges
;
919 for (p
= r
->start
; p
<= r
->finish
; p
++)
923 for (r2
= start_point_ranges
[p
];
926 if (r2
->regno
>= lra_constraint_new_regno_start
)
927 sparseset_set_bit (live_range_reload_inheritance_pseudos
,
933 if (sparseset_cardinality (live_range_reload_inheritance_pseudos
)
934 <= (unsigned)LRA_MAX_CONSIDERED_RELOAD_PSEUDOS
)
935 EXECUTE_IF_SET_IN_SPARSESET (live_range_reload_inheritance_pseudos
,
937 if ((int) reload_regno
!= regno
938 && (ira_reg_classes_intersect_p
939 [rclass
][regno_allocno_class_array
[reload_regno
]])
940 && live_pseudos_reg_renumber
[reload_regno
] < 0
941 && find_hard_regno_for (reload_regno
, &cost
, -1, first_p
) < 0)
942 sorted_reload_pseudos
[n
++] = reload_regno
;
943 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
945 update_lives (spill_regno
, true);
946 if (lra_dump_file
!= NULL
)
947 fprintf (lra_dump_file
, " spill %d(freq=%d)",
948 spill_regno
, lra_reg_info
[spill_regno
].freq
);
950 hard_regno
= find_hard_regno_for (regno
, &cost
, -1, first_p
);
953 assign_temporarily (regno
, hard_regno
);
954 qsort (sorted_reload_pseudos
, n
, sizeof (int),
955 reload_pseudo_compare_func
);
956 for (j
= 0; j
< n
; j
++)
958 reload_regno
= sorted_reload_pseudos
[j
];
959 lra_assert (live_pseudos_reg_renumber
[reload_regno
] < 0);
960 if ((reload_hard_regno
961 = find_hard_regno_for (reload_regno
,
962 &reload_cost
, -1, first_p
)) >= 0)
964 if (lra_dump_file
!= NULL
)
965 fprintf (lra_dump_file
, " assign %d(cost=%d)",
966 reload_regno
, reload_cost
);
967 assign_temporarily (reload_regno
, reload_hard_regno
);
971 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
975 cost
+= lra_reg_info
[spill_regno
].freq
;
976 if (ira_reg_equiv
[spill_regno
].memory
!= NULL
977 || ira_reg_equiv
[spill_regno
].constant
!= NULL
)
978 for (x
= ira_reg_equiv
[spill_regno
].init_insns
;
981 cost
-= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (x
->insn ()));
983 if (best_insn_pseudos_num
> insn_pseudos_num
984 || (best_insn_pseudos_num
== insn_pseudos_num
985 && best_cost
> cost
))
987 best_insn_pseudos_num
= insn_pseudos_num
;
989 best_hard_regno
= hard_regno
;
990 bitmap_copy (&best_spill_pseudos_bitmap
, &spill_pseudos_bitmap
);
991 if (lra_dump_file
!= NULL
)
992 fprintf (lra_dump_file
, " Now best %d(cost=%d)\n",
995 assign_temporarily (regno
, -1);
996 for (j
= 0; j
< n
; j
++)
998 reload_regno
= sorted_reload_pseudos
[j
];
999 if (live_pseudos_reg_renumber
[reload_regno
] >= 0)
1000 assign_temporarily (reload_regno
, -1);
1003 if (lra_dump_file
!= NULL
)
1004 fprintf (lra_dump_file
, "\n");
1005 /* Restore the live hard reg pseudo info for spilled pseudos. */
1006 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
1007 update_lives (spill_regno
, false);
1012 EXECUTE_IF_SET_IN_BITMAP (&best_spill_pseudos_bitmap
, 0, spill_regno
, bi
)
1014 if ((int) spill_regno
>= lra_constraint_new_regno_start
)
1015 former_reload_pseudo_spill_p
= true;
1016 if (lra_dump_file
!= NULL
)
1017 fprintf (lra_dump_file
, " Spill %sr%d(hr=%d, freq=%d) for r%d\n",
1018 pseudo_prefix_title (spill_regno
),
1019 spill_regno
, reg_renumber
[spill_regno
],
1020 lra_reg_info
[spill_regno
].freq
, regno
);
1021 update_lives (spill_regno
, true);
1022 lra_setup_reg_renumber (spill_regno
, -1, false);
1024 bitmap_ior_into (spilled_pseudo_bitmap
, &best_spill_pseudos_bitmap
);
1025 return best_hard_regno
;
1028 /* Assign HARD_REGNO to REGNO. */
1030 assign_hard_regno (int hard_regno
, int regno
)
1034 lra_assert (hard_regno
>= 0);
1035 lra_setup_reg_renumber (regno
, hard_regno
, true);
1036 update_lives (regno
, false);
1038 i
< hard_regno_nregs
[hard_regno
][lra_reg_info
[regno
].biggest_mode
];
1040 df_set_regs_ever_live (hard_regno
+ i
, true);
1043 /* Array used for sorting different pseudos. */
1044 static int *sorted_pseudos
;
1046 /* The constraints pass is allowed to create equivalences between
1047 pseudos that make the current allocation "incorrect" (in the sense
1048 that pseudos are assigned to hard registers from their own conflict
1049 sets). The global variable lra_risky_transformations_p says
1050 whether this might have happened.
1052 Process pseudos assigned to hard registers (less frequently used
1053 first), spill if a conflict is found, and mark the spilled pseudos
1054 in SPILLED_PSEUDO_BITMAP. Set up LIVE_HARD_REG_PSEUDOS from
1055 pseudos, assigned to hard registers. */
1057 setup_live_pseudos_and_spill_after_risky_transforms (bitmap
1058 spilled_pseudo_bitmap
)
1060 int p
, i
, j
, n
, regno
, hard_regno
;
1061 unsigned int k
, conflict_regno
;
1063 HARD_REG_SET conflict_set
;
1067 int max_regno
= max_reg_num ();
1069 if (! lra_risky_transformations_p
)
1071 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1072 if (reg_renumber
[i
] >= 0 && lra_reg_info
[i
].nrefs
> 0)
1073 update_lives (i
, false);
1076 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1077 if ((pic_offset_table_rtx
== NULL_RTX
1078 || i
!= (int) REGNO (pic_offset_table_rtx
))
1079 && reg_renumber
[i
] >= 0 && lra_reg_info
[i
].nrefs
> 0)
1080 sorted_pseudos
[n
++] = i
;
1081 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1082 if (pic_offset_table_rtx
!= NULL_RTX
1083 && (regno
= REGNO (pic_offset_table_rtx
)) >= FIRST_PSEUDO_REGISTER
1084 && reg_renumber
[regno
] >= 0 && lra_reg_info
[regno
].nrefs
> 0)
1085 sorted_pseudos
[n
++] = regno
;
1086 for (i
= n
- 1; i
>= 0; i
--)
1088 regno
= sorted_pseudos
[i
];
1089 hard_regno
= reg_renumber
[regno
];
1090 lra_assert (hard_regno
>= 0);
1091 mode
= lra_reg_info
[regno
].biggest_mode
;
1092 sparseset_clear (live_range_hard_reg_pseudos
);
1093 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
1095 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[r
->start
], 0, k
, bi
)
1096 sparseset_set_bit (live_range_hard_reg_pseudos
, k
);
1097 for (p
= r
->start
+ 1; p
<= r
->finish
; p
++)
1099 lra_live_range_t r2
;
1101 for (r2
= start_point_ranges
[p
];
1103 r2
= r2
->start_next
)
1104 if (live_pseudos_reg_renumber
[r2
->regno
] >= 0)
1105 sparseset_set_bit (live_range_hard_reg_pseudos
, r2
->regno
);
1108 COPY_HARD_REG_SET (conflict_set
, lra_no_alloc_regs
);
1109 IOR_HARD_REG_SET (conflict_set
, lra_reg_info
[regno
].conflict_hard_regs
);
1110 val
= lra_reg_info
[regno
].val
;
1111 offset
= lra_reg_info
[regno
].offset
;
1112 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos
, conflict_regno
)
1113 if (!lra_reg_val_equal_p (conflict_regno
, val
, offset
)
1114 /* If it is multi-register pseudos they should start on
1115 the same hard register. */
1116 || hard_regno
!= reg_renumber
[conflict_regno
])
1117 add_to_hard_reg_set (&conflict_set
,
1118 lra_reg_info
[conflict_regno
].biggest_mode
,
1119 reg_renumber
[conflict_regno
]);
1120 if (! overlaps_hard_reg_set_p (conflict_set
, mode
, hard_regno
))
1122 update_lives (regno
, false);
1125 bitmap_set_bit (spilled_pseudo_bitmap
, regno
);
1127 j
< hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)];
1129 lra_hard_reg_usage
[hard_regno
+ j
] -= lra_reg_info
[regno
].freq
;
1130 reg_renumber
[regno
] = -1;
1131 if (regno
>= lra_constraint_new_regno_start
)
1132 former_reload_pseudo_spill_p
= true;
1133 if (lra_dump_file
!= NULL
)
1134 fprintf (lra_dump_file
, " Spill r%d after risky transformations\n",
1139 /* Improve allocation by assigning the same hard regno of inheritance
1140 pseudos to the connected pseudos. We need this because inheritance
1141 pseudos are allocated after reload pseudos in the thread and when
1142 we assign a hard register to a reload pseudo we don't know yet that
1143 the connected inheritance pseudos can get the same hard register.
1144 Add pseudos with changed allocation to bitmap CHANGED_PSEUDOS. */
1146 improve_inheritance (bitmap changed_pseudos
)
1149 int regno
, another_regno
, hard_regno
, another_hard_regno
, cost
, i
, n
;
1150 lra_copy_t cp
, next_cp
;
1153 if (lra_inheritance_iter
> LRA_MAX_INHERITANCE_PASSES
)
1156 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos
, 0, k
, bi
)
1157 if (reg_renumber
[k
] >= 0 && lra_reg_info
[k
].nrefs
!= 0)
1158 sorted_pseudos
[n
++] = k
;
1159 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1160 for (i
= 0; i
< n
; i
++)
1162 regno
= sorted_pseudos
[i
];
1163 hard_regno
= reg_renumber
[regno
];
1164 lra_assert (hard_regno
>= 0);
1165 for (cp
= lra_reg_info
[regno
].copies
; cp
!= NULL
; cp
= next_cp
)
1167 if (cp
->regno1
== regno
)
1169 next_cp
= cp
->regno1_next
;
1170 another_regno
= cp
->regno2
;
1172 else if (cp
->regno2
== regno
)
1174 next_cp
= cp
->regno2_next
;
1175 another_regno
= cp
->regno1
;
1179 /* Don't change reload pseudo allocation. It might have
1180 this allocation for a purpose and changing it can result
1182 if ((another_regno
< lra_constraint_new_regno_start
1183 || bitmap_bit_p (&lra_inheritance_pseudos
, another_regno
))
1184 && (another_hard_regno
= reg_renumber
[another_regno
]) >= 0
1185 && another_hard_regno
!= hard_regno
)
1187 if (lra_dump_file
!= NULL
)
1190 " Improving inheritance for %d(%d) and %d(%d)...\n",
1191 regno
, hard_regno
, another_regno
, another_hard_regno
);
1192 update_lives (another_regno
, true);
1193 lra_setup_reg_renumber (another_regno
, -1, false);
1194 if (hard_regno
== find_hard_regno_for (another_regno
, &cost
,
1196 assign_hard_regno (hard_regno
, another_regno
);
1198 assign_hard_regno (another_hard_regno
, another_regno
);
1199 bitmap_set_bit (changed_pseudos
, another_regno
);
1206 /* Bitmap finally containing all pseudos spilled on this assignment
1208 static bitmap_head all_spilled_pseudos
;
1209 /* All pseudos whose allocation was changed. */
1210 static bitmap_head changed_pseudo_bitmap
;
1213 /* Add to LIVE_RANGE_HARD_REG_PSEUDOS all pseudos conflicting with
1214 REGNO and whose hard regs can be assigned to REGNO. */
1216 find_all_spills_for (int regno
)
1222 enum reg_class rclass
;
1223 bool *rclass_intersect_p
;
1225 rclass
= regno_allocno_class_array
[regno
];
1226 rclass_intersect_p
= ira_reg_classes_intersect_p
[rclass
];
1227 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
1229 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[r
->start
], 0, k
, bi
)
1230 if (rclass_intersect_p
[regno_allocno_class_array
[k
]])
1231 sparseset_set_bit (live_range_hard_reg_pseudos
, k
);
1232 for (p
= r
->start
+ 1; p
<= r
->finish
; p
++)
1234 lra_live_range_t r2
;
1236 for (r2
= start_point_ranges
[p
];
1238 r2
= r2
->start_next
)
1240 if (live_pseudos_reg_renumber
[r2
->regno
] >= 0
1241 && rclass_intersect_p
[regno_allocno_class_array
[r2
->regno
]])
1242 sparseset_set_bit (live_range_hard_reg_pseudos
, r2
->regno
);
1248 /* Assign hard registers to reload pseudos and other pseudos. */
1250 assign_by_spills (void)
1252 int i
, n
, nfails
, iter
, regno
, hard_regno
, cost
, restore_regno
;
1254 bitmap_head changed_insns
, do_not_assign_nonreload_pseudos
;
1255 unsigned int u
, conflict_regno
;
1258 int max_regno
= max_reg_num ();
1260 for (n
= 0, i
= lra_constraint_new_regno_start
; i
< max_regno
; i
++)
1261 if (reg_renumber
[i
] < 0 && lra_reg_info
[i
].nrefs
!= 0
1262 && regno_allocno_class_array
[i
] != NO_REGS
)
1263 sorted_pseudos
[n
++] = i
;
1264 bitmap_initialize (&insn_conflict_pseudos
, ®_obstack
);
1265 bitmap_initialize (&spill_pseudos_bitmap
, ®_obstack
);
1266 bitmap_initialize (&best_spill_pseudos_bitmap
, ®_obstack
);
1267 update_hard_regno_preference_check
= XCNEWVEC (int, max_regno
);
1268 curr_update_hard_regno_preference_check
= 0;
1269 memset (try_hard_reg_pseudos_check
, 0, sizeof (try_hard_reg_pseudos_check
));
1270 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1271 bitmap_initialize (&try_hard_reg_pseudos
[i
], ®_obstack
);
1272 curr_pseudo_check
= 0;
1273 bitmap_initialize (&changed_insns
, ®_obstack
);
1274 bitmap_initialize (&non_reload_pseudos
, ®_obstack
);
1275 bitmap_ior (&non_reload_pseudos
, &lra_inheritance_pseudos
, &lra_split_regs
);
1276 bitmap_ior_into (&non_reload_pseudos
, &lra_subreg_reload_pseudos
);
1277 bitmap_ior_into (&non_reload_pseudos
, &lra_optional_reload_pseudos
);
1278 for (iter
= 0; iter
<= 1; iter
++)
1280 qsort (sorted_pseudos
, n
, sizeof (int), reload_pseudo_compare_func
);
1282 for (i
= 0; i
< n
; i
++)
1284 regno
= sorted_pseudos
[i
];
1285 if (lra_dump_file
!= NULL
)
1286 fprintf (lra_dump_file
, " Assigning to %d "
1287 "(cl=%s, orig=%d, freq=%d, tfirst=%d, tfreq=%d)...\n",
1288 regno
, reg_class_names
[regno_allocno_class_array
[regno
]],
1289 ORIGINAL_REGNO (regno_reg_rtx
[regno
]),
1290 lra_reg_info
[regno
].freq
, regno_assign_info
[regno
].first
,
1291 regno_assign_info
[regno_assign_info
[regno
].first
].freq
);
1292 hard_regno
= find_hard_regno_for (regno
, &cost
, -1, iter
== 1);
1293 reload_p
= ! bitmap_bit_p (&non_reload_pseudos
, regno
);
1294 if (hard_regno
< 0 && reload_p
)
1295 hard_regno
= spill_for (regno
, &all_spilled_pseudos
, iter
== 1);
1299 sorted_pseudos
[nfails
++] = regno
;
1303 /* This register might have been spilled by the previous
1304 pass. Indicate that it is no longer spilled. */
1305 bitmap_clear_bit (&all_spilled_pseudos
, regno
);
1306 assign_hard_regno (hard_regno
, regno
);
1308 /* As non-reload pseudo assignment is changed we
1309 should reconsider insns referring for the
1311 bitmap_set_bit (&changed_pseudo_bitmap
, regno
);
1318 /* We did not assign hard regs to reload pseudos after two iterations.
1319 Either it's an asm and something is wrong with the constraints, or
1320 we have run out of spill registers; error out in either case. */
1322 bitmap_head failed_reload_insns
;
1324 bitmap_initialize (&failed_reload_insns
, ®_obstack
);
1325 for (i
= 0; i
< nfails
; i
++)
1327 regno
= sorted_pseudos
[i
];
1328 bitmap_ior_into (&failed_reload_insns
,
1329 &lra_reg_info
[regno
].insn_bitmap
);
1330 /* Assign an arbitrary hard register of regno class to
1331 avoid further trouble with this insn. */
1332 bitmap_clear_bit (&all_spilled_pseudos
, regno
);
1334 (ira_class_hard_regs
[regno_allocno_class_array
[regno
]][0],
1337 EXECUTE_IF_SET_IN_BITMAP (&failed_reload_insns
, 0, u
, bi
)
1339 insn
= lra_insn_recog_data
[u
]->insn
;
1340 if (asm_noperands (PATTERN (insn
)) >= 0)
1343 error_for_asm (insn
,
1344 "%<asm%> operand has impossible constraints");
1345 /* Avoid further trouble with this insn.
1346 For asm goto, instead of fixing up all the edges
1347 just clear the template and clear input operands
1348 (asm goto doesn't have any output operands). */
1351 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1352 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1353 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1354 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1355 lra_update_insn_regno_info (insn
);
1359 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
1360 lra_set_insn_deleted (insn
);
1365 error ("unable to find a register to spill");
1366 fatal_insn ("this is the insn:", insn
);
1371 /* This is a very rare event. We can not assign a hard register
1372 to reload pseudo because the hard register was assigned to
1373 another reload pseudo on a previous assignment pass. For x86
1374 example, on the 1st pass we assigned CX (although another
1375 hard register could be used for this) to reload pseudo in an
1376 insn, on the 2nd pass we need CX (and only this) hard
1377 register for a new reload pseudo in the same insn. Another
1378 possible situation may occur in assigning to multi-regs
1379 reload pseudos when hard regs pool is too fragmented even
1380 after spilling non-reload pseudos.
1382 We should do something radical here to succeed. Here we
1383 spill *all* conflicting pseudos and reassign them. */
1384 if (lra_dump_file
!= NULL
)
1385 fprintf (lra_dump_file
, " 2nd iter for reload pseudo assignments:\n");
1386 sparseset_clear (live_range_hard_reg_pseudos
);
1387 for (i
= 0; i
< nfails
; i
++)
1389 if (lra_dump_file
!= NULL
)
1390 fprintf (lra_dump_file
, " Reload r%d assignment failure\n",
1392 find_all_spills_for (sorted_pseudos
[i
]);
1394 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos
, conflict_regno
)
1396 if ((int) conflict_regno
>= lra_constraint_new_regno_start
)
1398 sorted_pseudos
[nfails
++] = conflict_regno
;
1399 former_reload_pseudo_spill_p
= true;
1401 if (lra_dump_file
!= NULL
)
1402 fprintf (lra_dump_file
, " Spill %s r%d(hr=%d, freq=%d)\n",
1403 pseudo_prefix_title (conflict_regno
), conflict_regno
,
1404 reg_renumber
[conflict_regno
],
1405 lra_reg_info
[conflict_regno
].freq
);
1406 update_lives (conflict_regno
, true);
1407 lra_setup_reg_renumber (conflict_regno
, -1, false);
1411 improve_inheritance (&changed_pseudo_bitmap
);
1412 bitmap_clear (&non_reload_pseudos
);
1413 bitmap_clear (&changed_insns
);
1416 /* We should not assign to original pseudos of inheritance
1417 pseudos or split pseudos if any its inheritance pseudo did
1418 not get hard register or any its split pseudo was not split
1419 because undo inheritance/split pass will extend live range of
1420 such inheritance or split pseudos. */
1421 bitmap_initialize (&do_not_assign_nonreload_pseudos
, ®_obstack
);
1422 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos
, 0, u
, bi
)
1423 if ((restore_regno
= lra_reg_info
[u
].restore_regno
) >= 0
1424 && reg_renumber
[u
] < 0
1425 && bitmap_bit_p (&lra_inheritance_pseudos
, u
))
1426 bitmap_set_bit (&do_not_assign_nonreload_pseudos
, restore_regno
);
1427 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs
, 0, u
, bi
)
1428 if ((restore_regno
= lra_reg_info
[u
].restore_regno
) >= 0
1429 && reg_renumber
[u
] >= 0)
1430 bitmap_set_bit (&do_not_assign_nonreload_pseudos
, restore_regno
);
1431 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1432 if (((i
< lra_constraint_new_regno_start
1433 && ! bitmap_bit_p (&do_not_assign_nonreload_pseudos
, i
))
1434 || (bitmap_bit_p (&lra_inheritance_pseudos
, i
)
1435 && lra_reg_info
[i
].restore_regno
>= 0)
1436 || (bitmap_bit_p (&lra_split_regs
, i
)
1437 && lra_reg_info
[i
].restore_regno
>= 0)
1438 || bitmap_bit_p (&lra_subreg_reload_pseudos
, i
)
1439 || bitmap_bit_p (&lra_optional_reload_pseudos
, i
))
1440 && reg_renumber
[i
] < 0 && lra_reg_info
[i
].nrefs
!= 0
1441 && regno_allocno_class_array
[i
] != NO_REGS
)
1442 sorted_pseudos
[n
++] = i
;
1443 bitmap_clear (&do_not_assign_nonreload_pseudos
);
1444 if (n
!= 0 && lra_dump_file
!= NULL
)
1445 fprintf (lra_dump_file
, " Reassigning non-reload pseudos\n");
1446 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1447 for (i
= 0; i
< n
; i
++)
1449 regno
= sorted_pseudos
[i
];
1450 hard_regno
= find_hard_regno_for (regno
, &cost
, -1, false);
1451 if (hard_regno
>= 0)
1453 assign_hard_regno (hard_regno
, regno
);
1454 /* We change allocation for non-reload pseudo on this
1455 iteration -- mark the pseudo for invalidation of used
1456 alternatives of insns containing the pseudo. */
1457 bitmap_set_bit (&changed_pseudo_bitmap
, regno
);
1461 enum reg_class rclass
= lra_get_allocno_class (regno
);
1462 enum reg_class spill_class
;
1464 if (targetm
.spill_class
== NULL
1465 || lra_reg_info
[regno
].restore_regno
< 0
1466 || ! bitmap_bit_p (&lra_inheritance_pseudos
, regno
)
1470 ((reg_class_t
) rclass
,
1471 PSEUDO_REGNO_MODE (regno
)))) == NO_REGS
)
1473 regno_allocno_class_array
[regno
] = spill_class
;
1474 hard_regno
= find_hard_regno_for (regno
, &cost
, -1, false);
1476 regno_allocno_class_array
[regno
] = rclass
;
1480 (regno
, spill_class
, spill_class
, spill_class
);
1481 assign_hard_regno (hard_regno
, regno
);
1482 bitmap_set_bit (&changed_pseudo_bitmap
, regno
);
1487 free (update_hard_regno_preference_check
);
1488 bitmap_clear (&best_spill_pseudos_bitmap
);
1489 bitmap_clear (&spill_pseudos_bitmap
);
1490 bitmap_clear (&insn_conflict_pseudos
);
1494 /* Entry function to assign hard registers to new reload pseudos
1495 starting with LRA_CONSTRAINT_NEW_REGNO_START (by possible spilling
1496 of old pseudos) and possibly to the old pseudos. The function adds
1497 what insns to process for the next constraint pass. Those are all
1498 insns who contains non-reload and non-inheritance pseudos with
1501 Return true if we did not spill any non-reload and non-inheritance
1509 bitmap_head insns_to_process
;
1511 int max_regno
= max_reg_num ();
1513 timevar_push (TV_LRA_ASSIGN
);
1514 lra_assignment_iter
++;
1515 if (lra_dump_file
!= NULL
)
1516 fprintf (lra_dump_file
, "\n********** Assignment #%d: **********\n\n",
1517 lra_assignment_iter
);
1519 sorted_pseudos
= XNEWVEC (int, max_regno
);
1520 sorted_reload_pseudos
= XNEWVEC (int, max_regno
);
1521 regno_allocno_class_array
= XNEWVEC (enum reg_class
, max_regno
);
1522 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1523 regno_allocno_class_array
[i
] = lra_get_allocno_class (i
);
1524 former_reload_pseudo_spill_p
= false;
1525 init_regno_assign_info ();
1526 bitmap_initialize (&all_spilled_pseudos
, ®_obstack
);
1527 create_live_range_start_chains ();
1528 setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos
);
1529 #ifdef ENABLE_CHECKING
1531 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1532 if (lra_reg_info
[i
].nrefs
!= 0 && reg_renumber
[i
] >= 0
1533 && lra_reg_info
[i
].call_p
1534 && overlaps_hard_reg_set_p (call_used_reg_set
,
1535 PSEUDO_REGNO_MODE (i
), reg_renumber
[i
]))
1538 /* Setup insns to process on the next constraint pass. */
1539 bitmap_initialize (&changed_pseudo_bitmap
, ®_obstack
);
1540 init_live_reload_and_inheritance_pseudos ();
1541 assign_by_spills ();
1542 finish_live_reload_and_inheritance_pseudos ();
1543 bitmap_ior_into (&changed_pseudo_bitmap
, &all_spilled_pseudos
);
1545 EXECUTE_IF_SET_IN_BITMAP (&all_spilled_pseudos
, 0, u
, bi
)
1546 /* We ignore spilled pseudos created on last inheritance pass
1547 because they will be removed. */
1548 if (lra_reg_info
[u
].restore_regno
< 0)
1550 no_spills_p
= false;
1553 finish_live_range_start_chains ();
1554 bitmap_clear (&all_spilled_pseudos
);
1555 bitmap_initialize (&insns_to_process
, ®_obstack
);
1556 EXECUTE_IF_SET_IN_BITMAP (&changed_pseudo_bitmap
, 0, u
, bi
)
1557 bitmap_ior_into (&insns_to_process
, &lra_reg_info
[u
].insn_bitmap
);
1558 bitmap_clear (&changed_pseudo_bitmap
);
1559 EXECUTE_IF_SET_IN_BITMAP (&insns_to_process
, 0, u
, bi
)
1561 lra_push_insn_by_uid (u
);
1562 /* Invalidate alternatives for insn should be processed. */
1563 lra_set_used_insn_alternative_by_uid (u
, -1);
1565 bitmap_clear (&insns_to_process
);
1566 finish_regno_assign_info ();
1567 free (regno_allocno_class_array
);
1568 free (sorted_pseudos
);
1569 free (sorted_reload_pseudos
);
1571 timevar_pop (TV_LRA_ASSIGN
);
1572 if (former_reload_pseudo_spill_p
)
1573 lra_assignment_iter_after_spill
++;
1574 if (lra_assignment_iter_after_spill
> LRA_MAX_ASSIGNMENT_ITERATION_NUMBER
)
1576 ("Maximum number of LRA assignment passes is achieved (%d)\n",
1577 LRA_MAX_ASSIGNMENT_ITERATION_NUMBER
);