1 /* Assign reload pseudos.
2 Copyright (C) 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 /* This file's main objective is to assign hard registers to reload
24 pseudos. It also tries to allocate hard registers to other
25 pseudos, but at a lower priority than the reload pseudos. The pass
26 does not transform the RTL.
28 We must allocate a hard register to every reload pseudo. We try to
29 increase the chances of finding a viable allocation by assigning
30 the pseudos in order of fewest available hard registers first. If
31 we still fail to find a hard register, we spill other (non-reload)
32 pseudos in order to make room.
34 find_hard_regno_for finds hard registers for allocation without
35 spilling. spill_for does the same with spilling. Both functions
36 use a cost model to determine the most profitable choice of hard
39 Once we have finished allocating reload pseudos, we also try to
40 assign registers to other (non-reload) pseudos. This is useful if
41 hard registers were freed up by the spilling just described.
43 We try to assign hard registers by collecting pseudos into threads.
44 These threads contain reload and inheritance pseudos that are
45 connected by copies (move insns). Doing this improves the chances
46 of pseudos in the thread getting the same hard register and, as a
47 result, of allowing some move insns to be deleted.
49 When we assign a hard register to a pseudo, we decrease the cost of
50 using the same hard register for pseudos that are connected by
53 If two hard registers have the same frequency-derived cost, we
54 prefer hard registers with higher priorities. The mapping of
55 registers to priorities is controlled by the register_priority
56 target hook. For example, x86-64 has a few register priorities:
57 hard registers with and without REX prefixes have different
58 priorities. This permits us to generate smaller code as insns
59 without REX prefixes are shorter.
61 If a few hard registers are still equally good for the assignment,
62 we choose the least used hard register. It is called leveling and
63 may be profitable for some targets.
65 Only insns with changed allocation pseudos are processed on the
68 The pseudo live-ranges are used to find conflicting pseudos.
70 For understanding the code, it is important to keep in mind that
71 inheritance, split, and reload pseudos created since last
72 constraint pass have regno >= lra_constraint_new_regno_start.
73 Inheritance and split pseudos created on any pass are in the
74 corresponding bitmaps. Inheritance and split pseudos since the
75 last constraint pass have also the corresponding non-negative
80 #include "coretypes.h"
82 #include "hard-reg-set.h"
86 #include "insn-config.h"
92 #include "basic-block.h"
96 #include "sparseset.h"
99 /* Array containing corresponding values of function
100 lra_get_allocno_class. It is used to speed up the code. */
101 static enum reg_class
*regno_allocno_class_array
;
103 /* Information about the thread to which a pseudo belongs. Threads are
104 a set of connected reload and inheritance pseudos with the same set of
105 available hard registers. Lone registers belong to their own threads. */
106 struct regno_assign_info
108 /* First/next pseudo of the same thread. */
110 /* Frequency of the thread (execution frequency of only reload
111 pseudos in the thread when the thread contains a reload pseudo).
112 Defined only for the first thread pseudo. */
116 /* Map regno to the corresponding regno assignment info. */
117 static struct regno_assign_info
*regno_assign_info
;
119 /* Process a pseudo copy with execution frequency COPY_FREQ connecting
120 REGNO1 and REGNO2 to form threads. */
122 process_copy_to_form_thread (int regno1
, int regno2
, int copy_freq
)
124 int last
, regno1_first
, regno2_first
;
126 lra_assert (regno1
>= lra_constraint_new_regno_start
127 && regno2
>= lra_constraint_new_regno_start
);
128 regno1_first
= regno_assign_info
[regno1
].first
;
129 regno2_first
= regno_assign_info
[regno2
].first
;
130 if (regno1_first
!= regno2_first
)
132 for (last
= regno2_first
;
133 regno_assign_info
[last
].next
>= 0;
134 last
= regno_assign_info
[last
].next
)
135 regno_assign_info
[last
].first
= regno1_first
;
136 regno_assign_info
[last
].first
= regno1_first
;
137 regno_assign_info
[last
].next
= regno_assign_info
[regno1_first
].next
;
138 regno_assign_info
[regno1_first
].next
= regno2_first
;
139 regno_assign_info
[regno1_first
].freq
140 += regno_assign_info
[regno2_first
].freq
;
142 regno_assign_info
[regno1_first
].freq
-= 2 * copy_freq
;
143 lra_assert (regno_assign_info
[regno1_first
].freq
>= 0);
146 /* Initialize REGNO_ASSIGN_INFO and form threads. */
148 init_regno_assign_info (void)
150 int i
, regno1
, regno2
, max_regno
= max_reg_num ();
153 regno_assign_info
= XNEWVEC (struct regno_assign_info
, max_regno
);
154 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
156 regno_assign_info
[i
].first
= i
;
157 regno_assign_info
[i
].next
= -1;
158 regno_assign_info
[i
].freq
= lra_reg_info
[i
].freq
;
160 /* Form the threads. */
161 for (i
= 0; (cp
= lra_get_copy (i
)) != NULL
; i
++)
162 if ((regno1
= cp
->regno1
) >= lra_constraint_new_regno_start
163 && (regno2
= cp
->regno2
) >= lra_constraint_new_regno_start
164 && reg_renumber
[regno1
] < 0 && lra_reg_info
[regno1
].nrefs
!= 0
165 && reg_renumber
[regno2
] < 0 && lra_reg_info
[regno2
].nrefs
!= 0
166 && (ira_class_hard_regs_num
[regno_allocno_class_array
[regno1
]]
167 == ira_class_hard_regs_num
[regno_allocno_class_array
[regno2
]]))
168 process_copy_to_form_thread (regno1
, regno2
, cp
->freq
);
171 /* Free REGNO_ASSIGN_INFO. */
173 finish_regno_assign_info (void)
175 free (regno_assign_info
);
178 /* The function is used to sort *reload* and *inheritance* pseudos to
179 try to assign them hard registers. We put pseudos from the same
180 thread always nearby. */
182 reload_pseudo_compare_func (const void *v1p
, const void *v2p
)
184 int r1
= *(const int *) v1p
, r2
= *(const int *) v2p
;
185 enum reg_class cl1
= regno_allocno_class_array
[r1
];
186 enum reg_class cl2
= regno_allocno_class_array
[r2
];
189 lra_assert (r1
>= lra_constraint_new_regno_start
190 && r2
>= lra_constraint_new_regno_start
);
192 /* Prefer to assign reload registers with smaller classes first to
193 guarantee assignment to all reload registers. */
194 if ((diff
= (ira_class_hard_regs_num
[cl1
]
195 - ira_class_hard_regs_num
[cl2
])) != 0)
197 if ((diff
= (regno_assign_info
[regno_assign_info
[r2
].first
].freq
198 - regno_assign_info
[regno_assign_info
[r1
].first
].freq
)) != 0)
200 /* Put pseudos from the thread nearby. */
201 if ((diff
= regno_assign_info
[r1
].first
- regno_assign_info
[r2
].first
) != 0)
203 /* If regs are equally good, sort by their numbers, so that the
204 results of qsort leave nothing to chance. */
208 /* The function is used to sort *non-reload* pseudos to try to assign
209 them hard registers. The order calculation is simpler than in the
210 previous function and based on the pseudo frequency usage. */
212 pseudo_compare_func (const void *v1p
, const void *v2p
)
214 int r1
= *(const int *) v1p
, r2
= *(const int *) v2p
;
217 /* Prefer to assign more frequently used registers first. */
218 if ((diff
= lra_reg_info
[r2
].freq
- lra_reg_info
[r1
].freq
) != 0)
221 /* If regs are equally good, sort by their numbers, so that the
222 results of qsort leave nothing to chance. */
226 /* Arrays of size LRA_LIVE_MAX_POINT mapping a program point to the
227 pseudo live ranges with given start point. We insert only live
228 ranges of pseudos interesting for assignment purposes. They are
229 reload pseudos and pseudos assigned to hard registers. */
230 static lra_live_range_t
*start_point_ranges
;
232 /* Used as a flag that a live range is not inserted in the start point
234 static struct lra_live_range not_in_chain_mark
;
236 /* Create and set up START_POINT_RANGES. */
238 create_live_range_start_chains (void)
243 start_point_ranges
= XCNEWVEC (lra_live_range_t
, lra_live_max_point
);
244 max_regno
= max_reg_num ();
245 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
246 if (i
>= lra_constraint_new_regno_start
|| reg_renumber
[i
] >= 0)
248 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
250 r
->start_next
= start_point_ranges
[r
->start
];
251 start_point_ranges
[r
->start
] = r
;
256 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
257 r
->start_next
= ¬_in_chain_mark
;
261 /* Insert live ranges of pseudo REGNO into start chains if they are
264 insert_in_live_range_start_chain (int regno
)
266 lra_live_range_t r
= lra_reg_info
[regno
].live_ranges
;
268 if (r
->start_next
!= ¬_in_chain_mark
)
270 for (; r
!= NULL
; r
= r
->next
)
272 r
->start_next
= start_point_ranges
[r
->start
];
273 start_point_ranges
[r
->start
] = r
;
277 /* Free START_POINT_RANGES. */
279 finish_live_range_start_chains (void)
281 gcc_assert (start_point_ranges
!= NULL
);
282 free (start_point_ranges
);
283 start_point_ranges
= NULL
;
286 /* Map: program point -> bitmap of all pseudos living at the point and
287 assigned to hard registers. */
288 static bitmap_head
*live_hard_reg_pseudos
;
289 static bitmap_obstack live_hard_reg_pseudos_bitmap_obstack
;
291 /* reg_renumber corresponding to pseudos marked in
292 live_hard_reg_pseudos. reg_renumber might be not matched to
293 live_hard_reg_pseudos but live_pseudos_reg_renumber always reflects
294 live_hard_reg_pseudos. */
295 static int *live_pseudos_reg_renumber
;
297 /* Sparseset used to calculate living hard reg pseudos for some program
299 static sparseset live_range_hard_reg_pseudos
;
301 /* Sparseset used to calculate living reload/inheritance pseudos for
302 some program point range. */
303 static sparseset live_range_reload_inheritance_pseudos
;
305 /* Allocate and initialize the data about living pseudos at program
310 int i
, max_regno
= max_reg_num ();
312 live_range_hard_reg_pseudos
= sparseset_alloc (max_regno
);
313 live_range_reload_inheritance_pseudos
= sparseset_alloc (max_regno
);
314 live_hard_reg_pseudos
= XNEWVEC (bitmap_head
, lra_live_max_point
);
315 bitmap_obstack_initialize (&live_hard_reg_pseudos_bitmap_obstack
);
316 for (i
= 0; i
< lra_live_max_point
; i
++)
317 bitmap_initialize (&live_hard_reg_pseudos
[i
],
318 &live_hard_reg_pseudos_bitmap_obstack
);
319 live_pseudos_reg_renumber
= XNEWVEC (int, max_regno
);
320 for (i
= 0; i
< max_regno
; i
++)
321 live_pseudos_reg_renumber
[i
] = -1;
324 /* Free the data about living pseudos at program points. */
328 sparseset_free (live_range_hard_reg_pseudos
);
329 sparseset_free (live_range_reload_inheritance_pseudos
);
330 free (live_hard_reg_pseudos
);
331 bitmap_obstack_release (&live_hard_reg_pseudos_bitmap_obstack
);
332 free (live_pseudos_reg_renumber
);
335 /* Update the LIVE_HARD_REG_PSEUDOS and LIVE_PSEUDOS_REG_RENUMBER
336 entries for pseudo REGNO. Assume that the register has been
337 spilled if FREE_P, otherwise assume that it has been assigned
338 reg_renumber[REGNO] (if >= 0). We also insert the pseudo live
339 ranges in the start chains when it is assumed to be assigned to a
340 hard register because we use the chains of pseudos assigned to hard
341 registers during allocation. */
343 update_lives (int regno
, bool free_p
)
348 if (reg_renumber
[regno
] < 0)
350 live_pseudos_reg_renumber
[regno
] = free_p
? -1 : reg_renumber
[regno
];
351 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
353 for (p
= r
->start
; p
<= r
->finish
; p
++)
355 bitmap_clear_bit (&live_hard_reg_pseudos
[p
], regno
);
358 bitmap_set_bit (&live_hard_reg_pseudos
[p
], regno
);
359 insert_in_live_range_start_chain (regno
);
364 /* Sparseset used to calculate reload pseudos conflicting with a given
365 pseudo when we are trying to find a hard register for the given
367 static sparseset conflict_reload_and_inheritance_pseudos
;
369 /* Map: program point -> bitmap of all reload and inheritance pseudos
370 living at the point. */
371 static bitmap_head
*live_reload_and_inheritance_pseudos
;
372 static bitmap_obstack live_reload_and_inheritance_pseudos_bitmap_obstack
;
374 /* Allocate and initialize data about living reload pseudos at any
375 given program point. */
377 init_live_reload_and_inheritance_pseudos (void)
379 int i
, p
, max_regno
= max_reg_num ();
382 conflict_reload_and_inheritance_pseudos
= sparseset_alloc (max_regno
);
383 live_reload_and_inheritance_pseudos
= XNEWVEC (bitmap_head
, lra_live_max_point
);
384 bitmap_obstack_initialize (&live_reload_and_inheritance_pseudos_bitmap_obstack
);
385 for (p
= 0; p
< lra_live_max_point
; p
++)
386 bitmap_initialize (&live_reload_and_inheritance_pseudos
[p
],
387 &live_reload_and_inheritance_pseudos_bitmap_obstack
);
388 for (i
= lra_constraint_new_regno_start
; i
< max_regno
; i
++)
390 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
391 for (p
= r
->start
; p
<= r
->finish
; p
++)
392 bitmap_set_bit (&live_reload_and_inheritance_pseudos
[p
], i
);
396 /* Finalize data about living reload pseudos at any given program
399 finish_live_reload_and_inheritance_pseudos (void)
401 sparseset_free (conflict_reload_and_inheritance_pseudos
);
402 free (live_reload_and_inheritance_pseudos
);
403 bitmap_obstack_release (&live_reload_and_inheritance_pseudos_bitmap_obstack
);
406 /* The value used to check that cost of given hard reg is really
407 defined currently. */
408 static int curr_hard_regno_costs_check
= 0;
409 /* Array used to check that cost of the corresponding hard reg (the
410 array element index) is really defined currently. */
411 static int hard_regno_costs_check
[FIRST_PSEUDO_REGISTER
];
412 /* The current costs of allocation of hard regs. Defined only if the
413 value of the corresponding element of the previous array is equal to
414 CURR_HARD_REGNO_COSTS_CHECK. */
415 static int hard_regno_costs
[FIRST_PSEUDO_REGISTER
];
417 /* Adjust cost of HARD_REGNO by INCR. Reset the cost first if it is
420 adjust_hard_regno_cost (int hard_regno
, int incr
)
422 if (hard_regno_costs_check
[hard_regno
] != curr_hard_regno_costs_check
)
423 hard_regno_costs
[hard_regno
] = 0;
424 hard_regno_costs_check
[hard_regno
] = curr_hard_regno_costs_check
;
425 hard_regno_costs
[hard_regno
] += incr
;
428 /* Try to find a free hard register for pseudo REGNO. Return the
429 hard register on success and set *COST to the cost of using
430 that register. (If several registers have equal cost, the one with
431 the highest priority wins.) Return -1 on failure.
433 If TRY_ONLY_HARD_REGNO >= 0, consider only that hard register,
434 otherwise consider all hard registers in REGNO's class. */
436 find_hard_regno_for (int regno
, int *cost
, int try_only_hard_regno
)
438 HARD_REG_SET conflict_set
;
439 int best_cost
= INT_MAX
, best_priority
= INT_MIN
, best_usage
= INT_MAX
;
441 int p
, i
, j
, rclass_size
, best_hard_regno
, priority
, hard_regno
;
442 int hr
, conflict_hr
, nregs
;
443 enum machine_mode biggest_mode
;
444 unsigned int k
, conflict_regno
;
445 int val
, biggest_nregs
, nregs_diff
;
446 enum reg_class rclass
;
448 bool *rclass_intersect_p
;
449 HARD_REG_SET impossible_start_hard_regs
;
451 COPY_HARD_REG_SET (conflict_set
, lra_no_alloc_regs
);
452 rclass
= regno_allocno_class_array
[regno
];
453 rclass_intersect_p
= ira_reg_classes_intersect_p
[rclass
];
454 curr_hard_regno_costs_check
++;
455 sparseset_clear (conflict_reload_and_inheritance_pseudos
);
456 sparseset_clear (live_range_hard_reg_pseudos
);
457 IOR_HARD_REG_SET (conflict_set
, lra_reg_info
[regno
].conflict_hard_regs
);
458 biggest_mode
= lra_reg_info
[regno
].biggest_mode
;
459 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
461 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[r
->start
], 0, k
, bi
)
462 if (rclass_intersect_p
[regno_allocno_class_array
[k
]])
463 sparseset_set_bit (live_range_hard_reg_pseudos
, k
);
464 EXECUTE_IF_SET_IN_BITMAP (&live_reload_and_inheritance_pseudos
[r
->start
],
466 if (lra_reg_info
[k
].preferred_hard_regno1
>= 0
467 && live_pseudos_reg_renumber
[k
] < 0
468 && rclass_intersect_p
[regno_allocno_class_array
[k
]])
469 sparseset_set_bit (conflict_reload_and_inheritance_pseudos
, k
);
470 for (p
= r
->start
+ 1; p
<= r
->finish
; p
++)
474 for (r2
= start_point_ranges
[p
];
478 if (r2
->regno
>= lra_constraint_new_regno_start
479 && lra_reg_info
[r2
->regno
].preferred_hard_regno1
>= 0
480 && live_pseudos_reg_renumber
[r2
->regno
] < 0
481 && rclass_intersect_p
[regno_allocno_class_array
[r2
->regno
]])
482 sparseset_set_bit (conflict_reload_and_inheritance_pseudos
,
484 if (live_pseudos_reg_renumber
[r2
->regno
] >= 0
485 && rclass_intersect_p
[regno_allocno_class_array
[r2
->regno
]])
486 sparseset_set_bit (live_range_hard_reg_pseudos
, r2
->regno
);
490 if ((hard_regno
= lra_reg_info
[regno
].preferred_hard_regno1
) >= 0)
492 adjust_hard_regno_cost
493 (hard_regno
, -lra_reg_info
[regno
].preferred_hard_regno_profit1
);
494 if ((hard_regno
= lra_reg_info
[regno
].preferred_hard_regno2
) >= 0)
495 adjust_hard_regno_cost
496 (hard_regno
, -lra_reg_info
[regno
].preferred_hard_regno_profit2
);
499 if (lra_reg_info
[regno
].no_stack_p
)
500 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
501 SET_HARD_REG_BIT (conflict_set
, i
);
503 sparseset_clear_bit (conflict_reload_and_inheritance_pseudos
, regno
);
504 val
= lra_reg_info
[regno
].val
;
505 CLEAR_HARD_REG_SET (impossible_start_hard_regs
);
506 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos
, conflict_regno
)
507 if (val
== lra_reg_info
[conflict_regno
].val
)
509 conflict_hr
= live_pseudos_reg_renumber
[conflict_regno
];
510 nregs
= (hard_regno_nregs
[conflict_hr
]
511 [lra_reg_info
[conflict_regno
].biggest_mode
]);
512 /* Remember about multi-register pseudos. For example, 2 hard
513 register pseudos can start on the same hard register but can
514 not start on HR and HR+1/HR-1. */
515 for (hr
= conflict_hr
+ 1;
516 hr
< FIRST_PSEUDO_REGISTER
&& hr
< conflict_hr
+ nregs
;
518 SET_HARD_REG_BIT (impossible_start_hard_regs
, hr
);
519 for (hr
= conflict_hr
- 1;
520 hr
>= 0 && hr
+ hard_regno_nregs
[hr
][biggest_mode
] > conflict_hr
;
522 SET_HARD_REG_BIT (impossible_start_hard_regs
, hr
);
526 add_to_hard_reg_set (&conflict_set
,
527 lra_reg_info
[conflict_regno
].biggest_mode
,
528 live_pseudos_reg_renumber
[conflict_regno
]);
529 if (hard_reg_set_subset_p (reg_class_contents
[rclass
],
533 EXECUTE_IF_SET_IN_SPARSESET (conflict_reload_and_inheritance_pseudos
,
535 if (val
!= lra_reg_info
[conflict_regno
].val
)
537 lra_assert (live_pseudos_reg_renumber
[conflict_regno
] < 0);
539 = lra_reg_info
[conflict_regno
].preferred_hard_regno1
) >= 0)
541 adjust_hard_regno_cost
543 lra_reg_info
[conflict_regno
].preferred_hard_regno_profit1
);
545 = lra_reg_info
[conflict_regno
].preferred_hard_regno2
) >= 0)
546 adjust_hard_regno_cost
548 lra_reg_info
[conflict_regno
].preferred_hard_regno_profit2
);
551 /* Make sure that all registers in a multi-word pseudo belong to the
553 IOR_COMPL_HARD_REG_SET (conflict_set
, reg_class_contents
[rclass
]);
554 lra_assert (rclass
!= NO_REGS
);
555 rclass_size
= ira_class_hard_regs_num
[rclass
];
556 best_hard_regno
= -1;
557 hard_regno
= ira_class_hard_regs
[rclass
][0];
558 biggest_nregs
= hard_regno_nregs
[hard_regno
][biggest_mode
];
559 nregs_diff
= (biggest_nregs
560 - hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)]);
561 for (i
= 0; i
< rclass_size
; i
++)
563 if (try_only_hard_regno
>= 0)
564 hard_regno
= try_only_hard_regno
;
566 hard_regno
= ira_class_hard_regs
[rclass
][i
];
567 if (! overlaps_hard_reg_set_p (conflict_set
,
568 PSEUDO_REGNO_MODE (regno
), hard_regno
)
569 /* We can not use prohibited_class_mode_regs because it is
570 not defined for all classes. */
571 && HARD_REGNO_MODE_OK (hard_regno
, PSEUDO_REGNO_MODE (regno
))
572 && ! TEST_HARD_REG_BIT (impossible_start_hard_regs
, hard_regno
)
574 #ifdef WORDS_BIG_ENDIAN
575 || (hard_regno
- nregs_diff
>= 0
576 && TEST_HARD_REG_BIT (reg_class_contents
[rclass
],
577 hard_regno
- nregs_diff
))
579 || TEST_HARD_REG_BIT (reg_class_contents
[rclass
],
580 hard_regno
+ nregs_diff
)
584 if (hard_regno_costs_check
[hard_regno
]
585 != curr_hard_regno_costs_check
)
587 hard_regno_costs_check
[hard_regno
] = curr_hard_regno_costs_check
;
588 hard_regno_costs
[hard_regno
] = 0;
591 j
< hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)];
593 if (! TEST_HARD_REG_BIT (call_used_reg_set
, hard_regno
+ j
)
594 && ! df_regs_ever_live_p (hard_regno
+ j
))
595 /* It needs save restore. */
596 hard_regno_costs
[hard_regno
]
597 += 2 * ENTRY_BLOCK_PTR
->next_bb
->frequency
;
598 priority
= targetm
.register_priority (hard_regno
);
599 if (best_hard_regno
< 0 || hard_regno_costs
[hard_regno
] < best_cost
600 || (hard_regno_costs
[hard_regno
] == best_cost
601 && (priority
> best_priority
602 /* Hard register usage leveling actually results
603 in bigger code for targets with conditional
604 execution like ARM because it reduces chance
605 of if-conversion after LRA. */
606 || (! targetm
.have_conditional_execution ()
607 && priority
== best_priority
608 && best_usage
> lra_hard_reg_usage
[hard_regno
]))))
610 best_hard_regno
= hard_regno
;
611 best_cost
= hard_regno_costs
[hard_regno
];
612 best_priority
= priority
;
613 best_usage
= lra_hard_reg_usage
[hard_regno
];
616 if (try_only_hard_regno
>= 0)
619 if (best_hard_regno
>= 0)
620 *cost
= best_cost
- lra_reg_info
[regno
].freq
;
621 return best_hard_regno
;
624 /* Current value used for checking elements in
625 update_hard_regno_preference_check. */
626 static int curr_update_hard_regno_preference_check
;
627 /* If an element value is equal to the above variable value, then the
628 corresponding regno has been processed for preference
630 static int *update_hard_regno_preference_check
;
632 /* Update the preference for using HARD_REGNO for pseudos that are
633 connected directly or indirectly with REGNO. Apply divisor DIV
634 to any preference adjustments.
636 The more indirectly a pseudo is connected, the smaller its effect
637 should be. We therefore increase DIV on each "hop". */
639 update_hard_regno_preference (int regno
, int hard_regno
, int div
)
641 int another_regno
, cost
;
642 lra_copy_t cp
, next_cp
;
644 /* Search depth 5 seems to be enough. */
647 for (cp
= lra_reg_info
[regno
].copies
; cp
!= NULL
; cp
= next_cp
)
649 if (cp
->regno1
== regno
)
651 next_cp
= cp
->regno1_next
;
652 another_regno
= cp
->regno2
;
654 else if (cp
->regno2
== regno
)
656 next_cp
= cp
->regno2_next
;
657 another_regno
= cp
->regno1
;
661 if (reg_renumber
[another_regno
] < 0
662 && (update_hard_regno_preference_check
[another_regno
]
663 != curr_update_hard_regno_preference_check
))
665 update_hard_regno_preference_check
[another_regno
]
666 = curr_update_hard_regno_preference_check
;
667 cost
= cp
->freq
< div
? 1 : cp
->freq
/ div
;
668 lra_setup_reload_pseudo_preferenced_hard_reg
669 (another_regno
, hard_regno
, cost
);
670 update_hard_regno_preference (another_regno
, hard_regno
, div
* 2);
675 /* Update REG_RENUMBER and other pseudo preferences by assignment of
676 HARD_REGNO to pseudo REGNO and print about it if PRINT_P. */
678 lra_setup_reg_renumber (int regno
, int hard_regno
, bool print_p
)
682 /* We can not just reassign hard register. */
683 lra_assert (hard_regno
< 0 || reg_renumber
[regno
] < 0);
684 if ((hr
= hard_regno
) < 0)
685 hr
= reg_renumber
[regno
];
686 reg_renumber
[regno
] = hard_regno
;
687 lra_assert (hr
>= 0);
688 for (i
= 0; i
< hard_regno_nregs
[hr
][PSEUDO_REGNO_MODE (regno
)]; i
++)
690 lra_hard_reg_usage
[hr
+ i
] -= lra_reg_info
[regno
].freq
;
692 lra_hard_reg_usage
[hr
+ i
] += lra_reg_info
[regno
].freq
;
693 if (print_p
&& lra_dump_file
!= NULL
)
694 fprintf (lra_dump_file
, " Assign %d to %sr%d (freq=%d)\n",
696 regno
< lra_constraint_new_regno_start
698 : bitmap_bit_p (&lra_inheritance_pseudos
, regno
) ? "inheritance "
699 : bitmap_bit_p (&lra_split_regs
, regno
) ? "split "
700 : bitmap_bit_p (&lra_optional_reload_pseudos
, regno
)
701 ? "optional reload ": "reload ",
702 regno
, lra_reg_info
[regno
].freq
);
705 curr_update_hard_regno_preference_check
++;
706 update_hard_regno_preference (regno
, hard_regno
, 1);
710 /* Pseudos which occur in insns containing a particular pseudo. */
711 static bitmap_head insn_conflict_pseudos
;
713 /* Bitmaps used to contain spill pseudos for given pseudo hard regno
714 and best spill pseudos for given pseudo (and best hard regno). */
715 static bitmap_head spill_pseudos_bitmap
, best_spill_pseudos_bitmap
;
717 /* Current pseudo check for validity of elements in
718 TRY_HARD_REG_PSEUDOS. */
719 static int curr_pseudo_check
;
720 /* Array used for validity of elements in TRY_HARD_REG_PSEUDOS. */
721 static int try_hard_reg_pseudos_check
[FIRST_PSEUDO_REGISTER
];
722 /* Pseudos who hold given hard register at the considered points. */
723 static bitmap_head try_hard_reg_pseudos
[FIRST_PSEUDO_REGISTER
];
725 /* Set up try_hard_reg_pseudos for given program point P and class
726 RCLASS. Those are pseudos living at P and assigned to a hard
727 register of RCLASS. In other words, those are pseudos which can be
728 spilled to assign a hard register of RCLASS to a pseudo living at
731 setup_try_hard_regno_pseudos (int p
, enum reg_class rclass
)
734 enum machine_mode mode
;
735 unsigned int spill_regno
;
738 /* Find what pseudos could be spilled. */
739 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[p
], 0, spill_regno
, bi
)
741 mode
= PSEUDO_REGNO_MODE (spill_regno
);
742 hard_regno
= live_pseudos_reg_renumber
[spill_regno
];
743 if (overlaps_hard_reg_set_p (reg_class_contents
[rclass
],
746 for (i
= hard_regno_nregs
[hard_regno
][mode
] - 1; i
>= 0; i
--)
748 if (try_hard_reg_pseudos_check
[hard_regno
+ i
]
749 != curr_pseudo_check
)
751 try_hard_reg_pseudos_check
[hard_regno
+ i
]
753 bitmap_clear (&try_hard_reg_pseudos
[hard_regno
+ i
]);
755 bitmap_set_bit (&try_hard_reg_pseudos
[hard_regno
+ i
],
762 /* Assign temporarily HARD_REGNO to pseudo REGNO. Temporary
763 assignment means that we might undo the data change. */
765 assign_temporarily (int regno
, int hard_regno
)
770 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
772 for (p
= r
->start
; p
<= r
->finish
; p
++)
774 bitmap_clear_bit (&live_hard_reg_pseudos
[p
], regno
);
777 bitmap_set_bit (&live_hard_reg_pseudos
[p
], regno
);
778 insert_in_live_range_start_chain (regno
);
781 live_pseudos_reg_renumber
[regno
] = hard_regno
;
784 /* Array used for sorting reload pseudos for subsequent allocation
785 after spilling some pseudo. */
786 static int *sorted_reload_pseudos
;
788 /* Spill some pseudos for a reload pseudo REGNO and return hard
789 register which should be used for pseudo after spilling. The
790 function adds spilled pseudos to SPILLED_PSEUDO_BITMAP. When we
791 choose hard register (and pseudos occupying the hard registers and
792 to be spilled), we take into account not only how REGNO will
793 benefit from the spills but also how other reload pseudos not yet
794 assigned to hard registers benefit from the spills too. In very
795 rare cases, the function can fail and return -1. */
797 spill_for (int regno
, bitmap spilled_pseudo_bitmap
)
799 int i
, j
, n
, p
, hard_regno
, best_hard_regno
, cost
, best_cost
, rclass_size
;
800 int reload_hard_regno
, reload_cost
;
801 enum machine_mode mode
, mode2
;
802 enum reg_class rclass
;
803 HARD_REG_SET spilled_hard_regs
;
804 unsigned int spill_regno
, reload_regno
, uid
;
805 int insn_pseudos_num
, best_insn_pseudos_num
;
809 rclass
= regno_allocno_class_array
[regno
];
810 lra_assert (reg_renumber
[regno
] < 0 && rclass
!= NO_REGS
);
811 bitmap_clear (&insn_conflict_pseudos
);
812 bitmap_clear (&best_spill_pseudos_bitmap
);
813 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info
[regno
].insn_bitmap
, 0, uid
, bi
)
815 struct lra_insn_reg
*ir
;
817 for (ir
= lra_get_insn_regs (uid
); ir
!= NULL
; ir
= ir
->next
)
818 if (ir
->regno
>= FIRST_PSEUDO_REGISTER
)
819 bitmap_set_bit (&insn_conflict_pseudos
, ir
->regno
);
821 best_hard_regno
= -1;
823 best_insn_pseudos_num
= INT_MAX
;
824 rclass_size
= ira_class_hard_regs_num
[rclass
];
825 mode
= PSEUDO_REGNO_MODE (regno
);
826 /* Invalidate try_hard_reg_pseudos elements. */
828 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
829 for (p
= r
->start
; p
<= r
->finish
; p
++)
830 setup_try_hard_regno_pseudos (p
, rclass
);
831 for (i
= 0; i
< rclass_size
; i
++)
833 hard_regno
= ira_class_hard_regs
[rclass
][i
];
834 bitmap_clear (&spill_pseudos_bitmap
);
835 for (j
= hard_regno_nregs
[hard_regno
][mode
] - 1; j
>= 0; j
--)
837 if (try_hard_reg_pseudos_check
[hard_regno
+ j
] != curr_pseudo_check
)
839 lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos
[hard_regno
+ j
]));
840 bitmap_ior_into (&spill_pseudos_bitmap
,
841 &try_hard_reg_pseudos
[hard_regno
+ j
]);
844 CLEAR_HARD_REG_SET (spilled_hard_regs
);
845 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
846 if ((int) spill_regno
>= lra_constraint_new_regno_start
847 && ! bitmap_bit_p (&lra_inheritance_pseudos
, spill_regno
)
848 && ! bitmap_bit_p (&lra_split_regs
, spill_regno
)
849 && ! bitmap_bit_p (&lra_optional_reload_pseudos
, spill_regno
))
851 insn_pseudos_num
= 0;
852 if (lra_dump_file
!= NULL
)
853 fprintf (lra_dump_file
, " Trying %d:", hard_regno
);
854 sparseset_clear (live_range_reload_inheritance_pseudos
);
855 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
857 if (bitmap_bit_p (&insn_conflict_pseudos
, spill_regno
))
859 mode2
= PSEUDO_REGNO_MODE (spill_regno
);
860 update_lives (spill_regno
, true);
861 if (lra_dump_file
!= NULL
)
862 fprintf (lra_dump_file
, " spill %d(freq=%d)",
863 spill_regno
, lra_reg_info
[spill_regno
].freq
);
864 add_to_hard_reg_set (&spilled_hard_regs
,
865 mode2
, reg_renumber
[spill_regno
]);
866 for (r
= lra_reg_info
[spill_regno
].live_ranges
;
870 for (p
= r
->start
; p
<= r
->finish
; p
++)
874 for (r2
= start_point_ranges
[p
];
877 if (r2
->regno
>= lra_constraint_new_regno_start
)
878 sparseset_set_bit (live_range_reload_inheritance_pseudos
,
883 hard_regno
= find_hard_regno_for (regno
, &cost
, -1);
886 assign_temporarily (regno
, hard_regno
);
888 EXECUTE_IF_SET_IN_SPARSESET (live_range_reload_inheritance_pseudos
,
890 if (live_pseudos_reg_renumber
[reload_regno
] < 0
891 && (hard_reg_set_intersect_p
893 [regno_allocno_class_array
[reload_regno
]],
895 sorted_reload_pseudos
[n
++] = reload_regno
;
896 qsort (sorted_reload_pseudos
, n
, sizeof (int),
897 reload_pseudo_compare_func
);
898 for (j
= 0; j
< n
; j
++)
900 reload_regno
= sorted_reload_pseudos
[j
];
901 lra_assert (live_pseudos_reg_renumber
[reload_regno
] < 0);
902 if ((reload_hard_regno
903 = find_hard_regno_for (reload_regno
,
904 &reload_cost
, -1)) >= 0
905 && (overlaps_hard_reg_set_p
907 PSEUDO_REGNO_MODE (reload_regno
), reload_hard_regno
)))
909 if (lra_dump_file
!= NULL
)
910 fprintf (lra_dump_file
, " assign %d(cost=%d)",
911 reload_regno
, reload_cost
);
912 assign_temporarily (reload_regno
, reload_hard_regno
);
916 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
920 cost
+= lra_reg_info
[spill_regno
].freq
;
921 if (ira_reg_equiv
[spill_regno
].memory
!= NULL
922 || ira_reg_equiv
[spill_regno
].constant
!= NULL
)
923 for (x
= ira_reg_equiv
[spill_regno
].init_insns
;
926 cost
-= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (XEXP (x
, 0)));
928 if (best_insn_pseudos_num
> insn_pseudos_num
929 || (best_insn_pseudos_num
== insn_pseudos_num
930 && best_cost
> cost
))
932 best_insn_pseudos_num
= insn_pseudos_num
;
934 best_hard_regno
= hard_regno
;
935 bitmap_copy (&best_spill_pseudos_bitmap
, &spill_pseudos_bitmap
);
936 if (lra_dump_file
!= NULL
)
937 fprintf (lra_dump_file
, " Now best %d(cost=%d)\n",
940 assign_temporarily (regno
, -1);
941 for (j
= 0; j
< n
; j
++)
943 reload_regno
= sorted_reload_pseudos
[j
];
944 if (live_pseudos_reg_renumber
[reload_regno
] >= 0)
945 assign_temporarily (reload_regno
, -1);
948 if (lra_dump_file
!= NULL
)
949 fprintf (lra_dump_file
, "\n");
950 /* Restore the live hard reg pseudo info for spilled pseudos. */
951 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap
, 0, spill_regno
, bi
)
952 update_lives (spill_regno
, false);
957 EXECUTE_IF_SET_IN_BITMAP (&best_spill_pseudos_bitmap
, 0, spill_regno
, bi
)
959 if (lra_dump_file
!= NULL
)
960 fprintf (lra_dump_file
, " Spill %sr%d(hr=%d, freq=%d) for r%d\n",
961 ((int) spill_regno
< lra_constraint_new_regno_start
963 : bitmap_bit_p (&lra_inheritance_pseudos
, spill_regno
)
965 : bitmap_bit_p (&lra_split_regs
, spill_regno
)
967 : bitmap_bit_p (&lra_optional_reload_pseudos
, spill_regno
)
968 ? "optional reload " : "reload "),
969 spill_regno
, reg_renumber
[spill_regno
],
970 lra_reg_info
[spill_regno
].freq
, regno
);
971 update_lives (spill_regno
, true);
972 lra_setup_reg_renumber (spill_regno
, -1, false);
974 bitmap_ior_into (spilled_pseudo_bitmap
, &best_spill_pseudos_bitmap
);
975 return best_hard_regno
;
978 /* Assign HARD_REGNO to REGNO. */
980 assign_hard_regno (int hard_regno
, int regno
)
984 lra_assert (hard_regno
>= 0);
985 lra_setup_reg_renumber (regno
, hard_regno
, true);
986 update_lives (regno
, false);
988 i
< hard_regno_nregs
[hard_regno
][lra_reg_info
[regno
].biggest_mode
];
990 df_set_regs_ever_live (hard_regno
+ i
, true);
993 /* Array used for sorting different pseudos. */
994 static int *sorted_pseudos
;
996 /* The constraints pass is allowed to create equivalences between
997 pseudos that make the current allocation "incorrect" (in the sense
998 that pseudos are assigned to hard registers from their own conflict
999 sets). The global variable lra_risky_transformations_p says
1000 whether this might have happened.
1002 Process pseudos assigned to hard registers (less frequently used
1003 first), spill if a conflict is found, and mark the spilled pseudos
1004 in SPILLED_PSEUDO_BITMAP. Set up LIVE_HARD_REG_PSEUDOS from
1005 pseudos, assigned to hard registers. */
1007 setup_live_pseudos_and_spill_after_risky_transforms (bitmap
1008 spilled_pseudo_bitmap
)
1010 int p
, i
, j
, n
, regno
, hard_regno
;
1011 unsigned int k
, conflict_regno
;
1013 HARD_REG_SET conflict_set
;
1014 enum machine_mode mode
;
1017 int max_regno
= max_reg_num ();
1019 if (! lra_risky_transformations_p
)
1021 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1022 if (reg_renumber
[i
] >= 0 && lra_reg_info
[i
].nrefs
> 0)
1023 update_lives (i
, false);
1026 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1027 if (reg_renumber
[i
] >= 0 && lra_reg_info
[i
].nrefs
> 0)
1028 sorted_pseudos
[n
++] = i
;
1029 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1030 for (i
= n
- 1; i
>= 0; i
--)
1032 regno
= sorted_pseudos
[i
];
1033 hard_regno
= reg_renumber
[regno
];
1034 lra_assert (hard_regno
>= 0);
1035 mode
= lra_reg_info
[regno
].biggest_mode
;
1036 sparseset_clear (live_range_hard_reg_pseudos
);
1037 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
1039 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos
[r
->start
], 0, k
, bi
)
1040 sparseset_set_bit (live_range_hard_reg_pseudos
, k
);
1041 for (p
= r
->start
+ 1; p
<= r
->finish
; p
++)
1043 lra_live_range_t r2
;
1045 for (r2
= start_point_ranges
[p
];
1047 r2
= r2
->start_next
)
1048 if (live_pseudos_reg_renumber
[r2
->regno
] >= 0)
1049 sparseset_set_bit (live_range_hard_reg_pseudos
, r2
->regno
);
1052 COPY_HARD_REG_SET (conflict_set
, lra_no_alloc_regs
);
1053 IOR_HARD_REG_SET (conflict_set
, lra_reg_info
[regno
].conflict_hard_regs
);
1054 val
= lra_reg_info
[regno
].val
;
1055 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos
, conflict_regno
)
1056 if (val
!= lra_reg_info
[conflict_regno
].val
1057 /* If it is multi-register pseudos they should start on
1058 the same hard register. */
1059 || hard_regno
!= reg_renumber
[conflict_regno
])
1060 add_to_hard_reg_set (&conflict_set
,
1061 lra_reg_info
[conflict_regno
].biggest_mode
,
1062 reg_renumber
[conflict_regno
]);
1063 if (! overlaps_hard_reg_set_p (conflict_set
, mode
, hard_regno
))
1065 update_lives (regno
, false);
1068 bitmap_set_bit (spilled_pseudo_bitmap
, regno
);
1070 j
< hard_regno_nregs
[hard_regno
][PSEUDO_REGNO_MODE (regno
)];
1072 lra_hard_reg_usage
[hard_regno
+ j
] -= lra_reg_info
[regno
].freq
;
1073 reg_renumber
[regno
] = -1;
1074 if (lra_dump_file
!= NULL
)
1075 fprintf (lra_dump_file
, " Spill r%d after risky transformations\n",
1080 /* Improve allocation by assigning the same hard regno of inheritance
1081 pseudos to the connected pseudos. We need this because inheritance
1082 pseudos are allocated after reload pseudos in the thread and when
1083 we assign a hard register to a reload pseudo we don't know yet that
1084 the connected inheritance pseudos can get the same hard register.
1085 Add pseudos with changed allocation to bitmap CHANGED_PSEUDOS. */
1087 improve_inheritance (bitmap changed_pseudos
)
1090 int regno
, another_regno
, hard_regno
, another_hard_regno
, cost
, i
, n
;
1091 lra_copy_t cp
, next_cp
;
1095 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos
, 0, k
, bi
)
1096 if (reg_renumber
[k
] >= 0 && lra_reg_info
[k
].nrefs
!= 0)
1097 sorted_pseudos
[n
++] = k
;
1098 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1099 for (i
= 0; i
< n
; i
++)
1101 regno
= sorted_pseudos
[i
];
1102 hard_regno
= reg_renumber
[regno
];
1103 lra_assert (hard_regno
>= 0);
1104 for (cp
= lra_reg_info
[regno
].copies
; cp
!= NULL
; cp
= next_cp
)
1106 if (cp
->regno1
== regno
)
1108 next_cp
= cp
->regno1_next
;
1109 another_regno
= cp
->regno2
;
1111 else if (cp
->regno2
== regno
)
1113 next_cp
= cp
->regno2_next
;
1114 another_regno
= cp
->regno1
;
1118 /* Don't change reload pseudo allocation. It might have
1119 this allocation for a purpose and changing it can result
1121 if ((another_regno
< lra_constraint_new_regno_start
1122 || bitmap_bit_p (&lra_inheritance_pseudos
, another_regno
))
1123 && (another_hard_regno
= reg_renumber
[another_regno
]) >= 0
1124 && another_hard_regno
!= hard_regno
)
1126 if (lra_dump_file
!= NULL
)
1129 " Improving inheritance for %d(%d) and %d(%d)...\n",
1130 regno
, hard_regno
, another_regno
, another_hard_regno
);
1131 update_lives (another_regno
, true);
1132 lra_setup_reg_renumber (another_regno
, -1, false);
1134 == find_hard_regno_for (another_regno
, &cost
, hard_regno
))
1135 assign_hard_regno (hard_regno
, another_regno
);
1137 assign_hard_regno (another_hard_regno
, another_regno
);
1138 bitmap_set_bit (changed_pseudos
, another_regno
);
1145 /* Bitmap finally containing all pseudos spilled on this assignment
1147 static bitmap_head all_spilled_pseudos
;
1148 /* All pseudos whose allocation was changed. */
1149 static bitmap_head changed_pseudo_bitmap
;
1151 /* Assign hard registers to reload pseudos and other pseudos. */
1153 assign_by_spills (void)
1155 int i
, n
, nfails
, iter
, regno
, hard_regno
, cost
, restore_regno
;
1158 bitmap_head changed_insns
, do_not_assign_nonreload_pseudos
;
1159 bitmap_head non_reload_pseudos
;
1162 int max_regno
= max_reg_num ();
1164 for (n
= 0, i
= lra_constraint_new_regno_start
; i
< max_regno
; i
++)
1165 if (reg_renumber
[i
] < 0 && lra_reg_info
[i
].nrefs
!= 0
1166 && regno_allocno_class_array
[i
] != NO_REGS
)
1167 sorted_pseudos
[n
++] = i
;
1168 bitmap_initialize (&insn_conflict_pseudos
, ®_obstack
);
1169 bitmap_initialize (&spill_pseudos_bitmap
, ®_obstack
);
1170 bitmap_initialize (&best_spill_pseudos_bitmap
, ®_obstack
);
1171 update_hard_regno_preference_check
= XCNEWVEC (int, max_regno
);
1172 curr_update_hard_regno_preference_check
= 0;
1173 memset (try_hard_reg_pseudos_check
, 0, sizeof (try_hard_reg_pseudos_check
));
1174 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1175 bitmap_initialize (&try_hard_reg_pseudos
[i
], ®_obstack
);
1176 curr_pseudo_check
= 0;
1177 bitmap_initialize (&changed_insns
, ®_obstack
);
1178 bitmap_initialize (&non_reload_pseudos
, ®_obstack
);
1179 bitmap_ior (&non_reload_pseudos
, &lra_inheritance_pseudos
, &lra_split_regs
);
1180 bitmap_ior_into (&non_reload_pseudos
, &lra_optional_reload_pseudos
);
1181 for (iter
= 0; iter
<= 1; iter
++)
1183 qsort (sorted_pseudos
, n
, sizeof (int), reload_pseudo_compare_func
);
1185 for (i
= 0; i
< n
; i
++)
1187 regno
= sorted_pseudos
[i
];
1188 if (lra_dump_file
!= NULL
)
1189 fprintf (lra_dump_file
, " Assigning to %d "
1190 "(cl=%s, orig=%d, freq=%d, tfirst=%d, tfreq=%d)...\n",
1191 regno
, reg_class_names
[regno_allocno_class_array
[regno
]],
1192 ORIGINAL_REGNO (regno_reg_rtx
[regno
]),
1193 lra_reg_info
[regno
].freq
, regno_assign_info
[regno
].first
,
1194 regno_assign_info
[regno_assign_info
[regno
].first
].freq
);
1195 hard_regno
= find_hard_regno_for (regno
, &cost
, -1);
1197 && ! bitmap_bit_p (&non_reload_pseudos
, regno
))
1198 hard_regno
= spill_for (regno
, &all_spilled_pseudos
);
1201 if (! bitmap_bit_p (&non_reload_pseudos
, regno
))
1202 sorted_pseudos
[nfails
++] = regno
;
1206 /* This register might have been spilled by the previous
1207 pass. Indicate that it is no longer spilled. */
1208 bitmap_clear_bit (&all_spilled_pseudos
, regno
);
1209 assign_hard_regno (hard_regno
, regno
);
1214 lra_assert (iter
== 0);
1215 /* This is a very rare event. We can not assign a hard
1216 register to reload pseudo because the hard register was
1217 assigned to another reload pseudo on a previous
1218 assignment pass. For x86 example, on the 1st pass we
1219 assigned CX (although another hard register could be used
1220 for this) to reload pseudo in an insn, on the 2nd pass we
1221 need CX (and only this) hard register for a new reload
1222 pseudo in the same insn. */
1223 if (lra_dump_file
!= NULL
)
1224 fprintf (lra_dump_file
, " 2nd iter for reload pseudo assignments:\n");
1225 for (i
= 0; i
< nfails
; i
++)
1227 if (lra_dump_file
!= NULL
)
1228 fprintf (lra_dump_file
, " Reload r%d assignment failure\n",
1230 bitmap_ior_into (&changed_insns
,
1231 &lra_reg_info
[sorted_pseudos
[i
]].insn_bitmap
);
1234 FOR_BB_INSNS (bb
, insn
)
1235 if (bitmap_bit_p (&changed_insns
, INSN_UID (insn
)))
1237 lra_insn_recog_data_t data
;
1238 struct lra_insn_reg
*r
;
1240 data
= lra_get_insn_recog_data (insn
);
1241 for (r
= data
->regs
; r
!= NULL
; r
= r
->next
)
1244 /* A reload pseudo did not get a hard register on the
1245 first iteration because of the conflict with
1246 another reload pseudos in the same insn. So we
1247 consider only reload pseudos assigned to hard
1248 registers. We shall exclude inheritance pseudos as
1249 they can occur in original insns (not reload ones).
1250 We can omit the check for split pseudos because
1251 they occur only in move insns containing non-reload
1253 if (regno
< lra_constraint_new_regno_start
1254 || bitmap_bit_p (&lra_inheritance_pseudos
, regno
)
1255 || reg_renumber
[regno
] < 0)
1257 sorted_pseudos
[nfails
++] = regno
;
1258 if (lra_dump_file
!= NULL
)
1259 fprintf (lra_dump_file
,
1260 " Spill reload r%d(hr=%d, freq=%d)\n",
1261 regno
, reg_renumber
[regno
],
1262 lra_reg_info
[regno
].freq
);
1263 update_lives (regno
, true);
1264 lra_setup_reg_renumber (regno
, -1, false);
1269 improve_inheritance (&changed_pseudo_bitmap
);
1270 bitmap_clear (&non_reload_pseudos
);
1271 bitmap_clear (&changed_insns
);
1274 /* We should not assign to original pseudos of inheritance
1275 pseudos or split pseudos if any its inheritance pseudo did
1276 not get hard register or any its split pseudo was not split
1277 because undo inheritance/split pass will extend live range of
1278 such inheritance or split pseudos. */
1279 bitmap_initialize (&do_not_assign_nonreload_pseudos
, ®_obstack
);
1280 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos
, 0, u
, bi
)
1281 if ((restore_regno
= lra_reg_info
[u
].restore_regno
) >= 0
1282 && reg_renumber
[u
] < 0
1283 && bitmap_bit_p (&lra_inheritance_pseudos
, u
))
1284 bitmap_set_bit (&do_not_assign_nonreload_pseudos
, restore_regno
);
1285 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs
, 0, u
, bi
)
1286 if ((restore_regno
= lra_reg_info
[u
].restore_regno
) >= 0
1287 && reg_renumber
[u
] >= 0)
1288 bitmap_set_bit (&do_not_assign_nonreload_pseudos
, restore_regno
);
1289 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1290 if (((i
< lra_constraint_new_regno_start
1291 && ! bitmap_bit_p (&do_not_assign_nonreload_pseudos
, i
))
1292 || (bitmap_bit_p (&lra_inheritance_pseudos
, i
)
1293 && lra_reg_info
[i
].restore_regno
>= 0)
1294 || (bitmap_bit_p (&lra_split_regs
, i
)
1295 && lra_reg_info
[i
].restore_regno
>= 0)
1296 || bitmap_bit_p (&lra_optional_reload_pseudos
, i
))
1297 && reg_renumber
[i
] < 0 && lra_reg_info
[i
].nrefs
!= 0
1298 && regno_allocno_class_array
[i
] != NO_REGS
)
1299 sorted_pseudos
[n
++] = i
;
1300 bitmap_clear (&do_not_assign_nonreload_pseudos
);
1301 if (n
!= 0 && lra_dump_file
!= NULL
)
1302 fprintf (lra_dump_file
, " Reassigning non-reload pseudos\n");
1303 qsort (sorted_pseudos
, n
, sizeof (int), pseudo_compare_func
);
1304 for (i
= 0; i
< n
; i
++)
1306 regno
= sorted_pseudos
[i
];
1307 hard_regno
= find_hard_regno_for (regno
, &cost
, -1);
1308 if (hard_regno
>= 0)
1310 assign_hard_regno (hard_regno
, regno
);
1311 /* We change allocation for non-reload pseudo on this
1312 iteration -- mark the pseudo for invalidation of used
1313 alternatives of insns containing the pseudo. */
1314 bitmap_set_bit (&changed_pseudo_bitmap
, regno
);
1318 free (update_hard_regno_preference_check
);
1319 bitmap_clear (&best_spill_pseudos_bitmap
);
1320 bitmap_clear (&spill_pseudos_bitmap
);
1321 bitmap_clear (&insn_conflict_pseudos
);
1325 /* Entry function to assign hard registers to new reload pseudos
1326 starting with LRA_CONSTRAINT_NEW_REGNO_START (by possible spilling
1327 of old pseudos) and possibly to the old pseudos. The function adds
1328 what insns to process for the next constraint pass. Those are all
1329 insns who contains non-reload and non-inheritance pseudos with
1332 Return true if we did not spill any non-reload and non-inheritance
1340 bitmap_head insns_to_process
;
1342 int max_regno
= max_reg_num ();
1344 timevar_push (TV_LRA_ASSIGN
);
1346 sorted_pseudos
= XNEWVEC (int, max_regno
);
1347 sorted_reload_pseudos
= XNEWVEC (int, max_regno
);
1348 regno_allocno_class_array
= XNEWVEC (enum reg_class
, max_regno
);
1349 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1350 regno_allocno_class_array
[i
] = lra_get_allocno_class (i
);
1351 init_regno_assign_info ();
1352 bitmap_initialize (&all_spilled_pseudos
, ®_obstack
);
1353 create_live_range_start_chains ();
1354 setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos
);
1355 #ifdef ENABLE_CHECKING
1356 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1357 if (lra_reg_info
[i
].nrefs
!= 0 && reg_renumber
[i
] >= 0
1358 && lra_reg_info
[i
].call_p
1359 && overlaps_hard_reg_set_p (call_used_reg_set
,
1360 PSEUDO_REGNO_MODE (i
), reg_renumber
[i
]))
1363 /* Setup insns to process on the next constraint pass. */
1364 bitmap_initialize (&changed_pseudo_bitmap
, ®_obstack
);
1365 init_live_reload_and_inheritance_pseudos ();
1366 assign_by_spills ();
1367 finish_live_reload_and_inheritance_pseudos ();
1368 bitmap_ior_into (&changed_pseudo_bitmap
, &all_spilled_pseudos
);
1370 EXECUTE_IF_SET_IN_BITMAP (&all_spilled_pseudos
, 0, u
, bi
)
1371 /* We ignore spilled pseudos created on last inheritance pass
1372 because they will be removed. */
1373 if (lra_reg_info
[u
].restore_regno
< 0)
1375 no_spills_p
= false;
1378 finish_live_range_start_chains ();
1379 bitmap_clear (&all_spilled_pseudos
);
1380 bitmap_initialize (&insns_to_process
, ®_obstack
);
1381 EXECUTE_IF_SET_IN_BITMAP (&changed_pseudo_bitmap
, 0, u
, bi
)
1382 bitmap_ior_into (&insns_to_process
, &lra_reg_info
[u
].insn_bitmap
);
1383 bitmap_clear (&changed_pseudo_bitmap
);
1384 EXECUTE_IF_SET_IN_BITMAP (&insns_to_process
, 0, u
, bi
)
1386 lra_push_insn_by_uid (u
);
1387 /* Invalidate alternatives for insn should be processed. */
1388 lra_set_used_insn_alternative_by_uid (u
, -1);
1390 bitmap_clear (&insns_to_process
);
1391 finish_regno_assign_info ();
1392 free (regno_allocno_class_array
);
1393 free (sorted_pseudos
);
1394 free (sorted_reload_pseudos
);
1396 timevar_pop (TV_LRA_ASSIGN
);