1 /* Change pseudos by memory.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file contains code for a pass to change spilled pseudos into
25 The pass creates necessary stack slots and assigns spilled pseudos
26 to the stack slots in following way:
28 for all spilled pseudos P most frequently used first do
29 for all stack slots S do
30 if P doesn't conflict with pseudos assigned to S then
31 assign S to P and goto to the next pseudo process
34 create new stack slot S and assign P to S
37 The actual algorithm is bit more complicated because of different
40 After that the code changes spilled pseudos (except ones created
41 from scratches) by corresponding stack slot memory in RTL.
43 If at least one stack slot was created, we need to run more passes
44 because we have new addresses which should be checked and because
45 the old address displacements might change and address constraints
46 (or insn memory constraints) might not be satisfied any more.
48 For some targets, the pass can spill some pseudos into hard
49 registers of different class (usually into vector registers)
50 instead of spilling them into memory if it is possible and
51 profitable. Spilling GENERAL_REGS pseudo into SSE registers for
52 Intel Corei7 is an example of such optimization. And this is
53 actually recommended by Intel optimization guide.
55 The file also contains code for final change of pseudos on hard
56 regs correspondingly assigned to them. */
60 #include "coretypes.h"
65 #include "insn-config.h"
76 /* Max regno at the start of the pass. */
79 /* Map spilled regno -> hard regno used instead of memory for
81 static rtx
*spill_hard_reg
;
83 /* The structure describes stack slot of a spilled pseudo. */
86 /* Number (0, 1, ...) of the stack slot to which given pseudo
89 /* First or next slot with the same slot number. */
90 struct pseudo_slot
*next
, *first
;
91 /* Memory representing the spilled pseudo. */
95 /* The stack slots for each spilled pseudo. Indexed by regnos. */
96 static struct pseudo_slot
*pseudo_slots
;
98 /* The structure describes a register or a stack slot which can be
99 used for several spilled pseudos. */
103 /* First pseudo with given stack slot. */
105 /* Hard reg into which the slot pseudos are spilled. The value is
106 negative for pseudos spilled into memory. */
108 /* Maximum alignment required by all users of the slot. */
110 /* Maximum size required by all users of the slot. */
112 /* Memory representing the all stack slot. It can be different from
113 memory representing a pseudo belonging to give stack slot because
114 pseudo can be placed in a part of the corresponding stack slot.
115 The value is NULL for pseudos spilled into a hard reg. */
117 /* Combined live ranges of all pseudos belonging to given slot. It
118 is used to figure out that a new spilled pseudo can use given
120 lra_live_range_t live_ranges
;
123 /* Array containing info about the stack slots. The array element is
124 indexed by the stack slot number in the range [0..slots_num). */
125 static class slot
*slots
;
126 /* The number of the stack slots currently existing. */
127 static int slots_num
;
129 /* Set up memory of the spilled pseudo I. The function can allocate
130 the corresponding stack slot if it is not done yet. */
132 assign_mem_slot (int i
)
135 machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
136 poly_int64 inherent_size
= PSEUDO_REGNO_BYTES (i
);
137 machine_mode wider_mode
138 = wider_subreg_mode (mode
, lra_reg_info
[i
].biggest_mode
);
139 poly_int64 total_size
= GET_MODE_SIZE (wider_mode
);
140 poly_int64 adjust
= 0;
142 lra_assert (regno_reg_rtx
[i
] != NULL_RTX
&& REG_P (regno_reg_rtx
[i
])
143 && lra_reg_info
[i
].nrefs
!= 0 && reg_renumber
[i
] < 0);
145 unsigned int slot_num
= pseudo_slots
[i
].slot_num
;
146 x
= slots
[slot_num
].mem
;
149 x
= assign_stack_local (BLKmode
, slots
[slot_num
].size
,
150 slots
[slot_num
].align
);
151 slots
[slot_num
].mem
= x
;
154 /* On a big endian machine, the "address" of the slot is the address
155 of the low part that fits its inherent mode. */
156 adjust
+= subreg_size_lowpart_offset (inherent_size
, total_size
);
157 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
159 /* Set all of the memory attributes as appropriate for a spill. */
160 set_mem_attrs_for_spill (x
);
161 pseudo_slots
[i
].mem
= x
;
164 /* Sort pseudos according their usage frequencies. */
166 regno_freq_compare (const void *v1p
, const void *v2p
)
168 const int regno1
= *(const int *) v1p
;
169 const int regno2
= *(const int *) v2p
;
172 if ((diff
= lra_reg_info
[regno2
].freq
- lra_reg_info
[regno1
].freq
) != 0)
174 return regno1
- regno2
;
177 /* Sort pseudos according to their slots, putting the slots in the order
178 that they should be allocated.
180 First prefer to group slots with variable sizes together and slots
181 with constant sizes together, since that usually makes them easier
182 to address from a common anchor point. E.g. loads of polynomial-sized
183 registers tend to take polynomial offsets while loads of constant-sized
184 registers tend to take constant (non-polynomial) offsets.
186 Next, slots with lower numbers have the highest priority and should
187 get the smallest displacement from the stack or frame pointer
188 (whichever is being used).
190 The first allocated slot is always closest to the frame pointer,
191 so prefer lower slot numbers when frame_pointer_needed. If the stack
192 and frame grow in the same direction, then the first allocated slot is
193 always closest to the initial stack pointer and furthest away from the
194 final stack pointer, so allocate higher numbers first when using the
195 stack pointer in that case. The reverse is true if the stack and
196 frame grow in opposite directions. */
198 pseudo_reg_slot_compare (const void *v1p
, const void *v2p
)
200 const int regno1
= *(const int *) v1p
;
201 const int regno2
= *(const int *) v2p
;
202 int diff
, slot_num1
, slot_num2
;
204 slot_num1
= pseudo_slots
[regno1
].slot_num
;
205 slot_num2
= pseudo_slots
[regno2
].slot_num
;
206 diff
= (int (slots
[slot_num1
].size
.is_constant ())
207 - int (slots
[slot_num2
].size
.is_constant ()));
210 if ((diff
= slot_num1
- slot_num2
) != 0)
211 return (frame_pointer_needed
212 || (!FRAME_GROWS_DOWNWARD
) == STACK_GROWS_DOWNWARD
? diff
: -diff
);
213 poly_int64 total_size1
= GET_MODE_SIZE (lra_reg_info
[regno1
].biggest_mode
);
214 poly_int64 total_size2
= GET_MODE_SIZE (lra_reg_info
[regno2
].biggest_mode
);
215 if ((diff
= compare_sizes_for_sort (total_size2
, total_size1
)) != 0)
217 return regno1
- regno2
;
220 /* Assign spill hard registers to N pseudos in PSEUDO_REGNOS which is
221 sorted in order of highest frequency first. Put the pseudos which
222 did not get a spill hard register at the beginning of array
223 PSEUDO_REGNOS. Return the number of such pseudos. */
225 assign_spill_hard_regs (int *pseudo_regnos
, int n
)
227 int i
, k
, p
, regno
, res
, spill_class_size
, hard_regno
, nr
;
228 enum reg_class rclass
, spill_class
;
234 HARD_REG_SET conflict_hard_regs
;
235 bitmap setjump_crosses
= regstat_get_setjmp_crosses ();
236 /* Hard registers which cannot be used for any purpose at given
237 program point because they are unallocatable or already allocated
238 for other pseudos. */
239 HARD_REG_SET
*reserved_hard_regs
;
241 if (! lra_reg_spill_p
)
243 /* Set up reserved hard regs for every program point. */
244 reserved_hard_regs
= XNEWVEC (HARD_REG_SET
, lra_live_max_point
);
245 for (p
= 0; p
< lra_live_max_point
; p
++)
246 reserved_hard_regs
[p
] = lra_no_alloc_regs
;
247 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
248 if (lra_reg_info
[i
].nrefs
!= 0
249 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
250 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
251 for (p
= r
->start
; p
<= r
->finish
; p
++)
252 add_to_hard_reg_set (&reserved_hard_regs
[p
],
253 lra_reg_info
[i
].biggest_mode
, hard_regno
);
254 auto_bitmap
ok_insn_bitmap (®_obstack
);
255 FOR_EACH_BB_FN (bb
, cfun
)
256 FOR_BB_INSNS (bb
, insn
)
257 if (DEBUG_INSN_P (insn
)
258 || ((set
= single_set (insn
)) != NULL_RTX
259 && REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))))
260 bitmap_set_bit (ok_insn_bitmap
, INSN_UID (insn
));
261 for (res
= i
= 0; i
< n
; i
++)
263 regno
= pseudo_regnos
[i
];
264 rclass
= lra_get_allocno_class (regno
);
265 if (bitmap_bit_p (setjump_crosses
, regno
)
268 targetm
.spill_class ((reg_class_t
) rclass
,
269 PSEUDO_REGNO_MODE (regno
)))) == NO_REGS
270 || bitmap_intersect_compl_p (&lra_reg_info
[regno
].insn_bitmap
,
273 pseudo_regnos
[res
++] = regno
;
276 lra_assert (spill_class
!= NO_REGS
);
277 conflict_hard_regs
= lra_reg_info
[regno
].conflict_hard_regs
;
278 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
279 for (p
= r
->start
; p
<= r
->finish
; p
++)
280 conflict_hard_regs
|= reserved_hard_regs
[p
];
281 spill_class_size
= ira_class_hard_regs_num
[spill_class
];
282 mode
= lra_reg_info
[regno
].biggest_mode
;
283 for (k
= 0; k
< spill_class_size
; k
++)
285 hard_regno
= ira_class_hard_regs
[spill_class
][k
];
286 if (TEST_HARD_REG_BIT (eliminable_regset
, hard_regno
)
287 || !targetm
.hard_regno_mode_ok (hard_regno
, mode
))
289 if (! overlaps_hard_reg_set_p (conflict_hard_regs
, mode
, hard_regno
))
292 if (k
>= spill_class_size
)
294 /* There is no available regs -- assign memory later. */
295 pseudo_regnos
[res
++] = regno
;
298 if (lra_dump_file
!= NULL
)
299 fprintf (lra_dump_file
, " Spill r%d into hr%d\n", regno
, hard_regno
);
300 add_to_hard_reg_set (&hard_regs_spilled_into
,
301 lra_reg_info
[regno
].biggest_mode
, hard_regno
);
302 /* Update reserved_hard_regs. */
303 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
304 for (p
= r
->start
; p
<= r
->finish
; p
++)
305 add_to_hard_reg_set (&reserved_hard_regs
[p
],
306 lra_reg_info
[regno
].biggest_mode
, hard_regno
);
307 spill_hard_reg
[regno
]
308 = gen_raw_REG (PSEUDO_REGNO_MODE (regno
), hard_regno
);
310 nr
< hard_regno_nregs (hard_regno
,
311 lra_reg_info
[regno
].biggest_mode
);
314 df_set_regs_ever_live (hard_regno
+ nr
, true);
316 free (reserved_hard_regs
);
320 /* Add pseudo REGNO to slot SLOT_NUM. */
322 add_pseudo_to_slot (int regno
, int slot_num
)
324 struct pseudo_slot
*first
;
326 /* Each pseudo has an inherent size which comes from its own mode,
327 and a total size which provides room for paradoxical subregs.
328 We need to make sure the size and alignment of the slot are
329 sufficient for both. */
330 machine_mode mode
= wider_subreg_mode (PSEUDO_REGNO_MODE (regno
),
331 lra_reg_info
[regno
].biggest_mode
);
332 unsigned int align
= spill_slot_alignment (mode
);
333 slots
[slot_num
].align
= MAX (slots
[slot_num
].align
, align
);
334 slots
[slot_num
].size
= upper_bound (slots
[slot_num
].size
,
335 GET_MODE_SIZE (mode
));
337 if (slots
[slot_num
].regno
< 0)
339 /* It is the first pseudo in the slot. */
340 slots
[slot_num
].regno
= regno
;
341 pseudo_slots
[regno
].first
= &pseudo_slots
[regno
];
342 pseudo_slots
[regno
].next
= NULL
;
346 first
= pseudo_slots
[regno
].first
= &pseudo_slots
[slots
[slot_num
].regno
];
347 pseudo_slots
[regno
].next
= first
->next
;
348 first
->next
= &pseudo_slots
[regno
];
350 pseudo_slots
[regno
].mem
= NULL_RTX
;
351 pseudo_slots
[regno
].slot_num
= slot_num
;
352 slots
[slot_num
].live_ranges
353 = lra_merge_live_ranges (slots
[slot_num
].live_ranges
,
354 lra_copy_live_range_list
355 (lra_reg_info
[regno
].live_ranges
));
358 /* Assign stack slot numbers to pseudos in array PSEUDO_REGNOS of
359 length N. Sort pseudos in PSEUDO_REGNOS for subsequent assigning
360 memory stack slots. */
362 assign_stack_slot_num_and_sort_pseudos (int *pseudo_regnos
, int n
)
366 /* Assign stack slot numbers to spilled pseudos, use smaller numbers
367 for most frequently used pseudos. */
368 for (i
= 0; i
< n
; i
++)
370 regno
= pseudo_regnos
[i
];
371 if (! flag_ira_share_spill_slots
)
376 = wider_subreg_mode (PSEUDO_REGNO_MODE (regno
),
377 lra_reg_info
[regno
].biggest_mode
);
378 for (j
= 0; j
< slots_num
; j
++)
379 if (slots
[j
].hard_regno
< 0
380 /* Although it's possible to share slots between modes
381 with constant and non-constant widths, we usually
382 get better spill code by keeping the constant and
383 non-constant areas separate. */
384 && (GET_MODE_SIZE (mode
).is_constant ()
385 == slots
[j
].size
.is_constant ())
386 && ! (lra_intersected_live_ranges_p
387 (slots
[j
].live_ranges
,
388 lra_reg_info
[regno
].live_ranges
)))
394 slots
[j
].live_ranges
= NULL
;
396 slots
[j
].align
= BITS_PER_UNIT
;
397 slots
[j
].regno
= slots
[j
].hard_regno
= -1;
398 slots
[j
].mem
= NULL_RTX
;
401 add_pseudo_to_slot (regno
, j
);
403 /* Sort regnos according to their slot numbers. */
404 qsort (pseudo_regnos
, n
, sizeof (int), pseudo_reg_slot_compare
);
407 /* Recursively process LOC in INSN and change spilled pseudos to the
408 corresponding memory or spilled hard reg. Ignore spilled pseudos
409 created from the scratches. Return true if the pseudo nrefs equal
410 to 0 (don't change the pseudo in this case). Otherwise return false. */
412 remove_pseudos (rtx
*loc
, rtx_insn
*insn
)
420 if (*loc
== NULL_RTX
)
422 code
= GET_CODE (*loc
);
423 if (code
== SUBREG
&& REG_P (SUBREG_REG (*loc
)))
425 /* Try to remove memory subregs to simplify LRA job
426 and avoid LRA cycling in case of subreg memory reload. */
427 res
= remove_pseudos (&SUBREG_REG (*loc
), insn
);
428 if (GET_CODE (SUBREG_REG (*loc
)) == MEM
)
430 alter_subreg (loc
, false);
431 if (GET_CODE (*loc
) == MEM
)
433 lra_update_insn_recog_data (insn
);
434 if (lra_dump_file
!= NULL
)
435 fprintf (lra_dump_file
,
436 "Memory subreg was simplified in insn #%u\n",
442 else if (code
== REG
&& (i
= REGNO (*loc
)) >= FIRST_PSEUDO_REGISTER
443 && lra_get_regno_hard_regno (i
) < 0
444 /* We do not want to assign memory for former scratches because
445 it might result in an address reload for some targets. In
446 any case we transform such pseudos not getting hard registers
447 into scratches back. */
448 && ! ira_former_scratch_p (i
))
450 if (lra_reg_info
[i
].nrefs
== 0
451 && pseudo_slots
[i
].mem
== NULL
&& spill_hard_reg
[i
] == NULL
)
453 if ((hard_reg
= spill_hard_reg
[i
]) != NULL_RTX
)
454 *loc
= copy_rtx (hard_reg
);
455 else if (pseudo_slots
[i
].mem
!= NULL_RTX
)
456 /* There might be no memory slot or hard reg for a pseudo when we spill
457 the frame pointer after elimination of frame pointer to stack
458 pointer became impossible. */
460 rtx x
= lra_eliminate_regs_1 (insn
, pseudo_slots
[i
].mem
,
461 GET_MODE (pseudo_slots
[i
].mem
),
462 false, false, 0, true);
463 *loc
= x
!= pseudo_slots
[i
].mem
? x
: copy_rtx (x
);
468 fmt
= GET_RTX_FORMAT (code
);
469 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
472 res
= remove_pseudos (&XEXP (*loc
, i
), insn
) || res
;
473 else if (fmt
[i
] == 'E')
477 for (j
= XVECLEN (*loc
, i
) - 1; j
>= 0; j
--)
478 res
= remove_pseudos (&XVECEXP (*loc
, i
, j
), insn
) || res
;
484 /* Convert spilled pseudos into their stack slots or spill hard regs,
485 put insns to process on the constraint stack (that is all insns in
486 which pseudos were changed to memory or spill hard regs). */
491 rtx_insn
*insn
, *curr
;
494 auto_bitmap
spilled_pseudos (®_obstack
);
495 auto_bitmap
changed_insns (®_obstack
);
496 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
498 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
499 && ! ira_former_scratch_p (i
))
501 bitmap_set_bit (spilled_pseudos
, i
);
502 bitmap_ior_into (changed_insns
, &lra_reg_info
[i
].insn_bitmap
);
505 FOR_EACH_BB_FN (bb
, cfun
)
507 FOR_BB_INSNS_SAFE (bb
, insn
, curr
)
509 bool removed_pseudo_p
= false;
511 if (bitmap_bit_p (changed_insns
, INSN_UID (insn
)))
515 removed_pseudo_p
= remove_pseudos (&PATTERN (insn
), insn
);
517 && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
))
518 removed_pseudo_p
= true;
519 for (link_loc
= ®_NOTES (insn
);
520 (link
= *link_loc
) != NULL_RTX
;
521 link_loc
= &XEXP (link
, 1))
523 switch (REG_NOTE_KIND (link
))
525 case REG_FRAME_RELATED_EXPR
:
526 case REG_CFA_DEF_CFA
:
527 case REG_CFA_ADJUST_CFA
:
529 case REG_CFA_REGISTER
:
530 case REG_CFA_EXPRESSION
:
531 case REG_CFA_RESTORE
:
532 case REG_CFA_SET_VDRAP
:
533 if (remove_pseudos (&XEXP (link
, 0), insn
))
534 removed_pseudo_p
= true;
540 if (lra_dump_file
!= NULL
)
541 fprintf (lra_dump_file
,
542 "Changing spilled pseudos to memory in insn #%u\n",
544 lra_push_insn (insn
);
545 if (lra_reg_spill_p
|| targetm
.different_addr_displacement_p ())
546 lra_set_used_insn_alternative (insn
, LRA_UNKNOWN_ALT
);
548 else if (CALL_P (insn
)
549 /* Presence of any pseudo in CALL_INSN_FUNCTION_USAGE
550 does not affect value of insn_bitmap of the
551 corresponding lra_reg_info. That is because we
552 don't need to reload pseudos in
553 CALL_INSN_FUNCTION_USAGEs. So if we process only
554 insns in the insn_bitmap of given pseudo here, we
555 can miss the pseudo in some
556 CALL_INSN_FUNCTION_USAGEs. */
557 && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
))
558 removed_pseudo_p
= true;
559 if (removed_pseudo_p
)
561 lra_assert (DEBUG_INSN_P (insn
));
562 lra_invalidate_insn_data (insn
);
563 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
564 if (lra_dump_file
!= NULL
)
565 fprintf (lra_dump_file
,
566 "Debug insn #%u is reset because it referenced "
567 "removed pseudo\n", INSN_UID (insn
));
569 bitmap_and_compl_into (df_get_live_in (bb
), spilled_pseudos
);
570 bitmap_and_compl_into (df_get_live_out (bb
), spilled_pseudos
);
575 /* Return true if we need scratch reg assignments. */
577 lra_need_for_scratch_reg_p (void)
579 int i
; max_regno
= max_reg_num ();
581 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
582 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
583 && ira_former_scratch_p (i
))
588 /* Return true if we need to change some pseudos into memory. */
590 lra_need_for_spills_p (void)
592 int i
; max_regno
= max_reg_num ();
594 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
595 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
596 && ! ira_former_scratch_p (i
))
601 /* Change spilled pseudos into memory or spill hard regs. Put changed
602 insns on the constraint stack (these insns will be considered on
603 the next constraint pass). The changed insns are all insns in
604 which pseudos were changed. */
608 int i
, n
, n2
, curr_regno
;
611 regs_num
= max_reg_num ();
612 spill_hard_reg
= XNEWVEC (rtx
, regs_num
);
613 pseudo_regnos
= XNEWVEC (int, regs_num
);
614 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
615 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
616 /* We do not want to assign memory for former scratches. */
617 && ! ira_former_scratch_p (i
))
618 pseudo_regnos
[n
++] = i
;
620 pseudo_slots
= XNEWVEC (struct pseudo_slot
, regs_num
);
621 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
623 spill_hard_reg
[i
] = NULL_RTX
;
624 pseudo_slots
[i
].mem
= NULL_RTX
;
626 slots
= XNEWVEC (class slot
, regs_num
);
627 /* Sort regnos according their usage frequencies. */
628 qsort (pseudo_regnos
, n
, sizeof (int), regno_freq_compare
);
629 n
= assign_spill_hard_regs (pseudo_regnos
, n
);
631 assign_stack_slot_num_and_sort_pseudos (pseudo_regnos
, n
);
632 for (i
= 0; i
< n
; i
++)
633 if (pseudo_slots
[pseudo_regnos
[i
]].mem
== NULL_RTX
)
634 assign_mem_slot (pseudo_regnos
[i
]);
635 if ((n2
= lra_update_fp2sp_elimination (pseudo_regnos
)) > 0)
637 /* Assign stack slots to spilled pseudos assigned to fp. */
638 assign_stack_slot_num_and_sort_pseudos (pseudo_regnos
, n2
);
639 for (i
= 0; i
< n2
; i
++)
640 if (pseudo_slots
[pseudo_regnos
[i
]].mem
== NULL_RTX
)
641 assign_mem_slot (pseudo_regnos
[i
]);
643 if (n
+ n2
> 0 && crtl
->stack_alignment_needed
)
644 /* If we have a stack frame, we must align it now. The stack size
645 may be a part of the offset computation for register
647 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
648 if (lra_dump_file
!= NULL
)
650 for (i
= 0; i
< slots_num
; i
++)
652 fprintf (lra_dump_file
, " Slot %d regnos (width = ", i
);
653 print_dec (GET_MODE_SIZE (GET_MODE (slots
[i
].mem
)),
654 lra_dump_file
, SIGNED
);
655 fprintf (lra_dump_file
, "):");
656 for (curr_regno
= slots
[i
].regno
;;
657 curr_regno
= pseudo_slots
[curr_regno
].next
- pseudo_slots
)
659 fprintf (lra_dump_file
, " %d", curr_regno
);
660 if (pseudo_slots
[curr_regno
].next
== NULL
)
663 fprintf (lra_dump_file
, "\n");
669 free (pseudo_regnos
);
670 free (spill_hard_reg
);
673 /* Apply alter_subreg for subregs of regs in *LOC. Use FINAL_P for
674 alter_subreg calls. Return true if any subreg of reg is
677 alter_subregs (rtx
*loc
, bool final_p
)
688 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
690 lra_assert (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
);
691 alter_subreg (loc
, final_p
);
694 fmt
= GET_RTX_FORMAT (code
);
696 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
700 if (alter_subregs (&XEXP (x
, i
), final_p
))
703 else if (fmt
[i
] == 'E')
707 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
708 if (alter_subregs (&XVECEXP (x
, i
, j
), final_p
))
715 /* Final change of pseudos got hard registers into the corresponding
716 hard registers and removing temporary clobbers. */
718 lra_final_code_change (void)
722 rtx_insn
*insn
, *curr
;
724 int max_regno
= max_reg_num ();
726 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
727 if (lra_reg_info
[i
].nrefs
!= 0
728 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
729 SET_REGNO (regno_reg_rtx
[i
], hard_regno
);
730 FOR_EACH_BB_FN (bb
, cfun
)
731 FOR_BB_INSNS_SAFE (bb
, insn
, curr
)
734 rtx pat
= PATTERN (insn
);
736 if (GET_CODE (pat
) == USE
&& XEXP (pat
, 0) == const1_rtx
)
738 /* Remove markers to eliminate critical edges for jump insn
739 output reloads (see code in ira.cc::ira). */
740 lra_invalidate_insn_data (insn
);
744 if (GET_CODE (pat
) == CLOBBER
&& LRA_TEMP_CLOBBER_P (pat
))
746 /* Remove clobbers temporarily created in LRA. We don't
747 need them anymore and don't want to waste compiler
748 time processing them in a few subsequent passes. */
749 lra_invalidate_insn_data (insn
);
754 /* IRA can generate move insns involving pseudos. It is
755 better remove them earlier to speed up compiler a bit.
756 It is also better to do it here as they might not pass
757 final RTL check in LRA, (e.g. insn moving a control
758 register into itself). So remove an useless move insn
759 unless next insn is USE marking the return reg (we should
760 save this as some subsequent optimizations assume that
761 such original insns are saved). */
762 if (NONJUMP_INSN_P (insn
) && GET_CODE (pat
) == SET
763 && REG_P (SET_SRC (pat
)) && REG_P (SET_DEST (pat
))
764 && REGNO (SET_SRC (pat
)) == REGNO (SET_DEST (pat
))
765 && REGNO (SET_SRC (pat
)) >= FIRST_PSEUDO_REGISTER
)
767 lra_invalidate_insn_data (insn
);
772 lra_insn_recog_data_t id
= lra_get_insn_recog_data (insn
);
773 struct lra_insn_reg
*reg
;
775 for (reg
= id
->regs
; reg
!= NULL
; reg
= reg
->next
)
776 if (reg
->regno
>= FIRST_PSEUDO_REGISTER
777 && lra_reg_info
[reg
->regno
].nrefs
== 0)
782 /* Pseudos still can be in debug insns in some very rare
783 and complicated cases, e.g. the pseudo was removed by
784 inheritance and the debug insn is not EBBs where the
785 inheritance happened. It is difficult and time
786 consuming to find what hard register corresponds the
787 pseudo -- so just remove the debug insn. Another
788 solution could be assigning hard reg/memory but it
789 would be a misleading info. It is better not to have
790 info than have it wrong. */
791 lra_assert (DEBUG_INSN_P (insn
));
792 lra_invalidate_insn_data (insn
);
797 struct lra_static_insn_data
*static_id
= id
->insn_static_data
;
798 bool insn_change_p
= false;
800 for (i
= id
->insn_static_data
->n_operands
- 1; i
>= 0; i
--)
801 if ((DEBUG_INSN_P (insn
) || ! static_id
->operand
[i
].is_operator
)
802 && alter_subregs (id
->operand_loc
[i
], ! DEBUG_INSN_P (insn
)))
804 lra_update_dup (id
, i
);
805 insn_change_p
= true;
807 if ((GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
808 && alter_subregs (&XEXP (pat
, 0), false))
809 insn_change_p
= true;
811 lra_update_operator_dups (id
);
813 if ((set
= single_set (insn
)) != NULL
814 && REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))
815 && REGNO (SET_SRC (set
)) == REGNO (SET_DEST (set
)))
817 /* Remove an useless move insn. IRA can generate move
818 insns involving pseudos. It is better remove them
819 earlier to speed up compiler a bit. It is also
820 better to do it here as they might not pass final RTL
821 check in LRA, (e.g. insn moving a control register
823 lra_invalidate_insn_data (insn
);