1 /* Change pseudos by memory.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file contains code for a pass to change spilled pseudos into
25 The pass creates necessary stack slots and assigns spilled pseudos
26 to the stack slots in following way:
28 for all spilled pseudos P most frequently used first do
29 for all stack slots S do
30 if P doesn't conflict with pseudos assigned to S then
31 assign S to P and goto to the next pseudo process
34 create new stack slot S and assign P to S
37 The actual algorithm is bit more complicated because of different
40 After that the code changes spilled pseudos (except ones created
41 from scratches) by corresponding stack slot memory in RTL.
43 If at least one stack slot was created, we need to run more passes
44 because we have new addresses which should be checked and because
45 the old address displacements might change and address constraints
46 (or insn memory constraints) might not be satisfied any more.
48 For some targets, the pass can spill some pseudos into hard
49 registers of different class (usually into vector registers)
50 instead of spilling them into memory if it is possible and
51 profitable. Spilling GENERAL_REGS pseudo into SSE registers for
52 Intel Corei7 is an example of such optimization. And this is
53 actually recommended by Intel optimization guide.
55 The file also contains code for final change of pseudos on hard
56 regs correspondingly assigned to them. */
60 #include "coretypes.h"
65 #include "insn-config.h"
76 /* Max regno at the start of the pass. */
79 /* Map spilled regno -> hard regno used instead of memory for
81 static rtx
*spill_hard_reg
;
83 /* The structure describes stack slot of a spilled pseudo. */
86 /* Number (0, 1, ...) of the stack slot to which given pseudo
89 /* First or next slot with the same slot number. */
90 struct pseudo_slot
*next
, *first
;
91 /* Memory representing the spilled pseudo. */
95 /* The stack slots for each spilled pseudo. Indexed by regnos. */
96 static struct pseudo_slot
*pseudo_slots
;
98 /* The structure describes a register or a stack slot which can be
99 used for several spilled pseudos. */
102 /* First pseudo with given stack slot. */
104 /* Hard reg into which the slot pseudos are spilled. The value is
105 negative for pseudos spilled into memory. */
107 /* Maximum alignment required by all users of the slot. */
109 /* Maximum size required by all users of the slot. */
111 /* Memory representing the all stack slot. It can be different from
112 memory representing a pseudo belonging to give stack slot because
113 pseudo can be placed in a part of the corresponding stack slot.
114 The value is NULL for pseudos spilled into a hard reg. */
116 /* Combined live ranges of all pseudos belonging to given slot. It
117 is used to figure out that a new spilled pseudo can use given
119 lra_live_range_t live_ranges
;
122 /* Array containing info about the stack slots. The array element is
123 indexed by the stack slot number in the range [0..slots_num). */
124 static struct slot
*slots
;
125 /* The number of the stack slots currently existing. */
126 static int slots_num
;
128 /* Set up memory of the spilled pseudo I. The function can allocate
129 the corresponding stack slot if it is not done yet. */
131 assign_mem_slot (int i
)
134 machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
135 poly_int64 inherent_size
= PSEUDO_REGNO_BYTES (i
);
136 machine_mode wider_mode
137 = wider_subreg_mode (mode
, lra_reg_info
[i
].biggest_mode
);
138 poly_int64 total_size
= GET_MODE_SIZE (wider_mode
);
139 poly_int64 adjust
= 0;
141 lra_assert (regno_reg_rtx
[i
] != NULL_RTX
&& REG_P (regno_reg_rtx
[i
])
142 && lra_reg_info
[i
].nrefs
!= 0 && reg_renumber
[i
] < 0);
144 unsigned int slot_num
= pseudo_slots
[i
].slot_num
;
145 x
= slots
[slot_num
].mem
;
148 x
= assign_stack_local (BLKmode
, slots
[slot_num
].size
,
149 slots
[slot_num
].align
);
150 slots
[slot_num
].mem
= x
;
153 /* On a big endian machine, the "address" of the slot is the address
154 of the low part that fits its inherent mode. */
155 adjust
+= subreg_size_lowpart_offset (inherent_size
, total_size
);
156 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
158 /* Set all of the memory attributes as appropriate for a spill. */
159 set_mem_attrs_for_spill (x
);
160 pseudo_slots
[i
].mem
= x
;
163 /* Sort pseudos according their usage frequencies. */
165 regno_freq_compare (const void *v1p
, const void *v2p
)
167 const int regno1
= *(const int *) v1p
;
168 const int regno2
= *(const int *) v2p
;
171 if ((diff
= lra_reg_info
[regno2
].freq
- lra_reg_info
[regno1
].freq
) != 0)
173 return regno1
- regno2
;
176 /* Sort pseudos according to their slots, putting the slots in the order
177 that they should be allocated.
179 First prefer to group slots with variable sizes together and slots
180 with constant sizes together, since that usually makes them easier
181 to address from a common anchor point. E.g. loads of polynomial-sized
182 registers tend to take polynomial offsets while loads of constant-sized
183 registers tend to take constant (non-polynomial) offsets.
185 Next, slots with lower numbers have the highest priority and should
186 get the smallest displacement from the stack or frame pointer
187 (whichever is being used).
189 The first allocated slot is always closest to the frame pointer,
190 so prefer lower slot numbers when frame_pointer_needed. If the stack
191 and frame grow in the same direction, then the first allocated slot is
192 always closest to the initial stack pointer and furthest away from the
193 final stack pointer, so allocate higher numbers first when using the
194 stack pointer in that case. The reverse is true if the stack and
195 frame grow in opposite directions. */
197 pseudo_reg_slot_compare (const void *v1p
, const void *v2p
)
199 const int regno1
= *(const int *) v1p
;
200 const int regno2
= *(const int *) v2p
;
201 int diff
, slot_num1
, slot_num2
;
203 slot_num1
= pseudo_slots
[regno1
].slot_num
;
204 slot_num2
= pseudo_slots
[regno2
].slot_num
;
205 diff
= (int (slots
[slot_num1
].size
.is_constant ())
206 - int (slots
[slot_num2
].size
.is_constant ()));
209 if ((diff
= slot_num1
- slot_num2
) != 0)
210 return (frame_pointer_needed
211 || (!FRAME_GROWS_DOWNWARD
) == STACK_GROWS_DOWNWARD
? diff
: -diff
);
212 poly_int64 total_size1
= GET_MODE_SIZE (lra_reg_info
[regno1
].biggest_mode
);
213 poly_int64 total_size2
= GET_MODE_SIZE (lra_reg_info
[regno2
].biggest_mode
);
214 if ((diff
= compare_sizes_for_sort (total_size2
, total_size1
)) != 0)
216 return regno1
- regno2
;
219 /* Assign spill hard registers to N pseudos in PSEUDO_REGNOS which is
220 sorted in order of highest frequency first. Put the pseudos which
221 did not get a spill hard register at the beginning of array
222 PSEUDO_REGNOS. Return the number of such pseudos. */
224 assign_spill_hard_regs (int *pseudo_regnos
, int n
)
226 int i
, k
, p
, regno
, res
, spill_class_size
, hard_regno
, nr
;
227 enum reg_class rclass
, spill_class
;
233 HARD_REG_SET conflict_hard_regs
;
234 bitmap setjump_crosses
= regstat_get_setjmp_crosses ();
235 /* Hard registers which can not be used for any purpose at given
236 program point because they are unallocatable or already allocated
237 for other pseudos. */
238 HARD_REG_SET
*reserved_hard_regs
;
240 if (! lra_reg_spill_p
)
242 /* Set up reserved hard regs for every program point. */
243 reserved_hard_regs
= XNEWVEC (HARD_REG_SET
, lra_live_max_point
);
244 for (p
= 0; p
< lra_live_max_point
; p
++)
245 COPY_HARD_REG_SET (reserved_hard_regs
[p
], lra_no_alloc_regs
);
246 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
247 if (lra_reg_info
[i
].nrefs
!= 0
248 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
249 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
250 for (p
= r
->start
; p
<= r
->finish
; p
++)
251 add_to_hard_reg_set (&reserved_hard_regs
[p
],
252 lra_reg_info
[i
].biggest_mode
, hard_regno
);
253 auto_bitmap
ok_insn_bitmap (®_obstack
);
254 FOR_EACH_BB_FN (bb
, cfun
)
255 FOR_BB_INSNS (bb
, insn
)
256 if (DEBUG_INSN_P (insn
)
257 || ((set
= single_set (insn
)) != NULL_RTX
258 && REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))))
259 bitmap_set_bit (ok_insn_bitmap
, INSN_UID (insn
));
260 for (res
= i
= 0; i
< n
; i
++)
262 regno
= pseudo_regnos
[i
];
263 rclass
= lra_get_allocno_class (regno
);
264 if (bitmap_bit_p (setjump_crosses
, regno
)
267 targetm
.spill_class ((reg_class_t
) rclass
,
268 PSEUDO_REGNO_MODE (regno
)))) == NO_REGS
269 || bitmap_intersect_compl_p (&lra_reg_info
[regno
].insn_bitmap
,
272 pseudo_regnos
[res
++] = regno
;
275 lra_assert (spill_class
!= NO_REGS
);
276 COPY_HARD_REG_SET (conflict_hard_regs
,
277 lra_reg_info
[regno
].conflict_hard_regs
);
278 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
279 for (p
= r
->start
; p
<= r
->finish
; p
++)
280 IOR_HARD_REG_SET (conflict_hard_regs
, reserved_hard_regs
[p
]);
281 spill_class_size
= ira_class_hard_regs_num
[spill_class
];
282 mode
= lra_reg_info
[regno
].biggest_mode
;
283 for (k
= 0; k
< spill_class_size
; k
++)
285 hard_regno
= ira_class_hard_regs
[spill_class
][k
];
286 if (! overlaps_hard_reg_set_p (conflict_hard_regs
, mode
, hard_regno
))
289 if (k
>= spill_class_size
)
291 /* There is no available regs -- assign memory later. */
292 pseudo_regnos
[res
++] = regno
;
295 if (lra_dump_file
!= NULL
)
296 fprintf (lra_dump_file
, " Spill r%d into hr%d\n", regno
, hard_regno
);
297 /* Update reserved_hard_regs. */
298 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
299 for (p
= r
->start
; p
<= r
->finish
; p
++)
300 add_to_hard_reg_set (&reserved_hard_regs
[p
],
301 lra_reg_info
[regno
].biggest_mode
, hard_regno
);
302 spill_hard_reg
[regno
]
303 = gen_raw_REG (PSEUDO_REGNO_MODE (regno
), hard_regno
);
305 nr
< hard_regno_nregs (hard_regno
,
306 lra_reg_info
[regno
].biggest_mode
);
309 df_set_regs_ever_live (hard_regno
+ nr
, true);
311 free (reserved_hard_regs
);
315 /* Add pseudo REGNO to slot SLOT_NUM. */
317 add_pseudo_to_slot (int regno
, int slot_num
)
319 struct pseudo_slot
*first
;
321 /* Each pseudo has an inherent size which comes from its own mode,
322 and a total size which provides room for paradoxical subregs.
323 We need to make sure the size and alignment of the slot are
324 sufficient for both. */
325 machine_mode mode
= wider_subreg_mode (PSEUDO_REGNO_MODE (regno
),
326 lra_reg_info
[regno
].biggest_mode
);
327 unsigned int align
= spill_slot_alignment (mode
);
328 slots
[slot_num
].align
= MAX (slots
[slot_num
].align
, align
);
329 slots
[slot_num
].size
= upper_bound (slots
[slot_num
].size
,
330 GET_MODE_SIZE (mode
));
332 if (slots
[slot_num
].regno
< 0)
334 /* It is the first pseudo in the slot. */
335 slots
[slot_num
].regno
= regno
;
336 pseudo_slots
[regno
].first
= &pseudo_slots
[regno
];
337 pseudo_slots
[regno
].next
= NULL
;
341 first
= pseudo_slots
[regno
].first
= &pseudo_slots
[slots
[slot_num
].regno
];
342 pseudo_slots
[regno
].next
= first
->next
;
343 first
->next
= &pseudo_slots
[regno
];
345 pseudo_slots
[regno
].mem
= NULL_RTX
;
346 pseudo_slots
[regno
].slot_num
= slot_num
;
347 slots
[slot_num
].live_ranges
348 = lra_merge_live_ranges (slots
[slot_num
].live_ranges
,
349 lra_copy_live_range_list
350 (lra_reg_info
[regno
].live_ranges
));
353 /* Assign stack slot numbers to pseudos in array PSEUDO_REGNOS of
354 length N. Sort pseudos in PSEUDO_REGNOS for subsequent assigning
355 memory stack slots. */
357 assign_stack_slot_num_and_sort_pseudos (int *pseudo_regnos
, int n
)
362 /* Assign stack slot numbers to spilled pseudos, use smaller numbers
363 for most frequently used pseudos. */
364 for (i
= 0; i
< n
; i
++)
366 regno
= pseudo_regnos
[i
];
367 if (! flag_ira_share_spill_slots
)
372 = wider_subreg_mode (PSEUDO_REGNO_MODE (regno
),
373 lra_reg_info
[regno
].biggest_mode
);
374 for (j
= 0; j
< slots_num
; j
++)
375 if (slots
[j
].hard_regno
< 0
376 /* Although it's possible to share slots between modes
377 with constant and non-constant widths, we usually
378 get better spill code by keeping the constant and
379 non-constant areas separate. */
380 && (GET_MODE_SIZE (mode
).is_constant ()
381 == slots
[j
].size
.is_constant ())
382 && ! (lra_intersected_live_ranges_p
383 (slots
[j
].live_ranges
,
384 lra_reg_info
[regno
].live_ranges
)))
390 slots
[j
].live_ranges
= NULL
;
392 slots
[j
].align
= BITS_PER_UNIT
;
393 slots
[j
].regno
= slots
[j
].hard_regno
= -1;
394 slots
[j
].mem
= NULL_RTX
;
397 add_pseudo_to_slot (regno
, j
);
399 /* Sort regnos according to their slot numbers. */
400 qsort (pseudo_regnos
, n
, sizeof (int), pseudo_reg_slot_compare
);
403 /* Recursively process LOC in INSN and change spilled pseudos to the
404 corresponding memory or spilled hard reg. Ignore spilled pseudos
405 created from the scratches. Return true if the pseudo nrefs equal
406 to 0 (don't change the pseudo in this case). Otherwise return false. */
408 remove_pseudos (rtx
*loc
, rtx_insn
*insn
)
416 if (*loc
== NULL_RTX
)
418 code
= GET_CODE (*loc
);
419 if (code
== REG
&& (i
= REGNO (*loc
)) >= FIRST_PSEUDO_REGISTER
420 && lra_get_regno_hard_regno (i
) < 0
421 /* We do not want to assign memory for former scratches because
422 it might result in an address reload for some targets. In
423 any case we transform such pseudos not getting hard registers
424 into scratches back. */
425 && ! lra_former_scratch_p (i
))
427 if (lra_reg_info
[i
].nrefs
== 0
428 && pseudo_slots
[i
].mem
== NULL
&& spill_hard_reg
[i
] == NULL
)
430 if ((hard_reg
= spill_hard_reg
[i
]) != NULL_RTX
)
431 *loc
= copy_rtx (hard_reg
);
434 rtx x
= lra_eliminate_regs_1 (insn
, pseudo_slots
[i
].mem
,
435 GET_MODE (pseudo_slots
[i
].mem
),
436 false, false, 0, true);
437 *loc
= x
!= pseudo_slots
[i
].mem
? x
: copy_rtx (x
);
442 fmt
= GET_RTX_FORMAT (code
);
443 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
446 res
= remove_pseudos (&XEXP (*loc
, i
), insn
) || res
;
447 else if (fmt
[i
] == 'E')
451 for (j
= XVECLEN (*loc
, i
) - 1; j
>= 0; j
--)
452 res
= remove_pseudos (&XVECEXP (*loc
, i
, j
), insn
) || res
;
458 /* Convert spilled pseudos into their stack slots or spill hard regs,
459 put insns to process on the constraint stack (that is all insns in
460 which pseudos were changed to memory or spill hard regs). */
465 rtx_insn
*insn
, *curr
;
468 auto_bitmap
spilled_pseudos (®_obstack
);
469 auto_bitmap
changed_insns (®_obstack
);
470 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
472 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
473 && ! lra_former_scratch_p (i
))
475 bitmap_set_bit (spilled_pseudos
, i
);
476 bitmap_ior_into (changed_insns
, &lra_reg_info
[i
].insn_bitmap
);
479 FOR_EACH_BB_FN (bb
, cfun
)
481 FOR_BB_INSNS_SAFE (bb
, insn
, curr
)
483 bool removed_pseudo_p
= false;
485 if (bitmap_bit_p (changed_insns
, INSN_UID (insn
)))
489 removed_pseudo_p
= remove_pseudos (&PATTERN (insn
), insn
);
491 && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
))
492 removed_pseudo_p
= true;
493 for (link_loc
= ®_NOTES (insn
);
494 (link
= *link_loc
) != NULL_RTX
;
495 link_loc
= &XEXP (link
, 1))
497 switch (REG_NOTE_KIND (link
))
499 case REG_FRAME_RELATED_EXPR
:
500 case REG_CFA_DEF_CFA
:
501 case REG_CFA_ADJUST_CFA
:
503 case REG_CFA_REGISTER
:
504 case REG_CFA_EXPRESSION
:
505 case REG_CFA_RESTORE
:
506 case REG_CFA_SET_VDRAP
:
507 if (remove_pseudos (&XEXP (link
, 0), insn
))
508 removed_pseudo_p
= true;
514 if (lra_dump_file
!= NULL
)
515 fprintf (lra_dump_file
,
516 "Changing spilled pseudos to memory in insn #%u\n",
518 lra_push_insn (insn
);
519 if (lra_reg_spill_p
|| targetm
.different_addr_displacement_p ())
520 lra_set_used_insn_alternative (insn
, -1);
522 else if (CALL_P (insn
)
523 /* Presence of any pseudo in CALL_INSN_FUNCTION_USAGE
524 does not affect value of insn_bitmap of the
525 corresponding lra_reg_info. That is because we
526 don't need to reload pseudos in
527 CALL_INSN_FUNCTION_USAGEs. So if we process only
528 insns in the insn_bitmap of given pseudo here, we
529 can miss the pseudo in some
530 CALL_INSN_FUNCTION_USAGEs. */
531 && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
))
532 removed_pseudo_p
= true;
533 if (removed_pseudo_p
)
535 lra_assert (DEBUG_INSN_P (insn
));
536 lra_invalidate_insn_data (insn
);
537 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
538 if (lra_dump_file
!= NULL
)
539 fprintf (lra_dump_file
,
540 "Debug insn #%u is reset because it referenced "
541 "removed pseudo\n", INSN_UID (insn
));
543 bitmap_and_compl_into (df_get_live_in (bb
), spilled_pseudos
);
544 bitmap_and_compl_into (df_get_live_out (bb
), spilled_pseudos
);
549 /* Return true if we need to change some pseudos into memory. */
551 lra_need_for_spills_p (void)
553 int i
; max_regno
= max_reg_num ();
555 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
556 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
557 && ! lra_former_scratch_p (i
))
562 /* Change spilled pseudos into memory or spill hard regs. Put changed
563 insns on the constraint stack (these insns will be considered on
564 the next constraint pass). The changed insns are all insns in
565 which pseudos were changed. */
569 int i
, n
, curr_regno
;
572 regs_num
= max_reg_num ();
573 spill_hard_reg
= XNEWVEC (rtx
, regs_num
);
574 pseudo_regnos
= XNEWVEC (int, regs_num
);
575 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
576 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
577 /* We do not want to assign memory for former scratches. */
578 && ! lra_former_scratch_p (i
))
579 pseudo_regnos
[n
++] = i
;
581 pseudo_slots
= XNEWVEC (struct pseudo_slot
, regs_num
);
582 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
584 spill_hard_reg
[i
] = NULL_RTX
;
585 pseudo_slots
[i
].mem
= NULL_RTX
;
587 slots
= XNEWVEC (struct slot
, regs_num
);
588 /* Sort regnos according their usage frequencies. */
589 qsort (pseudo_regnos
, n
, sizeof (int), regno_freq_compare
);
590 n
= assign_spill_hard_regs (pseudo_regnos
, n
);
591 assign_stack_slot_num_and_sort_pseudos (pseudo_regnos
, n
);
592 for (i
= 0; i
< n
; i
++)
593 if (pseudo_slots
[pseudo_regnos
[i
]].mem
== NULL_RTX
)
594 assign_mem_slot (pseudo_regnos
[i
]);
595 if (n
> 0 && crtl
->stack_alignment_needed
)
596 /* If we have a stack frame, we must align it now. The stack size
597 may be a part of the offset computation for register
599 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
600 if (lra_dump_file
!= NULL
)
602 for (i
= 0; i
< slots_num
; i
++)
604 fprintf (lra_dump_file
, " Slot %d regnos (width = ", i
);
605 print_dec (GET_MODE_SIZE (GET_MODE (slots
[i
].mem
)),
606 lra_dump_file
, SIGNED
);
607 fprintf (lra_dump_file
, "):");
608 for (curr_regno
= slots
[i
].regno
;;
609 curr_regno
= pseudo_slots
[curr_regno
].next
- pseudo_slots
)
611 fprintf (lra_dump_file
, " %d", curr_regno
);
612 if (pseudo_slots
[curr_regno
].next
== NULL
)
615 fprintf (lra_dump_file
, "\n");
621 free (pseudo_regnos
);
622 free (spill_hard_reg
);
625 /* Apply alter_subreg for subregs of regs in *LOC. Use FINAL_P for
626 alter_subreg calls. Return true if any subreg of reg is
629 alter_subregs (rtx
*loc
, bool final_p
)
640 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
642 lra_assert (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
);
643 alter_subreg (loc
, final_p
);
646 fmt
= GET_RTX_FORMAT (code
);
648 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
652 if (alter_subregs (&XEXP (x
, i
), final_p
))
655 else if (fmt
[i
] == 'E')
659 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
660 if (alter_subregs (&XVECEXP (x
, i
, j
), final_p
))
667 /* Return true if REGNO is used for return in the current
670 return_regno_p (unsigned int regno
)
672 rtx outgoing
= crtl
->return_rtx
;
677 if (REG_P (outgoing
))
678 return REGNO (outgoing
) == regno
;
679 else if (GET_CODE (outgoing
) == PARALLEL
)
683 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
685 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
687 if (REG_P (x
) && REGNO (x
) == regno
)
694 /* Return true if REGNO is in one of subsequent USE after INSN in the
697 regno_in_use_p (rtx_insn
*insn
, unsigned int regno
)
699 static lra_insn_recog_data_t id
;
700 static struct lra_static_insn_data
*static_id
;
701 struct lra_insn_reg
*reg
;
703 basic_block bb
= BLOCK_FOR_INSN (insn
);
705 while ((insn
= next_nondebug_insn (insn
)) != NULL_RTX
)
707 if (BARRIER_P (insn
) || bb
!= BLOCK_FOR_INSN (insn
))
711 if (GET_CODE (PATTERN (insn
)) == USE
712 && REG_P (XEXP (PATTERN (insn
), 0))
713 && regno
== REGNO (XEXP (PATTERN (insn
), 0)))
715 /* Check that the regno is not modified. */
716 id
= lra_get_insn_recog_data (insn
);
717 for (reg
= id
->regs
; reg
!= NULL
; reg
= reg
->next
)
718 if (reg
->type
!= OP_IN
&& reg
->regno
== (int) regno
)
720 static_id
= id
->insn_static_data
;
721 for (reg
= static_id
->hard_regs
; reg
!= NULL
; reg
= reg
->next
)
722 if (reg
->type
!= OP_IN
&& reg
->regno
== (int) regno
)
724 if (id
->arg_hard_regs
!= NULL
)
725 for (i
= 0; (arg_regno
= id
->arg_hard_regs
[i
]) >= 0; i
++)
726 if ((int) regno
== (arg_regno
>= FIRST_PSEUDO_REGISTER
727 ? arg_regno
: arg_regno
- FIRST_PSEUDO_REGISTER
))
733 /* Final change of pseudos got hard registers into the corresponding
734 hard registers and removing temporary clobbers. */
736 lra_final_code_change (void)
740 rtx_insn
*insn
, *curr
;
741 int max_regno
= max_reg_num ();
743 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
744 if (lra_reg_info
[i
].nrefs
!= 0
745 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
746 SET_REGNO (regno_reg_rtx
[i
], hard_regno
);
747 FOR_EACH_BB_FN (bb
, cfun
)
748 FOR_BB_INSNS_SAFE (bb
, insn
, curr
)
751 rtx pat
= PATTERN (insn
);
753 if (GET_CODE (pat
) == CLOBBER
&& LRA_TEMP_CLOBBER_P (pat
))
755 /* Remove clobbers temporarily created in LRA. We don't
756 need them anymore and don't want to waste compiler
757 time processing them in a few subsequent passes. */
758 lra_invalidate_insn_data (insn
);
763 /* IRA can generate move insns involving pseudos. It is
764 better remove them earlier to speed up compiler a bit.
765 It is also better to do it here as they might not pass
766 final RTL check in LRA, (e.g. insn moving a control
767 register into itself). So remove an useless move insn
768 unless next insn is USE marking the return reg (we should
769 save this as some subsequent optimizations assume that
770 such original insns are saved). */
771 if (NONJUMP_INSN_P (insn
) && GET_CODE (pat
) == SET
772 && REG_P (SET_SRC (pat
)) && REG_P (SET_DEST (pat
))
773 && REGNO (SET_SRC (pat
)) == REGNO (SET_DEST (pat
))
774 && (! return_regno_p (REGNO (SET_SRC (pat
)))
775 || ! regno_in_use_p (insn
, REGNO (SET_SRC (pat
)))))
777 lra_invalidate_insn_data (insn
);
782 lra_insn_recog_data_t id
= lra_get_insn_recog_data (insn
);
783 struct lra_insn_reg
*reg
;
785 for (reg
= id
->regs
; reg
!= NULL
; reg
= reg
->next
)
786 if (reg
->regno
>= FIRST_PSEUDO_REGISTER
787 && lra_reg_info
[reg
->regno
].nrefs
== 0)
792 /* Pseudos still can be in debug insns in some very rare
793 and complicated cases, e.g. the pseudo was removed by
794 inheritance and the debug insn is not EBBs where the
795 inheritance happened. It is difficult and time
796 consuming to find what hard register corresponds the
797 pseudo -- so just remove the debug insn. Another
798 solution could be assigning hard reg/memory but it
799 would be a misleading info. It is better not to have
800 info than have it wrong. */
801 lra_assert (DEBUG_INSN_P (insn
));
802 lra_invalidate_insn_data (insn
);
807 struct lra_static_insn_data
*static_id
= id
->insn_static_data
;
808 bool insn_change_p
= false;
810 for (i
= id
->insn_static_data
->n_operands
- 1; i
>= 0; i
--)
811 if ((DEBUG_INSN_P (insn
) || ! static_id
->operand
[i
].is_operator
)
812 && alter_subregs (id
->operand_loc
[i
], ! DEBUG_INSN_P (insn
)))
814 lra_update_dup (id
, i
);
815 insn_change_p
= true;
818 lra_update_operator_dups (id
);