1 /* Change pseudos by memory.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file contains code for a pass to change spilled pseudos into
25 The pass creates necessary stack slots and assigns spilled pseudos
26 to the stack slots in following way:
28 for all spilled pseudos P most frequently used first do
29 for all stack slots S do
30 if P doesn't conflict with pseudos assigned to S then
31 assign S to P and goto to the next pseudo process
34 create new stack slot S and assign P to S
37 The actual algorithm is bit more complicated because of different
40 After that the code changes spilled pseudos (except ones created
41 from scratches) by corresponding stack slot memory in RTL.
43 If at least one stack slot was created, we need to run more passes
44 because we have new addresses which should be checked and because
45 the old address displacements might change and address constraints
46 (or insn memory constraints) might not be satisfied any more.
48 For some targets, the pass can spill some pseudos into hard
49 registers of different class (usually into vector registers)
50 instead of spilling them into memory if it is possible and
51 profitable. Spilling GENERAL_REGS pseudo into SSE registers for
52 Intel Corei7 is an example of such optimization. And this is
53 actually recommended by Intel optimization guide.
55 The file also contains code for final change of pseudos on hard
56 regs correspondingly assigned to them. */
60 #include "coretypes.h"
64 #include "insn-config.h"
68 #include "hard-reg-set.h"
79 #include "dominance.h"
82 #include "basic-block.h"
91 /* Max regno at the start of the pass. */
94 /* Map spilled regno -> hard regno used instead of memory for
96 static rtx
*spill_hard_reg
;
98 /* The structure describes stack slot of a spilled pseudo. */
101 /* Number (0, 1, ...) of the stack slot to which given pseudo
104 /* First or next slot with the same slot number. */
105 struct pseudo_slot
*next
, *first
;
106 /* Memory representing the spilled pseudo. */
110 /* The stack slots for each spilled pseudo. Indexed by regnos. */
111 static struct pseudo_slot
*pseudo_slots
;
113 /* The structure describes a register or a stack slot which can be
114 used for several spilled pseudos. */
117 /* First pseudo with given stack slot. */
119 /* Hard reg into which the slot pseudos are spilled. The value is
120 negative for pseudos spilled into memory. */
122 /* Memory representing the all stack slot. It can be different from
123 memory representing a pseudo belonging to give stack slot because
124 pseudo can be placed in a part of the corresponding stack slot.
125 The value is NULL for pseudos spilled into a hard reg. */
127 /* Combined live ranges of all pseudos belonging to given slot. It
128 is used to figure out that a new spilled pseudo can use given
130 lra_live_range_t live_ranges
;
133 /* Array containing info about the stack slots. The array element is
134 indexed by the stack slot number in the range [0..slots_num). */
135 static struct slot
*slots
;
136 /* The number of the stack slots currently existing. */
137 static int slots_num
;
139 /* Set up memory of the spilled pseudo I. The function can allocate
140 the corresponding stack slot if it is not done yet. */
142 assign_mem_slot (int i
)
145 machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
146 unsigned int inherent_size
= PSEUDO_REGNO_BYTES (i
);
147 unsigned int inherent_align
= GET_MODE_ALIGNMENT (mode
);
148 unsigned int max_ref_width
= GET_MODE_SIZE (lra_reg_info
[i
].biggest_mode
);
149 unsigned int total_size
= MAX (inherent_size
, max_ref_width
);
150 unsigned int min_align
= max_ref_width
* BITS_PER_UNIT
;
153 lra_assert (regno_reg_rtx
[i
] != NULL_RTX
&& REG_P (regno_reg_rtx
[i
])
154 && lra_reg_info
[i
].nrefs
!= 0 && reg_renumber
[i
] < 0);
156 x
= slots
[pseudo_slots
[i
].slot_num
].mem
;
158 /* We can use a slot already allocated because it is guaranteed the
159 slot provides both enough inherent space and enough total
163 /* Each pseudo has an inherent size which comes from its own mode,
164 and a total size which provides room for paradoxical subregs
165 which refer to the pseudo reg in wider modes. We allocate a new
166 slot, making sure that it has enough inherent space and total
172 /* No known place to spill from => no slot to reuse. */
173 x
= assign_stack_local (mode
, total_size
,
174 min_align
> inherent_align
175 || total_size
> inherent_size
? -1 : 0);
177 /* Cancel the big-endian correction done in assign_stack_local.
178 Get the address of the beginning of the slot. This is so we
179 can do a big-endian correction unconditionally below. */
180 if (BYTES_BIG_ENDIAN
)
182 adjust
= inherent_size
- total_size
;
185 = adjust_address_nv (x
,
186 mode_for_size (total_size
* BITS_PER_UNIT
,
190 slots
[pseudo_slots
[i
].slot_num
].mem
= stack_slot
;
193 /* On a big endian machine, the "address" of the slot is the address
194 of the low part that fits its inherent mode. */
195 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
196 adjust
+= (total_size
- inherent_size
);
198 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
200 /* Set all of the memory attributes as appropriate for a spill. */
201 set_mem_attrs_for_spill (x
);
202 pseudo_slots
[i
].mem
= x
;
205 /* Sort pseudos according their usage frequencies. */
207 regno_freq_compare (const void *v1p
, const void *v2p
)
209 const int regno1
= *(const int *) v1p
;
210 const int regno2
= *(const int *) v2p
;
213 if ((diff
= lra_reg_info
[regno2
].freq
- lra_reg_info
[regno1
].freq
) != 0)
215 return regno1
- regno2
;
218 /* Redefine STACK_GROWS_DOWNWARD in terms of 0 or 1. */
219 #ifdef STACK_GROWS_DOWNWARD
220 # undef STACK_GROWS_DOWNWARD
221 # define STACK_GROWS_DOWNWARD 1
223 # define STACK_GROWS_DOWNWARD 0
226 /* Sort pseudos according to their slots, putting the slots in the order
227 that they should be allocated. Slots with lower numbers have the highest
228 priority and should get the smallest displacement from the stack or
229 frame pointer (whichever is being used).
231 The first allocated slot is always closest to the frame pointer,
232 so prefer lower slot numbers when frame_pointer_needed. If the stack
233 and frame grow in the same direction, then the first allocated slot is
234 always closest to the initial stack pointer and furthest away from the
235 final stack pointer, so allocate higher numbers first when using the
236 stack pointer in that case. The reverse is true if the stack and
237 frame grow in opposite directions. */
239 pseudo_reg_slot_compare (const void *v1p
, const void *v2p
)
241 const int regno1
= *(const int *) v1p
;
242 const int regno2
= *(const int *) v2p
;
243 int diff
, slot_num1
, slot_num2
;
244 int total_size1
, total_size2
;
246 slot_num1
= pseudo_slots
[regno1
].slot_num
;
247 slot_num2
= pseudo_slots
[regno2
].slot_num
;
248 if ((diff
= slot_num1
- slot_num2
) != 0)
249 return (frame_pointer_needed
250 || (!FRAME_GROWS_DOWNWARD
) == STACK_GROWS_DOWNWARD
? diff
: -diff
);
251 total_size1
= GET_MODE_SIZE (lra_reg_info
[regno1
].biggest_mode
);
252 total_size2
= GET_MODE_SIZE (lra_reg_info
[regno2
].biggest_mode
);
253 if ((diff
= total_size2
- total_size1
) != 0)
255 return regno1
- regno2
;
258 /* Assign spill hard registers to N pseudos in PSEUDO_REGNOS which is
259 sorted in order of highest frequency first. Put the pseudos which
260 did not get a spill hard register at the beginning of array
261 PSEUDO_REGNOS. Return the number of such pseudos. */
263 assign_spill_hard_regs (int *pseudo_regnos
, int n
)
265 int i
, k
, p
, regno
, res
, spill_class_size
, hard_regno
, nr
;
266 enum reg_class rclass
, spill_class
;
272 HARD_REG_SET conflict_hard_regs
;
273 bitmap_head ok_insn_bitmap
;
274 bitmap setjump_crosses
= regstat_get_setjmp_crosses ();
275 /* Hard registers which can not be used for any purpose at given
276 program point because they are unallocatable or already allocated
277 for other pseudos. */
278 HARD_REG_SET
*reserved_hard_regs
;
280 if (! lra_reg_spill_p
)
282 /* Set up reserved hard regs for every program point. */
283 reserved_hard_regs
= XNEWVEC (HARD_REG_SET
, lra_live_max_point
);
284 for (p
= 0; p
< lra_live_max_point
; p
++)
285 COPY_HARD_REG_SET (reserved_hard_regs
[p
], lra_no_alloc_regs
);
286 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
287 if (lra_reg_info
[i
].nrefs
!= 0
288 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
289 for (r
= lra_reg_info
[i
].live_ranges
; r
!= NULL
; r
= r
->next
)
290 for (p
= r
->start
; p
<= r
->finish
; p
++)
291 add_to_hard_reg_set (&reserved_hard_regs
[p
],
292 lra_reg_info
[i
].biggest_mode
, hard_regno
);
293 bitmap_initialize (&ok_insn_bitmap
, ®_obstack
);
294 FOR_EACH_BB_FN (bb
, cfun
)
295 FOR_BB_INSNS (bb
, insn
)
296 if (DEBUG_INSN_P (insn
)
297 || ((set
= single_set (insn
)) != NULL_RTX
298 && REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))))
299 bitmap_set_bit (&ok_insn_bitmap
, INSN_UID (insn
));
300 for (res
= i
= 0; i
< n
; i
++)
302 regno
= pseudo_regnos
[i
];
303 rclass
= lra_get_allocno_class (regno
);
304 if (bitmap_bit_p (setjump_crosses
, regno
)
307 targetm
.spill_class ((reg_class_t
) rclass
,
308 PSEUDO_REGNO_MODE (regno
)))) == NO_REGS
309 || bitmap_intersect_compl_p (&lra_reg_info
[regno
].insn_bitmap
,
312 pseudo_regnos
[res
++] = regno
;
315 lra_assert (spill_class
!= NO_REGS
);
316 COPY_HARD_REG_SET (conflict_hard_regs
,
317 lra_reg_info
[regno
].conflict_hard_regs
);
318 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
319 for (p
= r
->start
; p
<= r
->finish
; p
++)
320 IOR_HARD_REG_SET (conflict_hard_regs
, reserved_hard_regs
[p
]);
321 spill_class_size
= ira_class_hard_regs_num
[spill_class
];
322 mode
= lra_reg_info
[regno
].biggest_mode
;
323 for (k
= 0; k
< spill_class_size
; k
++)
325 hard_regno
= ira_class_hard_regs
[spill_class
][k
];
326 if (! overlaps_hard_reg_set_p (conflict_hard_regs
, mode
, hard_regno
))
329 if (k
>= spill_class_size
)
331 /* There is no available regs -- assign memory later. */
332 pseudo_regnos
[res
++] = regno
;
335 if (lra_dump_file
!= NULL
)
336 fprintf (lra_dump_file
, " Spill r%d into hr%d\n", regno
, hard_regno
);
337 /* Update reserved_hard_regs. */
338 for (r
= lra_reg_info
[regno
].live_ranges
; r
!= NULL
; r
= r
->next
)
339 for (p
= r
->start
; p
<= r
->finish
; p
++)
340 add_to_hard_reg_set (&reserved_hard_regs
[p
],
341 lra_reg_info
[regno
].biggest_mode
, hard_regno
);
342 spill_hard_reg
[regno
]
343 = gen_raw_REG (PSEUDO_REGNO_MODE (regno
), hard_regno
);
345 nr
< hard_regno_nregs
[hard_regno
][lra_reg_info
[regno
].biggest_mode
];
348 df_set_regs_ever_live (hard_regno
+ nr
, true);
350 bitmap_clear (&ok_insn_bitmap
);
351 free (reserved_hard_regs
);
355 /* Add pseudo REGNO to slot SLOT_NUM. */
357 add_pseudo_to_slot (int regno
, int slot_num
)
359 struct pseudo_slot
*first
;
361 if (slots
[slot_num
].regno
< 0)
363 /* It is the first pseudo in the slot. */
364 slots
[slot_num
].regno
= regno
;
365 pseudo_slots
[regno
].first
= &pseudo_slots
[regno
];
366 pseudo_slots
[regno
].next
= NULL
;
370 first
= pseudo_slots
[regno
].first
= &pseudo_slots
[slots
[slot_num
].regno
];
371 pseudo_slots
[regno
].next
= first
->next
;
372 first
->next
= &pseudo_slots
[regno
];
374 pseudo_slots
[regno
].mem
= NULL_RTX
;
375 pseudo_slots
[regno
].slot_num
= slot_num
;
376 slots
[slot_num
].live_ranges
377 = lra_merge_live_ranges (slots
[slot_num
].live_ranges
,
378 lra_copy_live_range_list
379 (lra_reg_info
[regno
].live_ranges
));
382 /* Assign stack slot numbers to pseudos in array PSEUDO_REGNOS of
383 length N. Sort pseudos in PSEUDO_REGNOS for subsequent assigning
384 memory stack slots. */
386 assign_stack_slot_num_and_sort_pseudos (int *pseudo_regnos
, int n
)
391 /* Assign stack slot numbers to spilled pseudos, use smaller numbers
392 for most frequently used pseudos. */
393 for (i
= 0; i
< n
; i
++)
395 regno
= pseudo_regnos
[i
];
396 if (! flag_ira_share_spill_slots
)
400 for (j
= 0; j
< slots_num
; j
++)
401 if (slots
[j
].hard_regno
< 0
402 && ! (lra_intersected_live_ranges_p
403 (slots
[j
].live_ranges
,
404 lra_reg_info
[regno
].live_ranges
)))
410 slots
[j
].live_ranges
= NULL
;
411 slots
[j
].regno
= slots
[j
].hard_regno
= -1;
412 slots
[j
].mem
= NULL_RTX
;
415 add_pseudo_to_slot (regno
, j
);
417 /* Sort regnos according to their slot numbers. */
418 qsort (pseudo_regnos
, n
, sizeof (int), pseudo_reg_slot_compare
);
421 /* Recursively process LOC in INSN and change spilled pseudos to the
422 corresponding memory or spilled hard reg. Ignore spilled pseudos
423 created from the scratches. */
425 remove_pseudos (rtx
*loc
, rtx_insn
*insn
)
432 if (*loc
== NULL_RTX
)
434 code
= GET_CODE (*loc
);
435 if (code
== REG
&& (i
= REGNO (*loc
)) >= FIRST_PSEUDO_REGISTER
436 && lra_get_regno_hard_regno (i
) < 0
437 /* We do not want to assign memory for former scratches because
438 it might result in an address reload for some targets. In
439 any case we transform such pseudos not getting hard registers
440 into scratches back. */
441 && ! lra_former_scratch_p (i
))
443 if ((hard_reg
= spill_hard_reg
[i
]) != NULL_RTX
)
444 *loc
= copy_rtx (hard_reg
);
447 rtx x
= lra_eliminate_regs_1 (insn
, pseudo_slots
[i
].mem
,
448 GET_MODE (pseudo_slots
[i
].mem
),
449 0, false, false, true);
450 *loc
= x
!= pseudo_slots
[i
].mem
? x
: copy_rtx (x
);
455 fmt
= GET_RTX_FORMAT (code
);
456 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
459 remove_pseudos (&XEXP (*loc
, i
), insn
);
460 else if (fmt
[i
] == 'E')
464 for (j
= XVECLEN (*loc
, i
) - 1; j
>= 0; j
--)
465 remove_pseudos (&XVECEXP (*loc
, i
, j
), insn
);
470 /* Convert spilled pseudos into their stack slots or spill hard regs,
471 put insns to process on the constraint stack (that is all insns in
472 which pseudos were changed to memory or spill hard regs). */
479 bitmap_head spilled_pseudos
, changed_insns
;
481 bitmap_initialize (&spilled_pseudos
, ®_obstack
);
482 bitmap_initialize (&changed_insns
, ®_obstack
);
483 for (i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
485 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
486 && ! lra_former_scratch_p (i
))
488 bitmap_set_bit (&spilled_pseudos
, i
);
489 bitmap_ior_into (&changed_insns
, &lra_reg_info
[i
].insn_bitmap
);
492 FOR_EACH_BB_FN (bb
, cfun
)
494 FOR_BB_INSNS (bb
, insn
)
495 if (bitmap_bit_p (&changed_insns
, INSN_UID (insn
)))
498 remove_pseudos (&PATTERN (insn
), insn
);
500 remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
);
501 for (link_loc
= ®_NOTES (insn
);
502 (link
= *link_loc
) != NULL_RTX
;
503 link_loc
= &XEXP (link
, 1))
505 switch (REG_NOTE_KIND (link
))
507 case REG_FRAME_RELATED_EXPR
:
508 case REG_CFA_DEF_CFA
:
509 case REG_CFA_ADJUST_CFA
:
511 case REG_CFA_REGISTER
:
512 case REG_CFA_EXPRESSION
:
513 case REG_CFA_RESTORE
:
514 case REG_CFA_SET_VDRAP
:
515 remove_pseudos (&XEXP (link
, 0), insn
);
521 if (lra_dump_file
!= NULL
)
522 fprintf (lra_dump_file
,
523 "Changing spilled pseudos to memory in insn #%u\n",
525 lra_push_insn (insn
);
526 if (lra_reg_spill_p
|| targetm
.different_addr_displacement_p ())
527 lra_set_used_insn_alternative (insn
, -1);
529 else if (CALL_P (insn
))
530 /* Presence of any pseudo in CALL_INSN_FUNCTION_USAGE does
531 not affect value of insn_bitmap of the corresponding
532 lra_reg_info. That is because we don't need to reload
533 pseudos in CALL_INSN_FUNCTION_USAGEs. So if we process
534 only insns in the insn_bitmap of given pseudo here, we
535 can miss the pseudo in some
536 CALL_INSN_FUNCTION_USAGEs. */
537 remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn
), insn
);
538 bitmap_and_compl_into (df_get_live_in (bb
), &spilled_pseudos
);
539 bitmap_and_compl_into (df_get_live_out (bb
), &spilled_pseudos
);
541 bitmap_clear (&spilled_pseudos
);
542 bitmap_clear (&changed_insns
);
545 /* Return true if we need to change some pseudos into memory. */
547 lra_need_for_spills_p (void)
549 int i
; max_regno
= max_reg_num ();
551 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
552 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
553 && ! lra_former_scratch_p (i
))
558 /* Change spilled pseudos into memory or spill hard regs. Put changed
559 insns on the constraint stack (these insns will be considered on
560 the next constraint pass). The changed insns are all insns in
561 which pseudos were changed. */
565 int i
, n
, curr_regno
;
568 regs_num
= max_reg_num ();
569 spill_hard_reg
= XNEWVEC (rtx
, regs_num
);
570 pseudo_regnos
= XNEWVEC (int, regs_num
);
571 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< regs_num
; i
++)
572 if (lra_reg_info
[i
].nrefs
!= 0 && lra_get_regno_hard_regno (i
) < 0
573 /* We do not want to assign memory for former scratches. */
574 && ! lra_former_scratch_p (i
))
576 spill_hard_reg
[i
] = NULL_RTX
;
577 pseudo_regnos
[n
++] = i
;
580 pseudo_slots
= XNEWVEC (struct pseudo_slot
, regs_num
);
581 slots
= XNEWVEC (struct slot
, regs_num
);
582 /* Sort regnos according their usage frequencies. */
583 qsort (pseudo_regnos
, n
, sizeof (int), regno_freq_compare
);
584 n
= assign_spill_hard_regs (pseudo_regnos
, n
);
585 assign_stack_slot_num_and_sort_pseudos (pseudo_regnos
, n
);
586 for (i
= 0; i
< n
; i
++)
587 if (pseudo_slots
[pseudo_regnos
[i
]].mem
== NULL_RTX
)
588 assign_mem_slot (pseudo_regnos
[i
]);
589 if (n
> 0 && crtl
->stack_alignment_needed
)
590 /* If we have a stack frame, we must align it now. The stack size
591 may be a part of the offset computation for register
593 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
594 if (lra_dump_file
!= NULL
)
596 for (i
= 0; i
< slots_num
; i
++)
598 fprintf (lra_dump_file
, " Slot %d regnos (width = %d):", i
,
599 GET_MODE_SIZE (GET_MODE (slots
[i
].mem
)));
600 for (curr_regno
= slots
[i
].regno
;;
601 curr_regno
= pseudo_slots
[curr_regno
].next
- pseudo_slots
)
603 fprintf (lra_dump_file
, " %d", curr_regno
);
604 if (pseudo_slots
[curr_regno
].next
== NULL
)
607 fprintf (lra_dump_file
, "\n");
613 free (pseudo_regnos
);
614 free (spill_hard_reg
);
617 /* Apply alter_subreg for subregs of regs in *LOC. Use FINAL_P for
618 alter_subreg calls. Return true if any subreg of reg is
621 alter_subregs (rtx
*loc
, bool final_p
)
632 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
634 lra_assert (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
);
635 alter_subreg (loc
, final_p
);
638 fmt
= GET_RTX_FORMAT (code
);
640 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
644 if (alter_subregs (&XEXP (x
, i
), final_p
))
647 else if (fmt
[i
] == 'E')
651 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
652 if (alter_subregs (&XVECEXP (x
, i
, j
), final_p
))
659 /* Return true if REGNO is used for return in the current
662 return_regno_p (unsigned int regno
)
664 rtx outgoing
= crtl
->return_rtx
;
669 if (REG_P (outgoing
))
670 return REGNO (outgoing
) == regno
;
671 else if (GET_CODE (outgoing
) == PARALLEL
)
675 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
677 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
679 if (REG_P (x
) && REGNO (x
) == regno
)
686 /* Final change of pseudos got hard registers into the corresponding
687 hard registers and removing temporary clobbers. */
689 lra_final_code_change (void)
693 rtx_insn
*insn
, *curr
;
694 int max_regno
= max_reg_num ();
696 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
697 if (lra_reg_info
[i
].nrefs
!= 0
698 && (hard_regno
= lra_get_regno_hard_regno (i
)) >= 0)
699 SET_REGNO (regno_reg_rtx
[i
], hard_regno
);
700 FOR_EACH_BB_FN (bb
, cfun
)
701 FOR_BB_INSNS_SAFE (bb
, insn
, curr
)
704 rtx pat
= PATTERN (insn
);
706 if (GET_CODE (pat
) == CLOBBER
&& LRA_TEMP_CLOBBER_P (pat
))
708 /* Remove clobbers temporarily created in LRA. We don't
709 need them anymore and don't want to waste compiler
710 time processing them in a few subsequent passes. */
711 lra_invalidate_insn_data (insn
);
716 /* IRA can generate move insns involving pseudos. It is
717 better remove them earlier to speed up compiler a bit.
718 It is also better to do it here as they might not pass
719 final RTL check in LRA, (e.g. insn moving a control
720 register into itself). So remove an useless move insn
721 unless next insn is USE marking the return reg (we should
722 save this as some subsequent optimizations assume that
723 such original insns are saved). */
724 if (NONJUMP_INSN_P (insn
) && GET_CODE (pat
) == SET
725 && REG_P (SET_SRC (pat
)) && REG_P (SET_DEST (pat
))
726 && REGNO (SET_SRC (pat
)) == REGNO (SET_DEST (pat
))
727 && ! return_regno_p (REGNO (SET_SRC (pat
))))
729 lra_invalidate_insn_data (insn
);
734 lra_insn_recog_data_t id
= lra_get_insn_recog_data (insn
);
735 struct lra_static_insn_data
*static_id
= id
->insn_static_data
;
736 bool insn_change_p
= false;
738 for (i
= id
->insn_static_data
->n_operands
- 1; i
>= 0; i
--)
739 if ((DEBUG_INSN_P (insn
) || ! static_id
->operand
[i
].is_operator
)
740 && alter_subregs (id
->operand_loc
[i
], ! DEBUG_INSN_P (insn
)))
742 lra_update_dup (id
, i
);
743 insn_change_p
= true;
746 lra_update_operator_dups (id
);