1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
35 #include "rtl-error.h"
37 #include "addresses.h"
45 /* This file contains the reload pass of the compiler, which is
46 run after register allocation has been done. It checks that
47 each insn is valid (operands required to be in registers really
48 are in registers of the proper class) and fixes up invalid ones
49 by copying values temporarily into registers for the insns
52 The results of register allocation are described by the vector
53 reg_renumber; the insns still contain pseudo regs, but reg_renumber
54 can be used to find which hard reg, if any, a pseudo reg is in.
56 The technique we always use is to free up a few hard regs that are
57 called ``reload regs'', and for each place where a pseudo reg
58 must be in a hard reg, copy it temporarily into one of the reload regs.
60 Reload regs are allocated locally for every instruction that needs
61 reloads. When there are pseudos which are allocated to a register that
62 has been chosen as a reload reg, such pseudos must be ``spilled''.
63 This means that they go to other hard regs, or to stack slots if no other
64 available hard regs can be found. Spilling can invalidate more
65 insns, requiring additional need for reloads, so we must keep checking
66 until the process stabilizes.
68 For machines with different classes of registers, we must keep track
69 of the register class needed for each reload, and make sure that
70 we allocate enough reload registers of each class.
72 The file reload.c contains the code that checks one insn for
73 validity and reports the reloads that it needs. This file
74 is in charge of scanning the entire rtl code, accumulating the
75 reload needs, spilling, assigning reload registers to use for
76 fixing up each insn, and generating the new insns to copy values
77 into the reload registers. */
79 struct target_reload default_target_reload
;
81 struct target_reload
*this_target_reload
= &default_target_reload
;
84 #define spill_indirect_levels \
85 (this_target_reload->x_spill_indirect_levels)
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88 into which reg N has been reloaded (perhaps for a previous insn). */
89 static rtx
*reg_last_reload_reg
;
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93 static regset_head reg_has_output_reload
;
95 /* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97 static HARD_REG_SET reg_is_output_reload
;
99 /* Widest width in which each pseudo reg is referred to (via subreg). */
100 static unsigned int *reg_max_ref_width
;
102 /* Vector to remember old contents of reg_renumber before spilling. */
103 static short *reg_old_renumber
;
105 /* During reload_as_needed, element N contains the last pseudo regno reloaded
106 into hard register N. If that pseudo reg occupied more than one register,
107 reg_reloaded_contents points to that pseudo for each spill register in
108 use; all of these must remain set for an inheritance to occur. */
109 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
111 /* During reload_as_needed, element N contains the insn for which
112 hard register N was last used. Its contents are significant only
113 when reg_reloaded_valid is set for this register. */
114 static rtx_insn
*reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
116 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
117 static HARD_REG_SET reg_reloaded_valid
;
118 /* Indicate if the register was dead at the end of the reload.
119 This is only valid if reg_reloaded_contents is set and valid. */
120 static HARD_REG_SET reg_reloaded_dead
;
122 /* Indicate whether the register's current value is one that is not
123 safe to retain across a call, even for registers that are normally
124 call-saved. This is only meaningful for members of reg_reloaded_valid. */
125 static HARD_REG_SET reg_reloaded_call_part_clobbered
;
127 /* Number of spill-regs so far; number of valid elements of spill_regs. */
130 /* In parallel with spill_regs, contains REG rtx's for those regs.
131 Holds the last rtx used for any given reg, or 0 if it has never
132 been used for spilling yet. This rtx is reused, provided it has
134 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
136 /* In parallel with spill_regs, contains nonzero for a spill reg
137 that was stored after the last time it was used.
138 The precise value is the insn generated to do the store. */
139 static rtx_insn
*spill_reg_store
[FIRST_PSEUDO_REGISTER
];
141 /* This is the register that was stored with spill_reg_store. This is a
142 copy of reload_out / reload_out_reg when the value was stored; if
143 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
144 static rtx spill_reg_stored_to
[FIRST_PSEUDO_REGISTER
];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs.
151 ?!? This is no longer accurate. */
152 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
154 /* This reg set indicates registers that can't be used as spill registers for
155 the currently processed insn. These are the hard registers which are live
156 during the insn, but not allocated to pseudos, as well as fixed
158 static HARD_REG_SET bad_spill_regs
;
160 /* These are the hard registers that can't be used as spill register for any
161 insn. This includes registers used for user variables and registers that
162 we can't eliminate. A register that appears in this set also can't be used
163 to retry register allocation. */
164 static HARD_REG_SET bad_spill_regs_global
;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `n_spills' is the number of
168 elements that are actually valid; new ones are added at the end.
170 Both spill_regs and spill_reg_order are used on two occasions:
171 once during find_reload_regs, where they keep track of the spill registers
172 for a single insn, but also during reload_as_needed where they show all
173 the registers ever used by reload. For the latter case, the information
174 is calculated during finish_spills. */
175 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
177 /* This vector of reg sets indicates, for each pseudo, which hard registers
178 may not be used for retrying global allocation because the register was
179 formerly spilled from one of them. If we allowed reallocating a pseudo to
180 a register that it was already allocated to, reload might not
182 static HARD_REG_SET
*pseudo_previous_regs
;
184 /* This vector of reg sets indicates, for each pseudo, which hard
185 registers may not be used for retrying global allocation because they
186 are used as spill registers during one of the insns in which the
188 static HARD_REG_SET
*pseudo_forbidden_regs
;
190 /* All hard regs that have been used as spill registers for any insn are
191 marked in this set. */
192 static HARD_REG_SET used_spill_regs
;
194 /* Index of last register assigned as a spill register. We allocate in
195 a round-robin fashion. */
196 static int last_spill_reg
;
198 /* Record the stack slot for each spilled hard register. */
199 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
201 /* Width allocated so far for that stack slot. */
202 static unsigned int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
204 /* Record which pseudos needed to be spilled. */
205 static regset_head spilled_pseudos
;
207 /* Record which pseudos changed their allocation in finish_spills. */
208 static regset_head changed_allocation_pseudos
;
210 /* Used for communication between order_regs_for_reload and count_pseudo.
211 Used to avoid counting one pseudo twice. */
212 static regset_head pseudos_counted
;
214 /* First uid used by insns created by reload in this function.
215 Used in find_equiv_reg. */
216 int reload_first_uid
;
218 /* Flag set by local-alloc or global-alloc if anything is live in
219 a call-clobbered reg across calls. */
220 int caller_save_needed
;
222 /* Set to 1 while reload_as_needed is operating.
223 Required by some machines to handle any generated moves differently. */
224 int reload_in_progress
= 0;
226 /* This obstack is used for allocation of rtl during register elimination.
227 The allocated storage can be freed once find_reloads has processed the
229 static struct obstack reload_obstack
;
231 /* Points to the beginning of the reload_obstack. All insn_chain structures
232 are allocated first. */
233 static char *reload_startobj
;
235 /* The point after all insn_chain structures. Used to quickly deallocate
236 memory allocated in copy_reloads during calculate_needs_all_insns. */
237 static char *reload_firstobj
;
239 /* This points before all local rtl generated by register elimination.
240 Used to quickly free all memory after processing one insn. */
241 static char *reload_insn_firstobj
;
243 /* List of insn_chain instructions, one for every insn that reload needs to
245 struct insn_chain
*reload_insn_chain
;
247 /* TRUE if we potentially left dead insns in the insn stream and want to
248 run DCE immediately after reload, FALSE otherwise. */
249 static bool need_dce
;
251 /* List of all insns needing reloads. */
252 static struct insn_chain
*insns_need_reload
;
254 /* This structure is used to record information about register eliminations.
255 Each array entry describes one possible way of eliminating a register
256 in favor of another. If there is more than one way of eliminating a
257 particular register, the most preferred should be specified first. */
261 int from
; /* Register number to be eliminated. */
262 int to
; /* Register number used as replacement. */
263 HOST_WIDE_INT initial_offset
; /* Initial difference between values. */
264 int can_eliminate
; /* Nonzero if this elimination can be done. */
265 int can_eliminate_previous
; /* Value returned by TARGET_CAN_ELIMINATE
266 target hook in previous scan over insns
268 HOST_WIDE_INT offset
; /* Current offset between the two regs. */
269 HOST_WIDE_INT previous_offset
;/* Offset at end of previous insn. */
270 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
271 rtx from_rtx
; /* REG rtx for the register to be eliminated.
272 We cannot simply compare the number since
273 we might then spuriously replace a hard
274 register corresponding to a pseudo
275 assigned to the reg to be eliminated. */
276 rtx to_rtx
; /* REG rtx for the replacement. */
279 static struct elim_table
*reg_eliminate
= 0;
281 /* This is an intermediate structure to initialize the table. It has
282 exactly the members provided by ELIMINABLE_REGS. */
283 static const struct elim_table_1
287 } reg_eliminate_1
[] =
289 /* If a set of eliminable registers was specified, define the table from it.
290 Otherwise, default to the normal case of the frame pointer being
291 replaced by the stack pointer. */
293 #ifdef ELIMINABLE_REGS
296 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
299 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
301 /* Record the number of pending eliminations that have an offset not equal
302 to their initial offset. If nonzero, we use a new copy of each
303 replacement result in any insns encountered. */
304 int num_not_at_initial_offset
;
306 /* Count the number of registers that we may be able to eliminate. */
307 static int num_eliminable
;
308 /* And the number of registers that are equivalent to a constant that
309 can be eliminated to frame_pointer / arg_pointer + constant. */
310 static int num_eliminable_invariants
;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the difference of the
315 number of the label and the first label number. We can't offset the
316 pointer itself as this can cause problems on machines with segmented
317 memory. The first table is an array of flags that records whether we
318 have yet encountered a label and the second table is an array of arrays,
319 one entry in the latter array for each elimination. */
321 static int first_label_num
;
322 static char *offsets_known_at
;
323 static HOST_WIDE_INT (*offsets_at
)[NUM_ELIMINABLE_REGS
];
325 vec
<reg_equivs_t
, va_gc
> *reg_equivs
;
327 /* Stack of addresses where an rtx has been changed. We can undo the
328 changes by popping items off the stack and restoring the original
329 value at each location.
331 We use this simplistic undo capability rather than copy_rtx as copy_rtx
332 will not make a deep copy of a normally sharable rtx, such as
333 (const (plus (symbol_ref) (const_int))). If such an expression appears
334 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
335 rtx expression would be changed. See PR 42431. */
338 static vec
<rtx_p
> substitute_stack
;
340 /* Number of labels in the current function. */
342 static int num_labels
;
344 static void replace_pseudos_in (rtx
*, machine_mode
, rtx
);
345 static void maybe_fix_stack_asms (void);
346 static void copy_reloads (struct insn_chain
*);
347 static void calculate_needs_all_insns (int);
348 static int find_reg (struct insn_chain
*, int);
349 static void find_reload_regs (struct insn_chain
*);
350 static void select_reload_regs (void);
351 static void delete_caller_save_insns (void);
353 static void spill_failure (rtx_insn
*, enum reg_class
);
354 static void count_spilled_pseudo (int, int, int);
355 static void delete_dead_insn (rtx_insn
*);
356 static void alter_reg (int, int, bool);
357 static void set_label_offsets (rtx
, rtx_insn
*, int);
358 static void check_eliminable_occurrences (rtx
);
359 static void elimination_effects (rtx
, machine_mode
);
360 static rtx
eliminate_regs_1 (rtx
, machine_mode
, rtx
, bool, bool);
361 static int eliminate_regs_in_insn (rtx_insn
*, int);
362 static void update_eliminable_offsets (void);
363 static void mark_not_eliminable (rtx
, const_rtx
, void *);
364 static void set_initial_elim_offsets (void);
365 static bool verify_initial_elim_offsets (void);
366 static void set_initial_label_offsets (void);
367 static void set_offsets_for_label (rtx_insn
*);
368 static void init_eliminable_invariants (rtx_insn
*, bool);
369 static void init_elim_table (void);
370 static void free_reg_equiv (void);
371 static void update_eliminables (HARD_REG_SET
*);
372 static bool update_eliminables_and_spill (void);
373 static void elimination_costs_in_insn (rtx_insn
*);
374 static void spill_hard_reg (unsigned int, int);
375 static int finish_spills (int);
376 static void scan_paradoxical_subregs (rtx
);
377 static void count_pseudo (int);
378 static void order_regs_for_reload (struct insn_chain
*);
379 static void reload_as_needed (int);
380 static void forget_old_reloads_1 (rtx
, const_rtx
, void *);
381 static void forget_marked_reloads (regset
);
382 static int reload_reg_class_lower (const void *, const void *);
383 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type
,
385 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type
,
387 static int reload_reg_free_p (unsigned int, int, enum reload_type
);
388 static int reload_reg_free_for_value_p (int, int, int, enum reload_type
,
390 static int free_for_value_p (int, machine_mode
, int, enum reload_type
,
392 static int allocate_reload_reg (struct insn_chain
*, int, int);
393 static int conflicts_with_override (rtx
);
394 static void failed_reload (rtx_insn
*, int);
395 static int set_reload_reg (int, int);
396 static void choose_reload_regs_init (struct insn_chain
*, rtx
*);
397 static void choose_reload_regs (struct insn_chain
*);
398 static void emit_input_reload_insns (struct insn_chain
*, struct reload
*,
400 static void emit_output_reload_insns (struct insn_chain
*, struct reload
*,
402 static void do_input_reload (struct insn_chain
*, struct reload
*, int);
403 static void do_output_reload (struct insn_chain
*, struct reload
*, int);
404 static void emit_reload_insns (struct insn_chain
*);
405 static void delete_output_reload (rtx_insn
*, int, int, rtx
);
406 static void delete_address_reloads (rtx_insn
*, rtx_insn
*);
407 static void delete_address_reloads_1 (rtx_insn
*, rtx
, rtx_insn
*);
408 static void inc_for_reload (rtx
, rtx
, rtx
, int);
409 static void add_auto_inc_notes (rtx_insn
*, rtx
);
410 static void substitute (rtx
*, const_rtx
, rtx
);
411 static bool gen_reload_chain_without_interm_reg_p (int, int);
412 static int reloads_conflict (int, int);
413 static rtx_insn
*gen_reload (rtx
, rtx
, int, enum reload_type
);
414 static rtx_insn
*emit_insn_if_valid_for_reload (rtx
);
416 /* Initialize the reload pass. This is called at the beginning of compilation
417 and may be called again if the target is reinitialized. */
424 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
425 Set spill_indirect_levels to the number of levels such addressing is
426 permitted, zero if it is not permitted at all. */
429 = gen_rtx_MEM (Pmode
,
432 LAST_VIRTUAL_REGISTER
+ 1),
433 gen_int_mode (4, Pmode
)));
434 spill_indirect_levels
= 0;
436 while (memory_address_p (QImode
, tem
))
438 spill_indirect_levels
++;
439 tem
= gen_rtx_MEM (Pmode
, tem
);
442 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
444 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
445 indirect_symref_ok
= memory_address_p (QImode
, tem
);
447 /* See if reg+reg is a valid (and offsettable) address. */
449 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
451 tem
= gen_rtx_PLUS (Pmode
,
452 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
453 gen_rtx_REG (Pmode
, i
));
455 /* This way, we make sure that reg+reg is an offsettable address. */
456 tem
= plus_constant (Pmode
, tem
, 4);
458 if (memory_address_p (QImode
, tem
))
460 double_reg_address_ok
= 1;
465 /* Initialize obstack for our rtl allocation. */
466 if (reload_startobj
== NULL
)
468 gcc_obstack_init (&reload_obstack
);
469 reload_startobj
= XOBNEWVAR (&reload_obstack
, char, 0);
472 INIT_REG_SET (&spilled_pseudos
);
473 INIT_REG_SET (&changed_allocation_pseudos
);
474 INIT_REG_SET (&pseudos_counted
);
477 /* List of insn chains that are currently unused. */
478 static struct insn_chain
*unused_insn_chains
= 0;
480 /* Allocate an empty insn_chain structure. */
482 new_insn_chain (void)
484 struct insn_chain
*c
;
486 if (unused_insn_chains
== 0)
488 c
= XOBNEW (&reload_obstack
, struct insn_chain
);
489 INIT_REG_SET (&c
->live_throughout
);
490 INIT_REG_SET (&c
->dead_or_set
);
494 c
= unused_insn_chains
;
495 unused_insn_chains
= c
->next
;
497 c
->is_caller_save_insn
= 0;
498 c
->need_operand_change
= 0;
504 /* Small utility function to set all regs in hard reg set TO which are
505 allocated to pseudos in regset FROM. */
508 compute_use_by_pseudos (HARD_REG_SET
*to
, regset from
)
511 reg_set_iterator rsi
;
513 EXECUTE_IF_SET_IN_REG_SET (from
, FIRST_PSEUDO_REGISTER
, regno
, rsi
)
515 int r
= reg_renumber
[regno
];
519 /* reload_combine uses the information from DF_LIVE_IN,
520 which might still contain registers that have not
521 actually been allocated since they have an
523 gcc_assert (ira_conflicts_p
|| reload_completed
);
526 add_to_hard_reg_set (to
, PSEUDO_REGNO_MODE (regno
), r
);
530 /* Replace all pseudos found in LOC with their corresponding
534 replace_pseudos_in (rtx
*loc
, machine_mode mem_mode
, rtx usage
)
547 unsigned int regno
= REGNO (x
);
549 if (regno
< FIRST_PSEUDO_REGISTER
)
552 x
= eliminate_regs_1 (x
, mem_mode
, usage
, true, false);
556 replace_pseudos_in (loc
, mem_mode
, usage
);
560 if (reg_equiv_constant (regno
))
561 *loc
= reg_equiv_constant (regno
);
562 else if (reg_equiv_invariant (regno
))
563 *loc
= reg_equiv_invariant (regno
);
564 else if (reg_equiv_mem (regno
))
565 *loc
= reg_equiv_mem (regno
);
566 else if (reg_equiv_address (regno
))
567 *loc
= gen_rtx_MEM (GET_MODE (x
), reg_equiv_address (regno
));
570 gcc_assert (!REG_P (regno_reg_rtx
[regno
])
571 || REGNO (regno_reg_rtx
[regno
]) != regno
);
572 *loc
= regno_reg_rtx
[regno
];
577 else if (code
== MEM
)
579 replace_pseudos_in (& XEXP (x
, 0), GET_MODE (x
), usage
);
583 /* Process each of our operands recursively. */
584 fmt
= GET_RTX_FORMAT (code
);
585 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
587 replace_pseudos_in (&XEXP (x
, i
), mem_mode
, usage
);
588 else if (*fmt
== 'E')
589 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
590 replace_pseudos_in (& XVECEXP (x
, i
, j
), mem_mode
, usage
);
593 /* Determine if the current function has an exception receiver block
594 that reaches the exit block via non-exceptional edges */
597 has_nonexceptional_receiver (void)
601 basic_block
*tos
, *worklist
, bb
;
603 /* If we're not optimizing, then just err on the safe side. */
607 /* First determine which blocks can reach exit via normal paths. */
608 tos
= worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
) + 1);
610 FOR_EACH_BB_FN (bb
, cfun
)
611 bb
->flags
&= ~BB_REACHABLE
;
613 /* Place the exit block on our worklist. */
614 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_REACHABLE
;
615 *tos
++ = EXIT_BLOCK_PTR_FOR_FN (cfun
);
617 /* Iterate: find everything reachable from what we've already seen. */
618 while (tos
!= worklist
)
622 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
623 if (!(e
->flags
& EDGE_ABNORMAL
))
625 basic_block src
= e
->src
;
627 if (!(src
->flags
& BB_REACHABLE
))
629 src
->flags
|= BB_REACHABLE
;
636 /* Now see if there's a reachable block with an exceptional incoming
638 FOR_EACH_BB_FN (bb
, cfun
)
639 if (bb
->flags
& BB_REACHABLE
&& bb_has_abnormal_pred (bb
))
642 /* No exceptional block reached exit unexceptionally. */
646 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
647 zero elements) to MAX_REG_NUM elements.
649 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
651 grow_reg_equivs (void)
653 int old_size
= vec_safe_length (reg_equivs
);
654 int max_regno
= max_reg_num ();
658 memset (&ze
, 0, sizeof (reg_equivs_t
));
659 vec_safe_reserve (reg_equivs
, max_regno
);
660 for (i
= old_size
; i
< max_regno
; i
++)
661 reg_equivs
->quick_insert (i
, ze
);
665 /* Global variables used by reload and its subroutines. */
667 /* The current basic block while in calculate_elim_costs_all_insns. */
668 static basic_block elim_bb
;
670 /* Set during calculate_needs if an insn needs register elimination. */
671 static int something_needs_elimination
;
672 /* Set during calculate_needs if an insn needs an operand changed. */
673 static int something_needs_operands_changed
;
674 /* Set by alter_regs if we spilled a register to the stack. */
675 static bool something_was_spilled
;
677 /* Nonzero means we couldn't get enough spill regs. */
680 /* Temporary array of pseudo-register number. */
681 static int *temp_pseudo_reg_arr
;
683 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
684 If that insn didn't set the register (i.e., it copied the register to
685 memory), just delete that insn instead of the equivalencing insn plus
686 anything now dead. If we call delete_dead_insn on that insn, we may
687 delete the insn that actually sets the register if the register dies
688 there and that is incorrect. */
692 for (int i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
694 if (reg_renumber
[i
] < 0 && reg_equiv_init (i
) != 0)
697 for (list
= reg_equiv_init (i
); list
; list
= XEXP (list
, 1))
699 rtx_insn
*equiv_insn
= as_a
<rtx_insn
*> (XEXP (list
, 0));
701 /* If we already deleted the insn or if it may trap, we can't
702 delete it. The latter case shouldn't happen, but can
703 if an insn has a variable address, gets a REG_EH_REGION
704 note added to it, and then gets converted into a load
705 from a constant address. */
706 if (NOTE_P (equiv_insn
)
707 || can_throw_internal (equiv_insn
))
709 else if (reg_set_p (regno_reg_rtx
[i
], PATTERN (equiv_insn
)))
710 delete_dead_insn (equiv_insn
);
712 SET_INSN_DELETED (equiv_insn
);
718 /* Return true if remove_init_insns will delete INSN. */
720 will_delete_init_insn_p (rtx_insn
*insn
)
722 rtx set
= single_set (insn
);
723 if (!set
|| !REG_P (SET_DEST (set
)))
725 unsigned regno
= REGNO (SET_DEST (set
));
727 if (can_throw_internal (insn
))
730 if (regno
< FIRST_PSEUDO_REGISTER
|| reg_renumber
[regno
] >= 0)
733 for (rtx list
= reg_equiv_init (regno
); list
; list
= XEXP (list
, 1))
735 rtx equiv_insn
= XEXP (list
, 0);
736 if (equiv_insn
== insn
)
742 /* Main entry point for the reload pass.
744 FIRST is the first insn of the function being compiled.
746 GLOBAL nonzero means we were called from global_alloc
747 and should attempt to reallocate any pseudoregs that we
748 displace from hard regs we will use for reloads.
749 If GLOBAL is zero, we do not have enough information to do that,
750 so any pseudo reg that is spilled must go to the stack.
752 Return value is TRUE if reload likely left dead insns in the
753 stream and a DCE pass should be run to elimiante them. Else the
754 return value is FALSE. */
757 reload (rtx_insn
*first
, int global
)
761 struct elim_table
*ep
;
765 /* Make sure even insns with volatile mem refs are recognizable. */
770 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
772 /* Make sure that the last insn in the chain
773 is not something that needs reloading. */
774 emit_note (NOTE_INSN_DELETED
);
776 /* Enable find_equiv_reg to distinguish insns made by reload. */
777 reload_first_uid
= get_max_uid ();
779 #ifdef SECONDARY_MEMORY_NEEDED
780 /* Initialize the secondary memory table. */
781 clear_secondary_mem ();
784 /* We don't have a stack slot for any spill reg yet. */
785 memset (spill_stack_slot
, 0, sizeof spill_stack_slot
);
786 memset (spill_stack_slot_width
, 0, sizeof spill_stack_slot_width
);
788 /* Initialize the save area information for caller-save, in case some
792 /* Compute which hard registers are now in use
793 as homes for pseudo registers.
794 This is done here rather than (eg) in global_alloc
795 because this point is reached even if not optimizing. */
796 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
799 /* A function that has a nonlocal label that can reach the exit
800 block via non-exceptional paths must save all call-saved
802 if (cfun
->has_nonlocal_label
803 && has_nonexceptional_receiver ())
804 crtl
->saves_all_registers
= 1;
806 if (crtl
->saves_all_registers
)
807 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
808 if (! call_used_regs
[i
] && ! fixed_regs
[i
] && ! LOCAL_REGNO (i
))
809 df_set_regs_ever_live (i
, true);
811 /* Find all the pseudo registers that didn't get hard regs
812 but do have known equivalent constants or memory slots.
813 These include parameters (known equivalent to parameter slots)
814 and cse'd or loop-moved constant memory addresses.
816 Record constant equivalents in reg_equiv_constant
817 so they will be substituted by find_reloads.
818 Record memory equivalents in reg_mem_equiv so they can
819 be substituted eventually by altering the REG-rtx's. */
822 reg_old_renumber
= XCNEWVEC (short, max_regno
);
823 memcpy (reg_old_renumber
, reg_renumber
, max_regno
* sizeof (short));
824 pseudo_forbidden_regs
= XNEWVEC (HARD_REG_SET
, max_regno
);
825 pseudo_previous_regs
= XCNEWVEC (HARD_REG_SET
, max_regno
);
827 CLEAR_HARD_REG_SET (bad_spill_regs_global
);
829 init_eliminable_invariants (first
, true);
832 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
833 stack slots to the pseudos that lack hard regs or equivalents.
834 Do not touch virtual registers. */
836 temp_pseudo_reg_arr
= XNEWVEC (int, max_regno
- LAST_VIRTUAL_REGISTER
- 1);
837 for (n
= 0, i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
838 temp_pseudo_reg_arr
[n
++] = i
;
841 /* Ask IRA to order pseudo-registers for better stack slot
843 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr
, n
, reg_max_ref_width
);
845 for (i
= 0; i
< n
; i
++)
846 alter_reg (temp_pseudo_reg_arr
[i
], -1, false);
848 /* If we have some registers we think can be eliminated, scan all insns to
849 see if there is an insn that sets one of these registers to something
850 other than itself plus a constant. If so, the register cannot be
851 eliminated. Doing this scan here eliminates an extra pass through the
852 main reload loop in the most common case where register elimination
854 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
856 note_stores (PATTERN (insn
), mark_not_eliminable
, NULL
);
858 maybe_fix_stack_asms ();
860 insns_need_reload
= 0;
861 something_needs_elimination
= 0;
863 /* Initialize to -1, which means take the first spill register. */
866 /* Spill any hard regs that we know we can't eliminate. */
867 CLEAR_HARD_REG_SET (used_spill_regs
);
868 /* There can be multiple ways to eliminate a register;
869 they should be listed adjacently.
870 Elimination for any register fails only if all possible ways fail. */
871 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; )
874 int can_eliminate
= 0;
877 can_eliminate
|= ep
->can_eliminate
;
880 while (ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
] && ep
->from
== from
);
882 spill_hard_reg (from
, 1);
885 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
&& frame_pointer_needed
)
886 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, 1);
888 finish_spills (global
);
890 /* From now on, we may need to generate moves differently. We may also
891 allow modifications of insns which cause them to not be recognized.
892 Any such modifications will be cleaned up during reload itself. */
893 reload_in_progress
= 1;
895 /* This loop scans the entire function each go-round
896 and repeats until one repetition spills no additional hard regs. */
899 int something_changed
;
900 HOST_WIDE_INT starting_frame_size
;
902 starting_frame_size
= get_frame_size ();
903 something_was_spilled
= false;
905 set_initial_elim_offsets ();
906 set_initial_label_offsets ();
908 /* For each pseudo register that has an equivalent location defined,
909 try to eliminate any eliminable registers (such as the frame pointer)
910 assuming initial offsets for the replacement register, which
913 If the resulting location is directly addressable, substitute
914 the MEM we just got directly for the old REG.
916 If it is not addressable but is a constant or the sum of a hard reg
917 and constant, it is probably not addressable because the constant is
918 out of range, in that case record the address; we will generate
919 hairy code to compute the address in a register each time it is
920 needed. Similarly if it is a hard register, but one that is not
921 valid as an address register.
923 If the location is not addressable, but does not have one of the
924 above forms, assign a stack slot. We have to do this to avoid the
925 potential of producing lots of reloads if, e.g., a location involves
926 a pseudo that didn't get a hard register and has an equivalent memory
927 location that also involves a pseudo that didn't get a hard register.
929 Perhaps at some point we will improve reload_when_needed handling
930 so this problem goes away. But that's very hairy. */
932 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
933 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc (i
))
935 rtx x
= eliminate_regs (reg_equiv_memory_loc (i
), VOIDmode
,
938 if (strict_memory_address_addr_space_p
939 (GET_MODE (regno_reg_rtx
[i
]), XEXP (x
, 0),
941 reg_equiv_mem (i
) = x
, reg_equiv_address (i
) = 0;
942 else if (CONSTANT_P (XEXP (x
, 0))
943 || (REG_P (XEXP (x
, 0))
944 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
945 || (GET_CODE (XEXP (x
, 0)) == PLUS
946 && REG_P (XEXP (XEXP (x
, 0), 0))
947 && (REGNO (XEXP (XEXP (x
, 0), 0))
948 < FIRST_PSEUDO_REGISTER
)
949 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
950 reg_equiv_address (i
) = XEXP (x
, 0), reg_equiv_mem (i
) = 0;
953 /* Make a new stack slot. Then indicate that something
954 changed so we go back and recompute offsets for
955 eliminable registers because the allocation of memory
956 below might change some offset. reg_equiv_{mem,address}
957 will be set up for this pseudo on the next pass around
959 reg_equiv_memory_loc (i
) = 0;
960 reg_equiv_init (i
) = 0;
961 alter_reg (i
, -1, true);
965 if (caller_save_needed
)
968 if (starting_frame_size
&& crtl
->stack_alignment_needed
)
970 /* If we have a stack frame, we must align it now. The
971 stack size may be a part of the offset computation for
972 register elimination. So if this changes the stack size,
973 then repeat the elimination bookkeeping. We don't
974 realign when there is no stack, as that will cause a
975 stack frame when none is needed should
976 STARTING_FRAME_OFFSET not be already aligned to
978 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
980 /* If we allocated another stack slot, redo elimination bookkeeping. */
981 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
983 if (update_eliminables_and_spill ())
988 if (caller_save_needed
)
990 save_call_clobbered_regs ();
991 /* That might have allocated new insn_chain structures. */
992 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
995 calculate_needs_all_insns (global
);
997 if (! ira_conflicts_p
)
998 /* Don't do it for IRA. We need this info because we don't
999 change live_throughout and dead_or_set for chains when IRA
1001 CLEAR_REG_SET (&spilled_pseudos
);
1003 something_changed
= 0;
1005 /* If we allocated any new memory locations, make another pass
1006 since it might have changed elimination offsets. */
1007 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
1008 something_changed
= 1;
1010 /* Even if the frame size remained the same, we might still have
1011 changed elimination offsets, e.g. if find_reloads called
1012 force_const_mem requiring the back end to allocate a constant
1013 pool base register that needs to be saved on the stack. */
1014 else if (!verify_initial_elim_offsets ())
1015 something_changed
= 1;
1017 if (update_eliminables_and_spill ())
1020 something_changed
= 1;
1024 select_reload_regs ();
1027 if (insns_need_reload
)
1028 something_changed
|= finish_spills (global
);
1031 if (! something_changed
)
1034 if (caller_save_needed
)
1035 delete_caller_save_insns ();
1037 obstack_free (&reload_obstack
, reload_firstobj
);
1040 /* If global-alloc was run, notify it of any register eliminations we have
1043 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1044 if (ep
->can_eliminate
)
1045 mark_elimination (ep
->from
, ep
->to
);
1047 remove_init_insns ();
1049 /* Use the reload registers where necessary
1050 by generating move instructions to move the must-be-register
1051 values into or out of the reload registers. */
1053 if (insns_need_reload
!= 0 || something_needs_elimination
1054 || something_needs_operands_changed
)
1056 HOST_WIDE_INT old_frame_size
= get_frame_size ();
1058 reload_as_needed (global
);
1060 gcc_assert (old_frame_size
== get_frame_size ());
1062 gcc_assert (verify_initial_elim_offsets ());
1065 /* If we were able to eliminate the frame pointer, show that it is no
1066 longer live at the start of any basic block. If it ls live by
1067 virtue of being in a pseudo, that pseudo will be marked live
1068 and hence the frame pointer will be known to be live via that
1071 if (! frame_pointer_needed
)
1072 FOR_EACH_BB_FN (bb
, cfun
)
1073 bitmap_clear_bit (df_get_live_in (bb
), HARD_FRAME_POINTER_REGNUM
);
1075 /* Come here (with failure set nonzero) if we can't get enough spill
1079 CLEAR_REG_SET (&changed_allocation_pseudos
);
1080 CLEAR_REG_SET (&spilled_pseudos
);
1081 reload_in_progress
= 0;
1083 /* Now eliminate all pseudo regs by modifying them into
1084 their equivalent memory references.
1085 The REG-rtx's for the pseudos are modified in place,
1086 so all insns that used to refer to them now refer to memory.
1088 For a reg that has a reg_equiv_address, all those insns
1089 were changed by reloading so that no insns refer to it any longer;
1090 but the DECL_RTL of a variable decl may refer to it,
1091 and if so this causes the debugging info to mention the variable. */
1093 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1097 if (reg_equiv_mem (i
))
1098 addr
= XEXP (reg_equiv_mem (i
), 0);
1100 if (reg_equiv_address (i
))
1101 addr
= reg_equiv_address (i
);
1105 if (reg_renumber
[i
] < 0)
1107 rtx reg
= regno_reg_rtx
[i
];
1109 REG_USERVAR_P (reg
) = 0;
1110 PUT_CODE (reg
, MEM
);
1111 XEXP (reg
, 0) = addr
;
1112 if (reg_equiv_memory_loc (i
))
1113 MEM_COPY_ATTRIBUTES (reg
, reg_equiv_memory_loc (i
));
1115 MEM_ATTRS (reg
) = 0;
1116 MEM_NOTRAP_P (reg
) = 1;
1118 else if (reg_equiv_mem (i
))
1119 XEXP (reg_equiv_mem (i
), 0) = addr
;
1122 /* We don't want complex addressing modes in debug insns
1123 if simpler ones will do, so delegitimize equivalences
1125 if (MAY_HAVE_DEBUG_INSNS
&& reg_renumber
[i
] < 0)
1127 rtx reg
= regno_reg_rtx
[i
];
1131 if (reg_equiv_constant (i
))
1132 equiv
= reg_equiv_constant (i
);
1133 else if (reg_equiv_invariant (i
))
1134 equiv
= reg_equiv_invariant (i
);
1135 else if (reg
&& MEM_P (reg
))
1136 equiv
= targetm
.delegitimize_address (reg
);
1137 else if (reg
&& REG_P (reg
) && (int)REGNO (reg
) != i
)
1143 for (use
= DF_REG_USE_CHAIN (i
); use
; use
= next
)
1145 insn
= DF_REF_INSN (use
);
1147 /* Make sure the next ref is for a different instruction,
1148 so that we're not affected by the rescan. */
1149 next
= DF_REF_NEXT_REG (use
);
1150 while (next
&& DF_REF_INSN (next
) == insn
)
1151 next
= DF_REF_NEXT_REG (next
);
1153 if (DEBUG_INSN_P (insn
))
1157 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
1158 df_insn_rescan_debug_internal (insn
);
1161 INSN_VAR_LOCATION_LOC (insn
)
1162 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn
),
1169 /* We must set reload_completed now since the cleanup_subreg_operands call
1170 below will re-recognize each insn and reload may have generated insns
1171 which are only valid during and after reload. */
1172 reload_completed
= 1;
1174 /* Make a pass over all the insns and delete all USEs which we inserted
1175 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1176 notes. Delete all CLOBBER insns, except those that refer to the return
1177 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1178 from misarranging variable-array code, and simplify (subreg (reg))
1179 operands. Strip and regenerate REG_INC notes that may have been moved
1182 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1188 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn
),
1189 VOIDmode
, CALL_INSN_FUNCTION_USAGE (insn
));
1191 if ((GET_CODE (PATTERN (insn
)) == USE
1192 /* We mark with QImode USEs introduced by reload itself. */
1193 && (GET_MODE (insn
) == QImode
1194 || find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
1195 || (GET_CODE (PATTERN (insn
)) == CLOBBER
1196 && (!MEM_P (XEXP (PATTERN (insn
), 0))
1197 || GET_MODE (XEXP (PATTERN (insn
), 0)) != BLKmode
1198 || (GET_CODE (XEXP (XEXP (PATTERN (insn
), 0), 0)) != SCRATCH
1199 && XEXP (XEXP (PATTERN (insn
), 0), 0)
1200 != stack_pointer_rtx
))
1201 && (!REG_P (XEXP (PATTERN (insn
), 0))
1202 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))))
1208 /* Some CLOBBERs may survive until here and still reference unassigned
1209 pseudos with const equivalent, which may in turn cause ICE in later
1210 passes if the reference remains in place. */
1211 if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
1212 replace_pseudos_in (& XEXP (PATTERN (insn
), 0),
1213 VOIDmode
, PATTERN (insn
));
1215 /* Discard obvious no-ops, even without -O. This optimization
1216 is fast and doesn't interfere with debugging. */
1217 if (NONJUMP_INSN_P (insn
)
1218 && GET_CODE (PATTERN (insn
)) == SET
1219 && REG_P (SET_SRC (PATTERN (insn
)))
1220 && REG_P (SET_DEST (PATTERN (insn
)))
1221 && (REGNO (SET_SRC (PATTERN (insn
)))
1222 == REGNO (SET_DEST (PATTERN (insn
)))))
1228 pnote
= ®_NOTES (insn
);
1231 if (REG_NOTE_KIND (*pnote
) == REG_DEAD
1232 || REG_NOTE_KIND (*pnote
) == REG_UNUSED
1233 || REG_NOTE_KIND (*pnote
) == REG_INC
)
1234 *pnote
= XEXP (*pnote
, 1);
1236 pnote
= &XEXP (*pnote
, 1);
1240 add_auto_inc_notes (insn
, PATTERN (insn
));
1242 /* Simplify (subreg (reg)) if it appears as an operand. */
1243 cleanup_subreg_operands (insn
);
1245 /* Clean up invalid ASMs so that they don't confuse later passes.
1247 if (asm_noperands (PATTERN (insn
)) >= 0)
1249 extract_insn (insn
);
1250 if (!constrain_operands (1, get_enabled_alternatives (insn
)))
1252 error_for_asm (insn
,
1253 "%<asm%> operand has impossible constraints");
1260 free (temp_pseudo_reg_arr
);
1262 /* Indicate that we no longer have known memory locations or constants. */
1265 free (reg_max_ref_width
);
1266 free (reg_old_renumber
);
1267 free (pseudo_previous_regs
);
1268 free (pseudo_forbidden_regs
);
1270 CLEAR_HARD_REG_SET (used_spill_regs
);
1271 for (i
= 0; i
< n_spills
; i
++)
1272 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1274 /* Free all the insn_chain structures at once. */
1275 obstack_free (&reload_obstack
, reload_startobj
);
1276 unused_insn_chains
= 0;
1278 inserted
= fixup_abnormal_edges ();
1280 /* We've possibly turned single trapping insn into multiple ones. */
1281 if (cfun
->can_throw_non_call_exceptions
)
1284 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1285 bitmap_ones (blocks
);
1286 find_many_sub_basic_blocks (blocks
);
1287 sbitmap_free (blocks
);
1291 commit_edge_insertions ();
1293 /* Replacing pseudos with their memory equivalents might have
1294 created shared rtx. Subsequent passes would get confused
1295 by this, so unshare everything here. */
1296 unshare_all_rtl_again (first
);
1298 #ifdef STACK_BOUNDARY
1299 /* init_emit has set the alignment of the hard frame pointer
1300 to STACK_BOUNDARY. It is very likely no longer valid if
1301 the hard frame pointer was used for register allocation. */
1302 if (!frame_pointer_needed
)
1303 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = BITS_PER_UNIT
;
1306 substitute_stack
.release ();
1308 gcc_assert (bitmap_empty_p (&spilled_pseudos
));
1310 reload_completed
= !failure
;
1315 /* Yet another special case. Unfortunately, reg-stack forces people to
1316 write incorrect clobbers in asm statements. These clobbers must not
1317 cause the register to appear in bad_spill_regs, otherwise we'll call
1318 fatal_insn later. We clear the corresponding regnos in the live
1319 register sets to avoid this.
1320 The whole thing is rather sick, I'm afraid. */
1323 maybe_fix_stack_asms (void)
1326 const char *constraints
[MAX_RECOG_OPERANDS
];
1327 machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1328 struct insn_chain
*chain
;
1330 for (chain
= reload_insn_chain
; chain
!= 0; chain
= chain
->next
)
1333 HARD_REG_SET clobbered
, allowed
;
1336 if (! INSN_P (chain
->insn
)
1337 || (noperands
= asm_noperands (PATTERN (chain
->insn
))) < 0)
1339 pat
= PATTERN (chain
->insn
);
1340 if (GET_CODE (pat
) != PARALLEL
)
1343 CLEAR_HARD_REG_SET (clobbered
);
1344 CLEAR_HARD_REG_SET (allowed
);
1346 /* First, make a mask of all stack regs that are clobbered. */
1347 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1349 rtx t
= XVECEXP (pat
, 0, i
);
1350 if (GET_CODE (t
) == CLOBBER
&& STACK_REG_P (XEXP (t
, 0)))
1351 SET_HARD_REG_BIT (clobbered
, REGNO (XEXP (t
, 0)));
1354 /* Get the operand values and constraints out of the insn. */
1355 decode_asm_operands (pat
, recog_data
.operand
, recog_data
.operand_loc
,
1356 constraints
, operand_mode
, NULL
);
1358 /* For every operand, see what registers are allowed. */
1359 for (i
= 0; i
< noperands
; i
++)
1361 const char *p
= constraints
[i
];
1362 /* For every alternative, we compute the class of registers allowed
1363 for reloading in CLS, and merge its contents into the reg set
1365 int cls
= (int) NO_REGS
;
1371 if (c
== '\0' || c
== ',' || c
== '#')
1373 /* End of one alternative - mark the regs in the current
1374 class, and reset the class. */
1375 IOR_HARD_REG_SET (allowed
, reg_class_contents
[cls
]);
1381 } while (c
!= '\0' && c
!= ',');
1390 cls
= (int) reg_class_subunion
[cls
][(int) GENERAL_REGS
];
1394 enum constraint_num cn
= lookup_constraint (p
);
1395 if (insn_extra_address_constraint (cn
))
1396 cls
= (int) reg_class_subunion
[cls
]
1397 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
1400 cls
= (int) reg_class_subunion
[cls
]
1401 [reg_class_for_constraint (cn
)];
1404 p
+= CONSTRAINT_LEN (c
, p
);
1407 /* Those of the registers which are clobbered, but allowed by the
1408 constraints, must be usable as reload registers. So clear them
1409 out of the life information. */
1410 AND_HARD_REG_SET (allowed
, clobbered
);
1411 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1412 if (TEST_HARD_REG_BIT (allowed
, i
))
1414 CLEAR_REGNO_REG_SET (&chain
->live_throughout
, i
);
1415 CLEAR_REGNO_REG_SET (&chain
->dead_or_set
, i
);
1422 /* Copy the global variables n_reloads and rld into the corresponding elts
1425 copy_reloads (struct insn_chain
*chain
)
1427 chain
->n_reloads
= n_reloads
;
1428 chain
->rld
= XOBNEWVEC (&reload_obstack
, struct reload
, n_reloads
);
1429 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
1430 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1433 /* Walk the chain of insns, and determine for each whether it needs reloads
1434 and/or eliminations. Build the corresponding insns_need_reload list, and
1435 set something_needs_elimination as appropriate. */
1437 calculate_needs_all_insns (int global
)
1439 struct insn_chain
**pprev_reload
= &insns_need_reload
;
1440 struct insn_chain
*chain
, *next
= 0;
1442 something_needs_elimination
= 0;
1444 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1445 for (chain
= reload_insn_chain
; chain
!= 0; chain
= next
)
1447 rtx_insn
*insn
= chain
->insn
;
1451 /* Clear out the shortcuts. */
1452 chain
->n_reloads
= 0;
1453 chain
->need_elim
= 0;
1454 chain
->need_reload
= 0;
1455 chain
->need_operand_change
= 0;
1457 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1458 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1459 what effects this has on the known offsets at labels. */
1461 if (LABEL_P (insn
) || JUMP_P (insn
) || JUMP_TABLE_DATA_P (insn
)
1462 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1463 set_label_offsets (insn
, insn
, 0);
1467 rtx old_body
= PATTERN (insn
);
1468 int old_code
= INSN_CODE (insn
);
1469 rtx old_notes
= REG_NOTES (insn
);
1470 int did_elimination
= 0;
1471 int operands_changed
= 0;
1473 /* Skip insns that only set an equivalence. */
1474 if (will_delete_init_insn_p (insn
))
1477 /* If needed, eliminate any eliminable registers. */
1478 if (num_eliminable
|| num_eliminable_invariants
)
1479 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1481 /* Analyze the instruction. */
1482 operands_changed
= find_reloads (insn
, 0, spill_indirect_levels
,
1483 global
, spill_reg_order
);
1485 /* If a no-op set needs more than one reload, this is likely
1486 to be something that needs input address reloads. We
1487 can't get rid of this cleanly later, and it is of no use
1488 anyway, so discard it now.
1489 We only do this when expensive_optimizations is enabled,
1490 since this complements reload inheritance / output
1491 reload deletion, and it can make debugging harder. */
1492 if (flag_expensive_optimizations
&& n_reloads
> 1)
1494 rtx set
= single_set (insn
);
1497 ((SET_SRC (set
) == SET_DEST (set
)
1498 && REG_P (SET_SRC (set
))
1499 && REGNO (SET_SRC (set
)) >= FIRST_PSEUDO_REGISTER
)
1500 || (REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))
1501 && reg_renumber
[REGNO (SET_SRC (set
))] < 0
1502 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1503 && reg_equiv_memory_loc (REGNO (SET_SRC (set
))) != NULL
1504 && reg_equiv_memory_loc (REGNO (SET_DEST (set
))) != NULL
1505 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set
))),
1506 reg_equiv_memory_loc (REGNO (SET_DEST (set
)))))))
1508 if (ira_conflicts_p
)
1509 /* Inform IRA about the insn deletion. */
1510 ira_mark_memory_move_deletion (REGNO (SET_DEST (set
)),
1511 REGNO (SET_SRC (set
)));
1513 /* Delete it from the reload chain. */
1515 chain
->prev
->next
= next
;
1517 reload_insn_chain
= next
;
1519 next
->prev
= chain
->prev
;
1520 chain
->next
= unused_insn_chains
;
1521 unused_insn_chains
= chain
;
1526 update_eliminable_offsets ();
1528 /* Remember for later shortcuts which insns had any reloads or
1529 register eliminations. */
1530 chain
->need_elim
= did_elimination
;
1531 chain
->need_reload
= n_reloads
> 0;
1532 chain
->need_operand_change
= operands_changed
;
1534 /* Discard any register replacements done. */
1535 if (did_elimination
)
1537 obstack_free (&reload_obstack
, reload_insn_firstobj
);
1538 PATTERN (insn
) = old_body
;
1539 INSN_CODE (insn
) = old_code
;
1540 REG_NOTES (insn
) = old_notes
;
1541 something_needs_elimination
= 1;
1544 something_needs_operands_changed
|= operands_changed
;
1548 copy_reloads (chain
);
1549 *pprev_reload
= chain
;
1550 pprev_reload
= &chain
->next_need_reload
;
1557 /* This function is called from the register allocator to set up estimates
1558 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1559 an invariant. The structure is similar to calculate_needs_all_insns. */
1562 calculate_elim_costs_all_insns (void)
1564 int *reg_equiv_init_cost
;
1568 reg_equiv_init_cost
= XCNEWVEC (int, max_regno
);
1570 init_eliminable_invariants (get_insns (), false);
1572 set_initial_elim_offsets ();
1573 set_initial_label_offsets ();
1575 FOR_EACH_BB_FN (bb
, cfun
)
1580 FOR_BB_INSNS (bb
, insn
)
1582 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1583 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1584 what effects this has on the known offsets at labels. */
1586 if (LABEL_P (insn
) || JUMP_P (insn
) || JUMP_TABLE_DATA_P (insn
)
1587 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1588 set_label_offsets (insn
, insn
, 0);
1592 rtx set
= single_set (insn
);
1594 /* Skip insns that only set an equivalence. */
1595 if (set
&& REG_P (SET_DEST (set
))
1596 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1597 && (reg_equiv_constant (REGNO (SET_DEST (set
)))
1598 || reg_equiv_invariant (REGNO (SET_DEST (set
)))))
1600 unsigned regno
= REGNO (SET_DEST (set
));
1601 rtx_insn_list
*init
= reg_equiv_init (regno
);
1604 rtx t
= eliminate_regs_1 (SET_SRC (set
), VOIDmode
, insn
,
1606 machine_mode mode
= GET_MODE (SET_DEST (set
));
1607 int cost
= set_src_cost (t
, mode
,
1608 optimize_bb_for_speed_p (bb
));
1609 int freq
= REG_FREQ_FROM_BB (bb
);
1611 reg_equiv_init_cost
[regno
] = cost
* freq
;
1615 /* If needed, eliminate any eliminable registers. */
1616 if (num_eliminable
|| num_eliminable_invariants
)
1617 elimination_costs_in_insn (insn
);
1620 update_eliminable_offsets ();
1624 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1626 if (reg_equiv_invariant (i
))
1628 if (reg_equiv_init (i
))
1630 int cost
= reg_equiv_init_cost
[i
];
1633 "Reg %d has equivalence, initial gains %d\n", i
, cost
);
1635 ira_adjust_equiv_reg_cost (i
, cost
);
1641 "Reg %d had equivalence, but can't be eliminated\n",
1643 ira_adjust_equiv_reg_cost (i
, 0);
1648 free (reg_equiv_init_cost
);
1649 free (offsets_known_at
);
1652 offsets_known_at
= NULL
;
1655 /* Comparison function for qsort to decide which of two reloads
1656 should be handled first. *P1 and *P2 are the reload numbers. */
1659 reload_reg_class_lower (const void *r1p
, const void *r2p
)
1661 int r1
= *(const short *) r1p
, r2
= *(const short *) r2p
;
1664 /* Consider required reloads before optional ones. */
1665 t
= rld
[r1
].optional
- rld
[r2
].optional
;
1669 /* Count all solitary classes before non-solitary ones. */
1670 t
= ((reg_class_size
[(int) rld
[r2
].rclass
] == 1)
1671 - (reg_class_size
[(int) rld
[r1
].rclass
] == 1));
1675 /* Aside from solitaires, consider all multi-reg groups first. */
1676 t
= rld
[r2
].nregs
- rld
[r1
].nregs
;
1680 /* Consider reloads in order of increasing reg-class number. */
1681 t
= (int) rld
[r1
].rclass
- (int) rld
[r2
].rclass
;
1685 /* If reloads are equally urgent, sort by reload number,
1686 so that the results of qsort leave nothing to chance. */
1690 /* The cost of spilling each hard reg. */
1691 static int spill_cost
[FIRST_PSEUDO_REGISTER
];
1693 /* When spilling multiple hard registers, we use SPILL_COST for the first
1694 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1695 only the first hard reg for a multi-reg pseudo. */
1696 static int spill_add_cost
[FIRST_PSEUDO_REGISTER
];
1698 /* Map of hard regno to pseudo regno currently occupying the hard
1700 static int hard_regno_to_pseudo_regno
[FIRST_PSEUDO_REGISTER
];
1702 /* Update the spill cost arrays, considering that pseudo REG is live. */
1705 count_pseudo (int reg
)
1707 int freq
= REG_FREQ (reg
);
1708 int r
= reg_renumber
[reg
];
1711 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1712 if (ira_conflicts_p
&& r
< 0)
1715 if (REGNO_REG_SET_P (&pseudos_counted
, reg
)
1716 || REGNO_REG_SET_P (&spilled_pseudos
, reg
))
1719 SET_REGNO_REG_SET (&pseudos_counted
, reg
);
1721 gcc_assert (r
>= 0);
1723 spill_add_cost
[r
] += freq
;
1724 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1727 hard_regno_to_pseudo_regno
[r
+ nregs
] = reg
;
1728 spill_cost
[r
+ nregs
] += freq
;
1732 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1733 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1736 order_regs_for_reload (struct insn_chain
*chain
)
1739 HARD_REG_SET used_by_pseudos
;
1740 HARD_REG_SET used_by_pseudos2
;
1741 reg_set_iterator rsi
;
1743 COPY_HARD_REG_SET (bad_spill_regs
, fixed_reg_set
);
1745 memset (spill_cost
, 0, sizeof spill_cost
);
1746 memset (spill_add_cost
, 0, sizeof spill_add_cost
);
1747 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1748 hard_regno_to_pseudo_regno
[i
] = -1;
1750 /* Count number of uses of each hard reg by pseudo regs allocated to it
1751 and then order them by decreasing use. First exclude hard registers
1752 that are live in or across this insn. */
1754 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
1755 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
1756 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos
);
1757 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos2
);
1759 /* Now find out which pseudos are allocated to it, and update
1761 CLEAR_REG_SET (&pseudos_counted
);
1763 EXECUTE_IF_SET_IN_REG_SET
1764 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1768 EXECUTE_IF_SET_IN_REG_SET
1769 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1773 CLEAR_REG_SET (&pseudos_counted
);
1776 /* Vector of reload-numbers showing the order in which the reloads should
1778 static short reload_order
[MAX_RELOADS
];
1780 /* This is used to keep track of the spill regs used in one insn. */
1781 static HARD_REG_SET used_spill_regs_local
;
1783 /* We decided to spill hard register SPILLED, which has a size of
1784 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1785 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1786 update SPILL_COST/SPILL_ADD_COST. */
1789 count_spilled_pseudo (int spilled
, int spilled_nregs
, int reg
)
1791 int freq
= REG_FREQ (reg
);
1792 int r
= reg_renumber
[reg
];
1795 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1796 if (ira_conflicts_p
&& r
< 0)
1799 gcc_assert (r
>= 0);
1801 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1803 if (REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1804 || spilled
+ spilled_nregs
<= r
|| r
+ nregs
<= spilled
)
1807 SET_REGNO_REG_SET (&spilled_pseudos
, reg
);
1809 spill_add_cost
[r
] -= freq
;
1812 hard_regno_to_pseudo_regno
[r
+ nregs
] = -1;
1813 spill_cost
[r
+ nregs
] -= freq
;
1817 /* Find reload register to use for reload number ORDER. */
1820 find_reg (struct insn_chain
*chain
, int order
)
1822 int rnum
= reload_order
[order
];
1823 struct reload
*rl
= rld
+ rnum
;
1824 int best_cost
= INT_MAX
;
1826 unsigned int i
, j
, n
;
1828 HARD_REG_SET not_usable
;
1829 HARD_REG_SET used_by_other_reload
;
1830 reg_set_iterator rsi
;
1831 static int regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1832 static int best_regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1834 COPY_HARD_REG_SET (not_usable
, bad_spill_regs
);
1835 IOR_HARD_REG_SET (not_usable
, bad_spill_regs_global
);
1836 IOR_COMPL_HARD_REG_SET (not_usable
, reg_class_contents
[rl
->rclass
]);
1838 CLEAR_HARD_REG_SET (used_by_other_reload
);
1839 for (k
= 0; k
< order
; k
++)
1841 int other
= reload_order
[k
];
1843 if (rld
[other
].regno
>= 0 && reloads_conflict (other
, rnum
))
1844 for (j
= 0; j
< rld
[other
].nregs
; j
++)
1845 SET_HARD_REG_BIT (used_by_other_reload
, rld
[other
].regno
+ j
);
1848 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1850 #ifdef REG_ALLOC_ORDER
1851 unsigned int regno
= reg_alloc_order
[i
];
1853 unsigned int regno
= i
;
1856 if (! TEST_HARD_REG_BIT (not_usable
, regno
)
1857 && ! TEST_HARD_REG_BIT (used_by_other_reload
, regno
)
1858 && HARD_REGNO_MODE_OK (regno
, rl
->mode
))
1860 int this_cost
= spill_cost
[regno
];
1862 unsigned int this_nregs
= hard_regno_nregs
[regno
][rl
->mode
];
1864 for (j
= 1; j
< this_nregs
; j
++)
1866 this_cost
+= spill_add_cost
[regno
+ j
];
1867 if ((TEST_HARD_REG_BIT (not_usable
, regno
+ j
))
1868 || TEST_HARD_REG_BIT (used_by_other_reload
, regno
+ j
))
1874 if (ira_conflicts_p
)
1876 /* Ask IRA to find a better pseudo-register for
1878 for (n
= j
= 0; j
< this_nregs
; j
++)
1880 int r
= hard_regno_to_pseudo_regno
[regno
+ j
];
1884 if (n
== 0 || regno_pseudo_regs
[n
- 1] != r
)
1885 regno_pseudo_regs
[n
++] = r
;
1887 regno_pseudo_regs
[n
++] = -1;
1889 || ira_better_spill_reload_regno_p (regno_pseudo_regs
,
1890 best_regno_pseudo_regs
,
1897 best_regno_pseudo_regs
[j
] = regno_pseudo_regs
[j
];
1898 if (regno_pseudo_regs
[j
] < 0)
1905 if (rl
->in
&& REG_P (rl
->in
) && REGNO (rl
->in
) == regno
)
1907 if (rl
->out
&& REG_P (rl
->out
) && REGNO (rl
->out
) == regno
)
1909 if (this_cost
< best_cost
1910 /* Among registers with equal cost, prefer caller-saved ones, or
1911 use REG_ALLOC_ORDER if it is defined. */
1912 || (this_cost
== best_cost
1913 #ifdef REG_ALLOC_ORDER
1914 && (inv_reg_alloc_order
[regno
]
1915 < inv_reg_alloc_order
[best_reg
])
1917 && call_used_regs
[regno
]
1918 && ! call_used_regs
[best_reg
]
1923 best_cost
= this_cost
;
1931 fprintf (dump_file
, "Using reg %d for reload %d\n", best_reg
, rnum
);
1933 rl
->nregs
= hard_regno_nregs
[best_reg
][rl
->mode
];
1934 rl
->regno
= best_reg
;
1936 EXECUTE_IF_SET_IN_REG_SET
1937 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1939 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1942 EXECUTE_IF_SET_IN_REG_SET
1943 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1945 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1948 for (i
= 0; i
< rl
->nregs
; i
++)
1950 gcc_assert (spill_cost
[best_reg
+ i
] == 0);
1951 gcc_assert (spill_add_cost
[best_reg
+ i
] == 0);
1952 gcc_assert (hard_regno_to_pseudo_regno
[best_reg
+ i
] == -1);
1953 SET_HARD_REG_BIT (used_spill_regs_local
, best_reg
+ i
);
1958 /* Find more reload regs to satisfy the remaining need of an insn, which
1960 Do it by ascending class number, since otherwise a reg
1961 might be spilled for a big class and might fail to count
1962 for a smaller class even though it belongs to that class. */
1965 find_reload_regs (struct insn_chain
*chain
)
1969 /* In order to be certain of getting the registers we need,
1970 we must sort the reloads into order of increasing register class.
1971 Then our grabbing of reload registers will parallel the process
1972 that provided the reload registers. */
1973 for (i
= 0; i
< chain
->n_reloads
; i
++)
1975 /* Show whether this reload already has a hard reg. */
1976 if (chain
->rld
[i
].reg_rtx
)
1978 int regno
= REGNO (chain
->rld
[i
].reg_rtx
);
1979 chain
->rld
[i
].regno
= regno
;
1981 = hard_regno_nregs
[regno
][GET_MODE (chain
->rld
[i
].reg_rtx
)];
1984 chain
->rld
[i
].regno
= -1;
1985 reload_order
[i
] = i
;
1988 n_reloads
= chain
->n_reloads
;
1989 memcpy (rld
, chain
->rld
, n_reloads
* sizeof (struct reload
));
1991 CLEAR_HARD_REG_SET (used_spill_regs_local
);
1994 fprintf (dump_file
, "Spilling for insn %d.\n", INSN_UID (chain
->insn
));
1996 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
1998 /* Compute the order of preference for hard registers to spill. */
2000 order_regs_for_reload (chain
);
2002 for (i
= 0; i
< n_reloads
; i
++)
2004 int r
= reload_order
[i
];
2006 /* Ignore reloads that got marked inoperative. */
2007 if ((rld
[r
].out
!= 0 || rld
[r
].in
!= 0 || rld
[r
].secondary_p
)
2008 && ! rld
[r
].optional
2009 && rld
[r
].regno
== -1)
2010 if (! find_reg (chain
, i
))
2013 fprintf (dump_file
, "reload failure for reload %d\n", r
);
2014 spill_failure (chain
->insn
, rld
[r
].rclass
);
2020 COPY_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs_local
);
2021 IOR_HARD_REG_SET (used_spill_regs
, used_spill_regs_local
);
2023 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
2027 select_reload_regs (void)
2029 struct insn_chain
*chain
;
2031 /* Try to satisfy the needs for each insn. */
2032 for (chain
= insns_need_reload
; chain
!= 0;
2033 chain
= chain
->next_need_reload
)
2034 find_reload_regs (chain
);
2037 /* Delete all insns that were inserted by emit_caller_save_insns during
2040 delete_caller_save_insns (void)
2042 struct insn_chain
*c
= reload_insn_chain
;
2046 while (c
!= 0 && c
->is_caller_save_insn
)
2048 struct insn_chain
*next
= c
->next
;
2049 rtx_insn
*insn
= c
->insn
;
2051 if (c
== reload_insn_chain
)
2052 reload_insn_chain
= next
;
2056 next
->prev
= c
->prev
;
2058 c
->prev
->next
= next
;
2059 c
->next
= unused_insn_chains
;
2060 unused_insn_chains
= c
;
2068 /* Handle the failure to find a register to spill.
2069 INSN should be one of the insns which needed this particular spill reg. */
2072 spill_failure (rtx_insn
*insn
, enum reg_class rclass
)
2074 if (asm_noperands (PATTERN (insn
)) >= 0)
2075 error_for_asm (insn
, "can%'t find a register in class %qs while "
2076 "reloading %<asm%>",
2077 reg_class_names
[rclass
]);
2080 error ("unable to find a register to spill in class %qs",
2081 reg_class_names
[rclass
]);
2085 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
2086 debug_reload_to_stream (dump_file
);
2088 fatal_insn ("this is the insn:", insn
);
2092 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2093 data that is dead in INSN. */
2096 delete_dead_insn (rtx_insn
*insn
)
2098 rtx_insn
*prev
= prev_active_insn (insn
);
2101 /* If the previous insn sets a register that dies in our insn make
2102 a note that we want to run DCE immediately after reload.
2104 We used to delete the previous insn & recurse, but that's wrong for
2105 block local equivalences. Instead of trying to figure out the exact
2106 circumstances where we can delete the potentially dead insns, just
2107 let DCE do the job. */
2108 if (prev
&& BLOCK_FOR_INSN (prev
) == BLOCK_FOR_INSN (insn
)
2109 && GET_CODE (PATTERN (prev
)) == SET
2110 && (prev_dest
= SET_DEST (PATTERN (prev
)), REG_P (prev_dest
))
2111 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2112 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
2113 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
2116 SET_INSN_DELETED (insn
);
2119 /* Modify the home of pseudo-reg I.
2120 The new home is present in reg_renumber[I].
2122 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2123 or it may be -1, meaning there is none or it is not relevant.
2124 This is used so that all pseudos spilled from a given hard reg
2125 can share one stack slot. */
2128 alter_reg (int i
, int from_reg
, bool dont_share_p
)
2130 /* When outputting an inline function, this can happen
2131 for a reg that isn't actually used. */
2132 if (regno_reg_rtx
[i
] == 0)
2135 /* If the reg got changed to a MEM at rtl-generation time,
2137 if (!REG_P (regno_reg_rtx
[i
]))
2140 /* Modify the reg-rtx to contain the new hard reg
2141 number or else to contain its pseudo reg number. */
2142 SET_REGNO (regno_reg_rtx
[i
],
2143 reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
);
2145 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2146 allocate a stack slot for it. */
2148 if (reg_renumber
[i
] < 0
2149 && REG_N_REFS (i
) > 0
2150 && reg_equiv_constant (i
) == 0
2151 && (reg_equiv_invariant (i
) == 0
2152 || reg_equiv_init (i
) == 0)
2153 && reg_equiv_memory_loc (i
) == 0)
2156 machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2157 unsigned int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2158 unsigned int inherent_align
= GET_MODE_ALIGNMENT (mode
);
2159 unsigned int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2160 unsigned int min_align
= reg_max_ref_width
[i
] * BITS_PER_UNIT
;
2163 something_was_spilled
= true;
2165 if (ira_conflicts_p
)
2167 /* Mark the spill for IRA. */
2168 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
2170 x
= ira_reuse_stack_slot (i
, inherent_size
, total_size
);
2176 /* Each pseudo reg has an inherent size which comes from its own mode,
2177 and a total size which provides room for paradoxical subregs
2178 which refer to the pseudo reg in wider modes.
2180 We can use a slot already allocated if it provides both
2181 enough inherent space and enough total space.
2182 Otherwise, we allocate a new slot, making sure that it has no less
2183 inherent space, and no less total space, then the previous slot. */
2184 else if (from_reg
== -1 || (!dont_share_p
&& ira_conflicts_p
))
2188 /* No known place to spill from => no slot to reuse. */
2189 x
= assign_stack_local (mode
, total_size
,
2190 min_align
> inherent_align
2191 || total_size
> inherent_size
? -1 : 0);
2195 /* Cancel the big-endian correction done in assign_stack_local.
2196 Get the address of the beginning of the slot. This is so we
2197 can do a big-endian correction unconditionally below. */
2198 if (BYTES_BIG_ENDIAN
)
2200 adjust
= inherent_size
- total_size
;
2203 = adjust_address_nv (x
, mode_for_size (total_size
2209 if (! dont_share_p
&& ira_conflicts_p
)
2210 /* Inform IRA about allocation a new stack slot. */
2211 ira_mark_new_stack_slot (stack_slot
, i
, total_size
);
2214 /* Reuse a stack slot if possible. */
2215 else if (spill_stack_slot
[from_reg
] != 0
2216 && spill_stack_slot_width
[from_reg
] >= total_size
2217 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2219 && MEM_ALIGN (spill_stack_slot
[from_reg
]) >= min_align
)
2220 x
= spill_stack_slot
[from_reg
];
2222 /* Allocate a bigger slot. */
2225 /* Compute maximum size needed, both for inherent size
2226 and for total size. */
2229 if (spill_stack_slot
[from_reg
])
2231 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2233 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2234 if (spill_stack_slot_width
[from_reg
] > total_size
)
2235 total_size
= spill_stack_slot_width
[from_reg
];
2236 if (MEM_ALIGN (spill_stack_slot
[from_reg
]) > min_align
)
2237 min_align
= MEM_ALIGN (spill_stack_slot
[from_reg
]);
2240 /* Make a slot with that size. */
2241 x
= assign_stack_local (mode
, total_size
,
2242 min_align
> inherent_align
2243 || total_size
> inherent_size
? -1 : 0);
2246 /* Cancel the big-endian correction done in assign_stack_local.
2247 Get the address of the beginning of the slot. This is so we
2248 can do a big-endian correction unconditionally below. */
2249 if (BYTES_BIG_ENDIAN
)
2251 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2254 = adjust_address_nv (x
, mode_for_size (total_size
2260 spill_stack_slot
[from_reg
] = stack_slot
;
2261 spill_stack_slot_width
[from_reg
] = total_size
;
2264 /* On a big endian machine, the "address" of the slot
2265 is the address of the low part that fits its inherent mode. */
2266 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2267 adjust
+= (total_size
- inherent_size
);
2269 /* If we have any adjustment to make, or if the stack slot is the
2270 wrong mode, make a new stack slot. */
2271 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
2273 /* Set all of the memory attributes as appropriate for a spill. */
2274 set_mem_attrs_for_spill (x
);
2276 /* Save the stack slot for later. */
2277 reg_equiv_memory_loc (i
) = x
;
2281 /* Mark the slots in regs_ever_live for the hard regs used by
2282 pseudo-reg number REGNO, accessed in MODE. */
2285 mark_home_live_1 (int regno
, machine_mode mode
)
2289 i
= reg_renumber
[regno
];
2292 lim
= end_hard_regno (mode
, i
);
2294 df_set_regs_ever_live (i
++, true);
2297 /* Mark the slots in regs_ever_live for the hard regs
2298 used by pseudo-reg number REGNO. */
2301 mark_home_live (int regno
)
2303 if (reg_renumber
[regno
] >= 0)
2304 mark_home_live_1 (regno
, PSEUDO_REGNO_MODE (regno
));
2307 /* This function handles the tracking of elimination offsets around branches.
2309 X is a piece of RTL being scanned.
2311 INSN is the insn that it came from, if any.
2313 INITIAL_P is nonzero if we are to set the offset to be the initial
2314 offset and zero if we are setting the offset of the label to be the
2318 set_label_offsets (rtx x
, rtx_insn
*insn
, int initial_p
)
2320 enum rtx_code code
= GET_CODE (x
);
2323 struct elim_table
*p
;
2328 if (LABEL_REF_NONLOCAL_P (x
))
2331 x
= LABEL_REF_LABEL (x
);
2333 /* ... fall through ... */
2336 /* If we know nothing about this label, set the desired offsets. Note
2337 that this sets the offset at a label to be the offset before a label
2338 if we don't know anything about the label. This is not correct for
2339 the label after a BARRIER, but is the best guess we can make. If
2340 we guessed wrong, we will suppress an elimination that might have
2341 been possible had we been able to guess correctly. */
2343 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
])
2345 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2346 offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2347 = (initial_p
? reg_eliminate
[i
].initial_offset
2348 : reg_eliminate
[i
].offset
);
2349 offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
] = 1;
2352 /* Otherwise, if this is the definition of a label and it is
2353 preceded by a BARRIER, set our offsets to the known offset of
2357 && (tem
= prev_nonnote_insn (insn
)) != 0
2359 set_offsets_for_label (insn
);
2361 /* If neither of the above cases is true, compare each offset
2362 with those previously recorded and suppress any eliminations
2363 where the offsets disagree. */
2365 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2366 if (offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2367 != (initial_p
? reg_eliminate
[i
].initial_offset
2368 : reg_eliminate
[i
].offset
))
2369 reg_eliminate
[i
].can_eliminate
= 0;
2373 case JUMP_TABLE_DATA
:
2374 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2378 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2380 /* ... fall through ... */
2384 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2385 to indirectly and hence must have all eliminations at their
2387 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2388 if (REG_NOTE_KIND (tem
) == REG_LABEL_OPERAND
)
2389 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2395 /* Each of the labels in the parallel or address vector must be
2396 at their initial offsets. We want the first field for PARALLEL
2397 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2399 for (i
= 0; i
< (unsigned) XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2400 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2405 /* We only care about setting PC. If the source is not RETURN,
2406 IF_THEN_ELSE, or a label, disable any eliminations not at
2407 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2408 isn't one of those possibilities. For branches to a label,
2409 call ourselves recursively.
2411 Note that this can disable elimination unnecessarily when we have
2412 a non-local goto since it will look like a non-constant jump to
2413 someplace in the current function. This isn't a significant
2414 problem since such jumps will normally be when all elimination
2415 pairs are back to their initial offsets. */
2417 if (SET_DEST (x
) != pc_rtx
)
2420 switch (GET_CODE (SET_SRC (x
)))
2427 set_label_offsets (SET_SRC (x
), insn
, initial_p
);
2431 tem
= XEXP (SET_SRC (x
), 1);
2432 if (GET_CODE (tem
) == LABEL_REF
)
2433 set_label_offsets (LABEL_REF_LABEL (tem
), insn
, initial_p
);
2434 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2437 tem
= XEXP (SET_SRC (x
), 2);
2438 if (GET_CODE (tem
) == LABEL_REF
)
2439 set_label_offsets (LABEL_REF_LABEL (tem
), insn
, initial_p
);
2440 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2448 /* If we reach here, all eliminations must be at their initial
2449 offset because we are doing a jump to a variable address. */
2450 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2451 if (p
->offset
!= p
->initial_offset
)
2452 p
->can_eliminate
= 0;
2460 /* This function examines every reg that occurs in X and adjusts the
2461 costs for its elimination which are gathered by IRA. INSN is the
2462 insn in which X occurs. We do not recurse into MEM expressions. */
2465 note_reg_elim_costly (const_rtx x
, rtx insn
)
2467 subrtx_iterator::array_type array
;
2468 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
2470 const_rtx x
= *iter
;
2472 iter
.skip_subrtxes ();
2474 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
2475 && reg_equiv_init (REGNO (x
))
2476 && reg_equiv_invariant (REGNO (x
)))
2478 rtx t
= reg_equiv_invariant (REGNO (x
));
2479 rtx new_rtx
= eliminate_regs_1 (t
, Pmode
, insn
, true, true);
2480 int cost
= set_src_cost (new_rtx
, Pmode
,
2481 optimize_bb_for_speed_p (elim_bb
));
2482 int freq
= REG_FREQ_FROM_BB (elim_bb
);
2485 ira_adjust_equiv_reg_cost (REGNO (x
), -cost
* freq
);
2490 /* Scan X and replace any eliminable registers (such as fp) with a
2491 replacement (such as sp), plus an offset.
2493 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2494 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2495 MEM, we are allowed to replace a sum of a register and the constant zero
2496 with the register, which we cannot do outside a MEM. In addition, we need
2497 to record the fact that a register is referenced outside a MEM.
2499 If INSN is an insn, it is the insn containing X. If we replace a REG
2500 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2501 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2502 the REG is being modified.
2504 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2505 That's used when we eliminate in expressions stored in notes.
2506 This means, do not set ref_outside_mem even if the reference
2509 If FOR_COSTS is true, we are being called before reload in order to
2510 estimate the costs of keeping registers with an equivalence unallocated.
2512 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2513 replacements done assuming all offsets are at their initial values. If
2514 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2515 encounter, return the actual location so that find_reloads will do
2516 the proper thing. */
2519 eliminate_regs_1 (rtx x
, machine_mode mem_mode
, rtx insn
,
2520 bool may_use_invariant
, bool for_costs
)
2522 enum rtx_code code
= GET_CODE (x
);
2523 struct elim_table
*ep
;
2530 if (! current_function_decl
)
2550 /* First handle the case where we encounter a bare register that
2551 is eliminable. Replace it with a PLUS. */
2552 if (regno
< FIRST_PSEUDO_REGISTER
)
2554 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2556 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2557 return plus_constant (Pmode
, ep
->to_rtx
, ep
->previous_offset
);
2560 else if (reg_renumber
&& reg_renumber
[regno
] < 0
2562 && reg_equiv_invariant (regno
))
2564 if (may_use_invariant
|| (insn
&& DEBUG_INSN_P (insn
)))
2565 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno
)),
2566 mem_mode
, insn
, true, for_costs
);
2567 /* There exists at least one use of REGNO that cannot be
2568 eliminated. Prevent the defining insn from being deleted. */
2569 reg_equiv_init (regno
) = NULL
;
2571 alter_reg (regno
, -1, true);
2575 /* You might think handling MINUS in a manner similar to PLUS is a
2576 good idea. It is not. It has been tried multiple times and every
2577 time the change has had to have been reverted.
2579 Other parts of reload know a PLUS is special (gen_reload for example)
2580 and require special code to handle code a reloaded PLUS operand.
2582 Also consider backends where the flags register is clobbered by a
2583 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2584 lea instruction comes to mind). If we try to reload a MINUS, we
2585 may kill the flags register that was holding a useful value.
2587 So, please before trying to handle MINUS, consider reload as a
2588 whole instead of this little section as well as the backend issues. */
2590 /* If this is the sum of an eliminable register and a constant, rework
2592 if (REG_P (XEXP (x
, 0))
2593 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2594 && CONSTANT_P (XEXP (x
, 1)))
2596 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2598 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2600 /* The only time we want to replace a PLUS with a REG (this
2601 occurs when the constant operand of the PLUS is the negative
2602 of the offset) is when we are inside a MEM. We won't want
2603 to do so at other times because that would change the
2604 structure of the insn in a way that reload can't handle.
2605 We special-case the commonest situation in
2606 eliminate_regs_in_insn, so just replace a PLUS with a
2607 PLUS here, unless inside a MEM. */
2608 if (mem_mode
!= 0 && CONST_INT_P (XEXP (x
, 1))
2609 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2612 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2613 plus_constant (Pmode
, XEXP (x
, 1),
2614 ep
->previous_offset
));
2617 /* If the register is not eliminable, we are done since the other
2618 operand is a constant. */
2622 /* If this is part of an address, we want to bring any constant to the
2623 outermost PLUS. We will do this by doing register replacement in
2624 our operands and seeing if a constant shows up in one of them.
2626 Note that there is no risk of modifying the structure of the insn,
2627 since we only get called for its operands, thus we are either
2628 modifying the address inside a MEM, or something like an address
2629 operand of a load-address insn. */
2632 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2634 rtx new1
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2637 if (reg_renumber
&& (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1)))
2639 /* If one side is a PLUS and the other side is a pseudo that
2640 didn't get a hard register but has a reg_equiv_constant,
2641 we must replace the constant here since it may no longer
2642 be in the position of any operand. */
2643 if (GET_CODE (new0
) == PLUS
&& REG_P (new1
)
2644 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2645 && reg_renumber
[REGNO (new1
)] < 0
2647 && reg_equiv_constant (REGNO (new1
)) != 0)
2648 new1
= reg_equiv_constant (REGNO (new1
));
2649 else if (GET_CODE (new1
) == PLUS
&& REG_P (new0
)
2650 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2651 && reg_renumber
[REGNO (new0
)] < 0
2652 && reg_equiv_constant (REGNO (new0
)) != 0)
2653 new0
= reg_equiv_constant (REGNO (new0
));
2655 new_rtx
= form_sum (GET_MODE (x
), new0
, new1
);
2657 /* As above, if we are not inside a MEM we do not want to
2658 turn a PLUS into something else. We might try to do so here
2659 for an addition of 0 if we aren't optimizing. */
2660 if (! mem_mode
&& GET_CODE (new_rtx
) != PLUS
)
2661 return gen_rtx_PLUS (GET_MODE (x
), new_rtx
, const0_rtx
);
2669 /* If this is the product of an eliminable register and a
2670 constant, apply the distribute law and move the constant out
2671 so that we have (plus (mult ..) ..). This is needed in order
2672 to keep load-address insns valid. This case is pathological.
2673 We ignore the possibility of overflow here. */
2674 if (REG_P (XEXP (x
, 0))
2675 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2676 && CONST_INT_P (XEXP (x
, 1)))
2677 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2679 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2682 /* Refs inside notes or in DEBUG_INSNs don't count for
2684 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2685 || GET_CODE (insn
) == INSN_LIST
2686 || DEBUG_INSN_P (insn
))))
2687 ep
->ref_outside_mem
= 1;
2690 plus_constant (Pmode
,
2691 gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2692 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2695 /* ... fall through ... */
2699 /* See comments before PLUS about handling MINUS. */
2701 case DIV
: case UDIV
:
2702 case MOD
: case UMOD
:
2703 case AND
: case IOR
: case XOR
:
2704 case ROTATERT
: case ROTATE
:
2705 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2707 case GE
: case GT
: case GEU
: case GTU
:
2708 case LE
: case LT
: case LEU
: case LTU
:
2710 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2712 rtx new1
= XEXP (x
, 1)
2713 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, false,
2716 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2717 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
2722 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2725 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2727 if (new_rtx
!= XEXP (x
, 0))
2729 /* If this is a REG_DEAD note, it is not valid anymore.
2730 Using the eliminated version could result in creating a
2731 REG_DEAD note for the stack or frame pointer. */
2732 if (REG_NOTE_KIND (x
) == REG_DEAD
)
2734 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2738 x
= alloc_reg_note (REG_NOTE_KIND (x
), new_rtx
, XEXP (x
, 1));
2742 /* ... fall through ... */
2746 /* Now do eliminations in the rest of the chain. If this was
2747 an EXPR_LIST, this might result in allocating more memory than is
2748 strictly needed, but it simplifies the code. */
2751 new_rtx
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2753 if (new_rtx
!= XEXP (x
, 1))
2755 gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new_rtx
);
2763 /* We do not support elimination of a register that is modified.
2764 elimination_effects has already make sure that this does not
2770 /* We do not support elimination of a register that is modified.
2771 elimination_effects has already make sure that this does not
2772 happen. The only remaining case we need to consider here is
2773 that the increment value may be an eliminable register. */
2774 if (GET_CODE (XEXP (x
, 1)) == PLUS
2775 && XEXP (XEXP (x
, 1), 0) == XEXP (x
, 0))
2777 rtx new_rtx
= eliminate_regs_1 (XEXP (XEXP (x
, 1), 1), mem_mode
,
2778 insn
, true, for_costs
);
2780 if (new_rtx
!= XEXP (XEXP (x
, 1), 1))
2781 return gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (x
, 0),
2782 gen_rtx_PLUS (GET_MODE (x
),
2783 XEXP (x
, 0), new_rtx
));
2787 case STRICT_LOW_PART
:
2789 case SIGN_EXTEND
: case ZERO_EXTEND
:
2790 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2791 case FLOAT
: case FIX
:
2792 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2801 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2803 if (new_rtx
!= XEXP (x
, 0))
2804 return gen_rtx_fmt_e (code
, GET_MODE (x
), new_rtx
);
2808 /* Similar to above processing, but preserve SUBREG_BYTE.
2809 Convert (subreg (mem)) to (mem) if not paradoxical.
2810 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2811 pseudo didn't get a hard reg, we must replace this with the
2812 eliminated version of the memory location because push_reload
2813 may do the replacement in certain circumstances. */
2814 if (REG_P (SUBREG_REG (x
))
2815 && !paradoxical_subreg_p (x
)
2817 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
2819 new_rtx
= SUBREG_REG (x
);
2822 new_rtx
= eliminate_regs_1 (SUBREG_REG (x
), mem_mode
, insn
, false, for_costs
);
2824 if (new_rtx
!= SUBREG_REG (x
))
2826 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
2827 int new_size
= GET_MODE_SIZE (GET_MODE (new_rtx
));
2830 && ((x_size
< new_size
2831 #if WORD_REGISTER_OPERATIONS
2832 /* On these machines, combine can create rtl of the form
2833 (set (subreg:m1 (reg:m2 R) 0) ...)
2834 where m1 < m2, and expects something interesting to
2835 happen to the entire word. Moreover, it will use the
2836 (reg:m2 R) later, expecting all bits to be preserved.
2837 So if the number of words is the same, preserve the
2838 subreg so that push_reload can see it. */
2839 && ! ((x_size
- 1) / UNITS_PER_WORD
2840 == (new_size
-1 ) / UNITS_PER_WORD
)
2843 || x_size
== new_size
)
2845 return adjust_address_nv (new_rtx
, GET_MODE (x
), SUBREG_BYTE (x
));
2847 return gen_rtx_SUBREG (GET_MODE (x
), new_rtx
, SUBREG_BYTE (x
));
2853 /* Our only special processing is to pass the mode of the MEM to our
2854 recursive call and copy the flags. While we are here, handle this
2855 case more efficiently. */
2857 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), GET_MODE (x
), insn
, true,
2860 && memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2861 && !memory_address_p (GET_MODE (x
), new_rtx
))
2862 note_reg_elim_costly (XEXP (x
, 0), insn
);
2864 return replace_equiv_address_nv (x
, new_rtx
);
2867 /* Handle insn_list USE that a call to a pure function may generate. */
2868 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), VOIDmode
, insn
, false,
2870 if (new_rtx
!= XEXP (x
, 0))
2871 return gen_rtx_USE (GET_MODE (x
), new_rtx
);
2876 gcc_assert (insn
&& DEBUG_INSN_P (insn
));
2886 /* Process each of our operands recursively. If any have changed, make a
2888 fmt
= GET_RTX_FORMAT (code
);
2889 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2893 new_rtx
= eliminate_regs_1 (XEXP (x
, i
), mem_mode
, insn
, false,
2895 if (new_rtx
!= XEXP (x
, i
) && ! copied
)
2897 x
= shallow_copy_rtx (x
);
2900 XEXP (x
, i
) = new_rtx
;
2902 else if (*fmt
== 'E')
2905 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2907 new_rtx
= eliminate_regs_1 (XVECEXP (x
, i
, j
), mem_mode
, insn
, false,
2909 if (new_rtx
!= XVECEXP (x
, i
, j
) && ! copied_vec
)
2911 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
2915 x
= shallow_copy_rtx (x
);
2918 XVEC (x
, i
) = new_v
;
2921 XVECEXP (x
, i
, j
) = new_rtx
;
2930 eliminate_regs (rtx x
, machine_mode mem_mode
, rtx insn
)
2932 if (reg_eliminate
== NULL
)
2934 gcc_assert (targetm
.no_register_allocation
);
2937 return eliminate_regs_1 (x
, mem_mode
, insn
, false, false);
2940 /* Scan rtx X for modifications of elimination target registers. Update
2941 the table of eliminables to reflect the changed state. MEM_MODE is
2942 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2945 elimination_effects (rtx x
, machine_mode mem_mode
)
2947 enum rtx_code code
= GET_CODE (x
);
2948 struct elim_table
*ep
;
2970 /* First handle the case where we encounter a bare register that
2971 is eliminable. Replace it with a PLUS. */
2972 if (regno
< FIRST_PSEUDO_REGISTER
)
2974 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2976 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2979 ep
->ref_outside_mem
= 1;
2984 else if (reg_renumber
[regno
] < 0
2986 && reg_equiv_constant (regno
)
2987 && ! function_invariant_p (reg_equiv_constant (regno
)))
2988 elimination_effects (reg_equiv_constant (regno
), mem_mode
);
2997 /* If we modify the source of an elimination rule, disable it. */
2998 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2999 if (ep
->from_rtx
== XEXP (x
, 0))
3000 ep
->can_eliminate
= 0;
3002 /* If we modify the target of an elimination rule by adding a constant,
3003 update its offset. If we modify the target in any other way, we'll
3004 have to disable the rule as well. */
3005 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3006 if (ep
->to_rtx
== XEXP (x
, 0))
3008 int size
= GET_MODE_SIZE (mem_mode
);
3010 /* If more bytes than MEM_MODE are pushed, account for them. */
3011 #ifdef PUSH_ROUNDING
3012 if (ep
->to_rtx
== stack_pointer_rtx
)
3013 size
= PUSH_ROUNDING (size
);
3015 if (code
== PRE_DEC
|| code
== POST_DEC
)
3017 else if (code
== PRE_INC
|| code
== POST_INC
)
3019 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3021 if (GET_CODE (XEXP (x
, 1)) == PLUS
3022 && XEXP (x
, 0) == XEXP (XEXP (x
, 1), 0)
3023 && CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
3024 ep
->offset
-= INTVAL (XEXP (XEXP (x
, 1), 1));
3026 ep
->can_eliminate
= 0;
3030 /* These two aren't unary operators. */
3031 if (code
== POST_MODIFY
|| code
== PRE_MODIFY
)
3034 /* Fall through to generic unary operation case. */
3035 case STRICT_LOW_PART
:
3037 case SIGN_EXTEND
: case ZERO_EXTEND
:
3038 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3039 case FLOAT
: case FIX
:
3040 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3049 elimination_effects (XEXP (x
, 0), mem_mode
);
3053 if (REG_P (SUBREG_REG (x
))
3054 && (GET_MODE_SIZE (GET_MODE (x
))
3055 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3057 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
3060 elimination_effects (SUBREG_REG (x
), mem_mode
);
3064 /* If using a register that is the source of an eliminate we still
3065 think can be performed, note it cannot be performed since we don't
3066 know how this register is used. */
3067 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3068 if (ep
->from_rtx
== XEXP (x
, 0))
3069 ep
->can_eliminate
= 0;
3071 elimination_effects (XEXP (x
, 0), mem_mode
);
3075 /* If clobbering a register that is the replacement register for an
3076 elimination we still think can be performed, note that it cannot
3077 be performed. Otherwise, we need not be concerned about it. */
3078 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3079 if (ep
->to_rtx
== XEXP (x
, 0))
3080 ep
->can_eliminate
= 0;
3082 elimination_effects (XEXP (x
, 0), mem_mode
);
3086 /* Check for setting a register that we know about. */
3087 if (REG_P (SET_DEST (x
)))
3089 /* See if this is setting the replacement register for an
3092 If DEST is the hard frame pointer, we do nothing because we
3093 assume that all assignments to the frame pointer are for
3094 non-local gotos and are being done at a time when they are valid
3095 and do not disturb anything else. Some machines want to
3096 eliminate a fake argument pointer (or even a fake frame pointer)
3097 with either the real frame or the stack pointer. Assignments to
3098 the hard frame pointer must not prevent this elimination. */
3100 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3102 if (ep
->to_rtx
== SET_DEST (x
)
3103 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3105 /* If it is being incremented, adjust the offset. Otherwise,
3106 this elimination can't be done. */
3107 rtx src
= SET_SRC (x
);
3109 if (GET_CODE (src
) == PLUS
3110 && XEXP (src
, 0) == SET_DEST (x
)
3111 && CONST_INT_P (XEXP (src
, 1)))
3112 ep
->offset
-= INTVAL (XEXP (src
, 1));
3114 ep
->can_eliminate
= 0;
3118 elimination_effects (SET_DEST (x
), VOIDmode
);
3119 elimination_effects (SET_SRC (x
), VOIDmode
);
3123 /* Our only special processing is to pass the mode of the MEM to our
3125 elimination_effects (XEXP (x
, 0), GET_MODE (x
));
3132 fmt
= GET_RTX_FORMAT (code
);
3133 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3136 elimination_effects (XEXP (x
, i
), mem_mode
);
3137 else if (*fmt
== 'E')
3138 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3139 elimination_effects (XVECEXP (x
, i
, j
), mem_mode
);
3143 /* Descend through rtx X and verify that no references to eliminable registers
3144 remain. If any do remain, mark the involved register as not
3148 check_eliminable_occurrences (rtx x
)
3157 code
= GET_CODE (x
);
3159 if (code
== REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3161 struct elim_table
*ep
;
3163 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3164 if (ep
->from_rtx
== x
)
3165 ep
->can_eliminate
= 0;
3169 fmt
= GET_RTX_FORMAT (code
);
3170 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3173 check_eliminable_occurrences (XEXP (x
, i
));
3174 else if (*fmt
== 'E')
3177 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3178 check_eliminable_occurrences (XVECEXP (x
, i
, j
));
3183 /* Scan INSN and eliminate all eliminable registers in it.
3185 If REPLACE is nonzero, do the replacement destructively. Also
3186 delete the insn as dead it if it is setting an eliminable register.
3188 If REPLACE is zero, do all our allocations in reload_obstack.
3190 If no eliminations were done and this insn doesn't require any elimination
3191 processing (these are not identical conditions: it might be updating sp,
3192 but not referencing fp; this needs to be seen during reload_as_needed so
3193 that the offset between fp and sp can be taken into consideration), zero
3194 is returned. Otherwise, 1 is returned. */
3197 eliminate_regs_in_insn (rtx_insn
*insn
, int replace
)
3199 int icode
= recog_memoized (insn
);
3200 rtx old_body
= PATTERN (insn
);
3201 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3202 rtx old_set
= single_set (insn
);
3206 rtx substed_operand
[MAX_RECOG_OPERANDS
];
3207 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3208 struct elim_table
*ep
;
3209 rtx plus_src
, plus_cst_src
;
3211 if (! insn_is_asm
&& icode
< 0)
3213 gcc_assert (DEBUG_INSN_P (insn
)
3214 || GET_CODE (PATTERN (insn
)) == USE
3215 || GET_CODE (PATTERN (insn
)) == CLOBBER
3216 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
3217 if (DEBUG_INSN_P (insn
))
3218 INSN_VAR_LOCATION_LOC (insn
)
3219 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn
), VOIDmode
, insn
);
3223 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3224 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3226 /* Check for setting an eliminable register. */
3227 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3228 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3230 /* If this is setting the frame pointer register to the
3231 hardware frame pointer register and this is an elimination
3232 that will be done (tested above), this insn is really
3233 adjusting the frame pointer downward to compensate for
3234 the adjustment done before a nonlocal goto. */
3235 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3236 && ep
->from
== FRAME_POINTER_REGNUM
3237 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3239 rtx base
= SET_SRC (old_set
);
3240 rtx_insn
*base_insn
= insn
;
3241 HOST_WIDE_INT offset
= 0;
3243 while (base
!= ep
->to_rtx
)
3245 rtx_insn
*prev_insn
;
3248 if (GET_CODE (base
) == PLUS
3249 && CONST_INT_P (XEXP (base
, 1)))
3251 offset
+= INTVAL (XEXP (base
, 1));
3252 base
= XEXP (base
, 0);
3254 else if ((prev_insn
= prev_nonnote_insn (base_insn
)) != 0
3255 && (prev_set
= single_set (prev_insn
)) != 0
3256 && rtx_equal_p (SET_DEST (prev_set
), base
))
3258 base
= SET_SRC (prev_set
);
3259 base_insn
= prev_insn
;
3265 if (base
== ep
->to_rtx
)
3267 rtx src
= plus_constant (Pmode
, ep
->to_rtx
,
3268 offset
- ep
->offset
);
3270 new_body
= old_body
;
3273 new_body
= copy_insn (old_body
);
3274 if (REG_NOTES (insn
))
3275 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3277 PATTERN (insn
) = new_body
;
3278 old_set
= single_set (insn
);
3280 /* First see if this insn remains valid when we
3281 make the change. If not, keep the INSN_CODE
3282 the same and let reload fit it up. */
3283 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3284 validate_change (insn
, &SET_DEST (old_set
),
3286 if (! apply_change_group ())
3288 SET_SRC (old_set
) = src
;
3289 SET_DEST (old_set
) = ep
->to_rtx
;
3297 /* In this case this insn isn't serving a useful purpose. We
3298 will delete it in reload_as_needed once we know that this
3299 elimination is, in fact, being done.
3301 If REPLACE isn't set, we can't delete this insn, but needn't
3302 process it since it won't be used unless something changes. */
3305 delete_dead_insn (insn
);
3313 /* We allow one special case which happens to work on all machines we
3314 currently support: a single set with the source or a REG_EQUAL
3315 note being a PLUS of an eliminable register and a constant. */
3316 plus_src
= plus_cst_src
= 0;
3317 if (old_set
&& REG_P (SET_DEST (old_set
)))
3319 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3320 plus_src
= SET_SRC (old_set
);
3321 /* First see if the source is of the form (plus (...) CST). */
3323 && CONST_INT_P (XEXP (plus_src
, 1)))
3324 plus_cst_src
= plus_src
;
3325 else if (REG_P (SET_SRC (old_set
))
3328 /* Otherwise, see if we have a REG_EQUAL note of the form
3329 (plus (...) CST). */
3331 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3333 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3334 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3335 && GET_CODE (XEXP (links
, 0)) == PLUS
3336 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3338 plus_cst_src
= XEXP (links
, 0);
3344 /* Check that the first operand of the PLUS is a hard reg or
3345 the lowpart subreg of one. */
3348 rtx reg
= XEXP (plus_cst_src
, 0);
3349 if (GET_CODE (reg
) == SUBREG
&& subreg_lowpart_p (reg
))
3350 reg
= SUBREG_REG (reg
);
3352 if (!REG_P (reg
) || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
3358 rtx reg
= XEXP (plus_cst_src
, 0);
3359 HOST_WIDE_INT offset
= INTVAL (XEXP (plus_cst_src
, 1));
3361 if (GET_CODE (reg
) == SUBREG
)
3362 reg
= SUBREG_REG (reg
);
3364 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3365 if (ep
->from_rtx
== reg
&& ep
->can_eliminate
)
3367 rtx to_rtx
= ep
->to_rtx
;
3368 offset
+= ep
->offset
;
3369 offset
= trunc_int_for_mode (offset
, GET_MODE (plus_cst_src
));
3371 if (GET_CODE (XEXP (plus_cst_src
, 0)) == SUBREG
)
3372 to_rtx
= gen_lowpart (GET_MODE (XEXP (plus_cst_src
, 0)),
3374 /* If we have a nonzero offset, and the source is already
3375 a simple REG, the following transformation would
3376 increase the cost of the insn by replacing a simple REG
3377 with (plus (reg sp) CST). So try only when we already
3378 had a PLUS before. */
3379 if (offset
== 0 || plus_src
)
3381 rtx new_src
= plus_constant (GET_MODE (to_rtx
),
3384 new_body
= old_body
;
3387 new_body
= copy_insn (old_body
);
3388 if (REG_NOTES (insn
))
3389 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3391 PATTERN (insn
) = new_body
;
3392 old_set
= single_set (insn
);
3394 /* First see if this insn remains valid when we make the
3395 change. If not, try to replace the whole pattern with
3396 a simple set (this may help if the original insn was a
3397 PARALLEL that was only recognized as single_set due to
3398 REG_UNUSED notes). If this isn't valid either, keep
3399 the INSN_CODE the same and let reload fix it up. */
3400 if (!validate_change (insn
, &SET_SRC (old_set
), new_src
, 0))
3402 rtx new_pat
= gen_rtx_SET (SET_DEST (old_set
), new_src
);
3404 if (!validate_change (insn
, &PATTERN (insn
), new_pat
, 0))
3405 SET_SRC (old_set
) = new_src
;
3412 /* This can't have an effect on elimination offsets, so skip right
3418 /* Determine the effects of this insn on elimination offsets. */
3419 elimination_effects (old_body
, VOIDmode
);
3421 /* Eliminate all eliminable registers occurring in operands that
3422 can be handled by reload. */
3423 extract_insn (insn
);
3424 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3426 orig_operand
[i
] = recog_data
.operand
[i
];
3427 substed_operand
[i
] = recog_data
.operand
[i
];
3429 /* For an asm statement, every operand is eliminable. */
3430 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3432 bool is_set_src
, in_plus
;
3434 /* Check for setting a register that we know about. */
3435 if (recog_data
.operand_type
[i
] != OP_IN
3436 && REG_P (orig_operand
[i
]))
3438 /* If we are assigning to a register that can be eliminated, it
3439 must be as part of a PARALLEL, since the code above handles
3440 single SETs. We must indicate that we can no longer
3441 eliminate this reg. */
3442 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3444 if (ep
->from_rtx
== orig_operand
[i
])
3445 ep
->can_eliminate
= 0;
3448 /* Companion to the above plus substitution, we can allow
3449 invariants as the source of a plain move. */
3452 && recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3456 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3457 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3461 = eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3462 replace
? insn
: NULL_RTX
,
3463 is_set_src
|| in_plus
, false);
3464 if (substed_operand
[i
] != orig_operand
[i
])
3466 /* Terminate the search in check_eliminable_occurrences at
3468 *recog_data
.operand_loc
[i
] = 0;
3470 /* If an output operand changed from a REG to a MEM and INSN is an
3471 insn, write a CLOBBER insn. */
3472 if (recog_data
.operand_type
[i
] != OP_IN
3473 && REG_P (orig_operand
[i
])
3474 && MEM_P (substed_operand
[i
])
3476 emit_insn_after (gen_clobber (orig_operand
[i
]), insn
);
3480 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3481 *recog_data
.dup_loc
[i
]
3482 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3484 /* If any eliminable remain, they aren't eliminable anymore. */
3485 check_eliminable_occurrences (old_body
);
3487 /* Substitute the operands; the new values are in the substed_operand
3489 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3490 *recog_data
.operand_loc
[i
] = substed_operand
[i
];
3491 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3492 *recog_data
.dup_loc
[i
] = substed_operand
[(int) recog_data
.dup_num
[i
]];
3494 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3495 re-recognize the insn. We do this in case we had a simple addition
3496 but now can do this as a load-address. This saves an insn in this
3498 If re-recognition fails, the old insn code number will still be used,
3499 and some register operands may have changed into PLUS expressions.
3500 These will be handled by find_reloads by loading them into a register
3505 /* If we aren't replacing things permanently and we changed something,
3506 make another copy to ensure that all the RTL is new. Otherwise
3507 things can go wrong if find_reload swaps commutative operands
3508 and one is inside RTL that has been copied while the other is not. */
3509 new_body
= old_body
;
3512 new_body
= copy_insn (old_body
);
3513 if (REG_NOTES (insn
))
3514 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3516 PATTERN (insn
) = new_body
;
3518 /* If we had a move insn but now we don't, rerecognize it. This will
3519 cause spurious re-recognition if the old move had a PARALLEL since
3520 the new one still will, but we can't call single_set without
3521 having put NEW_BODY into the insn and the re-recognition won't
3522 hurt in this rare case. */
3523 /* ??? Why this huge if statement - why don't we just rerecognize the
3527 && ((REG_P (SET_SRC (old_set
))
3528 && (GET_CODE (new_body
) != SET
3529 || !REG_P (SET_SRC (new_body
))))
3530 /* If this was a load from or store to memory, compare
3531 the MEM in recog_data.operand to the one in the insn.
3532 If they are not equal, then rerecognize the insn. */
3534 && ((MEM_P (SET_SRC (old_set
))
3535 && SET_SRC (old_set
) != recog_data
.operand
[1])
3536 || (MEM_P (SET_DEST (old_set
))
3537 && SET_DEST (old_set
) != recog_data
.operand
[0])))
3538 /* If this was an add insn before, rerecognize. */
3539 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3541 int new_icode
= recog (PATTERN (insn
), insn
, 0);
3543 INSN_CODE (insn
) = new_icode
;
3547 /* Restore the old body. If there were any changes to it, we made a copy
3548 of it while the changes were still in place, so we'll correctly return
3549 a modified insn below. */
3552 /* Restore the old body. */
3553 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3554 /* Restoring a top-level match_parallel would clobber the new_body
3555 we installed in the insn. */
3556 if (recog_data
.operand_loc
[i
] != &PATTERN (insn
))
3557 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3558 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3559 *recog_data
.dup_loc
[i
] = orig_operand
[(int) recog_data
.dup_num
[i
]];
3562 /* Update all elimination pairs to reflect the status after the current
3563 insn. The changes we make were determined by the earlier call to
3564 elimination_effects.
3566 We also detect cases where register elimination cannot be done,
3567 namely, if a register would be both changed and referenced outside a MEM
3568 in the resulting insn since such an insn is often undefined and, even if
3569 not, we cannot know what meaning will be given to it. Note that it is
3570 valid to have a register used in an address in an insn that changes it
3571 (presumably with a pre- or post-increment or decrement).
3573 If anything changes, return nonzero. */
3575 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3577 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3578 ep
->can_eliminate
= 0;
3580 ep
->ref_outside_mem
= 0;
3582 if (ep
->previous_offset
!= ep
->offset
)
3587 /* If we changed something, perform elimination in REG_NOTES. This is
3588 needed even when REPLACE is zero because a REG_DEAD note might refer
3589 to a register that we eliminate and could cause a different number
3590 of spill registers to be needed in the final reload pass than in
3592 if (val
&& REG_NOTES (insn
) != 0)
3594 = eliminate_regs_1 (REG_NOTES (insn
), VOIDmode
, REG_NOTES (insn
), true,
3600 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3601 register allocator. INSN is the instruction we need to examine, we perform
3602 eliminations in its operands and record cases where eliminating a reg with
3603 an invariant equivalence would add extra cost. */
3605 #pragma GCC diagnostic push
3606 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3608 elimination_costs_in_insn (rtx_insn
*insn
)
3610 int icode
= recog_memoized (insn
);
3611 rtx old_body
= PATTERN (insn
);
3612 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3613 rtx old_set
= single_set (insn
);
3615 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3616 rtx orig_dup
[MAX_RECOG_OPERANDS
];
3617 struct elim_table
*ep
;
3618 rtx plus_src
, plus_cst_src
;
3621 if (! insn_is_asm
&& icode
< 0)
3623 gcc_assert (DEBUG_INSN_P (insn
)
3624 || GET_CODE (PATTERN (insn
)) == USE
3625 || GET_CODE (PATTERN (insn
)) == CLOBBER
3626 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
3630 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3631 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3633 /* Check for setting an eliminable register. */
3634 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3635 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3639 /* We allow one special case which happens to work on all machines we
3640 currently support: a single set with the source or a REG_EQUAL
3641 note being a PLUS of an eliminable register and a constant. */
3642 plus_src
= plus_cst_src
= 0;
3644 if (old_set
&& REG_P (SET_DEST (old_set
)))
3647 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3648 plus_src
= SET_SRC (old_set
);
3649 /* First see if the source is of the form (plus (...) CST). */
3651 && CONST_INT_P (XEXP (plus_src
, 1)))
3652 plus_cst_src
= plus_src
;
3653 else if (REG_P (SET_SRC (old_set
))
3656 /* Otherwise, see if we have a REG_EQUAL note of the form
3657 (plus (...) CST). */
3659 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3661 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3662 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3663 && GET_CODE (XEXP (links
, 0)) == PLUS
3664 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3666 plus_cst_src
= XEXP (links
, 0);
3673 /* Determine the effects of this insn on elimination offsets. */
3674 elimination_effects (old_body
, VOIDmode
);
3676 /* Eliminate all eliminable registers occurring in operands that
3677 can be handled by reload. */
3678 extract_insn (insn
);
3679 int n_dups
= recog_data
.n_dups
;
3680 for (i
= 0; i
< n_dups
; i
++)
3681 orig_dup
[i
] = *recog_data
.dup_loc
[i
];
3683 int n_operands
= recog_data
.n_operands
;
3684 for (i
= 0; i
< n_operands
; i
++)
3686 orig_operand
[i
] = recog_data
.operand
[i
];
3688 /* For an asm statement, every operand is eliminable. */
3689 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3691 bool is_set_src
, in_plus
;
3693 /* Check for setting a register that we know about. */
3694 if (recog_data
.operand_type
[i
] != OP_IN
3695 && REG_P (orig_operand
[i
]))
3697 /* If we are assigning to a register that can be eliminated, it
3698 must be as part of a PARALLEL, since the code above handles
3699 single SETs. We must indicate that we can no longer
3700 eliminate this reg. */
3701 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3703 if (ep
->from_rtx
== orig_operand
[i
])
3704 ep
->can_eliminate
= 0;
3707 /* Companion to the above plus substitution, we can allow
3708 invariants as the source of a plain move. */
3710 if (old_set
&& recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3712 if (is_set_src
&& !sets_reg_p
)
3713 note_reg_elim_costly (SET_SRC (old_set
), insn
);
3715 if (plus_src
&& sets_reg_p
3716 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3717 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3720 eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3722 is_set_src
|| in_plus
, true);
3723 /* Terminate the search in check_eliminable_occurrences at
3725 *recog_data
.operand_loc
[i
] = 0;
3729 for (i
= 0; i
< n_dups
; i
++)
3730 *recog_data
.dup_loc
[i
]
3731 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3733 /* If any eliminable remain, they aren't eliminable anymore. */
3734 check_eliminable_occurrences (old_body
);
3736 /* Restore the old body. */
3737 for (i
= 0; i
< n_operands
; i
++)
3738 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3739 for (i
= 0; i
< n_dups
; i
++)
3740 *recog_data
.dup_loc
[i
] = orig_dup
[i
];
3742 /* Update all elimination pairs to reflect the status after the current
3743 insn. The changes we make were determined by the earlier call to
3744 elimination_effects. */
3746 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3748 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3749 ep
->can_eliminate
= 0;
3751 ep
->ref_outside_mem
= 0;
3756 #pragma GCC diagnostic pop
3758 /* Loop through all elimination pairs.
3759 Recalculate the number not at initial offset.
3761 Compute the maximum offset (minimum offset if the stack does not
3762 grow downward) for each elimination pair. */
3765 update_eliminable_offsets (void)
3767 struct elim_table
*ep
;
3769 num_not_at_initial_offset
= 0;
3770 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3772 ep
->previous_offset
= ep
->offset
;
3773 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3774 num_not_at_initial_offset
++;
3778 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3779 replacement we currently believe is valid, mark it as not eliminable if X
3780 modifies DEST in any way other than by adding a constant integer to it.
3782 If DEST is the frame pointer, we do nothing because we assume that
3783 all assignments to the hard frame pointer are nonlocal gotos and are being
3784 done at a time when they are valid and do not disturb anything else.
3785 Some machines want to eliminate a fake argument pointer with either the
3786 frame or stack pointer. Assignments to the hard frame pointer must not
3787 prevent this elimination.
3789 Called via note_stores from reload before starting its passes to scan
3790 the insns of the function. */
3793 mark_not_eliminable (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
3797 /* A SUBREG of a hard register here is just changing its mode. We should
3798 not see a SUBREG of an eliminable hard register, but check just in
3800 if (GET_CODE (dest
) == SUBREG
)
3801 dest
= SUBREG_REG (dest
);
3803 if (dest
== hard_frame_pointer_rtx
)
3806 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3807 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3808 && (GET_CODE (x
) != SET
3809 || GET_CODE (SET_SRC (x
)) != PLUS
3810 || XEXP (SET_SRC (x
), 0) != dest
3811 || !CONST_INT_P (XEXP (SET_SRC (x
), 1))))
3813 reg_eliminate
[i
].can_eliminate_previous
3814 = reg_eliminate
[i
].can_eliminate
= 0;
3819 /* Verify that the initial elimination offsets did not change since the
3820 last call to set_initial_elim_offsets. This is used to catch cases
3821 where something illegal happened during reload_as_needed that could
3822 cause incorrect code to be generated if we did not check for it. */
3825 verify_initial_elim_offsets (void)
3829 if (!num_eliminable
)
3832 #ifdef ELIMINABLE_REGS
3834 struct elim_table
*ep
;
3836 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3838 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, t
);
3839 if (t
!= ep
->initial_offset
)
3844 INITIAL_FRAME_POINTER_OFFSET (t
);
3845 if (t
!= reg_eliminate
[0].initial_offset
)
3852 /* Reset all offsets on eliminable registers to their initial values. */
3855 set_initial_elim_offsets (void)
3857 struct elim_table
*ep
= reg_eliminate
;
3859 #ifdef ELIMINABLE_REGS
3860 for (; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3862 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
3863 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3866 INITIAL_FRAME_POINTER_OFFSET (ep
->initial_offset
);
3867 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3870 num_not_at_initial_offset
= 0;
3873 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3876 set_initial_eh_label_offset (rtx label
)
3878 set_label_offsets (label
, NULL
, 1);
3881 /* Initialize the known label offsets.
3882 Set a known offset for each forced label to be at the initial offset
3883 of each elimination. We do this because we assume that all
3884 computed jumps occur from a location where each elimination is
3885 at its initial offset.
3886 For all other labels, show that we don't know the offsets. */
3889 set_initial_label_offsets (void)
3891 memset (offsets_known_at
, 0, num_labels
);
3893 for (rtx_insn_list
*x
= forced_labels
; x
; x
= x
->next ())
3895 set_label_offsets (x
->insn (), NULL
, 1);
3897 for (rtx_insn_list
*x
= nonlocal_goto_handler_labels
; x
; x
= x
->next ())
3899 set_label_offsets (x
->insn (), NULL
, 1);
3901 for_each_eh_label (set_initial_eh_label_offset
);
3904 /* Set all elimination offsets to the known values for the code label given
3908 set_offsets_for_label (rtx_insn
*insn
)
3911 int label_nr
= CODE_LABEL_NUMBER (insn
);
3912 struct elim_table
*ep
;
3914 num_not_at_initial_offset
= 0;
3915 for (i
= 0, ep
= reg_eliminate
; i
< NUM_ELIMINABLE_REGS
; ep
++, i
++)
3917 ep
->offset
= ep
->previous_offset
3918 = offsets_at
[label_nr
- first_label_num
][i
];
3919 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3920 num_not_at_initial_offset
++;
3924 /* See if anything that happened changes which eliminations are valid.
3925 For example, on the SPARC, whether or not the frame pointer can
3926 be eliminated can depend on what registers have been used. We need
3927 not check some conditions again (such as flag_omit_frame_pointer)
3928 since they can't have changed. */
3931 update_eliminables (HARD_REG_SET
*pset
)
3933 int previous_frame_pointer_needed
= frame_pointer_needed
;
3934 struct elim_table
*ep
;
3936 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3937 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
3938 && targetm
.frame_pointer_required ())
3939 #ifdef ELIMINABLE_REGS
3940 || ! targetm
.can_eliminate (ep
->from
, ep
->to
)
3943 ep
->can_eliminate
= 0;
3945 /* Look for the case where we have discovered that we can't replace
3946 register A with register B and that means that we will now be
3947 trying to replace register A with register C. This means we can
3948 no longer replace register C with register B and we need to disable
3949 such an elimination, if it exists. This occurs often with A == ap,
3950 B == sp, and C == fp. */
3952 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3954 struct elim_table
*op
;
3957 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3959 /* Find the current elimination for ep->from, if there is a
3961 for (op
= reg_eliminate
;
3962 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3963 if (op
->from
== ep
->from
&& op
->can_eliminate
)
3969 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3971 for (op
= reg_eliminate
;
3972 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3973 if (op
->from
== new_to
&& op
->to
== ep
->to
)
3974 op
->can_eliminate
= 0;
3978 /* See if any registers that we thought we could eliminate the previous
3979 time are no longer eliminable. If so, something has changed and we
3980 must spill the register. Also, recompute the number of eliminable
3981 registers and see if the frame pointer is needed; it is if there is
3982 no elimination of the frame pointer that we can perform. */
3984 frame_pointer_needed
= 1;
3985 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3987 if (ep
->can_eliminate
3988 && ep
->from
== FRAME_POINTER_REGNUM
3989 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
3990 && (! SUPPORTS_STACK_ALIGNMENT
3991 || ! crtl
->stack_realign_needed
))
3992 frame_pointer_needed
= 0;
3994 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3996 ep
->can_eliminate_previous
= 0;
3997 SET_HARD_REG_BIT (*pset
, ep
->from
);
4002 /* If we didn't need a frame pointer last time, but we do now, spill
4003 the hard frame pointer. */
4004 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
4005 SET_HARD_REG_BIT (*pset
, HARD_FRAME_POINTER_REGNUM
);
4008 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4009 Return true iff a register was spilled. */
4012 update_eliminables_and_spill (void)
4015 bool did_spill
= false;
4016 HARD_REG_SET to_spill
;
4017 CLEAR_HARD_REG_SET (to_spill
);
4018 update_eliminables (&to_spill
);
4019 AND_COMPL_HARD_REG_SET (used_spill_regs
, to_spill
);
4021 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4022 if (TEST_HARD_REG_BIT (to_spill
, i
))
4024 spill_hard_reg (i
, 1);
4027 /* Regardless of the state of spills, if we previously had
4028 a register that we thought we could eliminate, but now can
4029 not eliminate, we must run another pass.
4031 Consider pseudos which have an entry in reg_equiv_* which
4032 reference an eliminable register. We must make another pass
4033 to update reg_equiv_* so that we do not substitute in the
4034 old value from when we thought the elimination could be
4040 /* Return true if X is used as the target register of an elimination. */
4043 elimination_target_reg_p (rtx x
)
4045 struct elim_table
*ep
;
4047 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4048 if (ep
->to_rtx
== x
&& ep
->can_eliminate
)
4054 /* Initialize the table of registers to eliminate.
4055 Pre-condition: global flag frame_pointer_needed has been set before
4056 calling this function. */
4059 init_elim_table (void)
4061 struct elim_table
*ep
;
4062 #ifdef ELIMINABLE_REGS
4063 const struct elim_table_1
*ep1
;
4067 reg_eliminate
= XCNEWVEC (struct elim_table
, NUM_ELIMINABLE_REGS
);
4071 #ifdef ELIMINABLE_REGS
4072 for (ep
= reg_eliminate
, ep1
= reg_eliminate_1
;
4073 ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++, ep1
++)
4075 ep
->from
= ep1
->from
;
4077 ep
->can_eliminate
= ep
->can_eliminate_previous
4078 = (targetm
.can_eliminate (ep
->from
, ep
->to
)
4079 && ! (ep
->to
== STACK_POINTER_REGNUM
4080 && frame_pointer_needed
4081 && (! SUPPORTS_STACK_ALIGNMENT
4082 || ! stack_realign_fp
)));
4085 reg_eliminate
[0].from
= reg_eliminate_1
[0].from
;
4086 reg_eliminate
[0].to
= reg_eliminate_1
[0].to
;
4087 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
4088 = ! frame_pointer_needed
;
4091 /* Count the number of eliminable registers and build the FROM and TO
4092 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4093 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4094 We depend on this. */
4095 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4097 num_eliminable
+= ep
->can_eliminate
;
4098 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
4099 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
4103 /* Find all the pseudo registers that didn't get hard regs
4104 but do have known equivalent constants or memory slots.
4105 These include parameters (known equivalent to parameter slots)
4106 and cse'd or loop-moved constant memory addresses.
4108 Record constant equivalents in reg_equiv_constant
4109 so they will be substituted by find_reloads.
4110 Record memory equivalents in reg_mem_equiv so they can
4111 be substituted eventually by altering the REG-rtx's. */
4114 init_eliminable_invariants (rtx_insn
*first
, bool do_subregs
)
4121 reg_max_ref_width
= XCNEWVEC (unsigned int, max_regno
);
4123 reg_max_ref_width
= NULL
;
4125 num_eliminable_invariants
= 0;
4127 first_label_num
= get_first_label_num ();
4128 num_labels
= max_label_num () - first_label_num
;
4130 /* Allocate the tables used to store offset information at labels. */
4131 offsets_known_at
= XNEWVEC (char, num_labels
);
4132 offsets_at
= (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS
]) xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (HOST_WIDE_INT
));
4134 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4135 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4136 find largest such for each pseudo. FIRST is the head of the insn
4139 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4141 rtx set
= single_set (insn
);
4143 /* We may introduce USEs that we want to remove at the end, so
4144 we'll mark them with QImode. Make sure there are no
4145 previously-marked insns left by say regmove. */
4146 if (INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
4147 && GET_MODE (insn
) != VOIDmode
)
4148 PUT_MODE (insn
, VOIDmode
);
4150 if (do_subregs
&& NONDEBUG_INSN_P (insn
))
4151 scan_paradoxical_subregs (PATTERN (insn
));
4153 if (set
!= 0 && REG_P (SET_DEST (set
)))
4155 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
4161 i
= REGNO (SET_DEST (set
));
4164 if (i
<= LAST_VIRTUAL_REGISTER
)
4167 /* If flag_pic and we have constant, verify it's legitimate. */
4169 || !flag_pic
|| LEGITIMATE_PIC_OPERAND_P (x
))
4171 /* It can happen that a REG_EQUIV note contains a MEM
4172 that is not a legitimate memory operand. As later
4173 stages of reload assume that all addresses found
4174 in the reg_equiv_* arrays were originally legitimate,
4175 we ignore such REG_EQUIV notes. */
4176 if (memory_operand (x
, VOIDmode
))
4178 /* Always unshare the equivalence, so we can
4179 substitute into this insn without touching the
4181 reg_equiv_memory_loc (i
) = copy_rtx (x
);
4183 else if (function_invariant_p (x
))
4187 mode
= GET_MODE (SET_DEST (set
));
4188 if (GET_CODE (x
) == PLUS
)
4190 /* This is PLUS of frame pointer and a constant,
4191 and might be shared. Unshare it. */
4192 reg_equiv_invariant (i
) = copy_rtx (x
);
4193 num_eliminable_invariants
++;
4195 else if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
4197 reg_equiv_invariant (i
) = x
;
4198 num_eliminable_invariants
++;
4200 else if (targetm
.legitimate_constant_p (mode
, x
))
4201 reg_equiv_constant (i
) = x
;
4204 reg_equiv_memory_loc (i
) = force_const_mem (mode
, x
);
4205 if (! reg_equiv_memory_loc (i
))
4206 reg_equiv_init (i
) = NULL
;
4211 reg_equiv_init (i
) = NULL
;
4216 reg_equiv_init (i
) = NULL
;
4221 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4222 if (reg_equiv_init (i
))
4224 fprintf (dump_file
, "init_insns for %u: ", i
);
4225 print_inline_rtx (dump_file
, reg_equiv_init (i
), 20);
4226 fprintf (dump_file
, "\n");
4230 /* Indicate that we no longer have known memory locations or constants.
4231 Free all data involved in tracking these. */
4234 free_reg_equiv (void)
4238 free (offsets_known_at
);
4241 offsets_known_at
= 0;
4243 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4244 if (reg_equiv_alt_mem_list (i
))
4245 free_EXPR_LIST_list (®_equiv_alt_mem_list (i
));
4246 vec_free (reg_equivs
);
4249 /* Kick all pseudos out of hard register REGNO.
4251 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4252 because we found we can't eliminate some register. In the case, no pseudos
4253 are allowed to be in the register, even if they are only in a block that
4254 doesn't require spill registers, unlike the case when we are spilling this
4255 hard reg to produce another spill register.
4257 Return nonzero if any pseudos needed to be kicked out. */
4260 spill_hard_reg (unsigned int regno
, int cant_eliminate
)
4266 SET_HARD_REG_BIT (bad_spill_regs_global
, regno
);
4267 df_set_regs_ever_live (regno
, true);
4270 /* Spill every pseudo reg that was allocated to this reg
4271 or to something that overlaps this reg. */
4273 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4274 if (reg_renumber
[i
] >= 0
4275 && (unsigned int) reg_renumber
[i
] <= regno
4276 && end_hard_regno (PSEUDO_REGNO_MODE (i
), reg_renumber
[i
]) > regno
)
4277 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
4280 /* After spill_hard_reg was called and/or find_reload_regs was run for all
4281 insns that need reloads, this function is used to actually spill pseudo
4282 registers and try to reallocate them. It also sets up the spill_regs
4283 array for use by choose_reload_regs.
4285 GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4286 that we displace from hard registers. */
4289 finish_spills (int global
)
4291 struct insn_chain
*chain
;
4292 int something_changed
= 0;
4294 reg_set_iterator rsi
;
4296 /* Build the spill_regs array for the function. */
4297 /* If there are some registers still to eliminate and one of the spill regs
4298 wasn't ever used before, additional stack space may have to be
4299 allocated to store this register. Thus, we may have changed the offset
4300 between the stack and frame pointers, so mark that something has changed.
4302 One might think that we need only set VAL to 1 if this is a call-used
4303 register. However, the set of registers that must be saved by the
4304 prologue is not identical to the call-used set. For example, the
4305 register used by the call insn for the return PC is a call-used register,
4306 but must be saved by the prologue. */
4309 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4310 if (TEST_HARD_REG_BIT (used_spill_regs
, i
))
4312 spill_reg_order
[i
] = n_spills
;
4313 spill_regs
[n_spills
++] = i
;
4314 if (num_eliminable
&& ! df_regs_ever_live_p (i
))
4315 something_changed
= 1;
4316 df_set_regs_ever_live (i
, true);
4319 spill_reg_order
[i
] = -1;
4321 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4322 if (! ira_conflicts_p
|| reg_renumber
[i
] >= 0)
4324 /* Record the current hard register the pseudo is allocated to
4325 in pseudo_previous_regs so we avoid reallocating it to the
4326 same hard reg in a later pass. */
4327 gcc_assert (reg_renumber
[i
] >= 0);
4329 SET_HARD_REG_BIT (pseudo_previous_regs
[i
], reg_renumber
[i
]);
4330 /* Mark it as no longer having a hard register home. */
4331 reg_renumber
[i
] = -1;
4332 if (ira_conflicts_p
)
4333 /* Inform IRA about the change. */
4334 ira_mark_allocation_change (i
);
4335 /* We will need to scan everything again. */
4336 something_changed
= 1;
4339 /* Retry global register allocation if possible. */
4340 if (global
&& ira_conflicts_p
)
4344 memset (pseudo_forbidden_regs
, 0, max_regno
* sizeof (HARD_REG_SET
));
4345 /* For every insn that needs reloads, set the registers used as spill
4346 regs in pseudo_forbidden_regs for every pseudo live across the
4348 for (chain
= insns_need_reload
; chain
; chain
= chain
->next_need_reload
)
4350 EXECUTE_IF_SET_IN_REG_SET
4351 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4353 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4354 chain
->used_spill_regs
);
4356 EXECUTE_IF_SET_IN_REG_SET
4357 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4359 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4360 chain
->used_spill_regs
);
4364 /* Retry allocating the pseudos spilled in IRA and the
4365 reload. For each reg, merge the various reg sets that
4366 indicate which hard regs can't be used, and call
4367 ira_reassign_pseudos. */
4368 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned) max_regno
; i
++)
4369 if (reg_old_renumber
[i
] != reg_renumber
[i
])
4371 if (reg_renumber
[i
] < 0)
4372 temp_pseudo_reg_arr
[n
++] = i
;
4374 CLEAR_REGNO_REG_SET (&spilled_pseudos
, i
);
4376 if (ira_reassign_pseudos (temp_pseudo_reg_arr
, n
,
4377 bad_spill_regs_global
,
4378 pseudo_forbidden_regs
, pseudo_previous_regs
,
4380 something_changed
= 1;
4382 /* Fix up the register information in the insn chain.
4383 This involves deleting those of the spilled pseudos which did not get
4384 a new hard register home from the live_{before,after} sets. */
4385 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4387 HARD_REG_SET used_by_pseudos
;
4388 HARD_REG_SET used_by_pseudos2
;
4390 if (! ira_conflicts_p
)
4392 /* Don't do it for IRA because IRA and the reload still can
4393 assign hard registers to the spilled pseudos on next
4394 reload iterations. */
4395 AND_COMPL_REG_SET (&chain
->live_throughout
, &spilled_pseudos
);
4396 AND_COMPL_REG_SET (&chain
->dead_or_set
, &spilled_pseudos
);
4398 /* Mark any unallocated hard regs as available for spills. That
4399 makes inheritance work somewhat better. */
4400 if (chain
->need_reload
)
4402 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
4403 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
4404 IOR_HARD_REG_SET (used_by_pseudos
, used_by_pseudos2
);
4406 compute_use_by_pseudos (&used_by_pseudos
, &chain
->live_throughout
);
4407 compute_use_by_pseudos (&used_by_pseudos
, &chain
->dead_or_set
);
4408 /* Value of chain->used_spill_regs from previous iteration
4409 may be not included in the value calculated here because
4410 of possible removing caller-saves insns (see function
4411 delete_caller_save_insns. */
4412 COMPL_HARD_REG_SET (chain
->used_spill_regs
, used_by_pseudos
);
4413 AND_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs
);
4417 CLEAR_REG_SET (&changed_allocation_pseudos
);
4418 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4419 for (i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned)max_regno
; i
++)
4421 int regno
= reg_renumber
[i
];
4422 if (reg_old_renumber
[i
] == regno
)
4425 SET_REGNO_REG_SET (&changed_allocation_pseudos
, i
);
4427 alter_reg (i
, reg_old_renumber
[i
], false);
4428 reg_old_renumber
[i
] = regno
;
4432 fprintf (dump_file
, " Register %d now on stack.\n\n", i
);
4434 fprintf (dump_file
, " Register %d now in %d.\n\n",
4435 i
, reg_renumber
[i
]);
4439 return something_changed
;
4442 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4445 scan_paradoxical_subregs (rtx x
)
4449 enum rtx_code code
= GET_CODE (x
);
4465 if (REG_P (SUBREG_REG (x
))
4466 && (GET_MODE_SIZE (GET_MODE (x
))
4467 > reg_max_ref_width
[REGNO (SUBREG_REG (x
))]))
4469 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
4470 = GET_MODE_SIZE (GET_MODE (x
));
4471 mark_home_live_1 (REGNO (SUBREG_REG (x
)), GET_MODE (x
));
4479 fmt
= GET_RTX_FORMAT (code
);
4480 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4483 scan_paradoxical_subregs (XEXP (x
, i
));
4484 else if (fmt
[i
] == 'E')
4487 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4488 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
4493 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4494 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4495 and apply the corresponding narrowing subreg to *OTHER_PTR.
4496 Return true if the operands were changed, false otherwise. */
4499 strip_paradoxical_subreg (rtx
*op_ptr
, rtx
*other_ptr
)
4501 rtx op
, inner
, other
, tem
;
4504 if (!paradoxical_subreg_p (op
))
4506 inner
= SUBREG_REG (op
);
4509 tem
= gen_lowpart_common (GET_MODE (inner
), other
);
4513 /* If the lowpart operation turned a hard register into a subreg,
4514 rather than simplifying it to another hard register, then the
4515 mode change cannot be properly represented. For example, OTHER
4516 might be valid in its current mode, but not in the new one. */
4517 if (GET_CODE (tem
) == SUBREG
4519 && HARD_REGISTER_P (other
))
4527 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4528 examine all of the reload insns between PREV and NEXT exclusive, and
4529 annotate all that may trap. */
4532 fixup_eh_region_note (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4534 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
4537 if (!insn_could_throw_p (insn
))
4538 remove_note (insn
, note
);
4539 copy_reg_eh_region_note_forward (note
, NEXT_INSN (prev
), next
);
4542 /* Reload pseudo-registers into hard regs around each insn as needed.
4543 Additional register load insns are output before the insn that needs it
4544 and perhaps store insns after insns that modify the reloaded pseudo reg.
4546 reg_last_reload_reg and reg_reloaded_contents keep track of
4547 which registers are already available in reload registers.
4548 We update these for the reloads that we perform,
4549 as the insns are scanned. */
4552 reload_as_needed (int live_known
)
4554 struct insn_chain
*chain
;
4560 memset (spill_reg_rtx
, 0, sizeof spill_reg_rtx
);
4561 memset (spill_reg_store
, 0, sizeof spill_reg_store
);
4562 reg_last_reload_reg
= XCNEWVEC (rtx
, max_regno
);
4563 INIT_REG_SET (®_has_output_reload
);
4564 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4565 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered
);
4567 set_initial_elim_offsets ();
4569 /* Generate a marker insn that we will move around. */
4570 marker
= emit_note (NOTE_INSN_DELETED
);
4571 unlink_insn_chain (marker
, marker
);
4573 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4576 rtx_insn
*insn
= chain
->insn
;
4577 rtx_insn
*old_next
= NEXT_INSN (insn
);
4579 rtx_insn
*old_prev
= PREV_INSN (insn
);
4582 if (will_delete_init_insn_p (insn
))
4585 /* If we pass a label, copy the offsets from the label information
4586 into the current offsets of each elimination. */
4588 set_offsets_for_label (insn
);
4590 else if (INSN_P (insn
))
4592 regset_head regs_to_forget
;
4593 INIT_REG_SET (®s_to_forget
);
4594 note_stores (PATTERN (insn
), forget_old_reloads_1
, ®s_to_forget
);
4596 /* If this is a USE and CLOBBER of a MEM, ensure that any
4597 references to eliminable registers have been removed. */
4599 if ((GET_CODE (PATTERN (insn
)) == USE
4600 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4601 && MEM_P (XEXP (PATTERN (insn
), 0)))
4602 XEXP (XEXP (PATTERN (insn
), 0), 0)
4603 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4604 GET_MODE (XEXP (PATTERN (insn
), 0)),
4607 /* If we need to do register elimination processing, do so.
4608 This might delete the insn, in which case we are done. */
4609 if ((num_eliminable
|| num_eliminable_invariants
) && chain
->need_elim
)
4611 eliminate_regs_in_insn (insn
, 1);
4614 update_eliminable_offsets ();
4615 CLEAR_REG_SET (®s_to_forget
);
4620 /* If need_elim is nonzero but need_reload is zero, one might think
4621 that we could simply set n_reloads to 0. However, find_reloads
4622 could have done some manipulation of the insn (such as swapping
4623 commutative operands), and these manipulations are lost during
4624 the first pass for every insn that needs register elimination.
4625 So the actions of find_reloads must be redone here. */
4627 if (! chain
->need_elim
&& ! chain
->need_reload
4628 && ! chain
->need_operand_change
)
4630 /* First find the pseudo regs that must be reloaded for this insn.
4631 This info is returned in the tables reload_... (see reload.h).
4632 Also modify the body of INSN by substituting RELOAD
4633 rtx's for those pseudo regs. */
4636 CLEAR_REG_SET (®_has_output_reload
);
4637 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4639 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4645 rtx_insn
*next
= NEXT_INSN (insn
);
4647 /* ??? PREV can get deleted by reload inheritance.
4648 Work around this by emitting a marker note. */
4649 prev
= PREV_INSN (insn
);
4650 reorder_insns_nobb (marker
, marker
, prev
);
4652 /* Now compute which reload regs to reload them into. Perhaps
4653 reusing reload regs from previous insns, or else output
4654 load insns to reload them. Maybe output store insns too.
4655 Record the choices of reload reg in reload_reg_rtx. */
4656 choose_reload_regs (chain
);
4658 /* Generate the insns to reload operands into or out of
4659 their reload regs. */
4660 emit_reload_insns (chain
);
4662 /* Substitute the chosen reload regs from reload_reg_rtx
4663 into the insn's body (or perhaps into the bodies of other
4664 load and store insn that we just made for reloading
4665 and that we moved the structure into). */
4666 subst_reloads (insn
);
4668 prev
= PREV_INSN (marker
);
4669 unlink_insn_chain (marker
, marker
);
4671 /* Adjust the exception region notes for loads and stores. */
4672 if (cfun
->can_throw_non_call_exceptions
&& !CALL_P (insn
))
4673 fixup_eh_region_note (insn
, prev
, next
);
4675 /* Adjust the location of REG_ARGS_SIZE. */
4676 rtx p
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4679 remove_note (insn
, p
);
4680 fixup_args_size_notes (prev
, PREV_INSN (next
),
4681 INTVAL (XEXP (p
, 0)));
4684 /* If this was an ASM, make sure that all the reload insns
4685 we have generated are valid. If not, give an error
4687 if (asm_noperands (PATTERN (insn
)) >= 0)
4688 for (rtx_insn
*p
= NEXT_INSN (prev
);
4691 if (p
!= insn
&& INSN_P (p
)
4692 && GET_CODE (PATTERN (p
)) != USE
4693 && (recog_memoized (p
) < 0
4694 || (extract_insn (p
),
4695 !(constrain_operands (1,
4696 get_enabled_alternatives (p
))))))
4698 error_for_asm (insn
,
4699 "%<asm%> operand requires "
4700 "impossible reload");
4705 if (num_eliminable
&& chain
->need_elim
)
4706 update_eliminable_offsets ();
4708 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4709 is no longer validly lying around to save a future reload.
4710 Note that this does not detect pseudos that were reloaded
4711 for this insn in order to be stored in
4712 (obeying register constraints). That is correct; such reload
4713 registers ARE still valid. */
4714 forget_marked_reloads (®s_to_forget
);
4715 CLEAR_REG_SET (®s_to_forget
);
4717 /* There may have been CLOBBER insns placed after INSN. So scan
4718 between INSN and NEXT and use them to forget old reloads. */
4719 for (rtx_insn
*x
= NEXT_INSN (insn
); x
!= old_next
; x
= NEXT_INSN (x
))
4720 if (NONJUMP_INSN_P (x
) && GET_CODE (PATTERN (x
)) == CLOBBER
)
4721 note_stores (PATTERN (x
), forget_old_reloads_1
, NULL
);
4724 /* Likewise for regs altered by auto-increment in this insn.
4725 REG_INC notes have been changed by reloading:
4726 find_reloads_address_1 records substitutions for them,
4727 which have been performed by subst_reloads above. */
4728 for (i
= n_reloads
- 1; i
>= 0; i
--)
4730 rtx in_reg
= rld
[i
].in_reg
;
4733 enum rtx_code code
= GET_CODE (in_reg
);
4734 /* PRE_INC / PRE_DEC will have the reload register ending up
4735 with the same value as the stack slot, but that doesn't
4736 hold true for POST_INC / POST_DEC. Either we have to
4737 convert the memory access to a true POST_INC / POST_DEC,
4738 or we can't use the reload register for inheritance. */
4739 if ((code
== POST_INC
|| code
== POST_DEC
)
4740 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4741 REGNO (rld
[i
].reg_rtx
))
4742 /* Make sure it is the inc/dec pseudo, and not
4743 some other (e.g. output operand) pseudo. */
4744 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4745 == REGNO (XEXP (in_reg
, 0))))
4748 rtx reload_reg
= rld
[i
].reg_rtx
;
4749 machine_mode mode
= GET_MODE (reload_reg
);
4753 for (p
= PREV_INSN (old_next
); p
!= prev
; p
= PREV_INSN (p
))
4755 /* We really want to ignore REG_INC notes here, so
4756 use PATTERN (p) as argument to reg_set_p . */
4757 if (reg_set_p (reload_reg
, PATTERN (p
)))
4759 n
= count_occurrences (PATTERN (p
), reload_reg
, 0);
4765 = gen_rtx_fmt_e (code
, mode
, reload_reg
);
4767 validate_replace_rtx_group (reload_reg
,
4769 n
= verify_changes (0);
4771 /* We must also verify that the constraints
4772 are met after the replacement. Make sure
4773 extract_insn is only called for an insn
4774 where the replacements were found to be
4779 n
= constrain_operands (1,
4780 get_enabled_alternatives (p
));
4783 /* If the constraints were not met, then
4784 undo the replacement, else confirm it. */
4788 confirm_change_group ();
4794 add_reg_note (p
, REG_INC
, reload_reg
);
4795 /* Mark this as having an output reload so that the
4796 REG_INC processing code below won't invalidate
4797 the reload for inheritance. */
4798 SET_HARD_REG_BIT (reg_is_output_reload
,
4799 REGNO (reload_reg
));
4800 SET_REGNO_REG_SET (®_has_output_reload
,
4801 REGNO (XEXP (in_reg
, 0)));
4804 forget_old_reloads_1 (XEXP (in_reg
, 0), NULL_RTX
,
4807 else if ((code
== PRE_INC
|| code
== PRE_DEC
)
4808 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4809 REGNO (rld
[i
].reg_rtx
))
4810 /* Make sure it is the inc/dec pseudo, and not
4811 some other (e.g. output operand) pseudo. */
4812 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4813 == REGNO (XEXP (in_reg
, 0))))
4815 SET_HARD_REG_BIT (reg_is_output_reload
,
4816 REGNO (rld
[i
].reg_rtx
));
4817 SET_REGNO_REG_SET (®_has_output_reload
,
4818 REGNO (XEXP (in_reg
, 0)));
4820 else if (code
== PRE_INC
|| code
== PRE_DEC
4821 || code
== POST_INC
|| code
== POST_DEC
)
4823 int in_regno
= REGNO (XEXP (in_reg
, 0));
4825 if (reg_last_reload_reg
[in_regno
] != NULL_RTX
)
4828 bool forget_p
= true;
4830 in_hard_regno
= REGNO (reg_last_reload_reg
[in_regno
]);
4831 if (TEST_HARD_REG_BIT (reg_reloaded_valid
,
4834 for (rtx_insn
*x
= (old_prev
?
4835 NEXT_INSN (old_prev
) : insn
);
4838 if (x
== reg_reloaded_insn
[in_hard_regno
])
4844 /* If for some reasons, we didn't set up
4845 reg_last_reload_reg in this insn,
4846 invalidate inheritance from previous
4847 insns for the incremented/decremented
4848 register. Such registers will be not in
4849 reg_has_output_reload. Invalidate it
4850 also if the corresponding element in
4851 reg_reloaded_insn is also
4854 forget_old_reloads_1 (XEXP (in_reg
, 0),
4860 /* If a pseudo that got a hard register is auto-incremented,
4861 we must purge records of copying it into pseudos without
4863 for (rtx x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4864 if (REG_NOTE_KIND (x
) == REG_INC
)
4866 /* See if this pseudo reg was reloaded in this insn.
4867 If so, its last-reload info is still valid
4868 because it is based on this insn's reload. */
4869 for (i
= 0; i
< n_reloads
; i
++)
4870 if (rld
[i
].out
== XEXP (x
, 0))
4874 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
, NULL
);
4878 /* A reload reg's contents are unknown after a label. */
4880 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4882 /* Don't assume a reload reg is still good after a call insn
4883 if it is a call-used reg, or if it contains a value that will
4884 be partially clobbered by the call. */
4885 else if (CALL_P (insn
))
4887 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, call_used_reg_set
);
4888 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, reg_reloaded_call_part_clobbered
);
4890 /* If this is a call to a setjmp-type function, we must not
4891 reuse any reload reg contents across the call; that will
4892 just be clobbered by other uses of the register in later
4893 code, before the longjmp. */
4894 if (find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
4895 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4900 free (reg_last_reload_reg
);
4901 CLEAR_REG_SET (®_has_output_reload
);
4904 /* Discard all record of any value reloaded from X,
4905 or reloaded in X from someplace else;
4906 unless X is an output reload reg of the current insn.
4908 X may be a hard reg (the reload reg)
4909 or it may be a pseudo reg that was reloaded from.
4911 When DATA is non-NULL just mark the registers in regset
4912 to be forgotten later. */
4915 forget_old_reloads_1 (rtx x
, const_rtx ignored ATTRIBUTE_UNUSED
,
4920 regset regs
= (regset
) data
;
4922 /* note_stores does give us subregs of hard regs,
4923 subreg_regno_offset requires a hard reg. */
4924 while (GET_CODE (x
) == SUBREG
)
4926 /* We ignore the subreg offset when calculating the regno,
4927 because we are using the entire underlying hard register
4937 if (regno
>= FIRST_PSEUDO_REGISTER
)
4943 nr
= hard_regno_nregs
[regno
][GET_MODE (x
)];
4944 /* Storing into a spilled-reg invalidates its contents.
4945 This can happen if a block-local pseudo is allocated to that reg
4946 and it wasn't spilled because this block's total need is 0.
4947 Then some insn might have an optional reload and use this reg. */
4949 for (i
= 0; i
< nr
; i
++)
4950 /* But don't do this if the reg actually serves as an output
4951 reload reg in the current instruction. */
4953 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4955 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4956 spill_reg_store
[regno
+ i
] = 0;
4962 SET_REGNO_REG_SET (regs
, regno
+ nr
);
4965 /* Since value of X has changed,
4966 forget any value previously copied from it. */
4969 /* But don't forget a copy if this is the output reload
4970 that establishes the copy's validity. */
4972 || !REGNO_REG_SET_P (®_has_output_reload
, regno
+ nr
))
4973 reg_last_reload_reg
[regno
+ nr
] = 0;
4977 /* Forget the reloads marked in regset by previous function. */
4979 forget_marked_reloads (regset regs
)
4982 reg_set_iterator rsi
;
4983 EXECUTE_IF_SET_IN_REG_SET (regs
, 0, reg
, rsi
)
4985 if (reg
< FIRST_PSEUDO_REGISTER
4986 /* But don't do this if the reg actually serves as an output
4987 reload reg in the current instruction. */
4989 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, reg
)))
4991 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, reg
);
4992 spill_reg_store
[reg
] = 0;
4995 || !REGNO_REG_SET_P (®_has_output_reload
, reg
))
4996 reg_last_reload_reg
[reg
] = 0;
5000 /* The following HARD_REG_SETs indicate when each hard register is
5001 used for a reload of various parts of the current insn. */
5003 /* If reg is unavailable for all reloads. */
5004 static HARD_REG_SET reload_reg_unavailable
;
5005 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5006 static HARD_REG_SET reload_reg_used
;
5007 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5008 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
5009 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5010 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
5011 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5012 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
5013 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5014 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
5015 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5016 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
5017 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5018 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
5019 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5020 static HARD_REG_SET reload_reg_used_in_op_addr
;
5021 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5022 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
5023 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5024 static HARD_REG_SET reload_reg_used_in_insn
;
5025 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5026 static HARD_REG_SET reload_reg_used_in_other_addr
;
5028 /* If reg is in use as a reload reg for any sort of reload. */
5029 static HARD_REG_SET reload_reg_used_at_all
;
5031 /* If reg is use as an inherited reload. We just mark the first register
5033 static HARD_REG_SET reload_reg_used_for_inherit
;
5035 /* Records which hard regs are used in any way, either as explicit use or
5036 by being allocated to a pseudo during any point of the current insn. */
5037 static HARD_REG_SET reg_used_in_insn
;
5039 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5040 TYPE. MODE is used to indicate how many consecutive regs are
5044 mark_reload_reg_in_use (unsigned int regno
, int opnum
, enum reload_type type
,
5050 add_to_hard_reg_set (&reload_reg_used
, mode
, regno
);
5053 case RELOAD_FOR_INPUT_ADDRESS
:
5054 add_to_hard_reg_set (&reload_reg_used_in_input_addr
[opnum
], mode
, regno
);
5057 case RELOAD_FOR_INPADDR_ADDRESS
:
5058 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr
[opnum
], mode
, regno
);
5061 case RELOAD_FOR_OUTPUT_ADDRESS
:
5062 add_to_hard_reg_set (&reload_reg_used_in_output_addr
[opnum
], mode
, regno
);
5065 case RELOAD_FOR_OUTADDR_ADDRESS
:
5066 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr
[opnum
], mode
, regno
);
5069 case RELOAD_FOR_OPERAND_ADDRESS
:
5070 add_to_hard_reg_set (&reload_reg_used_in_op_addr
, mode
, regno
);
5073 case RELOAD_FOR_OPADDR_ADDR
:
5074 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload
, mode
, regno
);
5077 case RELOAD_FOR_OTHER_ADDRESS
:
5078 add_to_hard_reg_set (&reload_reg_used_in_other_addr
, mode
, regno
);
5081 case RELOAD_FOR_INPUT
:
5082 add_to_hard_reg_set (&reload_reg_used_in_input
[opnum
], mode
, regno
);
5085 case RELOAD_FOR_OUTPUT
:
5086 add_to_hard_reg_set (&reload_reg_used_in_output
[opnum
], mode
, regno
);
5089 case RELOAD_FOR_INSN
:
5090 add_to_hard_reg_set (&reload_reg_used_in_insn
, mode
, regno
);
5094 add_to_hard_reg_set (&reload_reg_used_at_all
, mode
, regno
);
5097 /* Similarly, but show REGNO is no longer in use for a reload. */
5100 clear_reload_reg_in_use (unsigned int regno
, int opnum
,
5101 enum reload_type type
, machine_mode mode
)
5103 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
5104 unsigned int start_regno
, end_regno
, r
;
5106 /* A complication is that for some reload types, inheritance might
5107 allow multiple reloads of the same types to share a reload register.
5108 We set check_opnum if we have to check only reloads with the same
5109 operand number, and check_any if we have to check all reloads. */
5110 int check_opnum
= 0;
5112 HARD_REG_SET
*used_in_set
;
5117 used_in_set
= &reload_reg_used
;
5120 case RELOAD_FOR_INPUT_ADDRESS
:
5121 used_in_set
= &reload_reg_used_in_input_addr
[opnum
];
5124 case RELOAD_FOR_INPADDR_ADDRESS
:
5126 used_in_set
= &reload_reg_used_in_inpaddr_addr
[opnum
];
5129 case RELOAD_FOR_OUTPUT_ADDRESS
:
5130 used_in_set
= &reload_reg_used_in_output_addr
[opnum
];
5133 case RELOAD_FOR_OUTADDR_ADDRESS
:
5135 used_in_set
= &reload_reg_used_in_outaddr_addr
[opnum
];
5138 case RELOAD_FOR_OPERAND_ADDRESS
:
5139 used_in_set
= &reload_reg_used_in_op_addr
;
5142 case RELOAD_FOR_OPADDR_ADDR
:
5144 used_in_set
= &reload_reg_used_in_op_addr_reload
;
5147 case RELOAD_FOR_OTHER_ADDRESS
:
5148 used_in_set
= &reload_reg_used_in_other_addr
;
5152 case RELOAD_FOR_INPUT
:
5153 used_in_set
= &reload_reg_used_in_input
[opnum
];
5156 case RELOAD_FOR_OUTPUT
:
5157 used_in_set
= &reload_reg_used_in_output
[opnum
];
5160 case RELOAD_FOR_INSN
:
5161 used_in_set
= &reload_reg_used_in_insn
;
5166 /* We resolve conflicts with remaining reloads of the same type by
5167 excluding the intervals of reload registers by them from the
5168 interval of freed reload registers. Since we only keep track of
5169 one set of interval bounds, we might have to exclude somewhat
5170 more than what would be necessary if we used a HARD_REG_SET here.
5171 But this should only happen very infrequently, so there should
5172 be no reason to worry about it. */
5174 start_regno
= regno
;
5175 end_regno
= regno
+ nregs
;
5176 if (check_opnum
|| check_any
)
5178 for (i
= n_reloads
- 1; i
>= 0; i
--)
5180 if (rld
[i
].when_needed
== type
5181 && (check_any
|| rld
[i
].opnum
== opnum
)
5184 unsigned int conflict_start
= true_regnum (rld
[i
].reg_rtx
);
5185 unsigned int conflict_end
5186 = end_hard_regno (rld
[i
].mode
, conflict_start
);
5188 /* If there is an overlap with the first to-be-freed register,
5189 adjust the interval start. */
5190 if (conflict_start
<= start_regno
&& conflict_end
> start_regno
)
5191 start_regno
= conflict_end
;
5192 /* Otherwise, if there is a conflict with one of the other
5193 to-be-freed registers, adjust the interval end. */
5194 if (conflict_start
> start_regno
&& conflict_start
< end_regno
)
5195 end_regno
= conflict_start
;
5200 for (r
= start_regno
; r
< end_regno
; r
++)
5201 CLEAR_HARD_REG_BIT (*used_in_set
, r
);
5204 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5205 specified by OPNUM and TYPE. */
5208 reload_reg_free_p (unsigned int regno
, int opnum
, enum reload_type type
)
5212 /* In use for a RELOAD_OTHER means it's not available for anything. */
5213 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
5214 || TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5220 /* In use for anything means we can't use it for RELOAD_OTHER. */
5221 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
5222 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5223 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5224 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5227 for (i
= 0; i
< reload_n_operands
; i
++)
5228 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5229 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5230 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5231 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5232 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5233 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5238 case RELOAD_FOR_INPUT
:
5239 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5240 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
5243 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5246 /* If it is used for some other input, can't use it. */
5247 for (i
= 0; i
< reload_n_operands
; i
++)
5248 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5251 /* If it is used in a later operand's address, can't use it. */
5252 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5253 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5254 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5259 case RELOAD_FOR_INPUT_ADDRESS
:
5260 /* Can't use a register if it is used for an input address for this
5261 operand or used as an input in an earlier one. */
5262 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
5263 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5266 for (i
= 0; i
< opnum
; i
++)
5267 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5272 case RELOAD_FOR_INPADDR_ADDRESS
:
5273 /* Can't use a register if it is used for an input address
5274 for this operand or used as an input in an earlier
5276 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5279 for (i
= 0; i
< opnum
; i
++)
5280 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5285 case RELOAD_FOR_OUTPUT_ADDRESS
:
5286 /* Can't use a register if it is used for an output address for this
5287 operand or used as an output in this or a later operand. Note
5288 that multiple output operands are emitted in reverse order, so
5289 the conflicting ones are those with lower indices. */
5290 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
5293 for (i
= 0; i
<= opnum
; i
++)
5294 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5299 case RELOAD_FOR_OUTADDR_ADDRESS
:
5300 /* Can't use a register if it is used for an output address
5301 for this operand or used as an output in this or a
5302 later operand. Note that multiple output operands are
5303 emitted in reverse order, so the conflicting ones are
5304 those with lower indices. */
5305 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
5308 for (i
= 0; i
<= opnum
; i
++)
5309 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5314 case RELOAD_FOR_OPERAND_ADDRESS
:
5315 for (i
= 0; i
< reload_n_operands
; i
++)
5316 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5319 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5320 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5322 case RELOAD_FOR_OPADDR_ADDR
:
5323 for (i
= 0; i
< reload_n_operands
; i
++)
5324 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5327 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
5329 case RELOAD_FOR_OUTPUT
:
5330 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5331 outputs, or an operand address for this or an earlier output.
5332 Note that multiple output operands are emitted in reverse order,
5333 so the conflicting ones are those with higher indices. */
5334 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5337 for (i
= 0; i
< reload_n_operands
; i
++)
5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5341 for (i
= opnum
; i
< reload_n_operands
; i
++)
5342 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5343 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5348 case RELOAD_FOR_INSN
:
5349 for (i
= 0; i
< reload_n_operands
; i
++)
5350 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5351 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5354 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5355 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5357 case RELOAD_FOR_OTHER_ADDRESS
:
5358 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
5365 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5366 the number RELOADNUM, is still available in REGNO at the end of the insn.
5368 We can assume that the reload reg was already tested for availability
5369 at the time it is needed, and we should not check this again,
5370 in case the reg has already been marked in use. */
5373 reload_reg_reaches_end_p (unsigned int regno
, int reloadnum
)
5375 int opnum
= rld
[reloadnum
].opnum
;
5376 enum reload_type type
= rld
[reloadnum
].when_needed
;
5379 /* See if there is a reload with the same type for this operand, using
5380 the same register. This case is not handled by the code below. */
5381 for (i
= reloadnum
+ 1; i
< n_reloads
; i
++)
5386 if (rld
[i
].opnum
!= opnum
|| rld
[i
].when_needed
!= type
)
5388 reg
= rld
[i
].reg_rtx
;
5389 if (reg
== NULL_RTX
)
5391 nregs
= hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)];
5392 if (regno
>= REGNO (reg
) && regno
< REGNO (reg
) + nregs
)
5399 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5400 its value must reach the end. */
5403 /* If this use is for part of the insn,
5404 its value reaches if no subsequent part uses the same register.
5405 Just like the above function, don't try to do this with lots
5408 case RELOAD_FOR_OTHER_ADDRESS
:
5409 /* Here we check for everything else, since these don't conflict
5410 with anything else and everything comes later. */
5412 for (i
= 0; i
< reload_n_operands
; i
++)
5413 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5414 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5415 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
5416 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5417 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5418 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5421 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5422 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5423 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5424 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5426 case RELOAD_FOR_INPUT_ADDRESS
:
5427 case RELOAD_FOR_INPADDR_ADDRESS
:
5428 /* Similar, except that we check only for this and subsequent inputs
5429 and the address of only subsequent inputs and we do not need
5430 to check for RELOAD_OTHER objects since they are known not to
5433 for (i
= opnum
; i
< reload_n_operands
; i
++)
5434 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5437 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5438 could be killed if the register is also used by reload with type
5439 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5440 if (type
== RELOAD_FOR_INPADDR_ADDRESS
5441 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
))
5444 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5445 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5446 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5449 for (i
= 0; i
< reload_n_operands
; i
++)
5450 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5451 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5452 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5455 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5458 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5459 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5460 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5462 case RELOAD_FOR_INPUT
:
5463 /* Similar to input address, except we start at the next operand for
5464 both input and input address and we do not check for
5465 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5468 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5469 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5470 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5471 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5474 /* ... fall through ... */
5476 case RELOAD_FOR_OPERAND_ADDRESS
:
5477 /* Check outputs and their addresses. */
5479 for (i
= 0; i
< reload_n_operands
; i
++)
5480 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5481 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5482 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5485 return (!TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5487 case RELOAD_FOR_OPADDR_ADDR
:
5488 for (i
= 0; i
< reload_n_operands
; i
++)
5489 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5490 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5491 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5494 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5495 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5496 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5498 case RELOAD_FOR_INSN
:
5499 /* These conflict with other outputs with RELOAD_OTHER. So
5500 we need only check for output addresses. */
5502 opnum
= reload_n_operands
;
5504 /* ... fall through ... */
5506 case RELOAD_FOR_OUTPUT
:
5507 case RELOAD_FOR_OUTPUT_ADDRESS
:
5508 case RELOAD_FOR_OUTADDR_ADDRESS
:
5509 /* We already know these can't conflict with a later output. So the
5510 only thing to check are later output addresses.
5511 Note that multiple output operands are emitted in reverse order,
5512 so the conflicting ones are those with lower indices. */
5513 for (i
= 0; i
< opnum
; i
++)
5514 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5515 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5518 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5519 could be killed if the register is also used by reload with type
5520 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5521 if (type
== RELOAD_FOR_OUTADDR_ADDRESS
5522 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
5532 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5533 every register in REG. */
5536 reload_reg_rtx_reaches_end_p (rtx reg
, int reloadnum
)
5540 for (i
= REGNO (reg
); i
< END_REGNO (reg
); i
++)
5541 if (!reload_reg_reaches_end_p (i
, reloadnum
))
5547 /* Returns whether R1 and R2 are uniquely chained: the value of one
5548 is used by the other, and that value is not used by any other
5549 reload for this insn. This is used to partially undo the decision
5550 made in find_reloads when in the case of multiple
5551 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5552 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5553 reloads. This code tries to avoid the conflict created by that
5554 change. It might be cleaner to explicitly keep track of which
5555 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5556 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5557 this after the fact. */
5559 reloads_unique_chain_p (int r1
, int r2
)
5563 /* We only check input reloads. */
5564 if (! rld
[r1
].in
|| ! rld
[r2
].in
)
5567 /* Avoid anything with output reloads. */
5568 if (rld
[r1
].out
|| rld
[r2
].out
)
5571 /* "chained" means one reload is a component of the other reload,
5572 not the same as the other reload. */
5573 if (rld
[r1
].opnum
!= rld
[r2
].opnum
5574 || rtx_equal_p (rld
[r1
].in
, rld
[r2
].in
)
5575 || rld
[r1
].optional
|| rld
[r2
].optional
5576 || ! (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
)
5577 || reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
)))
5580 /* The following loop assumes that r1 is the reload that feeds r2. */
5584 for (i
= 0; i
< n_reloads
; i
++)
5585 /* Look for input reloads that aren't our two */
5586 if (i
!= r1
&& i
!= r2
&& rld
[i
].in
)
5588 /* If our reload is mentioned at all, it isn't a simple chain. */
5589 if (reg_mentioned_p (rld
[r1
].in
, rld
[i
].in
))
5595 /* The recursive function change all occurrences of WHAT in *WHERE
5598 substitute (rtx
*where
, const_rtx what
, rtx repl
)
5607 if (*where
== what
|| rtx_equal_p (*where
, what
))
5609 /* Record the location of the changed rtx. */
5610 substitute_stack
.safe_push (where
);
5615 code
= GET_CODE (*where
);
5616 fmt
= GET_RTX_FORMAT (code
);
5617 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5623 for (j
= XVECLEN (*where
, i
) - 1; j
>= 0; j
--)
5624 substitute (&XVECEXP (*where
, i
, j
), what
, repl
);
5626 else if (fmt
[i
] == 'e')
5627 substitute (&XEXP (*where
, i
), what
, repl
);
5631 /* The function returns TRUE if chain of reload R1 and R2 (in any
5632 order) can be evaluated without usage of intermediate register for
5633 the reload containing another reload. It is important to see
5634 gen_reload to understand what the function is trying to do. As an
5635 example, let us have reload chain
5638 r1: <something> + const
5640 and reload R2 got reload reg HR. The function returns true if
5641 there is a correct insn HR = HR + <something>. Otherwise,
5642 gen_reload will use intermediate register (and this is the reload
5643 reg for R1) to reload <something>.
5645 We need this function to find a conflict for chain reloads. In our
5646 example, if HR = HR + <something> is incorrect insn, then we cannot
5647 use HR as a reload register for R2. If we do use it then we get a
5656 gen_reload_chain_without_interm_reg_p (int r1
, int r2
)
5658 /* Assume other cases in gen_reload are not possible for
5659 chain reloads or do need an intermediate hard registers. */
5664 rtx_insn
*last
= get_last_insn ();
5666 /* Make r2 a component of r1. */
5667 if (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
))
5670 gcc_assert (reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
));
5671 regno
= rld
[r1
].regno
>= 0 ? rld
[r1
].regno
: rld
[r2
].regno
;
5672 gcc_assert (regno
>= 0);
5673 out
= gen_rtx_REG (rld
[r1
].mode
, regno
);
5675 substitute (&in
, rld
[r2
].in
, gen_rtx_REG (rld
[r2
].mode
, regno
));
5677 /* If IN is a paradoxical SUBREG, remove it and try to put the
5678 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5679 strip_paradoxical_subreg (&in
, &out
);
5681 if (GET_CODE (in
) == PLUS
5682 && (REG_P (XEXP (in
, 0))
5683 || GET_CODE (XEXP (in
, 0)) == SUBREG
5684 || MEM_P (XEXP (in
, 0)))
5685 && (REG_P (XEXP (in
, 1))
5686 || GET_CODE (XEXP (in
, 1)) == SUBREG
5687 || CONSTANT_P (XEXP (in
, 1))
5688 || MEM_P (XEXP (in
, 1))))
5690 insn
= emit_insn (gen_rtx_SET (out
, in
));
5691 code
= recog_memoized (insn
);
5696 extract_insn (insn
);
5697 /* We want constrain operands to treat this insn strictly in
5698 its validity determination, i.e., the way it would after
5699 reload has completed. */
5700 result
= constrain_operands (1, get_enabled_alternatives (insn
));
5703 delete_insns_since (last
);
5706 /* Restore the original value at each changed address within R1. */
5707 while (!substitute_stack
.is_empty ())
5709 rtx
*where
= substitute_stack
.pop ();
5710 *where
= rld
[r2
].in
;
5716 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5719 This function uses the same algorithm as reload_reg_free_p above. */
5722 reloads_conflict (int r1
, int r2
)
5724 enum reload_type r1_type
= rld
[r1
].when_needed
;
5725 enum reload_type r2_type
= rld
[r2
].when_needed
;
5726 int r1_opnum
= rld
[r1
].opnum
;
5727 int r2_opnum
= rld
[r2
].opnum
;
5729 /* RELOAD_OTHER conflicts with everything. */
5730 if (r2_type
== RELOAD_OTHER
)
5733 /* Otherwise, check conflicts differently for each type. */
5737 case RELOAD_FOR_INPUT
:
5738 return (r2_type
== RELOAD_FOR_INSN
5739 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5740 || r2_type
== RELOAD_FOR_OPADDR_ADDR
5741 || r2_type
== RELOAD_FOR_INPUT
5742 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
5743 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
5744 && r2_opnum
> r1_opnum
));
5746 case RELOAD_FOR_INPUT_ADDRESS
:
5747 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
5748 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5750 case RELOAD_FOR_INPADDR_ADDRESS
:
5751 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
5752 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5754 case RELOAD_FOR_OUTPUT_ADDRESS
:
5755 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
5756 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5758 case RELOAD_FOR_OUTADDR_ADDRESS
:
5759 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
5760 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5762 case RELOAD_FOR_OPERAND_ADDRESS
:
5763 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
5764 || (r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5765 && (!reloads_unique_chain_p (r1
, r2
)
5766 || !gen_reload_chain_without_interm_reg_p (r1
, r2
))));
5768 case RELOAD_FOR_OPADDR_ADDR
:
5769 return (r2_type
== RELOAD_FOR_INPUT
5770 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
5772 case RELOAD_FOR_OUTPUT
:
5773 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
5774 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
5775 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
5776 && r2_opnum
>= r1_opnum
));
5778 case RELOAD_FOR_INSN
:
5779 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
5780 || r2_type
== RELOAD_FOR_INSN
5781 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5783 case RELOAD_FOR_OTHER_ADDRESS
:
5784 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
5794 /* Indexed by reload number, 1 if incoming value
5795 inherited from previous insns. */
5796 static char reload_inherited
[MAX_RELOADS
];
5798 /* For an inherited reload, this is the insn the reload was inherited from,
5799 if we know it. Otherwise, this is 0. */
5800 static rtx_insn
*reload_inheritance_insn
[MAX_RELOADS
];
5802 /* If nonzero, this is a place to get the value of the reload,
5803 rather than using reload_in. */
5804 static rtx reload_override_in
[MAX_RELOADS
];
5806 /* For each reload, the hard register number of the register used,
5807 or -1 if we did not need a register for this reload. */
5808 static int reload_spill_index
[MAX_RELOADS
];
5810 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5811 static rtx reload_reg_rtx_for_input
[MAX_RELOADS
];
5813 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5814 static rtx reload_reg_rtx_for_output
[MAX_RELOADS
];
5816 /* Subroutine of free_for_value_p, used to check a single register.
5817 START_REGNO is the starting regno of the full reload register
5818 (possibly comprising multiple hard registers) that we are considering. */
5821 reload_reg_free_for_value_p (int start_regno
, int regno
, int opnum
,
5822 enum reload_type type
, rtx value
, rtx out
,
5823 int reloadnum
, int ignore_address_reloads
)
5826 /* Set if we see an input reload that must not share its reload register
5827 with any new earlyclobber, but might otherwise share the reload
5828 register with an output or input-output reload. */
5829 int check_earlyclobber
= 0;
5833 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5836 if (out
== const0_rtx
)
5842 /* We use some pseudo 'time' value to check if the lifetimes of the
5843 new register use would overlap with the one of a previous reload
5844 that is not read-only or uses a different value.
5845 The 'time' used doesn't have to be linear in any shape or form, just
5847 Some reload types use different 'buckets' for each operand.
5848 So there are MAX_RECOG_OPERANDS different time values for each
5850 We compute TIME1 as the time when the register for the prospective
5851 new reload ceases to be live, and TIME2 for each existing
5852 reload as the time when that the reload register of that reload
5854 Where there is little to be gained by exact lifetime calculations,
5855 we just make conservative assumptions, i.e. a longer lifetime;
5856 this is done in the 'default:' cases. */
5859 case RELOAD_FOR_OTHER_ADDRESS
:
5860 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5861 time1
= copy
? 0 : 1;
5864 time1
= copy
? 1 : MAX_RECOG_OPERANDS
* 5 + 5;
5866 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5867 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5868 respectively, to the time values for these, we get distinct time
5869 values. To get distinct time values for each operand, we have to
5870 multiply opnum by at least three. We round that up to four because
5871 multiply by four is often cheaper. */
5872 case RELOAD_FOR_INPADDR_ADDRESS
:
5873 time1
= opnum
* 4 + 2;
5875 case RELOAD_FOR_INPUT_ADDRESS
:
5876 time1
= opnum
* 4 + 3;
5878 case RELOAD_FOR_INPUT
:
5879 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5880 executes (inclusive). */
5881 time1
= copy
? opnum
* 4 + 4 : MAX_RECOG_OPERANDS
* 4 + 3;
5883 case RELOAD_FOR_OPADDR_ADDR
:
5885 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5886 time1
= MAX_RECOG_OPERANDS
* 4 + 1;
5888 case RELOAD_FOR_OPERAND_ADDRESS
:
5889 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5891 time1
= copy
? MAX_RECOG_OPERANDS
* 4 + 2 : MAX_RECOG_OPERANDS
* 4 + 3;
5893 case RELOAD_FOR_OUTADDR_ADDRESS
:
5894 time1
= MAX_RECOG_OPERANDS
* 4 + 4 + opnum
;
5896 case RELOAD_FOR_OUTPUT_ADDRESS
:
5897 time1
= MAX_RECOG_OPERANDS
* 4 + 5 + opnum
;
5900 time1
= MAX_RECOG_OPERANDS
* 5 + 5;
5903 for (i
= 0; i
< n_reloads
; i
++)
5905 rtx reg
= rld
[i
].reg_rtx
;
5906 if (reg
&& REG_P (reg
)
5907 && ((unsigned) regno
- true_regnum (reg
)
5908 <= hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] - (unsigned) 1)
5911 rtx other_input
= rld
[i
].in
;
5913 /* If the other reload loads the same input value, that
5914 will not cause a conflict only if it's loading it into
5915 the same register. */
5916 if (true_regnum (reg
) != start_regno
)
5917 other_input
= NULL_RTX
;
5918 if (! other_input
|| ! rtx_equal_p (other_input
, value
)
5919 || rld
[i
].out
|| out
)
5922 switch (rld
[i
].when_needed
)
5924 case RELOAD_FOR_OTHER_ADDRESS
:
5927 case RELOAD_FOR_INPADDR_ADDRESS
:
5928 /* find_reloads makes sure that a
5929 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5930 by at most one - the first -
5931 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5932 address reload is inherited, the address address reload
5933 goes away, so we can ignore this conflict. */
5934 if (type
== RELOAD_FOR_INPUT_ADDRESS
&& reloadnum
== i
+ 1
5935 && ignore_address_reloads
5936 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5937 Then the address address is still needed to store
5938 back the new address. */
5939 && ! rld
[reloadnum
].out
)
5941 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5942 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5944 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5945 && ignore_address_reloads
5946 /* Unless we are reloading an auto_inc expression. */
5947 && ! rld
[reloadnum
].out
)
5949 time2
= rld
[i
].opnum
* 4 + 2;
5951 case RELOAD_FOR_INPUT_ADDRESS
:
5952 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5953 && ignore_address_reloads
5954 && ! rld
[reloadnum
].out
)
5956 time2
= rld
[i
].opnum
* 4 + 3;
5958 case RELOAD_FOR_INPUT
:
5959 time2
= rld
[i
].opnum
* 4 + 4;
5960 check_earlyclobber
= 1;
5962 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5963 == MAX_RECOG_OPERAND * 4 */
5964 case RELOAD_FOR_OPADDR_ADDR
:
5965 if (type
== RELOAD_FOR_OPERAND_ADDRESS
&& reloadnum
== i
+ 1
5966 && ignore_address_reloads
5967 && ! rld
[reloadnum
].out
)
5969 time2
= MAX_RECOG_OPERANDS
* 4 + 1;
5971 case RELOAD_FOR_OPERAND_ADDRESS
:
5972 time2
= MAX_RECOG_OPERANDS
* 4 + 2;
5973 check_earlyclobber
= 1;
5975 case RELOAD_FOR_INSN
:
5976 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5978 case RELOAD_FOR_OUTPUT
:
5979 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5980 instruction is executed. */
5981 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
5983 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5984 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5986 case RELOAD_FOR_OUTADDR_ADDRESS
:
5987 if (type
== RELOAD_FOR_OUTPUT_ADDRESS
&& reloadnum
== i
+ 1
5988 && ignore_address_reloads
5989 && ! rld
[reloadnum
].out
)
5991 time2
= MAX_RECOG_OPERANDS
* 4 + 4 + rld
[i
].opnum
;
5993 case RELOAD_FOR_OUTPUT_ADDRESS
:
5994 time2
= MAX_RECOG_OPERANDS
* 4 + 5 + rld
[i
].opnum
;
5997 /* If there is no conflict in the input part, handle this
5998 like an output reload. */
5999 if (! rld
[i
].in
|| rtx_equal_p (other_input
, value
))
6001 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
6002 /* Earlyclobbered outputs must conflict with inputs. */
6003 if (earlyclobber_operand_p (rld
[i
].out
))
6004 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
6009 /* RELOAD_OTHER might be live beyond instruction execution,
6010 but this is not obvious when we set time2 = 1. So check
6011 here if there might be a problem with the new reload
6012 clobbering the register used by the RELOAD_OTHER. */
6020 && (! rld
[i
].in
|| rld
[i
].out
6021 || ! rtx_equal_p (other_input
, value
)))
6022 || (out
&& rld
[reloadnum
].out_reg
6023 && time2
>= MAX_RECOG_OPERANDS
* 4 + 3))
6029 /* Earlyclobbered outputs must conflict with inputs. */
6030 if (check_earlyclobber
&& out
&& earlyclobber_operand_p (out
))
6036 /* Return 1 if the value in reload reg REGNO, as used by a reload
6037 needed for the part of the insn specified by OPNUM and TYPE,
6038 may be used to load VALUE into it.
6040 MODE is the mode in which the register is used, this is needed to
6041 determine how many hard regs to test.
6043 Other read-only reloads with the same value do not conflict
6044 unless OUT is nonzero and these other reloads have to live while
6045 output reloads live.
6046 If OUT is CONST0_RTX, this is a special case: it means that the
6047 test should not be for using register REGNO as reload register, but
6048 for copying from register REGNO into the reload register.
6050 RELOADNUM is the number of the reload we want to load this value for;
6051 a reload does not conflict with itself.
6053 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6054 reloads that load an address for the very reload we are considering.
6056 The caller has to make sure that there is no conflict with the return
6060 free_for_value_p (int regno
, machine_mode mode
, int opnum
,
6061 enum reload_type type
, rtx value
, rtx out
, int reloadnum
,
6062 int ignore_address_reloads
)
6064 int nregs
= hard_regno_nregs
[regno
][mode
];
6066 if (! reload_reg_free_for_value_p (regno
, regno
+ nregs
, opnum
, type
,
6067 value
, out
, reloadnum
,
6068 ignore_address_reloads
))
6073 /* Return nonzero if the rtx X is invariant over the current function. */
6074 /* ??? Actually, the places where we use this expect exactly what is
6075 tested here, and not everything that is function invariant. In
6076 particular, the frame pointer and arg pointer are special cased;
6077 pic_offset_table_rtx is not, and we must not spill these things to
6081 function_invariant_p (const_rtx x
)
6085 if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
6087 if (GET_CODE (x
) == PLUS
6088 && (XEXP (x
, 0) == frame_pointer_rtx
|| XEXP (x
, 0) == arg_pointer_rtx
)
6089 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
6094 /* Determine whether the reload reg X overlaps any rtx'es used for
6095 overriding inheritance. Return nonzero if so. */
6098 conflicts_with_override (rtx x
)
6101 for (i
= 0; i
< n_reloads
; i
++)
6102 if (reload_override_in
[i
]
6103 && reg_overlap_mentioned_p (x
, reload_override_in
[i
]))
6108 /* Give an error message saying we failed to find a reload for INSN,
6109 and clear out reload R. */
6111 failed_reload (rtx_insn
*insn
, int r
)
6113 if (asm_noperands (PATTERN (insn
)) < 0)
6114 /* It's the compiler's fault. */
6115 fatal_insn ("could not find a spill register", insn
);
6117 /* It's the user's fault; the operand's mode and constraint
6118 don't match. Disable this reload so we don't crash in final. */
6119 error_for_asm (insn
,
6120 "%<asm%> operand constraint incompatible with operand size");
6124 rld
[r
].optional
= 1;
6125 rld
[r
].secondary_p
= 1;
6128 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6129 for reload R. If it's valid, get an rtx for it. Return nonzero if
6132 set_reload_reg (int i
, int r
)
6134 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6136 int regno ATTRIBUTE_UNUSED
;
6137 rtx reg
= spill_reg_rtx
[i
];
6139 if (reg
== 0 || GET_MODE (reg
) != rld
[r
].mode
)
6140 spill_reg_rtx
[i
] = reg
6141 = gen_rtx_REG (rld
[r
].mode
, spill_regs
[i
]);
6143 regno
= true_regnum (reg
);
6145 /* Detect when the reload reg can't hold the reload mode.
6146 This used to be one `if', but Sequent compiler can't handle that. */
6147 if (HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6149 machine_mode test_mode
= VOIDmode
;
6151 test_mode
= GET_MODE (rld
[r
].in
);
6152 /* If rld[r].in has VOIDmode, it means we will load it
6153 in whatever mode the reload reg has: to wit, rld[r].mode.
6154 We have already tested that for validity. */
6155 /* Aside from that, we need to test that the expressions
6156 to reload from or into have modes which are valid for this
6157 reload register. Otherwise the reload insns would be invalid. */
6158 if (! (rld
[r
].in
!= 0 && test_mode
!= VOIDmode
6159 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
6160 if (! (rld
[r
].out
!= 0
6161 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (rld
[r
].out
))))
6163 /* The reg is OK. */
6166 /* Mark as in use for this insn the reload regs we use
6168 mark_reload_reg_in_use (spill_regs
[i
], rld
[r
].opnum
,
6169 rld
[r
].when_needed
, rld
[r
].mode
);
6171 rld
[r
].reg_rtx
= reg
;
6172 reload_spill_index
[r
] = spill_regs
[i
];
6179 /* Find a spill register to use as a reload register for reload R.
6180 LAST_RELOAD is nonzero if this is the last reload for the insn being
6183 Set rld[R].reg_rtx to the register allocated.
6185 We return 1 if successful, or 0 if we couldn't find a spill reg and
6186 we didn't change anything. */
6189 allocate_reload_reg (struct insn_chain
*chain ATTRIBUTE_UNUSED
, int r
,
6194 /* If we put this reload ahead, thinking it is a group,
6195 then insist on finding a group. Otherwise we can grab a
6196 reg that some other reload needs.
6197 (That can happen when we have a 68000 DATA_OR_FP_REG
6198 which is a group of data regs or one fp reg.)
6199 We need not be so restrictive if there are no more reloads
6202 ??? Really it would be nicer to have smarter handling
6203 for that kind of reg class, where a problem like this is normal.
6204 Perhaps those classes should be avoided for reloading
6205 by use of more alternatives. */
6207 int force_group
= rld
[r
].nregs
> 1 && ! last_reload
;
6209 /* If we want a single register and haven't yet found one,
6210 take any reg in the right class and not in use.
6211 If we want a consecutive group, here is where we look for it.
6213 We use three passes so we can first look for reload regs to
6214 reuse, which are already in use for other reloads in this insn,
6215 and only then use additional registers which are not "bad", then
6216 finally any register.
6218 I think that maximizing reuse is needed to make sure we don't
6219 run out of reload regs. Suppose we have three reloads, and
6220 reloads A and B can share regs. These need two regs.
6221 Suppose A and B are given different regs.
6222 That leaves none for C. */
6223 for (pass
= 0; pass
< 3; pass
++)
6225 /* I is the index in spill_regs.
6226 We advance it round-robin between insns to use all spill regs
6227 equally, so that inherited reloads have a chance
6228 of leapfrogging each other. */
6232 for (count
= 0; count
< n_spills
; count
++)
6234 int rclass
= (int) rld
[r
].rclass
;
6240 regnum
= spill_regs
[i
];
6242 if ((reload_reg_free_p (regnum
, rld
[r
].opnum
,
6245 /* We check reload_reg_used to make sure we
6246 don't clobber the return register. */
6247 && ! TEST_HARD_REG_BIT (reload_reg_used
, regnum
)
6248 && free_for_value_p (regnum
, rld
[r
].mode
, rld
[r
].opnum
,
6249 rld
[r
].when_needed
, rld
[r
].in
,
6251 && TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regnum
)
6252 && HARD_REGNO_MODE_OK (regnum
, rld
[r
].mode
)
6253 /* Look first for regs to share, then for unshared. But
6254 don't share regs used for inherited reloads; they are
6255 the ones we want to preserve. */
6257 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6259 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
6262 int nr
= hard_regno_nregs
[regnum
][rld
[r
].mode
];
6264 /* During the second pass we want to avoid reload registers
6265 which are "bad" for this reload. */
6267 && ira_bad_reload_regno (regnum
, rld
[r
].in
, rld
[r
].out
))
6270 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6271 (on 68000) got us two FP regs. If NR is 1,
6272 we would reject both of them. */
6275 /* If we need only one reg, we have already won. */
6278 /* But reject a single reg if we demand a group. */
6283 /* Otherwise check that as many consecutive regs as we need
6284 are available here. */
6287 int regno
= regnum
+ nr
- 1;
6288 if (!(TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
)
6289 && spill_reg_order
[regno
] >= 0
6290 && reload_reg_free_p (regno
, rld
[r
].opnum
,
6291 rld
[r
].when_needed
)))
6300 /* If we found something on the current pass, omit later passes. */
6301 if (count
< n_spills
)
6305 /* We should have found a spill register by now. */
6306 if (count
>= n_spills
)
6309 /* I is the index in SPILL_REG_RTX of the reload register we are to
6310 allocate. Get an rtx for it and find its register number. */
6312 return set_reload_reg (i
, r
);
6315 /* Initialize all the tables needed to allocate reload registers.
6316 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6317 is the array we use to restore the reg_rtx field for every reload. */
6320 choose_reload_regs_init (struct insn_chain
*chain
, rtx
*save_reload_reg_rtx
)
6324 for (i
= 0; i
< n_reloads
; i
++)
6325 rld
[i
].reg_rtx
= save_reload_reg_rtx
[i
];
6327 memset (reload_inherited
, 0, MAX_RELOADS
);
6328 memset (reload_inheritance_insn
, 0, MAX_RELOADS
* sizeof (rtx
));
6329 memset (reload_override_in
, 0, MAX_RELOADS
* sizeof (rtx
));
6331 CLEAR_HARD_REG_SET (reload_reg_used
);
6332 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
6333 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
6334 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
6335 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
6336 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
6338 CLEAR_HARD_REG_SET (reg_used_in_insn
);
6341 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->live_throughout
);
6342 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6343 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->dead_or_set
);
6344 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6345 compute_use_by_pseudos (®_used_in_insn
, &chain
->live_throughout
);
6346 compute_use_by_pseudos (®_used_in_insn
, &chain
->dead_or_set
);
6349 for (i
= 0; i
< reload_n_operands
; i
++)
6351 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
6352 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
6353 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
6354 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
6355 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
6356 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
6359 COMPL_HARD_REG_SET (reload_reg_unavailable
, chain
->used_spill_regs
);
6361 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
6363 for (i
= 0; i
< n_reloads
; i
++)
6364 /* If we have already decided to use a certain register,
6365 don't use it in another way. */
6367 mark_reload_reg_in_use (REGNO (rld
[i
].reg_rtx
), rld
[i
].opnum
,
6368 rld
[i
].when_needed
, rld
[i
].mode
);
6371 #ifdef SECONDARY_MEMORY_NEEDED
6372 /* If X is not a subreg, return it unmodified. If it is a subreg,
6373 look up whether we made a replacement for the SUBREG_REG. Return
6374 either the replacement or the SUBREG_REG. */
6377 replaced_subreg (rtx x
)
6379 if (GET_CODE (x
) == SUBREG
)
6380 return find_replacement (&SUBREG_REG (x
));
6385 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6386 mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6387 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6388 otherwise it is NULL. */
6391 compute_reload_subreg_offset (machine_mode outermode
,
6393 machine_mode innermode
)
6396 machine_mode middlemode
;
6399 return subreg_lowpart_offset (outermode
, innermode
);
6401 outer_offset
= SUBREG_BYTE (subreg
);
6402 middlemode
= GET_MODE (SUBREG_REG (subreg
));
6404 /* If SUBREG is paradoxical then return the normal lowpart offset
6405 for OUTERMODE and INNERMODE. Our caller has already checked
6406 that OUTERMODE fits in INNERMODE. */
6407 if (outer_offset
== 0
6408 && GET_MODE_SIZE (outermode
) > GET_MODE_SIZE (middlemode
))
6409 return subreg_lowpart_offset (outermode
, innermode
);
6411 /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6412 plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6413 return outer_offset
+ subreg_lowpart_offset (middlemode
, innermode
);
6416 /* Assign hard reg targets for the pseudo-registers we must reload
6417 into hard regs for this insn.
6418 Also output the instructions to copy them in and out of the hard regs.
6420 For machines with register classes, we are responsible for
6421 finding a reload reg in the proper class. */
6424 choose_reload_regs (struct insn_chain
*chain
)
6426 rtx_insn
*insn
= chain
->insn
;
6428 unsigned int max_group_size
= 1;
6429 enum reg_class group_class
= NO_REGS
;
6430 int pass
, win
, inheritance
;
6432 rtx save_reload_reg_rtx
[MAX_RELOADS
];
6434 /* In order to be certain of getting the registers we need,
6435 we must sort the reloads into order of increasing register class.
6436 Then our grabbing of reload registers will parallel the process
6437 that provided the reload registers.
6439 Also note whether any of the reloads wants a consecutive group of regs.
6440 If so, record the maximum size of the group desired and what
6441 register class contains all the groups needed by this insn. */
6443 for (j
= 0; j
< n_reloads
; j
++)
6445 reload_order
[j
] = j
;
6446 if (rld
[j
].reg_rtx
!= NULL_RTX
)
6448 gcc_assert (REG_P (rld
[j
].reg_rtx
)
6449 && HARD_REGISTER_P (rld
[j
].reg_rtx
));
6450 reload_spill_index
[j
] = REGNO (rld
[j
].reg_rtx
);
6453 reload_spill_index
[j
] = -1;
6455 if (rld
[j
].nregs
> 1)
6457 max_group_size
= MAX (rld
[j
].nregs
, max_group_size
);
6459 = reg_class_superunion
[(int) rld
[j
].rclass
][(int) group_class
];
6462 save_reload_reg_rtx
[j
] = rld
[j
].reg_rtx
;
6466 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
6468 /* If -O, try first with inheritance, then turning it off.
6469 If not -O, don't do inheritance.
6470 Using inheritance when not optimizing leads to paradoxes
6471 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6472 because one side of the comparison might be inherited. */
6474 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
6476 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6478 /* Process the reloads in order of preference just found.
6479 Beyond this point, subregs can be found in reload_reg_rtx.
6481 This used to look for an existing reloaded home for all of the
6482 reloads, and only then perform any new reloads. But that could lose
6483 if the reloads were done out of reg-class order because a later
6484 reload with a looser constraint might have an old home in a register
6485 needed by an earlier reload with a tighter constraint.
6487 To solve this, we make two passes over the reloads, in the order
6488 described above. In the first pass we try to inherit a reload
6489 from a previous insn. If there is a later reload that needs a
6490 class that is a proper subset of the class being processed, we must
6491 also allocate a spill register during the first pass.
6493 Then make a second pass over the reloads to allocate any reloads
6494 that haven't been given registers yet. */
6496 for (j
= 0; j
< n_reloads
; j
++)
6498 int r
= reload_order
[j
];
6499 rtx search_equiv
= NULL_RTX
;
6501 /* Ignore reloads that got marked inoperative. */
6502 if (rld
[r
].out
== 0 && rld
[r
].in
== 0
6503 && ! rld
[r
].secondary_p
)
6506 /* If find_reloads chose to use reload_in or reload_out as a reload
6507 register, we don't need to chose one. Otherwise, try even if it
6508 found one since we might save an insn if we find the value lying
6510 Try also when reload_in is a pseudo without a hard reg. */
6511 if (rld
[r
].in
!= 0 && rld
[r
].reg_rtx
!= 0
6512 && (rtx_equal_p (rld
[r
].in
, rld
[r
].reg_rtx
)
6513 || (rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)
6514 && !MEM_P (rld
[r
].in
)
6515 && true_regnum (rld
[r
].in
) < FIRST_PSEUDO_REGISTER
)))
6518 #if 0 /* No longer needed for correct operation.
6519 It might give better code, or might not; worth an experiment? */
6520 /* If this is an optional reload, we can't inherit from earlier insns
6521 until we are sure that any non-optional reloads have been allocated.
6522 The following code takes advantage of the fact that optional reloads
6523 are at the end of reload_order. */
6524 if (rld
[r
].optional
!= 0)
6525 for (i
= 0; i
< j
; i
++)
6526 if ((rld
[reload_order
[i
]].out
!= 0
6527 || rld
[reload_order
[i
]].in
!= 0
6528 || rld
[reload_order
[i
]].secondary_p
)
6529 && ! rld
[reload_order
[i
]].optional
6530 && rld
[reload_order
[i
]].reg_rtx
== 0)
6531 allocate_reload_reg (chain
, reload_order
[i
], 0);
6534 /* First see if this pseudo is already available as reloaded
6535 for a previous insn. We cannot try to inherit for reloads
6536 that are smaller than the maximum number of registers needed
6537 for groups unless the register we would allocate cannot be used
6540 We could check here to see if this is a secondary reload for
6541 an object that is already in a register of the desired class.
6542 This would avoid the need for the secondary reload register.
6543 But this is complex because we can't easily determine what
6544 objects might want to be loaded via this reload. So let a
6545 register be allocated here. In `emit_reload_insns' we suppress
6546 one of the loads in the case described above. */
6552 machine_mode mode
= VOIDmode
;
6553 rtx subreg
= NULL_RTX
;
6557 else if (REG_P (rld
[r
].in
))
6559 regno
= REGNO (rld
[r
].in
);
6560 mode
= GET_MODE (rld
[r
].in
);
6562 else if (REG_P (rld
[r
].in_reg
))
6564 regno
= REGNO (rld
[r
].in_reg
);
6565 mode
= GET_MODE (rld
[r
].in_reg
);
6567 else if (GET_CODE (rld
[r
].in_reg
) == SUBREG
6568 && REG_P (SUBREG_REG (rld
[r
].in_reg
)))
6570 regno
= REGNO (SUBREG_REG (rld
[r
].in_reg
));
6571 if (regno
< FIRST_PSEUDO_REGISTER
)
6572 regno
= subreg_regno (rld
[r
].in_reg
);
6575 subreg
= rld
[r
].in_reg
;
6576 byte
= SUBREG_BYTE (subreg
);
6578 mode
= GET_MODE (rld
[r
].in_reg
);
6581 else if (GET_RTX_CLASS (GET_CODE (rld
[r
].in_reg
)) == RTX_AUTOINC
6582 && REG_P (XEXP (rld
[r
].in_reg
, 0)))
6584 regno
= REGNO (XEXP (rld
[r
].in_reg
, 0));
6585 mode
= GET_MODE (XEXP (rld
[r
].in_reg
, 0));
6586 rld
[r
].out
= rld
[r
].in
;
6590 /* This won't work, since REGNO can be a pseudo reg number.
6591 Also, it takes much more hair to keep track of all the things
6592 that can invalidate an inherited reload of part of a pseudoreg. */
6593 else if (GET_CODE (rld
[r
].in
) == SUBREG
6594 && REG_P (SUBREG_REG (rld
[r
].in
)))
6595 regno
= subreg_regno (rld
[r
].in
);
6599 && reg_last_reload_reg
[regno
] != 0
6600 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
6601 >= GET_MODE_SIZE (mode
) + byte
)
6602 #ifdef CANNOT_CHANGE_MODE_CLASS
6603 /* Verify that the register it's in can be used in
6605 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg
[regno
]),
6606 GET_MODE (reg_last_reload_reg
[regno
]),
6611 enum reg_class rclass
= rld
[r
].rclass
, last_class
;
6612 rtx last_reg
= reg_last_reload_reg
[regno
];
6614 i
= REGNO (last_reg
);
6615 byte
= compute_reload_subreg_offset (mode
,
6617 GET_MODE (last_reg
));
6618 i
+= subreg_regno_offset (i
, GET_MODE (last_reg
), byte
, mode
);
6619 last_class
= REGNO_REG_CLASS (i
);
6621 if (reg_reloaded_contents
[i
] == regno
6622 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
6623 && HARD_REGNO_MODE_OK (i
, rld
[r
].mode
)
6624 && (TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
], i
)
6625 /* Even if we can't use this register as a reload
6626 register, we might use it for reload_override_in,
6627 if copying it to the desired class is cheap
6629 || ((register_move_cost (mode
, last_class
, rclass
)
6630 < memory_move_cost (mode
, rclass
, true))
6631 && (secondary_reload_class (1, rclass
, mode
,
6634 #ifdef SECONDARY_MEMORY_NEEDED
6635 && ! SECONDARY_MEMORY_NEEDED (last_class
, rclass
,
6640 && (rld
[r
].nregs
== max_group_size
6641 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
6643 && free_for_value_p (i
, rld
[r
].mode
, rld
[r
].opnum
,
6644 rld
[r
].when_needed
, rld
[r
].in
,
6647 /* If a group is needed, verify that all the subsequent
6648 registers still have their values intact. */
6649 int nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
6652 for (k
= 1; k
< nr
; k
++)
6653 if (reg_reloaded_contents
[i
+ k
] != regno
6654 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
6662 last_reg
= (GET_MODE (last_reg
) == mode
6663 ? last_reg
: gen_rtx_REG (mode
, i
));
6666 for (k
= 0; k
< nr
; k
++)
6667 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6670 /* We found a register that contains the
6671 value we need. If this register is the
6672 same as an `earlyclobber' operand of the
6673 current insn, just mark it as a place to
6674 reload from since we can't use it as the
6675 reload register itself. */
6677 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
6678 if (reg_overlap_mentioned_for_reload_p
6679 (reg_last_reload_reg
[regno
],
6680 reload_earlyclobbers
[i1
]))
6683 if (i1
!= n_earlyclobbers
6684 || ! (free_for_value_p (i
, rld
[r
].mode
,
6686 rld
[r
].when_needed
, rld
[r
].in
,
6688 /* Don't use it if we'd clobber a pseudo reg. */
6689 || (TEST_HARD_REG_BIT (reg_used_in_insn
, i
)
6691 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
6692 /* Don't clobber the frame pointer. */
6693 || (i
== HARD_FRAME_POINTER_REGNUM
6694 && frame_pointer_needed
6696 /* Don't really use the inherited spill reg
6697 if we need it wider than we've got it. */
6698 || (GET_MODE_SIZE (rld
[r
].mode
)
6699 > GET_MODE_SIZE (mode
))
6702 /* If find_reloads chose reload_out as reload
6703 register, stay with it - that leaves the
6704 inherited register for subsequent reloads. */
6705 || (rld
[r
].out
&& rld
[r
].reg_rtx
6706 && rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)))
6708 if (! rld
[r
].optional
)
6710 reload_override_in
[r
] = last_reg
;
6711 reload_inheritance_insn
[r
]
6712 = reg_reloaded_insn
[i
];
6718 /* We can use this as a reload reg. */
6719 /* Mark the register as in use for this part of
6721 mark_reload_reg_in_use (i
,
6725 rld
[r
].reg_rtx
= last_reg
;
6726 reload_inherited
[r
] = 1;
6727 reload_inheritance_insn
[r
]
6728 = reg_reloaded_insn
[i
];
6729 reload_spill_index
[r
] = i
;
6730 for (k
= 0; k
< nr
; k
++)
6731 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6739 /* Here's another way to see if the value is already lying around. */
6742 && ! reload_inherited
[r
]
6744 && (CONSTANT_P (rld
[r
].in
)
6745 || GET_CODE (rld
[r
].in
) == PLUS
6746 || REG_P (rld
[r
].in
)
6747 || MEM_P (rld
[r
].in
))
6748 && (rld
[r
].nregs
== max_group_size
6749 || ! reg_classes_intersect_p (rld
[r
].rclass
, group_class
)))
6750 search_equiv
= rld
[r
].in
;
6755 = find_equiv_reg (search_equiv
, insn
, rld
[r
].rclass
,
6756 -1, NULL
, 0, rld
[r
].mode
);
6762 regno
= REGNO (equiv
);
6765 /* This must be a SUBREG of a hard register.
6766 Make a new REG since this might be used in an
6767 address and not all machines support SUBREGs
6769 gcc_assert (GET_CODE (equiv
) == SUBREG
);
6770 regno
= subreg_regno (equiv
);
6771 equiv
= gen_rtx_REG (rld
[r
].mode
, regno
);
6772 /* If we choose EQUIV as the reload register, but the
6773 loop below decides to cancel the inheritance, we'll
6774 end up reloading EQUIV in rld[r].mode, not the mode
6775 it had originally. That isn't safe when EQUIV isn't
6776 available as a spill register since its value might
6777 still be live at this point. */
6778 for (i
= regno
; i
< regno
+ (int) rld
[r
].nregs
; i
++)
6779 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, i
))
6784 /* If we found a spill reg, reject it unless it is free
6785 and of the desired class. */
6789 int bad_for_class
= 0;
6790 int max_regno
= regno
+ rld
[r
].nregs
;
6792 for (i
= regno
; i
< max_regno
; i
++)
6794 regs_used
|= TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6796 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6801 && ! free_for_value_p (regno
, rld
[r
].mode
,
6802 rld
[r
].opnum
, rld
[r
].when_needed
,
6803 rld
[r
].in
, rld
[r
].out
, r
, 1))
6808 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6811 /* We found a register that contains the value we need.
6812 If this register is the same as an `earlyclobber' operand
6813 of the current insn, just mark it as a place to reload from
6814 since we can't use it as the reload register itself. */
6817 for (i
= 0; i
< n_earlyclobbers
; i
++)
6818 if (reg_overlap_mentioned_for_reload_p (equiv
,
6819 reload_earlyclobbers
[i
]))
6821 if (! rld
[r
].optional
)
6822 reload_override_in
[r
] = equiv
;
6827 /* If the equiv register we have found is explicitly clobbered
6828 in the current insn, it depends on the reload type if we
6829 can use it, use it for reload_override_in, or not at all.
6830 In particular, we then can't use EQUIV for a
6831 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6835 if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 2))
6836 switch (rld
[r
].when_needed
)
6838 case RELOAD_FOR_OTHER_ADDRESS
:
6839 case RELOAD_FOR_INPADDR_ADDRESS
:
6840 case RELOAD_FOR_INPUT_ADDRESS
:
6841 case RELOAD_FOR_OPADDR_ADDR
:
6844 case RELOAD_FOR_INPUT
:
6845 case RELOAD_FOR_OPERAND_ADDRESS
:
6846 if (! rld
[r
].optional
)
6847 reload_override_in
[r
] = equiv
;
6853 else if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 1))
6854 switch (rld
[r
].when_needed
)
6856 case RELOAD_FOR_OTHER_ADDRESS
:
6857 case RELOAD_FOR_INPADDR_ADDRESS
:
6858 case RELOAD_FOR_INPUT_ADDRESS
:
6859 case RELOAD_FOR_OPADDR_ADDR
:
6860 case RELOAD_FOR_OPERAND_ADDRESS
:
6861 case RELOAD_FOR_INPUT
:
6864 if (! rld
[r
].optional
)
6865 reload_override_in
[r
] = equiv
;
6873 /* If we found an equivalent reg, say no code need be generated
6874 to load it, and use it as our reload reg. */
6876 && (regno
!= HARD_FRAME_POINTER_REGNUM
6877 || !frame_pointer_needed
))
6879 int nr
= hard_regno_nregs
[regno
][rld
[r
].mode
];
6881 rld
[r
].reg_rtx
= equiv
;
6882 reload_spill_index
[r
] = regno
;
6883 reload_inherited
[r
] = 1;
6885 /* If reg_reloaded_valid is not set for this register,
6886 there might be a stale spill_reg_store lying around.
6887 We must clear it, since otherwise emit_reload_insns
6888 might delete the store. */
6889 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
6890 spill_reg_store
[regno
] = NULL
;
6891 /* If any of the hard registers in EQUIV are spill
6892 registers, mark them as in use for this insn. */
6893 for (k
= 0; k
< nr
; k
++)
6895 i
= spill_reg_order
[regno
+ k
];
6898 mark_reload_reg_in_use (regno
, rld
[r
].opnum
,
6901 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6908 /* If we found a register to use already, or if this is an optional
6909 reload, we are done. */
6910 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
!= 0)
6914 /* No longer needed for correct operation. Might or might
6915 not give better code on the average. Want to experiment? */
6917 /* See if there is a later reload that has a class different from our
6918 class that intersects our class or that requires less register
6919 than our reload. If so, we must allocate a register to this
6920 reload now, since that reload might inherit a previous reload
6921 and take the only available register in our class. Don't do this
6922 for optional reloads since they will force all previous reloads
6923 to be allocated. Also don't do this for reloads that have been
6926 for (i
= j
+ 1; i
< n_reloads
; i
++)
6928 int s
= reload_order
[i
];
6930 if ((rld
[s
].in
== 0 && rld
[s
].out
== 0
6931 && ! rld
[s
].secondary_p
)
6935 if ((rld
[s
].rclass
!= rld
[r
].rclass
6936 && reg_classes_intersect_p (rld
[r
].rclass
,
6938 || rld
[s
].nregs
< rld
[r
].nregs
)
6945 allocate_reload_reg (chain
, r
, j
== n_reloads
- 1);
6949 /* Now allocate reload registers for anything non-optional that
6950 didn't get one yet. */
6951 for (j
= 0; j
< n_reloads
; j
++)
6953 int r
= reload_order
[j
];
6955 /* Ignore reloads that got marked inoperative. */
6956 if (rld
[r
].out
== 0 && rld
[r
].in
== 0 && ! rld
[r
].secondary_p
)
6959 /* Skip reloads that already have a register allocated or are
6961 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
)
6964 if (! allocate_reload_reg (chain
, r
, j
== n_reloads
- 1))
6968 /* If that loop got all the way, we have won. */
6975 /* Loop around and try without any inheritance. */
6980 /* First undo everything done by the failed attempt
6981 to allocate with inheritance. */
6982 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6984 /* Some sanity tests to verify that the reloads found in the first
6985 pass are identical to the ones we have now. */
6986 gcc_assert (chain
->n_reloads
== n_reloads
);
6988 for (i
= 0; i
< n_reloads
; i
++)
6990 if (chain
->rld
[i
].regno
< 0 || chain
->rld
[i
].reg_rtx
!= 0)
6992 gcc_assert (chain
->rld
[i
].when_needed
== rld
[i
].when_needed
);
6993 for (j
= 0; j
< n_spills
; j
++)
6994 if (spill_regs
[j
] == chain
->rld
[i
].regno
)
6995 if (! set_reload_reg (j
, i
))
6996 failed_reload (chain
->insn
, i
);
7000 /* If we thought we could inherit a reload, because it seemed that
7001 nothing else wanted the same reload register earlier in the insn,
7002 verify that assumption, now that all reloads have been assigned.
7003 Likewise for reloads where reload_override_in has been set. */
7005 /* If doing expensive optimizations, do one preliminary pass that doesn't
7006 cancel any inheritance, but removes reloads that have been needed only
7007 for reloads that we know can be inherited. */
7008 for (pass
= flag_expensive_optimizations
; pass
>= 0; pass
--)
7010 for (j
= 0; j
< n_reloads
; j
++)
7012 int r
= reload_order
[j
];
7014 #ifdef SECONDARY_MEMORY_NEEDED
7017 if (reload_inherited
[r
] && rld
[r
].reg_rtx
)
7018 check_reg
= rld
[r
].reg_rtx
;
7019 else if (reload_override_in
[r
]
7020 && (REG_P (reload_override_in
[r
])
7021 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
7022 check_reg
= reload_override_in
[r
];
7025 if (! free_for_value_p (true_regnum (check_reg
), rld
[r
].mode
,
7026 rld
[r
].opnum
, rld
[r
].when_needed
, rld
[r
].in
,
7027 (reload_inherited
[r
]
7028 ? rld
[r
].out
: const0_rtx
),
7033 reload_inherited
[r
] = 0;
7034 reload_override_in
[r
] = 0;
7036 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7037 reload_override_in, then we do not need its related
7038 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7039 likewise for other reload types.
7040 We handle this by removing a reload when its only replacement
7041 is mentioned in reload_in of the reload we are going to inherit.
7042 A special case are auto_inc expressions; even if the input is
7043 inherited, we still need the address for the output. We can
7044 recognize them because they have RELOAD_OUT set to RELOAD_IN.
7045 If we succeeded removing some reload and we are doing a preliminary
7046 pass just to remove such reloads, make another pass, since the
7047 removal of one reload might allow us to inherit another one. */
7049 && rld
[r
].out
!= rld
[r
].in
7050 && remove_address_replacements (rld
[r
].in
))
7055 #ifdef SECONDARY_MEMORY_NEEDED
7056 /* If we needed a memory location for the reload, we also have to
7057 remove its related reloads. */
7059 && rld
[r
].out
!= rld
[r
].in
7060 && (tem
= replaced_subreg (rld
[r
].in
), REG_P (tem
))
7061 && REGNO (tem
) < FIRST_PSEUDO_REGISTER
7062 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem
)),
7063 rld
[r
].rclass
, rld
[r
].inmode
)
7064 && remove_address_replacements
7065 (get_secondary_mem (tem
, rld
[r
].inmode
, rld
[r
].opnum
,
7066 rld
[r
].when_needed
)))
7075 /* Now that reload_override_in is known valid,
7076 actually override reload_in. */
7077 for (j
= 0; j
< n_reloads
; j
++)
7078 if (reload_override_in
[j
])
7079 rld
[j
].in
= reload_override_in
[j
];
7081 /* If this reload won't be done because it has been canceled or is
7082 optional and not inherited, clear reload_reg_rtx so other
7083 routines (such as subst_reloads) don't get confused. */
7084 for (j
= 0; j
< n_reloads
; j
++)
7085 if (rld
[j
].reg_rtx
!= 0
7086 && ((rld
[j
].optional
&& ! reload_inherited
[j
])
7087 || (rld
[j
].in
== 0 && rld
[j
].out
== 0
7088 && ! rld
[j
].secondary_p
)))
7090 int regno
= true_regnum (rld
[j
].reg_rtx
);
7092 if (spill_reg_order
[regno
] >= 0)
7093 clear_reload_reg_in_use (regno
, rld
[j
].opnum
,
7094 rld
[j
].when_needed
, rld
[j
].mode
);
7096 reload_spill_index
[j
] = -1;
7099 /* Record which pseudos and which spill regs have output reloads. */
7100 for (j
= 0; j
< n_reloads
; j
++)
7102 int r
= reload_order
[j
];
7104 i
= reload_spill_index
[r
];
7106 /* I is nonneg if this reload uses a register.
7107 If rld[r].reg_rtx is 0, this is an optional reload
7108 that we opted to ignore. */
7109 if (rld
[r
].out_reg
!= 0 && REG_P (rld
[r
].out_reg
)
7110 && rld
[r
].reg_rtx
!= 0)
7112 int nregno
= REGNO (rld
[r
].out_reg
);
7115 if (nregno
< FIRST_PSEUDO_REGISTER
)
7116 nr
= hard_regno_nregs
[nregno
][rld
[r
].mode
];
7119 SET_REGNO_REG_SET (®_has_output_reload
,
7123 add_to_hard_reg_set (®_is_output_reload
, rld
[r
].mode
, i
);
7125 gcc_assert (rld
[r
].when_needed
== RELOAD_OTHER
7126 || rld
[r
].when_needed
== RELOAD_FOR_OUTPUT
7127 || rld
[r
].when_needed
== RELOAD_FOR_INSN
);
7132 /* Deallocate the reload register for reload R. This is called from
7133 remove_address_replacements. */
7136 deallocate_reload_reg (int r
)
7140 if (! rld
[r
].reg_rtx
)
7142 regno
= true_regnum (rld
[r
].reg_rtx
);
7144 if (spill_reg_order
[regno
] >= 0)
7145 clear_reload_reg_in_use (regno
, rld
[r
].opnum
, rld
[r
].when_needed
,
7147 reload_spill_index
[r
] = -1;
7150 /* These arrays are filled by emit_reload_insns and its subroutines. */
7151 static rtx_insn
*input_reload_insns
[MAX_RECOG_OPERANDS
];
7152 static rtx_insn
*other_input_address_reload_insns
= 0;
7153 static rtx_insn
*other_input_reload_insns
= 0;
7154 static rtx_insn
*input_address_reload_insns
[MAX_RECOG_OPERANDS
];
7155 static rtx_insn
*inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7156 static rtx_insn
*output_reload_insns
[MAX_RECOG_OPERANDS
];
7157 static rtx_insn
*output_address_reload_insns
[MAX_RECOG_OPERANDS
];
7158 static rtx_insn
*outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7159 static rtx_insn
*operand_reload_insns
= 0;
7160 static rtx_insn
*other_operand_reload_insns
= 0;
7161 static rtx_insn
*other_output_reload_insns
[MAX_RECOG_OPERANDS
];
7163 /* Values to be put in spill_reg_store are put here first. Instructions
7164 must only be placed here if the associated reload register reaches
7165 the end of the instruction's reload sequence. */
7166 static rtx_insn
*new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
7167 static HARD_REG_SET reg_reloaded_died
;
7169 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7170 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7171 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7172 adjusted register, and return true. Otherwise, return false. */
7174 reload_adjust_reg_for_temp (rtx
*reload_reg
, rtx alt_reload_reg
,
7175 enum reg_class new_class
,
7176 machine_mode new_mode
)
7181 for (reg
= *reload_reg
; reg
; reg
= alt_reload_reg
, alt_reload_reg
= 0)
7183 unsigned regno
= REGNO (reg
);
7185 if (!TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
], regno
))
7187 if (GET_MODE (reg
) != new_mode
)
7189 if (!HARD_REGNO_MODE_OK (regno
, new_mode
))
7191 if (hard_regno_nregs
[regno
][new_mode
]
7192 > hard_regno_nregs
[regno
][GET_MODE (reg
)])
7194 reg
= reload_adjust_reg_for_mode (reg
, new_mode
);
7202 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7203 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7204 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7205 adjusted register, and return true. Otherwise, return false. */
7207 reload_adjust_reg_for_icode (rtx
*reload_reg
, rtx alt_reload_reg
,
7208 enum insn_code icode
)
7211 enum reg_class new_class
= scratch_reload_class (icode
);
7212 machine_mode new_mode
= insn_data
[(int) icode
].operand
[2].mode
;
7214 return reload_adjust_reg_for_temp (reload_reg
, alt_reload_reg
,
7215 new_class
, new_mode
);
7218 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7219 has the number J. OLD contains the value to be used as input. */
7222 emit_input_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7225 rtx_insn
*insn
= chain
->insn
;
7227 rtx oldequiv_reg
= 0;
7233 /* delete_output_reload is only invoked properly if old contains
7234 the original pseudo register. Since this is replaced with a
7235 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7236 find the pseudo in RELOAD_IN_REG. This is also used to
7237 determine whether a secondary reload is needed. */
7238 if (reload_override_in
[j
]
7239 && (REG_P (rl
->in_reg
)
7240 || (GET_CODE (rl
->in_reg
) == SUBREG
7241 && REG_P (SUBREG_REG (rl
->in_reg
)))))
7248 else if (REG_P (oldequiv
))
7249 oldequiv_reg
= oldequiv
;
7250 else if (GET_CODE (oldequiv
) == SUBREG
)
7251 oldequiv_reg
= SUBREG_REG (oldequiv
);
7253 reloadreg
= reload_reg_rtx_for_input
[j
];
7254 mode
= GET_MODE (reloadreg
);
7256 /* If we are reloading from a register that was recently stored in
7257 with an output-reload, see if we can prove there was
7258 actually no need to store the old value in it. */
7260 if (optimize
&& REG_P (oldequiv
)
7261 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
7262 && spill_reg_store
[REGNO (oldequiv
)]
7264 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (oldequiv
)])
7265 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
7267 delete_output_reload (insn
, j
, REGNO (oldequiv
), reloadreg
);
7269 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7272 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
7273 oldequiv
= SUBREG_REG (oldequiv
);
7274 if (GET_MODE (oldequiv
) != VOIDmode
7275 && mode
!= GET_MODE (oldequiv
))
7276 oldequiv
= gen_lowpart_SUBREG (mode
, oldequiv
);
7278 /* Switch to the right place to emit the reload insns. */
7279 switch (rl
->when_needed
)
7282 where
= &other_input_reload_insns
;
7284 case RELOAD_FOR_INPUT
:
7285 where
= &input_reload_insns
[rl
->opnum
];
7287 case RELOAD_FOR_INPUT_ADDRESS
:
7288 where
= &input_address_reload_insns
[rl
->opnum
];
7290 case RELOAD_FOR_INPADDR_ADDRESS
:
7291 where
= &inpaddr_address_reload_insns
[rl
->opnum
];
7293 case RELOAD_FOR_OUTPUT_ADDRESS
:
7294 where
= &output_address_reload_insns
[rl
->opnum
];
7296 case RELOAD_FOR_OUTADDR_ADDRESS
:
7297 where
= &outaddr_address_reload_insns
[rl
->opnum
];
7299 case RELOAD_FOR_OPERAND_ADDRESS
:
7300 where
= &operand_reload_insns
;
7302 case RELOAD_FOR_OPADDR_ADDR
:
7303 where
= &other_operand_reload_insns
;
7305 case RELOAD_FOR_OTHER_ADDRESS
:
7306 where
= &other_input_address_reload_insns
;
7312 push_to_sequence (*where
);
7314 /* Auto-increment addresses must be reloaded in a special way. */
7315 if (rl
->out
&& ! rl
->out_reg
)
7317 /* We are not going to bother supporting the case where a
7318 incremented register can't be copied directly from
7319 OLDEQUIV since this seems highly unlikely. */
7320 gcc_assert (rl
->secondary_in_reload
< 0);
7322 if (reload_inherited
[j
])
7323 oldequiv
= reloadreg
;
7325 old
= XEXP (rl
->in_reg
, 0);
7327 /* Prevent normal processing of this reload. */
7329 /* Output a special code sequence for this case. */
7330 inc_for_reload (reloadreg
, oldequiv
, rl
->out
, rl
->inc
);
7333 /* If we are reloading a pseudo-register that was set by the previous
7334 insn, see if we can get rid of that pseudo-register entirely
7335 by redirecting the previous insn into our reload register. */
7337 else if (optimize
&& REG_P (old
)
7338 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7339 && dead_or_set_p (insn
, old
)
7340 /* This is unsafe if some other reload
7341 uses the same reg first. */
7342 && ! conflicts_with_override (reloadreg
)
7343 && free_for_value_p (REGNO (reloadreg
), rl
->mode
, rl
->opnum
,
7344 rl
->when_needed
, old
, rl
->out
, j
, 0))
7346 rtx_insn
*temp
= PREV_INSN (insn
);
7347 while (temp
&& (NOTE_P (temp
) || DEBUG_INSN_P (temp
)))
7348 temp
= PREV_INSN (temp
);
7350 && NONJUMP_INSN_P (temp
)
7351 && GET_CODE (PATTERN (temp
)) == SET
7352 && SET_DEST (PATTERN (temp
)) == old
7353 /* Make sure we can access insn_operand_constraint. */
7354 && asm_noperands (PATTERN (temp
)) < 0
7355 /* This is unsafe if operand occurs more than once in current
7356 insn. Perhaps some occurrences aren't reloaded. */
7357 && count_occurrences (PATTERN (insn
), old
, 0) == 1)
7359 rtx old
= SET_DEST (PATTERN (temp
));
7360 /* Store into the reload register instead of the pseudo. */
7361 SET_DEST (PATTERN (temp
)) = reloadreg
;
7363 /* Verify that resulting insn is valid.
7365 Note that we have replaced the destination of TEMP with
7366 RELOADREG. If TEMP references RELOADREG within an
7367 autoincrement addressing mode, then the resulting insn
7368 is ill-formed and we must reject this optimization. */
7369 extract_insn (temp
);
7370 if (constrain_operands (1, get_enabled_alternatives (temp
))
7371 && (!AUTO_INC_DEC
|| ! find_reg_note (temp
, REG_INC
, reloadreg
)))
7373 /* If the previous insn is an output reload, the source is
7374 a reload register, and its spill_reg_store entry will
7375 contain the previous destination. This is now
7377 if (REG_P (SET_SRC (PATTERN (temp
)))
7378 && REGNO (SET_SRC (PATTERN (temp
))) < FIRST_PSEUDO_REGISTER
)
7380 spill_reg_store
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7381 spill_reg_stored_to
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7384 /* If these are the only uses of the pseudo reg,
7385 pretend for GDB it lives in the reload reg we used. */
7386 if (REG_N_DEATHS (REGNO (old
)) == 1
7387 && REG_N_SETS (REGNO (old
)) == 1)
7389 reg_renumber
[REGNO (old
)] = REGNO (reloadreg
);
7390 if (ira_conflicts_p
)
7391 /* Inform IRA about the change. */
7392 ira_mark_allocation_change (REGNO (old
));
7393 alter_reg (REGNO (old
), -1, false);
7397 /* Adjust any debug insns between temp and insn. */
7398 while ((temp
= NEXT_INSN (temp
)) != insn
)
7399 if (DEBUG_INSN_P (temp
))
7400 INSN_VAR_LOCATION_LOC (temp
)
7401 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp
),
7404 gcc_assert (NOTE_P (temp
));
7408 SET_DEST (PATTERN (temp
)) = old
;
7413 /* We can't do that, so output an insn to load RELOADREG. */
7415 /* If we have a secondary reload, pick up the secondary register
7416 and icode, if any. If OLDEQUIV and OLD are different or
7417 if this is an in-out reload, recompute whether or not we
7418 still need a secondary register and what the icode should
7419 be. If we still need a secondary register and the class or
7420 icode is different, go back to reloading from OLD if using
7421 OLDEQUIV means that we got the wrong type of register. We
7422 cannot have different class or icode due to an in-out reload
7423 because we don't make such reloads when both the input and
7424 output need secondary reload registers. */
7426 if (! special
&& rl
->secondary_in_reload
>= 0)
7428 rtx second_reload_reg
= 0;
7429 rtx third_reload_reg
= 0;
7430 int secondary_reload
= rl
->secondary_in_reload
;
7431 rtx real_oldequiv
= oldequiv
;
7434 enum insn_code icode
;
7435 enum insn_code tertiary_icode
= CODE_FOR_nothing
;
7437 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7438 and similarly for OLD.
7439 See comments in get_secondary_reload in reload.c. */
7440 /* If it is a pseudo that cannot be replaced with its
7441 equivalent MEM, we must fall back to reload_in, which
7442 will have all the necessary substitutions registered.
7443 Likewise for a pseudo that can't be replaced with its
7444 equivalent constant.
7446 Take extra care for subregs of such pseudos. Note that
7447 we cannot use reg_equiv_mem in this case because it is
7448 not in the right mode. */
7451 if (GET_CODE (tmp
) == SUBREG
)
7452 tmp
= SUBREG_REG (tmp
);
7454 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7455 && (reg_equiv_memory_loc (REGNO (tmp
)) != 0
7456 || reg_equiv_constant (REGNO (tmp
)) != 0))
7458 if (! reg_equiv_mem (REGNO (tmp
))
7459 || num_not_at_initial_offset
7460 || GET_CODE (oldequiv
) == SUBREG
)
7461 real_oldequiv
= rl
->in
;
7463 real_oldequiv
= reg_equiv_mem (REGNO (tmp
));
7467 if (GET_CODE (tmp
) == SUBREG
)
7468 tmp
= SUBREG_REG (tmp
);
7470 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7471 && (reg_equiv_memory_loc (REGNO (tmp
)) != 0
7472 || reg_equiv_constant (REGNO (tmp
)) != 0))
7474 if (! reg_equiv_mem (REGNO (tmp
))
7475 || num_not_at_initial_offset
7476 || GET_CODE (old
) == SUBREG
)
7479 real_old
= reg_equiv_mem (REGNO (tmp
));
7482 second_reload_reg
= rld
[secondary_reload
].reg_rtx
;
7483 if (rld
[secondary_reload
].secondary_in_reload
>= 0)
7485 int tertiary_reload
= rld
[secondary_reload
].secondary_in_reload
;
7487 third_reload_reg
= rld
[tertiary_reload
].reg_rtx
;
7488 tertiary_icode
= rld
[secondary_reload
].secondary_in_icode
;
7489 /* We'd have to add more code for quartary reloads. */
7490 gcc_assert (rld
[tertiary_reload
].secondary_in_reload
< 0);
7492 icode
= rl
->secondary_in_icode
;
7494 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
7495 || (rl
->in
!= 0 && rl
->out
!= 0))
7497 secondary_reload_info sri
, sri2
;
7498 enum reg_class new_class
, new_t_class
;
7500 sri
.icode
= CODE_FOR_nothing
;
7501 sri
.prev_sri
= NULL
;
7503 = (enum reg_class
) targetm
.secondary_reload (1, real_oldequiv
,
7507 if (new_class
== NO_REGS
&& sri
.icode
== CODE_FOR_nothing
)
7508 second_reload_reg
= 0;
7509 else if (new_class
== NO_REGS
)
7511 if (reload_adjust_reg_for_icode (&second_reload_reg
,
7513 (enum insn_code
) sri
.icode
))
7515 icode
= (enum insn_code
) sri
.icode
;
7516 third_reload_reg
= 0;
7521 real_oldequiv
= real_old
;
7524 else if (sri
.icode
!= CODE_FOR_nothing
)
7525 /* We currently lack a way to express this in reloads. */
7529 sri2
.icode
= CODE_FOR_nothing
;
7530 sri2
.prev_sri
= &sri
;
7532 = (enum reg_class
) targetm
.secondary_reload (1, real_oldequiv
,
7535 if (new_t_class
== NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7537 if (reload_adjust_reg_for_temp (&second_reload_reg
,
7541 third_reload_reg
= 0;
7542 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7547 real_oldequiv
= real_old
;
7550 else if (new_t_class
== NO_REGS
&& sri2
.icode
!= CODE_FOR_nothing
)
7552 rtx intermediate
= second_reload_reg
;
7554 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7556 && reload_adjust_reg_for_icode (&third_reload_reg
, NULL
,
7560 second_reload_reg
= intermediate
;
7561 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7566 real_oldequiv
= real_old
;
7569 else if (new_t_class
!= NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7571 rtx intermediate
= second_reload_reg
;
7573 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7575 && reload_adjust_reg_for_temp (&third_reload_reg
, NULL
,
7578 second_reload_reg
= intermediate
;
7579 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7584 real_oldequiv
= real_old
;
7589 /* This could be handled more intelligently too. */
7591 real_oldequiv
= real_old
;
7596 /* If we still need a secondary reload register, check
7597 to see if it is being used as a scratch or intermediate
7598 register and generate code appropriately. If we need
7599 a scratch register, use REAL_OLDEQUIV since the form of
7600 the insn may depend on the actual address if it is
7603 if (second_reload_reg
)
7605 if (icode
!= CODE_FOR_nothing
)
7607 /* We'd have to add extra code to handle this case. */
7608 gcc_assert (!third_reload_reg
);
7610 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
7611 second_reload_reg
));
7616 /* See if we need a scratch register to load the
7617 intermediate register (a tertiary reload). */
7618 if (tertiary_icode
!= CODE_FOR_nothing
)
7620 emit_insn ((GEN_FCN (tertiary_icode
)
7621 (second_reload_reg
, real_oldequiv
,
7622 third_reload_reg
)));
7624 else if (third_reload_reg
)
7626 gen_reload (third_reload_reg
, real_oldequiv
,
7629 gen_reload (second_reload_reg
, third_reload_reg
,
7634 gen_reload (second_reload_reg
, real_oldequiv
,
7638 oldequiv
= second_reload_reg
;
7643 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
7645 rtx real_oldequiv
= oldequiv
;
7647 if ((REG_P (oldequiv
)
7648 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
7649 && (reg_equiv_memory_loc (REGNO (oldequiv
)) != 0
7650 || reg_equiv_constant (REGNO (oldequiv
)) != 0))
7651 || (GET_CODE (oldequiv
) == SUBREG
7652 && REG_P (SUBREG_REG (oldequiv
))
7653 && (REGNO (SUBREG_REG (oldequiv
))
7654 >= FIRST_PSEUDO_REGISTER
)
7655 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv
))) != 0)
7656 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv
))) != 0)))
7657 || (CONSTANT_P (oldequiv
)
7658 && (targetm
.preferred_reload_class (oldequiv
,
7659 REGNO_REG_CLASS (REGNO (reloadreg
)))
7661 real_oldequiv
= rl
->in
;
7662 gen_reload (reloadreg
, real_oldequiv
, rl
->opnum
,
7666 if (cfun
->can_throw_non_call_exceptions
)
7667 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7669 /* End this sequence. */
7670 *where
= get_insns ();
7673 /* Update reload_override_in so that delete_address_reloads_1
7674 can see the actual register usage. */
7676 reload_override_in
[j
] = oldequiv
;
7679 /* Generate insns to for the output reload RL, which is for the insn described
7680 by CHAIN and has the number J. */
7682 emit_output_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7686 rtx_insn
*insn
= chain
->insn
;
7693 if (rl
->when_needed
== RELOAD_OTHER
)
7696 push_to_sequence (output_reload_insns
[rl
->opnum
]);
7698 rl_reg_rtx
= reload_reg_rtx_for_output
[j
];
7699 mode
= GET_MODE (rl_reg_rtx
);
7701 reloadreg
= rl_reg_rtx
;
7703 /* If we need two reload regs, set RELOADREG to the intermediate
7704 one, since it will be stored into OLD. We might need a secondary
7705 register only for an input reload, so check again here. */
7707 if (rl
->secondary_out_reload
>= 0)
7710 int secondary_reload
= rl
->secondary_out_reload
;
7711 int tertiary_reload
= rld
[secondary_reload
].secondary_out_reload
;
7713 if (REG_P (old
) && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7714 && reg_equiv_mem (REGNO (old
)) != 0)
7715 real_old
= reg_equiv_mem (REGNO (old
));
7717 if (secondary_reload_class (0, rl
->rclass
, mode
, real_old
) != NO_REGS
)
7719 rtx second_reloadreg
= reloadreg
;
7720 reloadreg
= rld
[secondary_reload
].reg_rtx
;
7722 /* See if RELOADREG is to be used as a scratch register
7723 or as an intermediate register. */
7724 if (rl
->secondary_out_icode
!= CODE_FOR_nothing
)
7726 /* We'd have to add extra code to handle this case. */
7727 gcc_assert (tertiary_reload
< 0);
7729 emit_insn ((GEN_FCN (rl
->secondary_out_icode
)
7730 (real_old
, second_reloadreg
, reloadreg
)));
7735 /* See if we need both a scratch and intermediate reload
7738 enum insn_code tertiary_icode
7739 = rld
[secondary_reload
].secondary_out_icode
;
7741 /* We'd have to add more code for quartary reloads. */
7742 gcc_assert (tertiary_reload
< 0
7743 || rld
[tertiary_reload
].secondary_out_reload
< 0);
7745 if (GET_MODE (reloadreg
) != mode
)
7746 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
7748 if (tertiary_icode
!= CODE_FOR_nothing
)
7750 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7752 /* Copy primary reload reg to secondary reload reg.
7753 (Note that these have been swapped above, then
7754 secondary reload reg to OLD using our insn.) */
7756 /* If REAL_OLD is a paradoxical SUBREG, remove it
7757 and try to put the opposite SUBREG on
7759 strip_paradoxical_subreg (&real_old
, &reloadreg
);
7761 gen_reload (reloadreg
, second_reloadreg
,
7762 rl
->opnum
, rl
->when_needed
);
7763 emit_insn ((GEN_FCN (tertiary_icode
)
7764 (real_old
, reloadreg
, third_reloadreg
)));
7770 /* Copy between the reload regs here and then to
7773 gen_reload (reloadreg
, second_reloadreg
,
7774 rl
->opnum
, rl
->when_needed
);
7775 if (tertiary_reload
>= 0)
7777 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7779 gen_reload (third_reloadreg
, reloadreg
,
7780 rl
->opnum
, rl
->when_needed
);
7781 reloadreg
= third_reloadreg
;
7788 /* Output the last reload insn. */
7793 /* Don't output the last reload if OLD is not the dest of
7794 INSN and is in the src and is clobbered by INSN. */
7795 if (! flag_expensive_optimizations
7797 || !(set
= single_set (insn
))
7798 || rtx_equal_p (old
, SET_DEST (set
))
7799 || !reg_mentioned_p (old
, SET_SRC (set
))
7800 || !((REGNO (old
) < FIRST_PSEUDO_REGISTER
)
7801 && regno_clobbered_p (REGNO (old
), insn
, rl
->mode
, 0)))
7802 gen_reload (old
, reloadreg
, rl
->opnum
,
7806 /* Look at all insns we emitted, just to be safe. */
7807 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7810 rtx pat
= PATTERN (p
);
7812 /* If this output reload doesn't come from a spill reg,
7813 clear any memory of reloaded copies of the pseudo reg.
7814 If this output reload comes from a spill reg,
7815 reg_has_output_reload will make this do nothing. */
7816 note_stores (pat
, forget_old_reloads_1
, NULL
);
7818 if (reg_mentioned_p (rl_reg_rtx
, pat
))
7820 rtx set
= single_set (insn
);
7821 if (reload_spill_index
[j
] < 0
7823 && SET_SRC (set
) == rl_reg_rtx
)
7825 int src
= REGNO (SET_SRC (set
));
7827 reload_spill_index
[j
] = src
;
7828 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7829 if (find_regno_note (insn
, REG_DEAD
, src
))
7830 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7832 if (HARD_REGISTER_P (rl_reg_rtx
))
7834 int s
= rl
->secondary_out_reload
;
7835 set
= single_set (p
);
7836 /* If this reload copies only to the secondary reload
7837 register, the secondary reload does the actual
7839 if (s
>= 0 && set
== NULL_RTX
)
7840 /* We can't tell what function the secondary reload
7841 has and where the actual store to the pseudo is
7842 made; leave new_spill_reg_store alone. */
7845 && SET_SRC (set
) == rl_reg_rtx
7846 && SET_DEST (set
) == rld
[s
].reg_rtx
)
7848 /* Usually the next instruction will be the
7849 secondary reload insn; if we can confirm
7850 that it is, setting new_spill_reg_store to
7851 that insn will allow an extra optimization. */
7852 rtx s_reg
= rld
[s
].reg_rtx
;
7853 rtx_insn
*next
= NEXT_INSN (p
);
7854 rld
[s
].out
= rl
->out
;
7855 rld
[s
].out_reg
= rl
->out_reg
;
7856 set
= single_set (next
);
7857 if (set
&& SET_SRC (set
) == s_reg
7858 && reload_reg_rtx_reaches_end_p (s_reg
, s
))
7860 SET_HARD_REG_BIT (reg_is_output_reload
,
7862 new_spill_reg_store
[REGNO (s_reg
)] = next
;
7865 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx
, j
))
7866 new_spill_reg_store
[REGNO (rl_reg_rtx
)] = p
;
7871 if (rl
->when_needed
== RELOAD_OTHER
)
7873 emit_insn (other_output_reload_insns
[rl
->opnum
]);
7874 other_output_reload_insns
[rl
->opnum
] = get_insns ();
7877 output_reload_insns
[rl
->opnum
] = get_insns ();
7879 if (cfun
->can_throw_non_call_exceptions
)
7880 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7885 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7886 and has the number J. */
7888 do_input_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
7890 rtx_insn
*insn
= chain
->insn
;
7891 rtx old
= (rl
->in
&& MEM_P (rl
->in
)
7892 ? rl
->in_reg
: rl
->in
);
7893 rtx reg_rtx
= rl
->reg_rtx
;
7899 /* Determine the mode to reload in.
7900 This is very tricky because we have three to choose from.
7901 There is the mode the insn operand wants (rl->inmode).
7902 There is the mode of the reload register RELOADREG.
7903 There is the intrinsic mode of the operand, which we could find
7904 by stripping some SUBREGs.
7905 It turns out that RELOADREG's mode is irrelevant:
7906 we can change that arbitrarily.
7908 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7909 then the reload reg may not support QImode moves, so use SImode.
7910 If foo is in memory due to spilling a pseudo reg, this is safe,
7911 because the QImode value is in the least significant part of a
7912 slot big enough for a SImode. If foo is some other sort of
7913 memory reference, then it is impossible to reload this case,
7914 so previous passes had better make sure this never happens.
7916 Then consider a one-word union which has SImode and one of its
7917 members is a float, being fetched as (SUBREG:SF union:SI).
7918 We must fetch that as SFmode because we could be loading into
7919 a float-only register. In this case OLD's mode is correct.
7921 Consider an immediate integer: it has VOIDmode. Here we need
7922 to get a mode from something else.
7924 In some cases, there is a fourth mode, the operand's
7925 containing mode. If the insn specifies a containing mode for
7926 this operand, it overrides all others.
7928 I am not sure whether the algorithm here is always right,
7929 but it does the right things in those cases. */
7931 mode
= GET_MODE (old
);
7932 if (mode
== VOIDmode
)
7935 /* We cannot use gen_lowpart_common since it can do the wrong thing
7936 when REG_RTX has a multi-word mode. Note that REG_RTX must
7937 always be a REG here. */
7938 if (GET_MODE (reg_rtx
) != mode
)
7939 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
7941 reload_reg_rtx_for_input
[j
] = reg_rtx
;
7944 /* AUTO_INC reloads need to be handled even if inherited. We got an
7945 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7946 && (! reload_inherited
[j
] || (rl
->out
&& ! rl
->out_reg
))
7947 && ! rtx_equal_p (reg_rtx
, old
)
7949 emit_input_reload_insns (chain
, rld
+ j
, old
, j
);
7951 /* When inheriting a wider reload, we have a MEM in rl->in,
7952 e.g. inheriting a SImode output reload for
7953 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7954 if (optimize
&& reload_inherited
[j
] && rl
->in
7956 && MEM_P (rl
->in_reg
)
7957 && reload_spill_index
[j
] >= 0
7958 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
7959 rl
->in
= regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
7961 /* If we are reloading a register that was recently stored in with an
7962 output-reload, see if we can prove there was
7963 actually no need to store the old value in it. */
7966 && (reload_inherited
[j
] || reload_override_in
[j
])
7969 && spill_reg_store
[REGNO (reg_rtx
)] != 0
7971 /* There doesn't seem to be any reason to restrict this to pseudos
7972 and doing so loses in the case where we are copying from a
7973 register of the wrong class. */
7974 && !HARD_REGISTER_P (spill_reg_stored_to
[REGNO (reg_rtx
)])
7976 /* The insn might have already some references to stackslots
7977 replaced by MEMs, while reload_out_reg still names the
7979 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (reg_rtx
)])
7980 || rtx_equal_p (spill_reg_stored_to
[REGNO (reg_rtx
)], rl
->out_reg
)))
7981 delete_output_reload (insn
, j
, REGNO (reg_rtx
), reg_rtx
);
7984 /* Do output reloading for reload RL, which is for the insn described by
7985 CHAIN and has the number J.
7986 ??? At some point we need to support handling output reloads of
7987 JUMP_INSNs or insns that set cc0. */
7989 do_output_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
7992 rtx_insn
*insn
= chain
->insn
;
7993 /* If this is an output reload that stores something that is
7994 not loaded in this same reload, see if we can eliminate a previous
7996 rtx pseudo
= rl
->out_reg
;
7997 rtx reg_rtx
= rl
->reg_rtx
;
7999 if (rl
->out
&& reg_rtx
)
8003 /* Determine the mode to reload in.
8004 See comments above (for input reloading). */
8005 mode
= GET_MODE (rl
->out
);
8006 if (mode
== VOIDmode
)
8008 /* VOIDmode should never happen for an output. */
8009 if (asm_noperands (PATTERN (insn
)) < 0)
8010 /* It's the compiler's fault. */
8011 fatal_insn ("VOIDmode on an output", insn
);
8012 error_for_asm (insn
, "output operand is constant in %<asm%>");
8013 /* Prevent crash--use something we know is valid. */
8015 rl
->out
= gen_rtx_REG (mode
, REGNO (reg_rtx
));
8017 if (GET_MODE (reg_rtx
) != mode
)
8018 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
8020 reload_reg_rtx_for_output
[j
] = reg_rtx
;
8025 && ! rtx_equal_p (rl
->in_reg
, pseudo
)
8026 && REGNO (pseudo
) >= FIRST_PSEUDO_REGISTER
8027 && reg_last_reload_reg
[REGNO (pseudo
)])
8029 int pseudo_no
= REGNO (pseudo
);
8030 int last_regno
= REGNO (reg_last_reload_reg
[pseudo_no
]);
8032 /* We don't need to test full validity of last_regno for
8033 inherit here; we only want to know if the store actually
8034 matches the pseudo. */
8035 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, last_regno
)
8036 && reg_reloaded_contents
[last_regno
] == pseudo_no
8037 && spill_reg_store
[last_regno
]
8038 && rtx_equal_p (pseudo
, spill_reg_stored_to
[last_regno
]))
8039 delete_output_reload (insn
, j
, last_regno
, reg_rtx
);
8045 || rtx_equal_p (old
, reg_rtx
))
8048 /* An output operand that dies right away does need a reload,
8049 but need not be copied from it. Show the new location in the
8051 if ((REG_P (old
) || GET_CODE (old
) == SCRATCH
)
8052 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
8054 XEXP (note
, 0) = reg_rtx
;
8057 /* Likewise for a SUBREG of an operand that dies. */
8058 else if (GET_CODE (old
) == SUBREG
8059 && REG_P (SUBREG_REG (old
))
8060 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
8063 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
), reg_rtx
);
8066 else if (GET_CODE (old
) == SCRATCH
)
8067 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8068 but we don't want to make an output reload. */
8071 /* If is a JUMP_INSN, we can't support output reloads yet. */
8072 gcc_assert (NONJUMP_INSN_P (insn
));
8074 emit_output_reload_insns (chain
, rld
+ j
, j
);
8077 /* A reload copies values of MODE from register SRC to register DEST.
8078 Return true if it can be treated for inheritance purposes like a
8079 group of reloads, each one reloading a single hard register. The
8080 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8081 occupy the same number of hard registers. */
8084 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED
,
8085 int src ATTRIBUTE_UNUSED
,
8086 machine_mode mode ATTRIBUTE_UNUSED
)
8088 #ifdef CANNOT_CHANGE_MODE_CLASS
8089 return (!REG_CANNOT_CHANGE_MODE_P (dest
, mode
, reg_raw_mode
[dest
])
8090 && !REG_CANNOT_CHANGE_MODE_P (src
, mode
, reg_raw_mode
[src
]));
8096 /* Output insns to reload values in and out of the chosen reload regs. */
8099 emit_reload_insns (struct insn_chain
*chain
)
8101 rtx_insn
*insn
= chain
->insn
;
8105 CLEAR_HARD_REG_SET (reg_reloaded_died
);
8107 for (j
= 0; j
< reload_n_operands
; j
++)
8108 input_reload_insns
[j
] = input_address_reload_insns
[j
]
8109 = inpaddr_address_reload_insns
[j
]
8110 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
8111 = outaddr_address_reload_insns
[j
]
8112 = other_output_reload_insns
[j
] = 0;
8113 other_input_address_reload_insns
= 0;
8114 other_input_reload_insns
= 0;
8115 operand_reload_insns
= 0;
8116 other_operand_reload_insns
= 0;
8118 /* Dump reloads into the dump file. */
8121 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
8122 debug_reload_to_stream (dump_file
);
8125 for (j
= 0; j
< n_reloads
; j
++)
8126 if (rld
[j
].reg_rtx
&& HARD_REGISTER_P (rld
[j
].reg_rtx
))
8130 for (i
= REGNO (rld
[j
].reg_rtx
); i
< END_REGNO (rld
[j
].reg_rtx
); i
++)
8131 new_spill_reg_store
[i
] = 0;
8134 /* Now output the instructions to copy the data into and out of the
8135 reload registers. Do these in the order that the reloads were reported,
8136 since reloads of base and index registers precede reloads of operands
8137 and the operands may need the base and index registers reloaded. */
8139 for (j
= 0; j
< n_reloads
; j
++)
8141 do_input_reload (chain
, rld
+ j
, j
);
8142 do_output_reload (chain
, rld
+ j
, j
);
8145 /* Now write all the insns we made for reloads in the order expected by
8146 the allocation functions. Prior to the insn being reloaded, we write
8147 the following reloads:
8149 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8151 RELOAD_OTHER reloads.
8153 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8154 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8155 RELOAD_FOR_INPUT reload for the operand.
8157 RELOAD_FOR_OPADDR_ADDRS reloads.
8159 RELOAD_FOR_OPERAND_ADDRESS reloads.
8161 After the insn being reloaded, we write the following:
8163 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8164 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8165 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8166 reloads for the operand. The RELOAD_OTHER output reloads are
8167 output in descending order by reload number. */
8169 emit_insn_before (other_input_address_reload_insns
, insn
);
8170 emit_insn_before (other_input_reload_insns
, insn
);
8172 for (j
= 0; j
< reload_n_operands
; j
++)
8174 emit_insn_before (inpaddr_address_reload_insns
[j
], insn
);
8175 emit_insn_before (input_address_reload_insns
[j
], insn
);
8176 emit_insn_before (input_reload_insns
[j
], insn
);
8179 emit_insn_before (other_operand_reload_insns
, insn
);
8180 emit_insn_before (operand_reload_insns
, insn
);
8182 for (j
= 0; j
< reload_n_operands
; j
++)
8184 rtx_insn
*x
= emit_insn_after (outaddr_address_reload_insns
[j
], insn
);
8185 x
= emit_insn_after (output_address_reload_insns
[j
], x
);
8186 x
= emit_insn_after (output_reload_insns
[j
], x
);
8187 emit_insn_after (other_output_reload_insns
[j
], x
);
8190 /* For all the spill regs newly reloaded in this instruction,
8191 record what they were reloaded from, so subsequent instructions
8192 can inherit the reloads.
8194 Update spill_reg_store for the reloads of this insn.
8195 Copy the elements that were updated in the loop above. */
8197 for (j
= 0; j
< n_reloads
; j
++)
8199 int r
= reload_order
[j
];
8200 int i
= reload_spill_index
[r
];
8202 /* If this is a non-inherited input reload from a pseudo, we must
8203 clear any memory of a previous store to the same pseudo. Only do
8204 something if there will not be an output reload for the pseudo
8206 if (rld
[r
].in_reg
!= 0
8207 && ! (reload_inherited
[r
] || reload_override_in
[r
]))
8209 rtx reg
= rld
[r
].in_reg
;
8211 if (GET_CODE (reg
) == SUBREG
)
8212 reg
= SUBREG_REG (reg
);
8215 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
8216 && !REGNO_REG_SET_P (®_has_output_reload
, REGNO (reg
)))
8218 int nregno
= REGNO (reg
);
8220 if (reg_last_reload_reg
[nregno
])
8222 int last_regno
= REGNO (reg_last_reload_reg
[nregno
]);
8224 if (reg_reloaded_contents
[last_regno
] == nregno
)
8225 spill_reg_store
[last_regno
] = 0;
8230 /* I is nonneg if this reload used a register.
8231 If rld[r].reg_rtx is 0, this is an optional reload
8232 that we opted to ignore. */
8234 if (i
>= 0 && rld
[r
].reg_rtx
!= 0)
8236 int nr
= hard_regno_nregs
[i
][GET_MODE (rld
[r
].reg_rtx
)];
8239 /* For a multi register reload, we need to check if all or part
8240 of the value lives to the end. */
8241 for (k
= 0; k
< nr
; k
++)
8242 if (reload_reg_reaches_end_p (i
+ k
, r
))
8243 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
8245 /* Maybe the spill reg contains a copy of reload_out. */
8247 && (REG_P (rld
[r
].out
)
8249 ? REG_P (rld
[r
].out_reg
)
8250 /* The reload value is an auto-modification of
8251 some kind. For PRE_INC, POST_INC, PRE_DEC
8252 and POST_DEC, we record an equivalence
8253 between the reload register and the operand
8254 on the optimistic assumption that we can make
8255 the equivalence hold. reload_as_needed must
8256 then either make it hold or invalidate the
8259 PRE_MODIFY and POST_MODIFY addresses are reloaded
8260 somewhat differently, and allowing them here leads
8262 : (GET_CODE (rld
[r
].out
) != POST_MODIFY
8263 && GET_CODE (rld
[r
].out
) != PRE_MODIFY
))))
8267 reg
= reload_reg_rtx_for_output
[r
];
8268 if (reload_reg_rtx_reaches_end_p (reg
, r
))
8270 machine_mode mode
= GET_MODE (reg
);
8271 int regno
= REGNO (reg
);
8272 int nregs
= hard_regno_nregs
[regno
][mode
];
8273 rtx out
= (REG_P (rld
[r
].out
)
8277 /* AUTO_INC */ : XEXP (rld
[r
].in_reg
, 0));
8278 int out_regno
= REGNO (out
);
8279 int out_nregs
= (!HARD_REGISTER_NUM_P (out_regno
) ? 1
8280 : hard_regno_nregs
[out_regno
][mode
]);
8283 spill_reg_store
[regno
] = new_spill_reg_store
[regno
];
8284 spill_reg_stored_to
[regno
] = out
;
8285 reg_last_reload_reg
[out_regno
] = reg
;
8287 piecemeal
= (HARD_REGISTER_NUM_P (out_regno
)
8288 && nregs
== out_nregs
8289 && inherit_piecemeal_p (out_regno
, regno
, mode
));
8291 /* If OUT_REGNO is a hard register, it may occupy more than
8292 one register. If it does, say what is in the
8293 rest of the registers assuming that both registers
8294 agree on how many words the object takes. If not,
8295 invalidate the subsequent registers. */
8297 if (HARD_REGISTER_NUM_P (out_regno
))
8298 for (k
= 1; k
< out_nregs
; k
++)
8299 reg_last_reload_reg
[out_regno
+ k
]
8300 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8302 /* Now do the inverse operation. */
8303 for (k
= 0; k
< nregs
; k
++)
8305 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8306 reg_reloaded_contents
[regno
+ k
]
8307 = (!HARD_REGISTER_NUM_P (out_regno
) || !piecemeal
8310 reg_reloaded_insn
[regno
+ k
] = insn
;
8311 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8312 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8313 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8316 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8321 /* Maybe the spill reg contains a copy of reload_in. Only do
8322 something if there will not be an output reload for
8323 the register being reloaded. */
8324 else if (rld
[r
].out_reg
== 0
8326 && ((REG_P (rld
[r
].in
)
8327 && !HARD_REGISTER_P (rld
[r
].in
)
8328 && !REGNO_REG_SET_P (®_has_output_reload
,
8330 || (REG_P (rld
[r
].in_reg
)
8331 && !REGNO_REG_SET_P (®_has_output_reload
,
8332 REGNO (rld
[r
].in_reg
))))
8333 && !reg_set_p (reload_reg_rtx_for_input
[r
], PATTERN (insn
)))
8337 reg
= reload_reg_rtx_for_input
[r
];
8338 if (reload_reg_rtx_reaches_end_p (reg
, r
))
8348 mode
= GET_MODE (reg
);
8349 regno
= REGNO (reg
);
8350 nregs
= hard_regno_nregs
[regno
][mode
];
8351 if (REG_P (rld
[r
].in
)
8352 && REGNO (rld
[r
].in
) >= FIRST_PSEUDO_REGISTER
)
8354 else if (REG_P (rld
[r
].in_reg
))
8357 in
= XEXP (rld
[r
].in_reg
, 0);
8358 in_regno
= REGNO (in
);
8360 in_nregs
= (!HARD_REGISTER_NUM_P (in_regno
) ? 1
8361 : hard_regno_nregs
[in_regno
][mode
]);
8363 reg_last_reload_reg
[in_regno
] = reg
;
8365 piecemeal
= (HARD_REGISTER_NUM_P (in_regno
)
8366 && nregs
== in_nregs
8367 && inherit_piecemeal_p (regno
, in_regno
, mode
));
8369 if (HARD_REGISTER_NUM_P (in_regno
))
8370 for (k
= 1; k
< in_nregs
; k
++)
8371 reg_last_reload_reg
[in_regno
+ k
]
8372 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8374 /* Unless we inherited this reload, show we haven't
8375 recently done a store.
8376 Previous stores of inherited auto_inc expressions
8377 also have to be discarded. */
8378 if (! reload_inherited
[r
]
8379 || (rld
[r
].out
&& ! rld
[r
].out_reg
))
8380 spill_reg_store
[regno
] = 0;
8382 for (k
= 0; k
< nregs
; k
++)
8384 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8385 reg_reloaded_contents
[regno
+ k
]
8386 = (!HARD_REGISTER_NUM_P (in_regno
) || !piecemeal
8389 reg_reloaded_insn
[regno
+ k
] = insn
;
8390 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8391 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8392 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8395 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8402 /* The following if-statement was #if 0'd in 1.34 (or before...).
8403 It's reenabled in 1.35 because supposedly nothing else
8404 deals with this problem. */
8406 /* If a register gets output-reloaded from a non-spill register,
8407 that invalidates any previous reloaded copy of it.
8408 But forget_old_reloads_1 won't get to see it, because
8409 it thinks only about the original insn. So invalidate it here.
8410 Also do the same thing for RELOAD_OTHER constraints where the
8411 output is discarded. */
8413 && ((rld
[r
].out
!= 0
8414 && (REG_P (rld
[r
].out
)
8415 || (MEM_P (rld
[r
].out
)
8416 && REG_P (rld
[r
].out_reg
))))
8417 || (rld
[r
].out
== 0 && rld
[r
].out_reg
8418 && REG_P (rld
[r
].out_reg
))))
8420 rtx out
= ((rld
[r
].out
&& REG_P (rld
[r
].out
))
8421 ? rld
[r
].out
: rld
[r
].out_reg
);
8422 int out_regno
= REGNO (out
);
8423 machine_mode mode
= GET_MODE (out
);
8425 /* REG_RTX is now set or clobbered by the main instruction.
8426 As the comment above explains, forget_old_reloads_1 only
8427 sees the original instruction, and there is no guarantee
8428 that the original instruction also clobbered REG_RTX.
8429 For example, if find_reloads sees that the input side of
8430 a matched operand pair dies in this instruction, it may
8431 use the input register as the reload register.
8433 Calling forget_old_reloads_1 is a waste of effort if
8434 REG_RTX is also the output register.
8436 If we know that REG_RTX holds the value of a pseudo
8437 register, the code after the call will record that fact. */
8438 if (rld
[r
].reg_rtx
&& rld
[r
].reg_rtx
!= out
)
8439 forget_old_reloads_1 (rld
[r
].reg_rtx
, NULL_RTX
, NULL
);
8441 if (!HARD_REGISTER_NUM_P (out_regno
))
8444 rtx_insn
*store_insn
= NULL
;
8446 reg_last_reload_reg
[out_regno
] = 0;
8448 /* If we can find a hard register that is stored, record
8449 the storing insn so that we may delete this insn with
8450 delete_output_reload. */
8451 src_reg
= reload_reg_rtx_for_output
[r
];
8455 if (reload_reg_rtx_reaches_end_p (src_reg
, r
))
8456 store_insn
= new_spill_reg_store
[REGNO (src_reg
)];
8462 /* If this is an optional reload, try to find the
8463 source reg from an input reload. */
8464 rtx set
= single_set (insn
);
8465 if (set
&& SET_DEST (set
) == rld
[r
].out
)
8469 src_reg
= SET_SRC (set
);
8471 for (k
= 0; k
< n_reloads
; k
++)
8473 if (rld
[k
].in
== src_reg
)
8475 src_reg
= reload_reg_rtx_for_input
[k
];
8481 if (src_reg
&& REG_P (src_reg
)
8482 && REGNO (src_reg
) < FIRST_PSEUDO_REGISTER
)
8484 int src_regno
, src_nregs
, k
;
8487 gcc_assert (GET_MODE (src_reg
) == mode
);
8488 src_regno
= REGNO (src_reg
);
8489 src_nregs
= hard_regno_nregs
[src_regno
][mode
];
8490 /* The place where to find a death note varies with
8491 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8492 necessarily checked exactly in the code that moves
8493 notes, so just check both locations. */
8494 note
= find_regno_note (insn
, REG_DEAD
, src_regno
);
8495 if (! note
&& store_insn
)
8496 note
= find_regno_note (store_insn
, REG_DEAD
, src_regno
);
8497 for (k
= 0; k
< src_nregs
; k
++)
8499 spill_reg_store
[src_regno
+ k
] = store_insn
;
8500 spill_reg_stored_to
[src_regno
+ k
] = out
;
8501 reg_reloaded_contents
[src_regno
+ k
] = out_regno
;
8502 reg_reloaded_insn
[src_regno
+ k
] = store_insn
;
8503 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, src_regno
+ k
);
8504 SET_HARD_REG_BIT (reg_reloaded_valid
, src_regno
+ k
);
8505 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno
+ k
,
8507 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8510 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8512 SET_HARD_REG_BIT (reg_is_output_reload
, src_regno
+ k
);
8514 SET_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8516 CLEAR_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8518 reg_last_reload_reg
[out_regno
] = src_reg
;
8519 /* We have to set reg_has_output_reload here, or else
8520 forget_old_reloads_1 will clear reg_last_reload_reg
8522 SET_REGNO_REG_SET (®_has_output_reload
,
8528 int k
, out_nregs
= hard_regno_nregs
[out_regno
][mode
];
8530 for (k
= 0; k
< out_nregs
; k
++)
8531 reg_last_reload_reg
[out_regno
+ k
] = 0;
8535 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
8538 /* Go through the motions to emit INSN and test if it is strictly valid.
8539 Return the emitted insn if valid, else return NULL. */
8542 emit_insn_if_valid_for_reload (rtx pat
)
8544 rtx_insn
*last
= get_last_insn ();
8547 rtx_insn
*insn
= emit_insn (pat
);
8548 code
= recog_memoized (insn
);
8552 extract_insn (insn
);
8553 /* We want constrain operands to treat this insn strictly in its
8554 validity determination, i.e., the way it would after reload has
8556 if (constrain_operands (1, get_enabled_alternatives (insn
)))
8560 delete_insns_since (last
);
8564 /* Emit code to perform a reload from IN (which may be a reload register) to
8565 OUT (which may also be a reload register). IN or OUT is from operand
8566 OPNUM with reload type TYPE.
8568 Returns first insn emitted. */
8571 gen_reload (rtx out
, rtx in
, int opnum
, enum reload_type type
)
8573 rtx_insn
*last
= get_last_insn ();
8575 #ifdef SECONDARY_MEMORY_NEEDED
8579 /* If IN is a paradoxical SUBREG, remove it and try to put the
8580 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8581 if (!strip_paradoxical_subreg (&in
, &out
))
8582 strip_paradoxical_subreg (&out
, &in
);
8584 /* How to do this reload can get quite tricky. Normally, we are being
8585 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8586 register that didn't get a hard register. In that case we can just
8587 call emit_move_insn.
8589 We can also be asked to reload a PLUS that adds a register or a MEM to
8590 another register, constant or MEM. This can occur during frame pointer
8591 elimination and while reloading addresses. This case is handled by
8592 trying to emit a single insn to perform the add. If it is not valid,
8593 we use a two insn sequence.
8595 Or we can be asked to reload an unary operand that was a fragment of
8596 an addressing mode, into a register. If it isn't recognized as-is,
8597 we try making the unop operand and the reload-register the same:
8598 (set reg:X (unop:X expr:Y))
8599 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8601 Finally, we could be called to handle an 'o' constraint by putting
8602 an address into a register. In that case, we first try to do this
8603 with a named pattern of "reload_load_address". If no such pattern
8604 exists, we just emit a SET insn and hope for the best (it will normally
8605 be valid on machines that use 'o').
8607 This entire process is made complex because reload will never
8608 process the insns we generate here and so we must ensure that
8609 they will fit their constraints and also by the fact that parts of
8610 IN might be being reloaded separately and replaced with spill registers.
8611 Because of this, we are, in some sense, just guessing the right approach
8612 here. The one listed above seems to work.
8614 ??? At some point, this whole thing needs to be rethought. */
8616 if (GET_CODE (in
) == PLUS
8617 && (REG_P (XEXP (in
, 0))
8618 || GET_CODE (XEXP (in
, 0)) == SUBREG
8619 || MEM_P (XEXP (in
, 0)))
8620 && (REG_P (XEXP (in
, 1))
8621 || GET_CODE (XEXP (in
, 1)) == SUBREG
8622 || CONSTANT_P (XEXP (in
, 1))
8623 || MEM_P (XEXP (in
, 1))))
8625 /* We need to compute the sum of a register or a MEM and another
8626 register, constant, or MEM, and put it into the reload
8627 register. The best possible way of doing this is if the machine
8628 has a three-operand ADD insn that accepts the required operands.
8630 The simplest approach is to try to generate such an insn and see if it
8631 is recognized and matches its constraints. If so, it can be used.
8633 It might be better not to actually emit the insn unless it is valid,
8634 but we need to pass the insn as an operand to `recog' and
8635 `extract_insn' and it is simpler to emit and then delete the insn if
8636 not valid than to dummy things up. */
8640 enum insn_code code
;
8642 op0
= find_replacement (&XEXP (in
, 0));
8643 op1
= find_replacement (&XEXP (in
, 1));
8645 /* Since constraint checking is strict, commutativity won't be
8646 checked, so we need to do that here to avoid spurious failure
8647 if the add instruction is two-address and the second operand
8648 of the add is the same as the reload reg, which is frequently
8649 the case. If the insn would be A = B + A, rearrange it so
8650 it will be A = A + B as constrain_operands expects. */
8652 if (REG_P (XEXP (in
, 1))
8653 && REGNO (out
) == REGNO (XEXP (in
, 1)))
8654 tem
= op0
, op0
= op1
, op1
= tem
;
8656 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
8657 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
8659 insn
= emit_insn_if_valid_for_reload (gen_rtx_SET (out
, in
));
8663 /* If that failed, we must use a conservative two-insn sequence.
8665 Use a move to copy one operand into the reload register. Prefer
8666 to reload a constant, MEM or pseudo since the move patterns can
8667 handle an arbitrary operand. If OP1 is not a constant, MEM or
8668 pseudo and OP1 is not a valid operand for an add instruction, then
8671 After reloading one of the operands into the reload register, add
8672 the reload register to the output register.
8674 If there is another way to do this for a specific machine, a
8675 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8678 code
= optab_handler (add_optab
, GET_MODE (out
));
8680 if (CONSTANT_P (op1
) || MEM_P (op1
) || GET_CODE (op1
) == SUBREG
8682 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
)
8683 || (code
!= CODE_FOR_nothing
8684 && !insn_operand_matches (code
, 2, op1
)))
8685 tem
= op0
, op0
= op1
, op1
= tem
;
8687 gen_reload (out
, op0
, opnum
, type
);
8689 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8690 This fixes a problem on the 32K where the stack pointer cannot
8691 be used as an operand of an add insn. */
8693 if (rtx_equal_p (op0
, op1
))
8696 insn
= emit_insn_if_valid_for_reload (gen_add2_insn (out
, op1
));
8699 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8700 set_dst_reg_note (insn
, REG_EQUIV
, in
, out
);
8704 /* If that failed, copy the address register to the reload register.
8705 Then add the constant to the reload register. */
8707 gcc_assert (!reg_overlap_mentioned_p (out
, op0
));
8708 gen_reload (out
, op1
, opnum
, type
);
8709 insn
= emit_insn (gen_add2_insn (out
, op0
));
8710 set_dst_reg_note (insn
, REG_EQUIV
, in
, out
);
8713 #ifdef SECONDARY_MEMORY_NEEDED
8714 /* If we need a memory location to do the move, do it that way. */
8715 else if ((tem1
= replaced_subreg (in
), tem2
= replaced_subreg (out
),
8716 (REG_P (tem1
) && REG_P (tem2
)))
8717 && REGNO (tem1
) < FIRST_PSEUDO_REGISTER
8718 && REGNO (tem2
) < FIRST_PSEUDO_REGISTER
8719 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1
)),
8720 REGNO_REG_CLASS (REGNO (tem2
)),
8723 /* Get the memory to use and rewrite both registers to its mode. */
8724 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
8726 if (GET_MODE (loc
) != GET_MODE (out
))
8727 out
= gen_rtx_REG (GET_MODE (loc
), reg_or_subregno (out
));
8729 if (GET_MODE (loc
) != GET_MODE (in
))
8730 in
= gen_rtx_REG (GET_MODE (loc
), reg_or_subregno (in
));
8732 gen_reload (loc
, in
, opnum
, type
);
8733 gen_reload (out
, loc
, opnum
, type
);
8736 else if (REG_P (out
) && UNARY_P (in
))
8743 op1
= find_replacement (&XEXP (in
, 0));
8744 if (op1
!= XEXP (in
, 0))
8745 in
= gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
), op1
);
8747 /* First, try a plain SET. */
8748 set
= emit_insn_if_valid_for_reload (gen_rtx_SET (out
, in
));
8752 /* If that failed, move the inner operand to the reload
8753 register, and try the same unop with the inner expression
8754 replaced with the reload register. */
8756 if (GET_MODE (op1
) != GET_MODE (out
))
8757 out_moded
= gen_rtx_REG (GET_MODE (op1
), REGNO (out
));
8761 gen_reload (out_moded
, op1
, opnum
, type
);
8763 insn
= gen_rtx_SET (out
, gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
),
8765 insn
= emit_insn_if_valid_for_reload (insn
);
8768 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8769 return as_a
<rtx_insn
*> (insn
);
8772 fatal_insn ("failure trying to reload:", set
);
8774 /* If IN is a simple operand, use gen_move_insn. */
8775 else if (OBJECT_P (in
) || GET_CODE (in
) == SUBREG
)
8777 tem
= emit_insn (gen_move_insn (out
, in
));
8778 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8779 mark_jump_label (in
, tem
, 0);
8782 else if (targetm
.have_reload_load_address ())
8783 emit_insn (targetm
.gen_reload_load_address (out
, in
));
8785 /* Otherwise, just write (set OUT IN) and hope for the best. */
8787 emit_insn (gen_rtx_SET (out
, in
));
8789 /* Return the first insn emitted.
8790 We can not just return get_last_insn, because there may have
8791 been multiple instructions emitted. Also note that gen_move_insn may
8792 emit more than one insn itself, so we can not assume that there is one
8793 insn emitted per emit_insn_before call. */
8795 return last
? NEXT_INSN (last
) : get_insns ();
8798 /* Delete a previously made output-reload whose result we now believe
8799 is not needed. First we double-check.
8801 INSN is the insn now being processed.
8802 LAST_RELOAD_REG is the hard register number for which we want to delete
8803 the last output reload.
8804 J is the reload-number that originally used REG. The caller has made
8805 certain that reload J doesn't use REG any longer for input.
8806 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8809 delete_output_reload (rtx_insn
*insn
, int j
, int last_reload_reg
,
8812 rtx_insn
*output_reload_insn
= spill_reg_store
[last_reload_reg
];
8813 rtx reg
= spill_reg_stored_to
[last_reload_reg
];
8816 int n_inherited
= 0;
8821 /* It is possible that this reload has been only used to set another reload
8822 we eliminated earlier and thus deleted this instruction too. */
8823 if (output_reload_insn
->deleted ())
8826 /* Get the raw pseudo-register referred to. */
8828 while (GET_CODE (reg
) == SUBREG
)
8829 reg
= SUBREG_REG (reg
);
8830 substed
= reg_equiv_memory_loc (REGNO (reg
));
8832 /* This is unsafe if the operand occurs more often in the current
8833 insn than it is inherited. */
8834 for (k
= n_reloads
- 1; k
>= 0; k
--)
8836 rtx reg2
= rld
[k
].in
;
8839 if (MEM_P (reg2
) || reload_override_in
[k
])
8840 reg2
= rld
[k
].in_reg
;
8842 if (AUTO_INC_DEC
&& rld
[k
].out
&& ! rld
[k
].out_reg
)
8843 reg2
= XEXP (rld
[k
].in_reg
, 0);
8845 while (GET_CODE (reg2
) == SUBREG
)
8846 reg2
= SUBREG_REG (reg2
);
8847 if (rtx_equal_p (reg2
, reg
))
8849 if (reload_inherited
[k
] || reload_override_in
[k
] || k
== j
)
8855 n_occurrences
= count_occurrences (PATTERN (insn
), reg
, 0);
8856 if (CALL_P (insn
) && CALL_INSN_FUNCTION_USAGE (insn
))
8857 n_occurrences
+= count_occurrences (CALL_INSN_FUNCTION_USAGE (insn
),
8860 n_occurrences
+= count_occurrences (PATTERN (insn
),
8861 eliminate_regs (substed
, VOIDmode
,
8863 for (rtx i1
= reg_equiv_alt_mem_list (REGNO (reg
)); i1
; i1
= XEXP (i1
, 1))
8865 gcc_assert (!rtx_equal_p (XEXP (i1
, 0), substed
));
8866 n_occurrences
+= count_occurrences (PATTERN (insn
), XEXP (i1
, 0), 0);
8868 if (n_occurrences
> n_inherited
)
8871 regno
= REGNO (reg
);
8872 if (regno
>= FIRST_PSEUDO_REGISTER
)
8875 nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
8877 /* If the pseudo-reg we are reloading is no longer referenced
8878 anywhere between the store into it and here,
8879 and we're within the same basic block, then the value can only
8880 pass through the reload reg and end up here.
8881 Otherwise, give up--return. */
8882 for (rtx_insn
*i1
= NEXT_INSN (output_reload_insn
);
8883 i1
!= insn
; i1
= NEXT_INSN (i1
))
8885 if (NOTE_INSN_BASIC_BLOCK_P (i1
))
8887 if ((NONJUMP_INSN_P (i1
) || CALL_P (i1
))
8888 && refers_to_regno_p (regno
, regno
+ nregs
, PATTERN (i1
), NULL
))
8890 /* If this is USE in front of INSN, we only have to check that
8891 there are no more references than accounted for by inheritance. */
8892 while (NONJUMP_INSN_P (i1
) && GET_CODE (PATTERN (i1
)) == USE
)
8894 n_occurrences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
8895 i1
= NEXT_INSN (i1
);
8897 if (n_occurrences
<= n_inherited
&& i1
== insn
)
8903 /* We will be deleting the insn. Remove the spill reg information. */
8904 for (k
= hard_regno_nregs
[last_reload_reg
][GET_MODE (reg
)]; k
-- > 0; )
8906 spill_reg_store
[last_reload_reg
+ k
] = 0;
8907 spill_reg_stored_to
[last_reload_reg
+ k
] = 0;
8910 /* The caller has already checked that REG dies or is set in INSN.
8911 It has also checked that we are optimizing, and thus some
8912 inaccuracies in the debugging information are acceptable.
8913 So we could just delete output_reload_insn. But in some cases
8914 we can improve the debugging information without sacrificing
8915 optimization - maybe even improving the code: See if the pseudo
8916 reg has been completely replaced with reload regs. If so, delete
8917 the store insn and forget we had a stack slot for the pseudo. */
8918 if (rld
[j
].out
!= rld
[j
].in
8919 && REG_N_DEATHS (REGNO (reg
)) == 1
8920 && REG_N_SETS (REGNO (reg
)) == 1
8921 && REG_BASIC_BLOCK (REGNO (reg
)) >= NUM_FIXED_BLOCKS
8922 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
8926 /* We know that it was used only between here and the beginning of
8927 the current basic block. (We also know that the last use before
8928 INSN was the output reload we are thinking of deleting, but never
8929 mind that.) Search that range; see if any ref remains. */
8930 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8932 rtx set
= single_set (i2
);
8934 /* Uses which just store in the pseudo don't count,
8935 since if they are the only uses, they are dead. */
8936 if (set
!= 0 && SET_DEST (set
) == reg
)
8938 if (LABEL_P (i2
) || JUMP_P (i2
))
8940 if ((NONJUMP_INSN_P (i2
) || CALL_P (i2
))
8941 && reg_mentioned_p (reg
, PATTERN (i2
)))
8943 /* Some other ref remains; just delete the output reload we
8945 delete_address_reloads (output_reload_insn
, insn
);
8946 delete_insn (output_reload_insn
);
8951 /* Delete the now-dead stores into this pseudo. Note that this
8952 loop also takes care of deleting output_reload_insn. */
8953 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8955 rtx set
= single_set (i2
);
8957 if (set
!= 0 && SET_DEST (set
) == reg
)
8959 delete_address_reloads (i2
, insn
);
8962 if (LABEL_P (i2
) || JUMP_P (i2
))
8966 /* For the debugging info, say the pseudo lives in this reload reg. */
8967 reg_renumber
[REGNO (reg
)] = REGNO (new_reload_reg
);
8968 if (ira_conflicts_p
)
8969 /* Inform IRA about the change. */
8970 ira_mark_allocation_change (REGNO (reg
));
8971 alter_reg (REGNO (reg
), -1, false);
8975 delete_address_reloads (output_reload_insn
, insn
);
8976 delete_insn (output_reload_insn
);
8980 /* We are going to delete DEAD_INSN. Recursively delete loads of
8981 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8982 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8984 delete_address_reloads (rtx_insn
*dead_insn
, rtx_insn
*current_insn
)
8986 rtx set
= single_set (dead_insn
);
8988 rtx_insn
*prev
, *next
;
8991 rtx dst
= SET_DEST (set
);
8993 delete_address_reloads_1 (dead_insn
, XEXP (dst
, 0), current_insn
);
8995 /* If we deleted the store from a reloaded post_{in,de}c expression,
8996 we can delete the matching adds. */
8997 prev
= PREV_INSN (dead_insn
);
8998 next
= NEXT_INSN (dead_insn
);
8999 if (! prev
|| ! next
)
9001 set
= single_set (next
);
9002 set2
= single_set (prev
);
9004 || GET_CODE (SET_SRC (set
)) != PLUS
|| GET_CODE (SET_SRC (set2
)) != PLUS
9005 || !CONST_INT_P (XEXP (SET_SRC (set
), 1))
9006 || !CONST_INT_P (XEXP (SET_SRC (set2
), 1)))
9008 dst
= SET_DEST (set
);
9009 if (! rtx_equal_p (dst
, SET_DEST (set2
))
9010 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set
), 0))
9011 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set2
), 0))
9012 || (INTVAL (XEXP (SET_SRC (set
), 1))
9013 != -INTVAL (XEXP (SET_SRC (set2
), 1))))
9015 delete_related_insns (prev
);
9016 delete_related_insns (next
);
9019 /* Subfunction of delete_address_reloads: process registers found in X. */
9021 delete_address_reloads_1 (rtx_insn
*dead_insn
, rtx x
, rtx_insn
*current_insn
)
9023 rtx_insn
*prev
, *i2
;
9026 enum rtx_code code
= GET_CODE (x
);
9030 const char *fmt
= GET_RTX_FORMAT (code
);
9031 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9034 delete_address_reloads_1 (dead_insn
, XEXP (x
, i
), current_insn
);
9035 else if (fmt
[i
] == 'E')
9037 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9038 delete_address_reloads_1 (dead_insn
, XVECEXP (x
, i
, j
),
9045 if (spill_reg_order
[REGNO (x
)] < 0)
9048 /* Scan backwards for the insn that sets x. This might be a way back due
9050 for (prev
= PREV_INSN (dead_insn
); prev
; prev
= PREV_INSN (prev
))
9052 code
= GET_CODE (prev
);
9053 if (code
== CODE_LABEL
|| code
== JUMP_INSN
)
9057 if (reg_set_p (x
, PATTERN (prev
)))
9059 if (reg_referenced_p (x
, PATTERN (prev
)))
9062 if (! prev
|| INSN_UID (prev
) < reload_first_uid
)
9064 /* Check that PREV only sets the reload register. */
9065 set
= single_set (prev
);
9068 dst
= SET_DEST (set
);
9070 || ! rtx_equal_p (dst
, x
))
9072 if (! reg_set_p (dst
, PATTERN (dead_insn
)))
9074 /* Check if DST was used in a later insn -
9075 it might have been inherited. */
9076 for (i2
= NEXT_INSN (dead_insn
); i2
; i2
= NEXT_INSN (i2
))
9082 if (reg_referenced_p (dst
, PATTERN (i2
)))
9084 /* If there is a reference to the register in the current insn,
9085 it might be loaded in a non-inherited reload. If no other
9086 reload uses it, that means the register is set before
9088 if (i2
== current_insn
)
9090 for (j
= n_reloads
- 1; j
>= 0; j
--)
9091 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
9092 || reload_override_in
[j
] == dst
)
9094 for (j
= n_reloads
- 1; j
>= 0; j
--)
9095 if (rld
[j
].in
&& rld
[j
].reg_rtx
== dst
)
9104 /* If DST is still live at CURRENT_INSN, check if it is used for
9105 any reload. Note that even if CURRENT_INSN sets DST, we still
9106 have to check the reloads. */
9107 if (i2
== current_insn
)
9109 for (j
= n_reloads
- 1; j
>= 0; j
--)
9110 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
9111 || reload_override_in
[j
] == dst
)
9113 /* ??? We can't finish the loop here, because dst might be
9114 allocated to a pseudo in this block if no reload in this
9115 block needs any of the classes containing DST - see
9116 spill_hard_reg. There is no easy way to tell this, so we
9117 have to scan till the end of the basic block. */
9119 if (reg_set_p (dst
, PATTERN (i2
)))
9123 delete_address_reloads_1 (prev
, SET_SRC (set
), current_insn
);
9124 reg_reloaded_contents
[REGNO (dst
)] = -1;
9128 /* Output reload-insns to reload VALUE into RELOADREG.
9129 VALUE is an autoincrement or autodecrement RTX whose operand
9130 is a register or memory location;
9131 so reloading involves incrementing that location.
9132 IN is either identical to VALUE, or some cheaper place to reload from.
9134 INC_AMOUNT is the number to increment or decrement by (always positive).
9135 This cannot be deduced from VALUE. */
9138 inc_for_reload (rtx reloadreg
, rtx in
, rtx value
, int inc_amount
)
9140 /* REG or MEM to be copied and incremented. */
9141 rtx incloc
= find_replacement (&XEXP (value
, 0));
9142 /* Nonzero if increment after copying. */
9143 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
9144 || GET_CODE (value
) == POST_MODIFY
);
9149 rtx real_in
= in
== value
? incloc
: in
;
9151 /* No hard register is equivalent to this register after
9152 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9153 we could inc/dec that register as well (maybe even using it for
9154 the source), but I'm not sure it's worth worrying about. */
9156 reg_last_reload_reg
[REGNO (incloc
)] = 0;
9158 if (GET_CODE (value
) == PRE_MODIFY
|| GET_CODE (value
) == POST_MODIFY
)
9160 gcc_assert (GET_CODE (XEXP (value
, 1)) == PLUS
);
9161 inc
= find_replacement (&XEXP (XEXP (value
, 1), 1));
9165 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
9166 inc_amount
= -inc_amount
;
9168 inc
= GEN_INT (inc_amount
);
9171 /* If this is post-increment, first copy the location to the reload reg. */
9172 if (post
&& real_in
!= reloadreg
)
9173 emit_insn (gen_move_insn (reloadreg
, real_in
));
9177 /* See if we can directly increment INCLOC. Use a method similar to
9178 that in gen_reload. */
9180 last
= get_last_insn ();
9181 add_insn
= emit_insn (gen_rtx_SET (incloc
,
9182 gen_rtx_PLUS (GET_MODE (incloc
),
9185 code
= recog_memoized (add_insn
);
9188 extract_insn (add_insn
);
9189 if (constrain_operands (1, get_enabled_alternatives (add_insn
)))
9191 /* If this is a pre-increment and we have incremented the value
9192 where it lives, copy the incremented value to RELOADREG to
9193 be used as an address. */
9196 emit_insn (gen_move_insn (reloadreg
, incloc
));
9200 delete_insns_since (last
);
9203 /* If couldn't do the increment directly, must increment in RELOADREG.
9204 The way we do this depends on whether this is pre- or post-increment.
9205 For pre-increment, copy INCLOC to the reload register, increment it
9206 there, then save back. */
9210 if (in
!= reloadreg
)
9211 emit_insn (gen_move_insn (reloadreg
, real_in
));
9212 emit_insn (gen_add2_insn (reloadreg
, inc
));
9213 emit_insn (gen_move_insn (incloc
, reloadreg
));
9218 Because this might be a jump insn or a compare, and because RELOADREG
9219 may not be available after the insn in an input reload, we must do
9220 the incrementation before the insn being reloaded for.
9222 We have already copied IN to RELOADREG. Increment the copy in
9223 RELOADREG, save that back, then decrement RELOADREG so it has
9224 the original value. */
9226 emit_insn (gen_add2_insn (reloadreg
, inc
));
9227 emit_insn (gen_move_insn (incloc
, reloadreg
));
9228 if (CONST_INT_P (inc
))
9229 emit_insn (gen_add2_insn (reloadreg
,
9230 gen_int_mode (-INTVAL (inc
),
9231 GET_MODE (reloadreg
))));
9233 emit_insn (gen_sub2_insn (reloadreg
, inc
));
9238 add_auto_inc_notes (rtx_insn
*insn
, rtx x
)
9240 enum rtx_code code
= GET_CODE (x
);
9244 if (code
== MEM
&& auto_inc_p (XEXP (x
, 0)))
9246 add_reg_note (insn
, REG_INC
, XEXP (XEXP (x
, 0), 0));
9250 /* Scan all the operand sub-expressions. */
9251 fmt
= GET_RTX_FORMAT (code
);
9252 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9255 add_auto_inc_notes (insn
, XEXP (x
, i
));
9256 else if (fmt
[i
] == 'E')
9257 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9258 add_auto_inc_notes (insn
, XVECEXP (x
, i
, j
));