1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "rtl-error.h"
32 #include "insn-config.h"
39 #include "addresses.h"
40 #include "basic-block.h"
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
85 struct target_reload default_target_reload
;
87 struct target_reload
*this_target_reload
= &default_target_reload
;
90 #define spill_indirect_levels \
91 (this_target_reload->x_spill_indirect_levels)
93 /* During reload_as_needed, element N contains a REG rtx for the hard reg
94 into which reg N has been reloaded (perhaps for a previous insn). */
95 static rtx
*reg_last_reload_reg
;
97 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
98 for an output reload that stores into reg N. */
99 static regset_head reg_has_output_reload
;
101 /* Indicates which hard regs are reload-registers for an output reload
102 in the current insn. */
103 static HARD_REG_SET reg_is_output_reload
;
105 /* Widest width in which each pseudo reg is referred to (via subreg). */
106 static unsigned int *reg_max_ref_width
;
108 /* Vector to remember old contents of reg_renumber before spilling. */
109 static short *reg_old_renumber
;
111 /* During reload_as_needed, element N contains the last pseudo regno reloaded
112 into hard register N. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
117 /* During reload_as_needed, element N contains the insn for which
118 hard register N was last used. Its contents are significant only
119 when reg_reloaded_valid is set for this register. */
120 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
122 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
123 static HARD_REG_SET reg_reloaded_valid
;
124 /* Indicate if the register was dead at the end of the reload.
125 This is only valid if reg_reloaded_contents is set and valid. */
126 static HARD_REG_SET reg_reloaded_dead
;
128 /* Indicate whether the register's current value is one that is not
129 safe to retain across a call, even for registers that are normally
130 call-saved. This is only meaningful for members of reg_reloaded_valid. */
131 static HARD_REG_SET reg_reloaded_call_part_clobbered
;
133 /* Number of spill-regs so far; number of valid elements of spill_regs. */
136 /* In parallel with spill_regs, contains REG rtx's for those regs.
137 Holds the last rtx used for any given reg, or 0 if it has never
138 been used for spilling yet. This rtx is reused, provided it has
140 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
142 /* In parallel with spill_regs, contains nonzero for a spill reg
143 that was stored after the last time it was used.
144 The precise value is the insn generated to do the store. */
145 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
147 /* This is the register that was stored with spill_reg_store. This is a
148 copy of reload_out / reload_out_reg when the value was stored; if
149 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
150 static rtx spill_reg_stored_to
[FIRST_PSEUDO_REGISTER
];
152 /* This table is the inverse mapping of spill_regs:
153 indexed by hard reg number,
154 it contains the position of that reg in spill_regs,
155 or -1 for something that is not in spill_regs.
157 ?!? This is no longer accurate. */
158 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
160 /* This reg set indicates registers that can't be used as spill registers for
161 the currently processed insn. These are the hard registers which are live
162 during the insn, but not allocated to pseudos, as well as fixed
164 static HARD_REG_SET bad_spill_regs
;
166 /* These are the hard registers that can't be used as spill register for any
167 insn. This includes registers used for user variables and registers that
168 we can't eliminate. A register that appears in this set also can't be used
169 to retry register allocation. */
170 static HARD_REG_SET bad_spill_regs_global
;
172 /* Describes order of use of registers for reloading
173 of spilled pseudo-registers. `n_spills' is the number of
174 elements that are actually valid; new ones are added at the end.
176 Both spill_regs and spill_reg_order are used on two occasions:
177 once during find_reload_regs, where they keep track of the spill registers
178 for a single insn, but also during reload_as_needed where they show all
179 the registers ever used by reload. For the latter case, the information
180 is calculated during finish_spills. */
181 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
183 /* This vector of reg sets indicates, for each pseudo, which hard registers
184 may not be used for retrying global allocation because the register was
185 formerly spilled from one of them. If we allowed reallocating a pseudo to
186 a register that it was already allocated to, reload might not
188 static HARD_REG_SET
*pseudo_previous_regs
;
190 /* This vector of reg sets indicates, for each pseudo, which hard
191 registers may not be used for retrying global allocation because they
192 are used as spill registers during one of the insns in which the
194 static HARD_REG_SET
*pseudo_forbidden_regs
;
196 /* All hard regs that have been used as spill registers for any insn are
197 marked in this set. */
198 static HARD_REG_SET used_spill_regs
;
200 /* Index of last register assigned as a spill register. We allocate in
201 a round-robin fashion. */
202 static int last_spill_reg
;
204 /* Record the stack slot for each spilled hard register. */
205 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
207 /* Width allocated so far for that stack slot. */
208 static unsigned int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
210 /* Record which pseudos needed to be spilled. */
211 static regset_head spilled_pseudos
;
213 /* Record which pseudos changed their allocation in finish_spills. */
214 static regset_head changed_allocation_pseudos
;
216 /* Used for communication between order_regs_for_reload and count_pseudo.
217 Used to avoid counting one pseudo twice. */
218 static regset_head pseudos_counted
;
220 /* First uid used by insns created by reload in this function.
221 Used in find_equiv_reg. */
222 int reload_first_uid
;
224 /* Flag set by local-alloc or global-alloc if anything is live in
225 a call-clobbered reg across calls. */
226 int caller_save_needed
;
228 /* Set to 1 while reload_as_needed is operating.
229 Required by some machines to handle any generated moves differently. */
230 int reload_in_progress
= 0;
232 /* This obstack is used for allocation of rtl during register elimination.
233 The allocated storage can be freed once find_reloads has processed the
235 static struct obstack reload_obstack
;
237 /* Points to the beginning of the reload_obstack. All insn_chain structures
238 are allocated first. */
239 static char *reload_startobj
;
241 /* The point after all insn_chain structures. Used to quickly deallocate
242 memory allocated in copy_reloads during calculate_needs_all_insns. */
243 static char *reload_firstobj
;
245 /* This points before all local rtl generated by register elimination.
246 Used to quickly free all memory after processing one insn. */
247 static char *reload_insn_firstobj
;
249 /* List of insn_chain instructions, one for every insn that reload needs to
251 struct insn_chain
*reload_insn_chain
;
253 /* TRUE if we potentially left dead insns in the insn stream and want to
254 run DCE immediately after reload, FALSE otherwise. */
255 static bool need_dce
;
257 /* List of all insns needing reloads. */
258 static struct insn_chain
*insns_need_reload
;
260 /* This structure is used to record information about register eliminations.
261 Each array entry describes one possible way of eliminating a register
262 in favor of another. If there is more than one way of eliminating a
263 particular register, the most preferred should be specified first. */
267 int from
; /* Register number to be eliminated. */
268 int to
; /* Register number used as replacement. */
269 HOST_WIDE_INT initial_offset
; /* Initial difference between values. */
270 int can_eliminate
; /* Nonzero if this elimination can be done. */
271 int can_eliminate_previous
; /* Value returned by TARGET_CAN_ELIMINATE
272 target hook in previous scan over insns
274 HOST_WIDE_INT offset
; /* Current offset between the two regs. */
275 HOST_WIDE_INT previous_offset
;/* Offset at end of previous insn. */
276 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
277 rtx from_rtx
; /* REG rtx for the register to be eliminated.
278 We cannot simply compare the number since
279 we might then spuriously replace a hard
280 register corresponding to a pseudo
281 assigned to the reg to be eliminated. */
282 rtx to_rtx
; /* REG rtx for the replacement. */
285 static struct elim_table
*reg_eliminate
= 0;
287 /* This is an intermediate structure to initialize the table. It has
288 exactly the members provided by ELIMINABLE_REGS. */
289 static const struct elim_table_1
293 } reg_eliminate_1
[] =
295 /* If a set of eliminable registers was specified, define the table from it.
296 Otherwise, default to the normal case of the frame pointer being
297 replaced by the stack pointer. */
299 #ifdef ELIMINABLE_REGS
302 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
305 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
307 /* Record the number of pending eliminations that have an offset not equal
308 to their initial offset. If nonzero, we use a new copy of each
309 replacement result in any insns encountered. */
310 int num_not_at_initial_offset
;
312 /* Count the number of registers that we may be able to eliminate. */
313 static int num_eliminable
;
314 /* And the number of registers that are equivalent to a constant that
315 can be eliminated to frame_pointer / arg_pointer + constant. */
316 static int num_eliminable_invariants
;
318 /* For each label, we record the offset of each elimination. If we reach
319 a label by more than one path and an offset differs, we cannot do the
320 elimination. This information is indexed by the difference of the
321 number of the label and the first label number. We can't offset the
322 pointer itself as this can cause problems on machines with segmented
323 memory. The first table is an array of flags that records whether we
324 have yet encountered a label and the second table is an array of arrays,
325 one entry in the latter array for each elimination. */
327 static int first_label_num
;
328 static char *offsets_known_at
;
329 static HOST_WIDE_INT (*offsets_at
)[NUM_ELIMINABLE_REGS
];
331 VEC(reg_equivs_t
,gc
) *reg_equivs
;
333 /* Stack of addresses where an rtx has been changed. We can undo the
334 changes by popping items off the stack and restoring the original
335 value at each location.
337 We use this simplistic undo capability rather than copy_rtx as copy_rtx
338 will not make a deep copy of a normally sharable rtx, such as
339 (const (plus (symbol_ref) (const_int))). If such an expression appears
340 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
341 rtx expression would be changed. See PR 42431. */
345 DEF_VEC_ALLOC_P(rtx_p
,heap
);
346 static VEC(rtx_p
,heap
) *substitute_stack
;
348 /* Number of labels in the current function. */
350 static int num_labels
;
352 static void replace_pseudos_in (rtx
*, enum machine_mode
, rtx
);
353 static void maybe_fix_stack_asms (void);
354 static void copy_reloads (struct insn_chain
*);
355 static void calculate_needs_all_insns (int);
356 static int find_reg (struct insn_chain
*, int);
357 static void find_reload_regs (struct insn_chain
*);
358 static void select_reload_regs (void);
359 static void delete_caller_save_insns (void);
361 static void spill_failure (rtx
, enum reg_class
);
362 static void count_spilled_pseudo (int, int, int);
363 static void delete_dead_insn (rtx
);
364 static void alter_reg (int, int, bool);
365 static void set_label_offsets (rtx
, rtx
, int);
366 static void check_eliminable_occurrences (rtx
);
367 static void elimination_effects (rtx
, enum machine_mode
);
368 static rtx
eliminate_regs_1 (rtx
, enum machine_mode
, rtx
, bool, bool);
369 static int eliminate_regs_in_insn (rtx
, int);
370 static void update_eliminable_offsets (void);
371 static void mark_not_eliminable (rtx
, const_rtx
, void *);
372 static void set_initial_elim_offsets (void);
373 static bool verify_initial_elim_offsets (void);
374 static void set_initial_label_offsets (void);
375 static void set_offsets_for_label (rtx
);
376 static void init_eliminable_invariants (rtx
, bool);
377 static void init_elim_table (void);
378 static void free_reg_equiv (void);
379 static void update_eliminables (HARD_REG_SET
*);
380 static void elimination_costs_in_insn (rtx
);
381 static void spill_hard_reg (unsigned int, int);
382 static int finish_spills (int);
383 static void scan_paradoxical_subregs (rtx
);
384 static void count_pseudo (int);
385 static void order_regs_for_reload (struct insn_chain
*);
386 static void reload_as_needed (int);
387 static void forget_old_reloads_1 (rtx
, const_rtx
, void *);
388 static void forget_marked_reloads (regset
);
389 static int reload_reg_class_lower (const void *, const void *);
390 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type
,
392 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type
,
394 static int reload_reg_free_p (unsigned int, int, enum reload_type
);
395 static int reload_reg_free_for_value_p (int, int, int, enum reload_type
,
397 static int free_for_value_p (int, enum machine_mode
, int, enum reload_type
,
399 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type
);
400 static int allocate_reload_reg (struct insn_chain
*, int, int);
401 static int conflicts_with_override (rtx
);
402 static void failed_reload (rtx
, int);
403 static int set_reload_reg (int, int);
404 static void choose_reload_regs_init (struct insn_chain
*, rtx
*);
405 static void choose_reload_regs (struct insn_chain
*);
406 static void emit_input_reload_insns (struct insn_chain
*, struct reload
*,
408 static void emit_output_reload_insns (struct insn_chain
*, struct reload
*,
410 static void do_input_reload (struct insn_chain
*, struct reload
*, int);
411 static void do_output_reload (struct insn_chain
*, struct reload
*, int);
412 static void emit_reload_insns (struct insn_chain
*);
413 static void delete_output_reload (rtx
, int, int, rtx
);
414 static void delete_address_reloads (rtx
, rtx
);
415 static void delete_address_reloads_1 (rtx
, rtx
, rtx
);
416 static void inc_for_reload (rtx
, rtx
, rtx
, int);
418 static void add_auto_inc_notes (rtx
, rtx
);
420 static void substitute (rtx
*, const_rtx
, rtx
);
421 static bool gen_reload_chain_without_interm_reg_p (int, int);
422 static int reloads_conflict (int, int);
423 static rtx
gen_reload (rtx
, rtx
, int, enum reload_type
);
424 static rtx
emit_insn_if_valid_for_reload (rtx
);
426 /* Initialize the reload pass. This is called at the beginning of compilation
427 and may be called again if the target is reinitialized. */
434 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
435 Set spill_indirect_levels to the number of levels such addressing is
436 permitted, zero if it is not permitted at all. */
439 = gen_rtx_MEM (Pmode
,
442 LAST_VIRTUAL_REGISTER
+ 1),
444 spill_indirect_levels
= 0;
446 while (memory_address_p (QImode
, tem
))
448 spill_indirect_levels
++;
449 tem
= gen_rtx_MEM (Pmode
, tem
);
452 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
454 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
455 indirect_symref_ok
= memory_address_p (QImode
, tem
);
457 /* See if reg+reg is a valid (and offsettable) address. */
459 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
461 tem
= gen_rtx_PLUS (Pmode
,
462 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
463 gen_rtx_REG (Pmode
, i
));
465 /* This way, we make sure that reg+reg is an offsettable address. */
466 tem
= plus_constant (tem
, 4);
468 if (memory_address_p (QImode
, tem
))
470 double_reg_address_ok
= 1;
475 /* Initialize obstack for our rtl allocation. */
476 gcc_obstack_init (&reload_obstack
);
477 reload_startobj
= XOBNEWVAR (&reload_obstack
, char, 0);
479 INIT_REG_SET (&spilled_pseudos
);
480 INIT_REG_SET (&changed_allocation_pseudos
);
481 INIT_REG_SET (&pseudos_counted
);
484 /* List of insn chains that are currently unused. */
485 static struct insn_chain
*unused_insn_chains
= 0;
487 /* Allocate an empty insn_chain structure. */
489 new_insn_chain (void)
491 struct insn_chain
*c
;
493 if (unused_insn_chains
== 0)
495 c
= XOBNEW (&reload_obstack
, struct insn_chain
);
496 INIT_REG_SET (&c
->live_throughout
);
497 INIT_REG_SET (&c
->dead_or_set
);
501 c
= unused_insn_chains
;
502 unused_insn_chains
= c
->next
;
504 c
->is_caller_save_insn
= 0;
505 c
->need_operand_change
= 0;
511 /* Small utility function to set all regs in hard reg set TO which are
512 allocated to pseudos in regset FROM. */
515 compute_use_by_pseudos (HARD_REG_SET
*to
, regset from
)
518 reg_set_iterator rsi
;
520 EXECUTE_IF_SET_IN_REG_SET (from
, FIRST_PSEUDO_REGISTER
, regno
, rsi
)
522 int r
= reg_renumber
[regno
];
526 /* reload_combine uses the information from DF_LIVE_IN,
527 which might still contain registers that have not
528 actually been allocated since they have an
530 gcc_assert (ira_conflicts_p
|| reload_completed
);
533 add_to_hard_reg_set (to
, PSEUDO_REGNO_MODE (regno
), r
);
537 /* Replace all pseudos found in LOC with their corresponding
541 replace_pseudos_in (rtx
*loc
, enum machine_mode mem_mode
, rtx usage
)
554 unsigned int regno
= REGNO (x
);
556 if (regno
< FIRST_PSEUDO_REGISTER
)
559 x
= eliminate_regs_1 (x
, mem_mode
, usage
, true, false);
563 replace_pseudos_in (loc
, mem_mode
, usage
);
567 if (reg_equiv_constant (regno
))
568 *loc
= reg_equiv_constant (regno
);
569 else if (reg_equiv_invariant (regno
))
570 *loc
= reg_equiv_invariant (regno
);
571 else if (reg_equiv_mem (regno
))
572 *loc
= reg_equiv_mem (regno
);
573 else if (reg_equiv_address (regno
))
574 *loc
= gen_rtx_MEM (GET_MODE (x
), reg_equiv_address (regno
));
577 gcc_assert (!REG_P (regno_reg_rtx
[regno
])
578 || REGNO (regno_reg_rtx
[regno
]) != regno
);
579 *loc
= regno_reg_rtx
[regno
];
584 else if (code
== MEM
)
586 replace_pseudos_in (& XEXP (x
, 0), GET_MODE (x
), usage
);
590 /* Process each of our operands recursively. */
591 fmt
= GET_RTX_FORMAT (code
);
592 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
594 replace_pseudos_in (&XEXP (x
, i
), mem_mode
, usage
);
595 else if (*fmt
== 'E')
596 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
597 replace_pseudos_in (& XVECEXP (x
, i
, j
), mem_mode
, usage
);
600 /* Determine if the current function has an exception receiver block
601 that reaches the exit block via non-exceptional edges */
604 has_nonexceptional_receiver (void)
608 basic_block
*tos
, *worklist
, bb
;
610 /* If we're not optimizing, then just err on the safe side. */
614 /* First determine which blocks can reach exit via normal paths. */
615 tos
= worklist
= XNEWVEC (basic_block
, n_basic_blocks
+ 1);
618 bb
->flags
&= ~BB_REACHABLE
;
620 /* Place the exit block on our worklist. */
621 EXIT_BLOCK_PTR
->flags
|= BB_REACHABLE
;
622 *tos
++ = EXIT_BLOCK_PTR
;
624 /* Iterate: find everything reachable from what we've already seen. */
625 while (tos
!= worklist
)
629 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
630 if (!(e
->flags
& EDGE_ABNORMAL
))
632 basic_block src
= e
->src
;
634 if (!(src
->flags
& BB_REACHABLE
))
636 src
->flags
|= BB_REACHABLE
;
643 /* Now see if there's a reachable block with an exceptional incoming
646 if (bb
->flags
& BB_REACHABLE
&& bb_has_abnormal_pred (bb
))
649 /* No exceptional block reached exit unexceptionally. */
653 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
654 zero elements) to MAX_REG_NUM elements.
656 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
658 grow_reg_equivs (void)
660 int old_size
= VEC_length (reg_equivs_t
, reg_equivs
);
661 int max_regno
= max_reg_num ();
664 VEC_reserve (reg_equivs_t
, gc
, reg_equivs
, max_regno
);
665 for (i
= old_size
; i
< max_regno
; i
++)
667 VEC_quick_insert (reg_equivs_t
, reg_equivs
, i
, 0);
668 memset (VEC_index (reg_equivs_t
, reg_equivs
, i
), 0, sizeof (reg_equivs_t
));
674 /* Global variables used by reload and its subroutines. */
676 /* The current basic block while in calculate_elim_costs_all_insns. */
677 static basic_block elim_bb
;
679 /* Set during calculate_needs if an insn needs register elimination. */
680 static int something_needs_elimination
;
681 /* Set during calculate_needs if an insn needs an operand changed. */
682 static int something_needs_operands_changed
;
683 /* Set by alter_regs if we spilled a register to the stack. */
684 static bool something_was_spilled
;
686 /* Nonzero means we couldn't get enough spill regs. */
689 /* Temporary array of pseudo-register number. */
690 static int *temp_pseudo_reg_arr
;
692 /* Main entry point for the reload pass.
694 FIRST is the first insn of the function being compiled.
696 GLOBAL nonzero means we were called from global_alloc
697 and should attempt to reallocate any pseudoregs that we
698 displace from hard regs we will use for reloads.
699 If GLOBAL is zero, we do not have enough information to do that,
700 so any pseudo reg that is spilled must go to the stack.
702 Return value is TRUE if reload likely left dead insns in the
703 stream and a DCE pass should be run to elimiante them. Else the
704 return value is FALSE. */
707 reload (rtx first
, int global
)
711 struct elim_table
*ep
;
715 /* Make sure even insns with volatile mem refs are recognizable. */
720 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
722 /* Make sure that the last insn in the chain
723 is not something that needs reloading. */
724 emit_note (NOTE_INSN_DELETED
);
726 /* Enable find_equiv_reg to distinguish insns made by reload. */
727 reload_first_uid
= get_max_uid ();
729 #ifdef SECONDARY_MEMORY_NEEDED
730 /* Initialize the secondary memory table. */
731 clear_secondary_mem ();
734 /* We don't have a stack slot for any spill reg yet. */
735 memset (spill_stack_slot
, 0, sizeof spill_stack_slot
);
736 memset (spill_stack_slot_width
, 0, sizeof spill_stack_slot_width
);
738 /* Initialize the save area information for caller-save, in case some
742 /* Compute which hard registers are now in use
743 as homes for pseudo registers.
744 This is done here rather than (eg) in global_alloc
745 because this point is reached even if not optimizing. */
746 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
749 /* A function that has a nonlocal label that can reach the exit
750 block via non-exceptional paths must save all call-saved
752 if (cfun
->has_nonlocal_label
753 && has_nonexceptional_receiver ())
754 crtl
->saves_all_registers
= 1;
756 if (crtl
->saves_all_registers
)
757 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
758 if (! call_used_regs
[i
] && ! fixed_regs
[i
] && ! LOCAL_REGNO (i
))
759 df_set_regs_ever_live (i
, true);
761 /* Find all the pseudo registers that didn't get hard regs
762 but do have known equivalent constants or memory slots.
763 These include parameters (known equivalent to parameter slots)
764 and cse'd or loop-moved constant memory addresses.
766 Record constant equivalents in reg_equiv_constant
767 so they will be substituted by find_reloads.
768 Record memory equivalents in reg_mem_equiv so they can
769 be substituted eventually by altering the REG-rtx's. */
772 reg_max_ref_width
= XCNEWVEC (unsigned int, max_regno
);
773 reg_old_renumber
= XCNEWVEC (short, max_regno
);
774 memcpy (reg_old_renumber
, reg_renumber
, max_regno
* sizeof (short));
775 pseudo_forbidden_regs
= XNEWVEC (HARD_REG_SET
, max_regno
);
776 pseudo_previous_regs
= XCNEWVEC (HARD_REG_SET
, max_regno
);
778 CLEAR_HARD_REG_SET (bad_spill_regs_global
);
780 init_eliminable_invariants (first
, true);
783 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
784 stack slots to the pseudos that lack hard regs or equivalents.
785 Do not touch virtual registers. */
787 temp_pseudo_reg_arr
= XNEWVEC (int, max_regno
- LAST_VIRTUAL_REGISTER
- 1);
788 for (n
= 0, i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
789 temp_pseudo_reg_arr
[n
++] = i
;
792 /* Ask IRA to order pseudo-registers for better stack slot
794 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr
, n
, reg_max_ref_width
);
796 for (i
= 0; i
< n
; i
++)
797 alter_reg (temp_pseudo_reg_arr
[i
], -1, false);
799 /* If we have some registers we think can be eliminated, scan all insns to
800 see if there is an insn that sets one of these registers to something
801 other than itself plus a constant. If so, the register cannot be
802 eliminated. Doing this scan here eliminates an extra pass through the
803 main reload loop in the most common case where register elimination
805 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
807 note_stores (PATTERN (insn
), mark_not_eliminable
, NULL
);
809 maybe_fix_stack_asms ();
811 insns_need_reload
= 0;
812 something_needs_elimination
= 0;
814 /* Initialize to -1, which means take the first spill register. */
817 /* Spill any hard regs that we know we can't eliminate. */
818 CLEAR_HARD_REG_SET (used_spill_regs
);
819 /* There can be multiple ways to eliminate a register;
820 they should be listed adjacently.
821 Elimination for any register fails only if all possible ways fail. */
822 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; )
825 int can_eliminate
= 0;
828 can_eliminate
|= ep
->can_eliminate
;
831 while (ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
] && ep
->from
== from
);
833 spill_hard_reg (from
, 1);
836 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
837 if (frame_pointer_needed
)
838 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, 1);
840 finish_spills (global
);
842 /* From now on, we may need to generate moves differently. We may also
843 allow modifications of insns which cause them to not be recognized.
844 Any such modifications will be cleaned up during reload itself. */
845 reload_in_progress
= 1;
847 /* This loop scans the entire function each go-round
848 and repeats until one repetition spills no additional hard regs. */
851 int something_changed
;
853 HOST_WIDE_INT starting_frame_size
;
855 starting_frame_size
= get_frame_size ();
856 something_was_spilled
= false;
858 set_initial_elim_offsets ();
859 set_initial_label_offsets ();
861 /* For each pseudo register that has an equivalent location defined,
862 try to eliminate any eliminable registers (such as the frame pointer)
863 assuming initial offsets for the replacement register, which
866 If the resulting location is directly addressable, substitute
867 the MEM we just got directly for the old REG.
869 If it is not addressable but is a constant or the sum of a hard reg
870 and constant, it is probably not addressable because the constant is
871 out of range, in that case record the address; we will generate
872 hairy code to compute the address in a register each time it is
873 needed. Similarly if it is a hard register, but one that is not
874 valid as an address register.
876 If the location is not addressable, but does not have one of the
877 above forms, assign a stack slot. We have to do this to avoid the
878 potential of producing lots of reloads if, e.g., a location involves
879 a pseudo that didn't get a hard register and has an equivalent memory
880 location that also involves a pseudo that didn't get a hard register.
882 Perhaps at some point we will improve reload_when_needed handling
883 so this problem goes away. But that's very hairy. */
885 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
886 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc (i
))
888 rtx x
= eliminate_regs (reg_equiv_memory_loc (i
), VOIDmode
,
891 if (strict_memory_address_addr_space_p
892 (GET_MODE (regno_reg_rtx
[i
]), XEXP (x
, 0),
894 reg_equiv_mem (i
) = x
, reg_equiv_address (i
) = 0;
895 else if (CONSTANT_P (XEXP (x
, 0))
896 || (REG_P (XEXP (x
, 0))
897 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
898 || (GET_CODE (XEXP (x
, 0)) == PLUS
899 && REG_P (XEXP (XEXP (x
, 0), 0))
900 && (REGNO (XEXP (XEXP (x
, 0), 0))
901 < FIRST_PSEUDO_REGISTER
)
902 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
903 reg_equiv_address (i
) = XEXP (x
, 0), reg_equiv_mem (i
) = 0;
906 /* Make a new stack slot. Then indicate that something
907 changed so we go back and recompute offsets for
908 eliminable registers because the allocation of memory
909 below might change some offset. reg_equiv_{mem,address}
910 will be set up for this pseudo on the next pass around
912 reg_equiv_memory_loc (i
) = 0;
913 reg_equiv_init (i
) = 0;
914 alter_reg (i
, -1, true);
918 if (caller_save_needed
)
921 /* If we allocated another stack slot, redo elimination bookkeeping. */
922 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
924 if (starting_frame_size
&& crtl
->stack_alignment_needed
)
926 /* If we have a stack frame, we must align it now. The
927 stack size may be a part of the offset computation for
928 register elimination. So if this changes the stack size,
929 then repeat the elimination bookkeeping. We don't
930 realign when there is no stack, as that will cause a
931 stack frame when none is needed should
932 STARTING_FRAME_OFFSET not be already aligned to
934 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
935 if (starting_frame_size
!= get_frame_size ())
939 if (caller_save_needed
)
941 save_call_clobbered_regs ();
942 /* That might have allocated new insn_chain structures. */
943 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
946 calculate_needs_all_insns (global
);
948 if (! ira_conflicts_p
)
949 /* Don't do it for IRA. We need this info because we don't
950 change live_throughout and dead_or_set for chains when IRA
952 CLEAR_REG_SET (&spilled_pseudos
);
956 something_changed
= 0;
958 /* If we allocated any new memory locations, make another pass
959 since it might have changed elimination offsets. */
960 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
961 something_changed
= 1;
963 /* Even if the frame size remained the same, we might still have
964 changed elimination offsets, e.g. if find_reloads called
965 force_const_mem requiring the back end to allocate a constant
966 pool base register that needs to be saved on the stack. */
967 else if (!verify_initial_elim_offsets ())
968 something_changed
= 1;
971 HARD_REG_SET to_spill
;
972 CLEAR_HARD_REG_SET (to_spill
);
973 update_eliminables (&to_spill
);
974 AND_COMPL_HARD_REG_SET (used_spill_regs
, to_spill
);
976 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
977 if (TEST_HARD_REG_BIT (to_spill
, i
))
979 spill_hard_reg (i
, 1);
982 /* Regardless of the state of spills, if we previously had
983 a register that we thought we could eliminate, but now can
984 not eliminate, we must run another pass.
986 Consider pseudos which have an entry in reg_equiv_* which
987 reference an eliminable register. We must make another pass
988 to update reg_equiv_* so that we do not substitute in the
989 old value from when we thought the elimination could be
991 something_changed
= 1;
995 select_reload_regs ();
999 if (insns_need_reload
!= 0 || did_spill
)
1000 something_changed
|= finish_spills (global
);
1002 if (! something_changed
)
1005 if (caller_save_needed
)
1006 delete_caller_save_insns ();
1008 obstack_free (&reload_obstack
, reload_firstobj
);
1011 /* If global-alloc was run, notify it of any register eliminations we have
1014 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1015 if (ep
->can_eliminate
)
1016 mark_elimination (ep
->from
, ep
->to
);
1018 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1019 If that insn didn't set the register (i.e., it copied the register to
1020 memory), just delete that insn instead of the equivalencing insn plus
1021 anything now dead. If we call delete_dead_insn on that insn, we may
1022 delete the insn that actually sets the register if the register dies
1023 there and that is incorrect. */
1025 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1027 if (reg_renumber
[i
] < 0 && reg_equiv_init (i
) != 0)
1030 for (list
= reg_equiv_init (i
); list
; list
= XEXP (list
, 1))
1032 rtx equiv_insn
= XEXP (list
, 0);
1034 /* If we already deleted the insn or if it may trap, we can't
1035 delete it. The latter case shouldn't happen, but can
1036 if an insn has a variable address, gets a REG_EH_REGION
1037 note added to it, and then gets converted into a load
1038 from a constant address. */
1039 if (NOTE_P (equiv_insn
)
1040 || can_throw_internal (equiv_insn
))
1042 else if (reg_set_p (regno_reg_rtx
[i
], PATTERN (equiv_insn
)))
1043 delete_dead_insn (equiv_insn
);
1045 SET_INSN_DELETED (equiv_insn
);
1050 /* Use the reload registers where necessary
1051 by generating move instructions to move the must-be-register
1052 values into or out of the reload registers. */
1054 if (insns_need_reload
!= 0 || something_needs_elimination
1055 || something_needs_operands_changed
)
1057 HOST_WIDE_INT old_frame_size
= get_frame_size ();
1059 reload_as_needed (global
);
1061 gcc_assert (old_frame_size
== get_frame_size ());
1063 gcc_assert (verify_initial_elim_offsets ());
1066 /* If we were able to eliminate the frame pointer, show that it is no
1067 longer live at the start of any basic block. If it ls live by
1068 virtue of being in a pseudo, that pseudo will be marked live
1069 and hence the frame pointer will be known to be live via that
1072 if (! frame_pointer_needed
)
1074 bitmap_clear_bit (df_get_live_in (bb
), HARD_FRAME_POINTER_REGNUM
);
1076 /* Come here (with failure set nonzero) if we can't get enough spill
1080 CLEAR_REG_SET (&changed_allocation_pseudos
);
1081 CLEAR_REG_SET (&spilled_pseudos
);
1082 reload_in_progress
= 0;
1084 /* Now eliminate all pseudo regs by modifying them into
1085 their equivalent memory references.
1086 The REG-rtx's for the pseudos are modified in place,
1087 so all insns that used to refer to them now refer to memory.
1089 For a reg that has a reg_equiv_address, all those insns
1090 were changed by reloading so that no insns refer to it any longer;
1091 but the DECL_RTL of a variable decl may refer to it,
1092 and if so this causes the debugging info to mention the variable. */
1094 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1098 if (reg_equiv_mem (i
))
1099 addr
= XEXP (reg_equiv_mem (i
), 0);
1101 if (reg_equiv_address (i
))
1102 addr
= reg_equiv_address (i
);
1106 if (reg_renumber
[i
] < 0)
1108 rtx reg
= regno_reg_rtx
[i
];
1110 REG_USERVAR_P (reg
) = 0;
1111 PUT_CODE (reg
, MEM
);
1112 XEXP (reg
, 0) = addr
;
1113 if (reg_equiv_memory_loc (i
))
1114 MEM_COPY_ATTRIBUTES (reg
, reg_equiv_memory_loc (i
));
1117 MEM_IN_STRUCT_P (reg
) = MEM_SCALAR_P (reg
) = 0;
1118 MEM_ATTRS (reg
) = 0;
1120 MEM_NOTRAP_P (reg
) = 1;
1122 else if (reg_equiv_mem (i
))
1123 XEXP (reg_equiv_mem (i
), 0) = addr
;
1126 /* We don't want complex addressing modes in debug insns
1127 if simpler ones will do, so delegitimize equivalences
1129 if (MAY_HAVE_DEBUG_INSNS
&& reg_renumber
[i
] < 0)
1131 rtx reg
= regno_reg_rtx
[i
];
1135 if (reg_equiv_constant (i
))
1136 equiv
= reg_equiv_constant (i
);
1137 else if (reg_equiv_invariant (i
))
1138 equiv
= reg_equiv_invariant (i
);
1139 else if (reg
&& MEM_P (reg
))
1140 equiv
= targetm
.delegitimize_address (reg
);
1141 else if (reg
&& REG_P (reg
) && (int)REGNO (reg
) != i
)
1147 for (use
= DF_REG_USE_CHAIN (i
); use
; use
= next
)
1149 insn
= DF_REF_INSN (use
);
1151 /* Make sure the next ref is for a different instruction,
1152 so that we're not affected by the rescan. */
1153 next
= DF_REF_NEXT_REG (use
);
1154 while (next
&& DF_REF_INSN (next
) == insn
)
1155 next
= DF_REF_NEXT_REG (next
);
1157 if (DEBUG_INSN_P (insn
))
1161 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
1162 df_insn_rescan_debug_internal (insn
);
1165 INSN_VAR_LOCATION_LOC (insn
)
1166 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn
),
1173 /* We must set reload_completed now since the cleanup_subreg_operands call
1174 below will re-recognize each insn and reload may have generated insns
1175 which are only valid during and after reload. */
1176 reload_completed
= 1;
1178 /* Make a pass over all the insns and delete all USEs which we inserted
1179 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1180 notes. Delete all CLOBBER insns, except those that refer to the return
1181 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1182 from misarranging variable-array code, and simplify (subreg (reg))
1183 operands. Strip and regenerate REG_INC notes that may have been moved
1186 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1192 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn
),
1193 VOIDmode
, CALL_INSN_FUNCTION_USAGE (insn
));
1195 if ((GET_CODE (PATTERN (insn
)) == USE
1196 /* We mark with QImode USEs introduced by reload itself. */
1197 && (GET_MODE (insn
) == QImode
1198 || find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
1199 || (GET_CODE (PATTERN (insn
)) == CLOBBER
1200 && (!MEM_P (XEXP (PATTERN (insn
), 0))
1201 || GET_MODE (XEXP (PATTERN (insn
), 0)) != BLKmode
1202 || (GET_CODE (XEXP (XEXP (PATTERN (insn
), 0), 0)) != SCRATCH
1203 && XEXP (XEXP (PATTERN (insn
), 0), 0)
1204 != stack_pointer_rtx
))
1205 && (!REG_P (XEXP (PATTERN (insn
), 0))
1206 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))))
1212 /* Some CLOBBERs may survive until here and still reference unassigned
1213 pseudos with const equivalent, which may in turn cause ICE in later
1214 passes if the reference remains in place. */
1215 if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
1216 replace_pseudos_in (& XEXP (PATTERN (insn
), 0),
1217 VOIDmode
, PATTERN (insn
));
1219 /* Discard obvious no-ops, even without -O. This optimization
1220 is fast and doesn't interfere with debugging. */
1221 if (NONJUMP_INSN_P (insn
)
1222 && GET_CODE (PATTERN (insn
)) == SET
1223 && REG_P (SET_SRC (PATTERN (insn
)))
1224 && REG_P (SET_DEST (PATTERN (insn
)))
1225 && (REGNO (SET_SRC (PATTERN (insn
)))
1226 == REGNO (SET_DEST (PATTERN (insn
)))))
1232 pnote
= ®_NOTES (insn
);
1235 if (REG_NOTE_KIND (*pnote
) == REG_DEAD
1236 || REG_NOTE_KIND (*pnote
) == REG_UNUSED
1237 || REG_NOTE_KIND (*pnote
) == REG_INC
)
1238 *pnote
= XEXP (*pnote
, 1);
1240 pnote
= &XEXP (*pnote
, 1);
1244 add_auto_inc_notes (insn
, PATTERN (insn
));
1247 /* Simplify (subreg (reg)) if it appears as an operand. */
1248 cleanup_subreg_operands (insn
);
1250 /* Clean up invalid ASMs so that they don't confuse later passes.
1252 if (asm_noperands (PATTERN (insn
)) >= 0)
1254 extract_insn (insn
);
1255 if (!constrain_operands (1))
1257 error_for_asm (insn
,
1258 "%<asm%> operand has impossible constraints");
1265 /* If we are doing generic stack checking, give a warning if this
1266 function's frame size is larger than we expect. */
1267 if (flag_stack_check
== GENERIC_STACK_CHECK
)
1269 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
1270 static int verbose_warned
= 0;
1272 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1273 if (df_regs_ever_live_p (i
) && ! fixed_regs
[i
] && call_used_regs
[i
])
1274 size
+= UNITS_PER_WORD
;
1276 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
1278 warning (0, "frame size too large for reliable stack checking");
1279 if (! verbose_warned
)
1281 warning (0, "try reducing the number of local variables");
1287 free (temp_pseudo_reg_arr
);
1289 /* Indicate that we no longer have known memory locations or constants. */
1292 free (reg_max_ref_width
);
1293 free (reg_old_renumber
);
1294 free (pseudo_previous_regs
);
1295 free (pseudo_forbidden_regs
);
1297 CLEAR_HARD_REG_SET (used_spill_regs
);
1298 for (i
= 0; i
< n_spills
; i
++)
1299 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1301 /* Free all the insn_chain structures at once. */
1302 obstack_free (&reload_obstack
, reload_startobj
);
1303 unused_insn_chains
= 0;
1305 inserted
= fixup_abnormal_edges ();
1307 /* We've possibly turned single trapping insn into multiple ones. */
1308 if (cfun
->can_throw_non_call_exceptions
)
1311 blocks
= sbitmap_alloc (last_basic_block
);
1312 sbitmap_ones (blocks
);
1313 find_many_sub_basic_blocks (blocks
);
1314 sbitmap_free (blocks
);
1318 commit_edge_insertions ();
1320 /* Replacing pseudos with their memory equivalents might have
1321 created shared rtx. Subsequent passes would get confused
1322 by this, so unshare everything here. */
1323 unshare_all_rtl_again (first
);
1325 #ifdef STACK_BOUNDARY
1326 /* init_emit has set the alignment of the hard frame pointer
1327 to STACK_BOUNDARY. It is very likely no longer valid if
1328 the hard frame pointer was used for register allocation. */
1329 if (!frame_pointer_needed
)
1330 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = BITS_PER_UNIT
;
1333 VEC_free (rtx_p
, heap
, substitute_stack
);
1335 gcc_assert (bitmap_empty_p (&spilled_pseudos
));
1337 reload_completed
= !failure
;
1342 /* Yet another special case. Unfortunately, reg-stack forces people to
1343 write incorrect clobbers in asm statements. These clobbers must not
1344 cause the register to appear in bad_spill_regs, otherwise we'll call
1345 fatal_insn later. We clear the corresponding regnos in the live
1346 register sets to avoid this.
1347 The whole thing is rather sick, I'm afraid. */
1350 maybe_fix_stack_asms (void)
1353 const char *constraints
[MAX_RECOG_OPERANDS
];
1354 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1355 struct insn_chain
*chain
;
1357 for (chain
= reload_insn_chain
; chain
!= 0; chain
= chain
->next
)
1360 HARD_REG_SET clobbered
, allowed
;
1363 if (! INSN_P (chain
->insn
)
1364 || (noperands
= asm_noperands (PATTERN (chain
->insn
))) < 0)
1366 pat
= PATTERN (chain
->insn
);
1367 if (GET_CODE (pat
) != PARALLEL
)
1370 CLEAR_HARD_REG_SET (clobbered
);
1371 CLEAR_HARD_REG_SET (allowed
);
1373 /* First, make a mask of all stack regs that are clobbered. */
1374 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1376 rtx t
= XVECEXP (pat
, 0, i
);
1377 if (GET_CODE (t
) == CLOBBER
&& STACK_REG_P (XEXP (t
, 0)))
1378 SET_HARD_REG_BIT (clobbered
, REGNO (XEXP (t
, 0)));
1381 /* Get the operand values and constraints out of the insn. */
1382 decode_asm_operands (pat
, recog_data
.operand
, recog_data
.operand_loc
,
1383 constraints
, operand_mode
, NULL
);
1385 /* For every operand, see what registers are allowed. */
1386 for (i
= 0; i
< noperands
; i
++)
1388 const char *p
= constraints
[i
];
1389 /* For every alternative, we compute the class of registers allowed
1390 for reloading in CLS, and merge its contents into the reg set
1392 int cls
= (int) NO_REGS
;
1398 if (c
== '\0' || c
== ',' || c
== '#')
1400 /* End of one alternative - mark the regs in the current
1401 class, and reset the class. */
1402 IOR_HARD_REG_SET (allowed
, reg_class_contents
[cls
]);
1408 } while (c
!= '\0' && c
!= ',');
1416 case '=': case '+': case '*': case '%': case '?': case '!':
1417 case '0': case '1': case '2': case '3': case '4': case '<':
1418 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1419 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1420 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1421 case TARGET_MEM_CONSTRAINT
:
1425 cls
= (int) reg_class_subunion
[cls
]
1426 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
1431 cls
= (int) reg_class_subunion
[cls
][(int) GENERAL_REGS
];
1435 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
1436 cls
= (int) reg_class_subunion
[cls
]
1437 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
1439 cls
= (int) reg_class_subunion
[cls
]
1440 [(int) REG_CLASS_FROM_CONSTRAINT (c
, p
)];
1442 p
+= CONSTRAINT_LEN (c
, p
);
1445 /* Those of the registers which are clobbered, but allowed by the
1446 constraints, must be usable as reload registers. So clear them
1447 out of the life information. */
1448 AND_HARD_REG_SET (allowed
, clobbered
);
1449 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1450 if (TEST_HARD_REG_BIT (allowed
, i
))
1452 CLEAR_REGNO_REG_SET (&chain
->live_throughout
, i
);
1453 CLEAR_REGNO_REG_SET (&chain
->dead_or_set
, i
);
1460 /* Copy the global variables n_reloads and rld into the corresponding elts
1463 copy_reloads (struct insn_chain
*chain
)
1465 chain
->n_reloads
= n_reloads
;
1466 chain
->rld
= XOBNEWVEC (&reload_obstack
, struct reload
, n_reloads
);
1467 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
1468 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1471 /* Walk the chain of insns, and determine for each whether it needs reloads
1472 and/or eliminations. Build the corresponding insns_need_reload list, and
1473 set something_needs_elimination as appropriate. */
1475 calculate_needs_all_insns (int global
)
1477 struct insn_chain
**pprev_reload
= &insns_need_reload
;
1478 struct insn_chain
*chain
, *next
= 0;
1480 something_needs_elimination
= 0;
1482 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1483 for (chain
= reload_insn_chain
; chain
!= 0; chain
= next
)
1485 rtx insn
= chain
->insn
;
1489 /* Clear out the shortcuts. */
1490 chain
->n_reloads
= 0;
1491 chain
->need_elim
= 0;
1492 chain
->need_reload
= 0;
1493 chain
->need_operand_change
= 0;
1495 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1496 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1497 what effects this has on the known offsets at labels. */
1499 if (LABEL_P (insn
) || JUMP_P (insn
)
1500 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1501 set_label_offsets (insn
, insn
, 0);
1505 rtx old_body
= PATTERN (insn
);
1506 int old_code
= INSN_CODE (insn
);
1507 rtx old_notes
= REG_NOTES (insn
);
1508 int did_elimination
= 0;
1509 int operands_changed
= 0;
1510 rtx set
= single_set (insn
);
1512 /* Skip insns that only set an equivalence. */
1513 if (set
&& REG_P (SET_DEST (set
))
1514 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1515 && (reg_equiv_constant (REGNO (SET_DEST (set
)))
1516 || (reg_equiv_invariant (REGNO (SET_DEST (set
)))))
1517 && reg_equiv_init (REGNO (SET_DEST (set
))))
1520 /* If needed, eliminate any eliminable registers. */
1521 if (num_eliminable
|| num_eliminable_invariants
)
1522 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1524 /* Analyze the instruction. */
1525 operands_changed
= find_reloads (insn
, 0, spill_indirect_levels
,
1526 global
, spill_reg_order
);
1528 /* If a no-op set needs more than one reload, this is likely
1529 to be something that needs input address reloads. We
1530 can't get rid of this cleanly later, and it is of no use
1531 anyway, so discard it now.
1532 We only do this when expensive_optimizations is enabled,
1533 since this complements reload inheritance / output
1534 reload deletion, and it can make debugging harder. */
1535 if (flag_expensive_optimizations
&& n_reloads
> 1)
1537 rtx set
= single_set (insn
);
1540 ((SET_SRC (set
) == SET_DEST (set
)
1541 && REG_P (SET_SRC (set
))
1542 && REGNO (SET_SRC (set
)) >= FIRST_PSEUDO_REGISTER
)
1543 || (REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))
1544 && reg_renumber
[REGNO (SET_SRC (set
))] < 0
1545 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1546 && reg_equiv_memory_loc (REGNO (SET_SRC (set
))) != NULL
1547 && reg_equiv_memory_loc (REGNO (SET_DEST (set
))) != NULL
1548 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set
))),
1549 reg_equiv_memory_loc (REGNO (SET_DEST (set
)))))))
1551 if (ira_conflicts_p
)
1552 /* Inform IRA about the insn deletion. */
1553 ira_mark_memory_move_deletion (REGNO (SET_DEST (set
)),
1554 REGNO (SET_SRC (set
)));
1556 /* Delete it from the reload chain. */
1558 chain
->prev
->next
= next
;
1560 reload_insn_chain
= next
;
1562 next
->prev
= chain
->prev
;
1563 chain
->next
= unused_insn_chains
;
1564 unused_insn_chains
= chain
;
1569 update_eliminable_offsets ();
1571 /* Remember for later shortcuts which insns had any reloads or
1572 register eliminations. */
1573 chain
->need_elim
= did_elimination
;
1574 chain
->need_reload
= n_reloads
> 0;
1575 chain
->need_operand_change
= operands_changed
;
1577 /* Discard any register replacements done. */
1578 if (did_elimination
)
1580 obstack_free (&reload_obstack
, reload_insn_firstobj
);
1581 PATTERN (insn
) = old_body
;
1582 INSN_CODE (insn
) = old_code
;
1583 REG_NOTES (insn
) = old_notes
;
1584 something_needs_elimination
= 1;
1587 something_needs_operands_changed
|= operands_changed
;
1591 copy_reloads (chain
);
1592 *pprev_reload
= chain
;
1593 pprev_reload
= &chain
->next_need_reload
;
1600 /* This function is called from the register allocator to set up estimates
1601 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1602 an invariant. The structure is similar to calculate_needs_all_insns. */
1605 calculate_elim_costs_all_insns (void)
1607 int *reg_equiv_init_cost
;
1611 reg_equiv_init_cost
= XCNEWVEC (int, max_regno
);
1613 init_eliminable_invariants (get_insns (), false);
1615 set_initial_elim_offsets ();
1616 set_initial_label_offsets ();
1623 FOR_BB_INSNS (bb
, insn
)
1625 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1626 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1627 what effects this has on the known offsets at labels. */
1629 if (LABEL_P (insn
) || JUMP_P (insn
)
1630 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1631 set_label_offsets (insn
, insn
, 0);
1635 rtx set
= single_set (insn
);
1637 /* Skip insns that only set an equivalence. */
1638 if (set
&& REG_P (SET_DEST (set
))
1639 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1640 && (reg_equiv_constant (REGNO (SET_DEST (set
)))
1641 || reg_equiv_invariant (REGNO (SET_DEST (set
)))))
1643 unsigned regno
= REGNO (SET_DEST (set
));
1644 rtx init
= reg_equiv_init (regno
);
1647 rtx t
= eliminate_regs_1 (SET_SRC (set
), VOIDmode
, insn
,
1649 int cost
= set_src_cost (t
, optimize_bb_for_speed_p (bb
));
1650 int freq
= REG_FREQ_FROM_BB (bb
);
1652 reg_equiv_init_cost
[regno
] = cost
* freq
;
1656 /* If needed, eliminate any eliminable registers. */
1657 if (num_eliminable
|| num_eliminable_invariants
)
1658 elimination_costs_in_insn (insn
);
1661 update_eliminable_offsets ();
1665 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1667 if (reg_equiv_invariant (i
))
1669 if (reg_equiv_init (i
))
1671 int cost
= reg_equiv_init_cost
[i
];
1674 "Reg %d has equivalence, initial gains %d\n", i
, cost
);
1676 ira_adjust_equiv_reg_cost (i
, cost
);
1682 "Reg %d had equivalence, but can't be eliminated\n",
1684 ira_adjust_equiv_reg_cost (i
, 0);
1689 free (reg_equiv_init_cost
);
1692 /* Comparison function for qsort to decide which of two reloads
1693 should be handled first. *P1 and *P2 are the reload numbers. */
1696 reload_reg_class_lower (const void *r1p
, const void *r2p
)
1698 int r1
= *(const short *) r1p
, r2
= *(const short *) r2p
;
1701 /* Consider required reloads before optional ones. */
1702 t
= rld
[r1
].optional
- rld
[r2
].optional
;
1706 /* Count all solitary classes before non-solitary ones. */
1707 t
= ((reg_class_size
[(int) rld
[r2
].rclass
] == 1)
1708 - (reg_class_size
[(int) rld
[r1
].rclass
] == 1));
1712 /* Aside from solitaires, consider all multi-reg groups first. */
1713 t
= rld
[r2
].nregs
- rld
[r1
].nregs
;
1717 /* Consider reloads in order of increasing reg-class number. */
1718 t
= (int) rld
[r1
].rclass
- (int) rld
[r2
].rclass
;
1722 /* If reloads are equally urgent, sort by reload number,
1723 so that the results of qsort leave nothing to chance. */
1727 /* The cost of spilling each hard reg. */
1728 static int spill_cost
[FIRST_PSEUDO_REGISTER
];
1730 /* When spilling multiple hard registers, we use SPILL_COST for the first
1731 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1732 only the first hard reg for a multi-reg pseudo. */
1733 static int spill_add_cost
[FIRST_PSEUDO_REGISTER
];
1735 /* Map of hard regno to pseudo regno currently occupying the hard
1737 static int hard_regno_to_pseudo_regno
[FIRST_PSEUDO_REGISTER
];
1739 /* Update the spill cost arrays, considering that pseudo REG is live. */
1742 count_pseudo (int reg
)
1744 int freq
= REG_FREQ (reg
);
1745 int r
= reg_renumber
[reg
];
1748 if (REGNO_REG_SET_P (&pseudos_counted
, reg
)
1749 || REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1750 /* Ignore spilled pseudo-registers which can be here only if IRA
1752 || (ira_conflicts_p
&& r
< 0))
1755 SET_REGNO_REG_SET (&pseudos_counted
, reg
);
1757 gcc_assert (r
>= 0);
1759 spill_add_cost
[r
] += freq
;
1760 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1763 hard_regno_to_pseudo_regno
[r
+ nregs
] = reg
;
1764 spill_cost
[r
+ nregs
] += freq
;
1768 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1769 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1772 order_regs_for_reload (struct insn_chain
*chain
)
1775 HARD_REG_SET used_by_pseudos
;
1776 HARD_REG_SET used_by_pseudos2
;
1777 reg_set_iterator rsi
;
1779 COPY_HARD_REG_SET (bad_spill_regs
, fixed_reg_set
);
1781 memset (spill_cost
, 0, sizeof spill_cost
);
1782 memset (spill_add_cost
, 0, sizeof spill_add_cost
);
1783 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1784 hard_regno_to_pseudo_regno
[i
] = -1;
1786 /* Count number of uses of each hard reg by pseudo regs allocated to it
1787 and then order them by decreasing use. First exclude hard registers
1788 that are live in or across this insn. */
1790 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
1791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
1792 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos
);
1793 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos2
);
1795 /* Now find out which pseudos are allocated to it, and update
1797 CLEAR_REG_SET (&pseudos_counted
);
1799 EXECUTE_IF_SET_IN_REG_SET
1800 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1804 EXECUTE_IF_SET_IN_REG_SET
1805 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1809 CLEAR_REG_SET (&pseudos_counted
);
1812 /* Vector of reload-numbers showing the order in which the reloads should
1814 static short reload_order
[MAX_RELOADS
];
1816 /* This is used to keep track of the spill regs used in one insn. */
1817 static HARD_REG_SET used_spill_regs_local
;
1819 /* We decided to spill hard register SPILLED, which has a size of
1820 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1821 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1822 update SPILL_COST/SPILL_ADD_COST. */
1825 count_spilled_pseudo (int spilled
, int spilled_nregs
, int reg
)
1827 int freq
= REG_FREQ (reg
);
1828 int r
= reg_renumber
[reg
];
1829 int nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1831 /* Ignore spilled pseudo-registers which can be here only if IRA is
1833 if ((ira_conflicts_p
&& r
< 0)
1834 || REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1835 || spilled
+ spilled_nregs
<= r
|| r
+ nregs
<= spilled
)
1838 SET_REGNO_REG_SET (&spilled_pseudos
, reg
);
1840 spill_add_cost
[r
] -= freq
;
1843 hard_regno_to_pseudo_regno
[r
+ nregs
] = -1;
1844 spill_cost
[r
+ nregs
] -= freq
;
1848 /* Find reload register to use for reload number ORDER. */
1851 find_reg (struct insn_chain
*chain
, int order
)
1853 int rnum
= reload_order
[order
];
1854 struct reload
*rl
= rld
+ rnum
;
1855 int best_cost
= INT_MAX
;
1857 unsigned int i
, j
, n
;
1859 HARD_REG_SET not_usable
;
1860 HARD_REG_SET used_by_other_reload
;
1861 reg_set_iterator rsi
;
1862 static int regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1863 static int best_regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1865 COPY_HARD_REG_SET (not_usable
, bad_spill_regs
);
1866 IOR_HARD_REG_SET (not_usable
, bad_spill_regs_global
);
1867 IOR_COMPL_HARD_REG_SET (not_usable
, reg_class_contents
[rl
->rclass
]);
1869 CLEAR_HARD_REG_SET (used_by_other_reload
);
1870 for (k
= 0; k
< order
; k
++)
1872 int other
= reload_order
[k
];
1874 if (rld
[other
].regno
>= 0 && reloads_conflict (other
, rnum
))
1875 for (j
= 0; j
< rld
[other
].nregs
; j
++)
1876 SET_HARD_REG_BIT (used_by_other_reload
, rld
[other
].regno
+ j
);
1879 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1881 #ifdef REG_ALLOC_ORDER
1882 unsigned int regno
= reg_alloc_order
[i
];
1884 unsigned int regno
= i
;
1887 if (! TEST_HARD_REG_BIT (not_usable
, regno
)
1888 && ! TEST_HARD_REG_BIT (used_by_other_reload
, regno
)
1889 && HARD_REGNO_MODE_OK (regno
, rl
->mode
))
1891 int this_cost
= spill_cost
[regno
];
1893 unsigned int this_nregs
= hard_regno_nregs
[regno
][rl
->mode
];
1895 for (j
= 1; j
< this_nregs
; j
++)
1897 this_cost
+= spill_add_cost
[regno
+ j
];
1898 if ((TEST_HARD_REG_BIT (not_usable
, regno
+ j
))
1899 || TEST_HARD_REG_BIT (used_by_other_reload
, regno
+ j
))
1905 if (ira_conflicts_p
)
1907 /* Ask IRA to find a better pseudo-register for
1909 for (n
= j
= 0; j
< this_nregs
; j
++)
1911 int r
= hard_regno_to_pseudo_regno
[regno
+ j
];
1915 if (n
== 0 || regno_pseudo_regs
[n
- 1] != r
)
1916 regno_pseudo_regs
[n
++] = r
;
1918 regno_pseudo_regs
[n
++] = -1;
1920 || ira_better_spill_reload_regno_p (regno_pseudo_regs
,
1921 best_regno_pseudo_regs
,
1928 best_regno_pseudo_regs
[j
] = regno_pseudo_regs
[j
];
1929 if (regno_pseudo_regs
[j
] < 0)
1936 if (rl
->in
&& REG_P (rl
->in
) && REGNO (rl
->in
) == regno
)
1938 if (rl
->out
&& REG_P (rl
->out
) && REGNO (rl
->out
) == regno
)
1940 if (this_cost
< best_cost
1941 /* Among registers with equal cost, prefer caller-saved ones, or
1942 use REG_ALLOC_ORDER if it is defined. */
1943 || (this_cost
== best_cost
1944 #ifdef REG_ALLOC_ORDER
1945 && (inv_reg_alloc_order
[regno
]
1946 < inv_reg_alloc_order
[best_reg
])
1948 && call_used_regs
[regno
]
1949 && ! call_used_regs
[best_reg
]
1954 best_cost
= this_cost
;
1962 fprintf (dump_file
, "Using reg %d for reload %d\n", best_reg
, rnum
);
1964 rl
->nregs
= hard_regno_nregs
[best_reg
][rl
->mode
];
1965 rl
->regno
= best_reg
;
1967 EXECUTE_IF_SET_IN_REG_SET
1968 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1970 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1976 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1979 for (i
= 0; i
< rl
->nregs
; i
++)
1981 gcc_assert (spill_cost
[best_reg
+ i
] == 0);
1982 gcc_assert (spill_add_cost
[best_reg
+ i
] == 0);
1983 gcc_assert (hard_regno_to_pseudo_regno
[best_reg
+ i
] == -1);
1984 SET_HARD_REG_BIT (used_spill_regs_local
, best_reg
+ i
);
1989 /* Find more reload regs to satisfy the remaining need of an insn, which
1991 Do it by ascending class number, since otherwise a reg
1992 might be spilled for a big class and might fail to count
1993 for a smaller class even though it belongs to that class. */
1996 find_reload_regs (struct insn_chain
*chain
)
2000 /* In order to be certain of getting the registers we need,
2001 we must sort the reloads into order of increasing register class.
2002 Then our grabbing of reload registers will parallel the process
2003 that provided the reload registers. */
2004 for (i
= 0; i
< chain
->n_reloads
; i
++)
2006 /* Show whether this reload already has a hard reg. */
2007 if (chain
->rld
[i
].reg_rtx
)
2009 int regno
= REGNO (chain
->rld
[i
].reg_rtx
);
2010 chain
->rld
[i
].regno
= regno
;
2012 = hard_regno_nregs
[regno
][GET_MODE (chain
->rld
[i
].reg_rtx
)];
2015 chain
->rld
[i
].regno
= -1;
2016 reload_order
[i
] = i
;
2019 n_reloads
= chain
->n_reloads
;
2020 memcpy (rld
, chain
->rld
, n_reloads
* sizeof (struct reload
));
2022 CLEAR_HARD_REG_SET (used_spill_regs_local
);
2025 fprintf (dump_file
, "Spilling for insn %d.\n", INSN_UID (chain
->insn
));
2027 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
2029 /* Compute the order of preference for hard registers to spill. */
2031 order_regs_for_reload (chain
);
2033 for (i
= 0; i
< n_reloads
; i
++)
2035 int r
= reload_order
[i
];
2037 /* Ignore reloads that got marked inoperative. */
2038 if ((rld
[r
].out
!= 0 || rld
[r
].in
!= 0 || rld
[r
].secondary_p
)
2039 && ! rld
[r
].optional
2040 && rld
[r
].regno
== -1)
2041 if (! find_reg (chain
, i
))
2044 fprintf (dump_file
, "reload failure for reload %d\n", r
);
2045 spill_failure (chain
->insn
, rld
[r
].rclass
);
2051 COPY_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs_local
);
2052 IOR_HARD_REG_SET (used_spill_regs
, used_spill_regs_local
);
2054 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
2058 select_reload_regs (void)
2060 struct insn_chain
*chain
;
2062 /* Try to satisfy the needs for each insn. */
2063 for (chain
= insns_need_reload
; chain
!= 0;
2064 chain
= chain
->next_need_reload
)
2065 find_reload_regs (chain
);
2068 /* Delete all insns that were inserted by emit_caller_save_insns during
2071 delete_caller_save_insns (void)
2073 struct insn_chain
*c
= reload_insn_chain
;
2077 while (c
!= 0 && c
->is_caller_save_insn
)
2079 struct insn_chain
*next
= c
->next
;
2082 if (c
== reload_insn_chain
)
2083 reload_insn_chain
= next
;
2087 next
->prev
= c
->prev
;
2089 c
->prev
->next
= next
;
2090 c
->next
= unused_insn_chains
;
2091 unused_insn_chains
= c
;
2099 /* Handle the failure to find a register to spill.
2100 INSN should be one of the insns which needed this particular spill reg. */
2103 spill_failure (rtx insn
, enum reg_class rclass
)
2105 if (asm_noperands (PATTERN (insn
)) >= 0)
2106 error_for_asm (insn
, "can%'t find a register in class %qs while "
2107 "reloading %<asm%>",
2108 reg_class_names
[rclass
]);
2111 error ("unable to find a register to spill in class %qs",
2112 reg_class_names
[rclass
]);
2116 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
2117 debug_reload_to_stream (dump_file
);
2119 fatal_insn ("this is the insn:", insn
);
2123 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2124 data that is dead in INSN. */
2127 delete_dead_insn (rtx insn
)
2129 rtx prev
= prev_active_insn (insn
);
2132 /* If the previous insn sets a register that dies in our insn make
2133 a note that we want to run DCE immediately after reload.
2135 We used to delete the previous insn & recurse, but that's wrong for
2136 block local equivalences. Instead of trying to figure out the exact
2137 circumstances where we can delete the potentially dead insns, just
2138 let DCE do the job. */
2139 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2140 && (prev_dest
= SET_DEST (PATTERN (prev
)), REG_P (prev_dest
))
2141 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2142 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
2143 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
2146 SET_INSN_DELETED (insn
);
2149 /* Modify the home of pseudo-reg I.
2150 The new home is present in reg_renumber[I].
2152 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2153 or it may be -1, meaning there is none or it is not relevant.
2154 This is used so that all pseudos spilled from a given hard reg
2155 can share one stack slot. */
2158 alter_reg (int i
, int from_reg
, bool dont_share_p
)
2160 /* When outputting an inline function, this can happen
2161 for a reg that isn't actually used. */
2162 if (regno_reg_rtx
[i
] == 0)
2165 /* If the reg got changed to a MEM at rtl-generation time,
2167 if (!REG_P (regno_reg_rtx
[i
]))
2170 /* Modify the reg-rtx to contain the new hard reg
2171 number or else to contain its pseudo reg number. */
2172 SET_REGNO (regno_reg_rtx
[i
],
2173 reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
);
2175 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2176 allocate a stack slot for it. */
2178 if (reg_renumber
[i
] < 0
2179 && REG_N_REFS (i
) > 0
2180 && reg_equiv_constant (i
) == 0
2181 && (reg_equiv_invariant (i
) == 0
2182 || reg_equiv_init (i
) == 0)
2183 && reg_equiv_memory_loc (i
) == 0)
2186 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2187 unsigned int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2188 unsigned int inherent_align
= GET_MODE_ALIGNMENT (mode
);
2189 unsigned int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2190 unsigned int min_align
= reg_max_ref_width
[i
] * BITS_PER_UNIT
;
2193 something_was_spilled
= true;
2195 if (ira_conflicts_p
)
2197 /* Mark the spill for IRA. */
2198 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
2200 x
= ira_reuse_stack_slot (i
, inherent_size
, total_size
);
2206 /* Each pseudo reg has an inherent size which comes from its own mode,
2207 and a total size which provides room for paradoxical subregs
2208 which refer to the pseudo reg in wider modes.
2210 We can use a slot already allocated if it provides both
2211 enough inherent space and enough total space.
2212 Otherwise, we allocate a new slot, making sure that it has no less
2213 inherent space, and no less total space, then the previous slot. */
2214 else if (from_reg
== -1 || (!dont_share_p
&& ira_conflicts_p
))
2218 /* No known place to spill from => no slot to reuse. */
2219 x
= assign_stack_local (mode
, total_size
,
2220 min_align
> inherent_align
2221 || total_size
> inherent_size
? -1 : 0);
2225 /* Cancel the big-endian correction done in assign_stack_local.
2226 Get the address of the beginning of the slot. This is so we
2227 can do a big-endian correction unconditionally below. */
2228 if (BYTES_BIG_ENDIAN
)
2230 adjust
= inherent_size
- total_size
;
2233 = adjust_address_nv (x
, mode_for_size (total_size
2239 if (! dont_share_p
&& ira_conflicts_p
)
2240 /* Inform IRA about allocation a new stack slot. */
2241 ira_mark_new_stack_slot (stack_slot
, i
, total_size
);
2244 /* Reuse a stack slot if possible. */
2245 else if (spill_stack_slot
[from_reg
] != 0
2246 && spill_stack_slot_width
[from_reg
] >= total_size
2247 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2249 && MEM_ALIGN (spill_stack_slot
[from_reg
]) >= min_align
)
2250 x
= spill_stack_slot
[from_reg
];
2252 /* Allocate a bigger slot. */
2255 /* Compute maximum size needed, both for inherent size
2256 and for total size. */
2259 if (spill_stack_slot
[from_reg
])
2261 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2263 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2264 if (spill_stack_slot_width
[from_reg
] > total_size
)
2265 total_size
= spill_stack_slot_width
[from_reg
];
2266 if (MEM_ALIGN (spill_stack_slot
[from_reg
]) > min_align
)
2267 min_align
= MEM_ALIGN (spill_stack_slot
[from_reg
]);
2270 /* Make a slot with that size. */
2271 x
= assign_stack_local (mode
, total_size
,
2272 min_align
> inherent_align
2273 || total_size
> inherent_size
? -1 : 0);
2276 /* Cancel the big-endian correction done in assign_stack_local.
2277 Get the address of the beginning of the slot. This is so we
2278 can do a big-endian correction unconditionally below. */
2279 if (BYTES_BIG_ENDIAN
)
2281 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2284 = adjust_address_nv (x
, mode_for_size (total_size
2290 spill_stack_slot
[from_reg
] = stack_slot
;
2291 spill_stack_slot_width
[from_reg
] = total_size
;
2294 /* On a big endian machine, the "address" of the slot
2295 is the address of the low part that fits its inherent mode. */
2296 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2297 adjust
+= (total_size
- inherent_size
);
2299 /* If we have any adjustment to make, or if the stack slot is the
2300 wrong mode, make a new stack slot. */
2301 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
2303 /* Set all of the memory attributes as appropriate for a spill. */
2304 set_mem_attrs_for_spill (x
);
2306 /* Save the stack slot for later. */
2307 reg_equiv_memory_loc (i
) = x
;
2311 /* Mark the slots in regs_ever_live for the hard regs used by
2312 pseudo-reg number REGNO, accessed in MODE. */
2315 mark_home_live_1 (int regno
, enum machine_mode mode
)
2319 i
= reg_renumber
[regno
];
2322 lim
= end_hard_regno (mode
, i
);
2324 df_set_regs_ever_live(i
++, true);
2327 /* Mark the slots in regs_ever_live for the hard regs
2328 used by pseudo-reg number REGNO. */
2331 mark_home_live (int regno
)
2333 if (reg_renumber
[regno
] >= 0)
2334 mark_home_live_1 (regno
, PSEUDO_REGNO_MODE (regno
));
2337 /* This function handles the tracking of elimination offsets around branches.
2339 X is a piece of RTL being scanned.
2341 INSN is the insn that it came from, if any.
2343 INITIAL_P is nonzero if we are to set the offset to be the initial
2344 offset and zero if we are setting the offset of the label to be the
2348 set_label_offsets (rtx x
, rtx insn
, int initial_p
)
2350 enum rtx_code code
= GET_CODE (x
);
2353 struct elim_table
*p
;
2358 if (LABEL_REF_NONLOCAL_P (x
))
2363 /* ... fall through ... */
2366 /* If we know nothing about this label, set the desired offsets. Note
2367 that this sets the offset at a label to be the offset before a label
2368 if we don't know anything about the label. This is not correct for
2369 the label after a BARRIER, but is the best guess we can make. If
2370 we guessed wrong, we will suppress an elimination that might have
2371 been possible had we been able to guess correctly. */
2373 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
])
2375 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2376 offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2377 = (initial_p
? reg_eliminate
[i
].initial_offset
2378 : reg_eliminate
[i
].offset
);
2379 offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
] = 1;
2382 /* Otherwise, if this is the definition of a label and it is
2383 preceded by a BARRIER, set our offsets to the known offset of
2387 && (tem
= prev_nonnote_insn (insn
)) != 0
2389 set_offsets_for_label (insn
);
2391 /* If neither of the above cases is true, compare each offset
2392 with those previously recorded and suppress any eliminations
2393 where the offsets disagree. */
2395 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2396 if (offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2397 != (initial_p
? reg_eliminate
[i
].initial_offset
2398 : reg_eliminate
[i
].offset
))
2399 reg_eliminate
[i
].can_eliminate
= 0;
2404 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2406 /* ... fall through ... */
2410 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2411 to indirectly and hence must have all eliminations at their
2413 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2414 if (REG_NOTE_KIND (tem
) == REG_LABEL_OPERAND
)
2415 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2421 /* Each of the labels in the parallel or address vector must be
2422 at their initial offsets. We want the first field for PARALLEL
2423 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2425 for (i
= 0; i
< (unsigned) XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2426 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2431 /* We only care about setting PC. If the source is not RETURN,
2432 IF_THEN_ELSE, or a label, disable any eliminations not at
2433 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2434 isn't one of those possibilities. For branches to a label,
2435 call ourselves recursively.
2437 Note that this can disable elimination unnecessarily when we have
2438 a non-local goto since it will look like a non-constant jump to
2439 someplace in the current function. This isn't a significant
2440 problem since such jumps will normally be when all elimination
2441 pairs are back to their initial offsets. */
2443 if (SET_DEST (x
) != pc_rtx
)
2446 switch (GET_CODE (SET_SRC (x
)))
2453 set_label_offsets (SET_SRC (x
), insn
, initial_p
);
2457 tem
= XEXP (SET_SRC (x
), 1);
2458 if (GET_CODE (tem
) == LABEL_REF
)
2459 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2460 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2463 tem
= XEXP (SET_SRC (x
), 2);
2464 if (GET_CODE (tem
) == LABEL_REF
)
2465 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2466 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2474 /* If we reach here, all eliminations must be at their initial
2475 offset because we are doing a jump to a variable address. */
2476 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2477 if (p
->offset
!= p
->initial_offset
)
2478 p
->can_eliminate
= 0;
2486 /* Called through for_each_rtx, this function examines every reg that occurs
2487 in PX and adjusts the costs for its elimination which are gathered by IRA.
2488 DATA is the insn in which PX occurs. We do not recurse into MEM
2492 note_reg_elim_costly (rtx
*px
, void *data
)
2494 rtx insn
= (rtx
)data
;
2501 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
2502 && reg_equiv_init (REGNO (x
))
2503 && reg_equiv_invariant (REGNO (x
)))
2505 rtx t
= reg_equiv_invariant (REGNO (x
));
2506 rtx new_rtx
= eliminate_regs_1 (t
, Pmode
, insn
, true, true);
2507 int cost
= set_src_cost (new_rtx
, optimize_bb_for_speed_p (elim_bb
));
2508 int freq
= REG_FREQ_FROM_BB (elim_bb
);
2511 ira_adjust_equiv_reg_cost (REGNO (x
), -cost
* freq
);
2516 /* Scan X and replace any eliminable registers (such as fp) with a
2517 replacement (such as sp), plus an offset.
2519 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2520 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2521 MEM, we are allowed to replace a sum of a register and the constant zero
2522 with the register, which we cannot do outside a MEM. In addition, we need
2523 to record the fact that a register is referenced outside a MEM.
2525 If INSN is an insn, it is the insn containing X. If we replace a REG
2526 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2527 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2528 the REG is being modified.
2530 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2531 That's used when we eliminate in expressions stored in notes.
2532 This means, do not set ref_outside_mem even if the reference
2535 If FOR_COSTS is true, we are being called before reload in order to
2536 estimate the costs of keeping registers with an equivalence unallocated.
2538 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2539 replacements done assuming all offsets are at their initial values. If
2540 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2541 encounter, return the actual location so that find_reloads will do
2542 the proper thing. */
2545 eliminate_regs_1 (rtx x
, enum machine_mode mem_mode
, rtx insn
,
2546 bool may_use_invariant
, bool for_costs
)
2548 enum rtx_code code
= GET_CODE (x
);
2549 struct elim_table
*ep
;
2556 if (! current_function_decl
)
2579 /* First handle the case where we encounter a bare register that
2580 is eliminable. Replace it with a PLUS. */
2581 if (regno
< FIRST_PSEUDO_REGISTER
)
2583 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2585 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2586 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2589 else if (reg_renumber
&& reg_renumber
[regno
] < 0
2591 && reg_equiv_invariant (regno
))
2593 if (may_use_invariant
|| (insn
&& DEBUG_INSN_P (insn
)))
2594 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno
)),
2595 mem_mode
, insn
, true, for_costs
);
2596 /* There exists at least one use of REGNO that cannot be
2597 eliminated. Prevent the defining insn from being deleted. */
2598 reg_equiv_init (regno
) = NULL_RTX
;
2600 alter_reg (regno
, -1, true);
2604 /* You might think handling MINUS in a manner similar to PLUS is a
2605 good idea. It is not. It has been tried multiple times and every
2606 time the change has had to have been reverted.
2608 Other parts of reload know a PLUS is special (gen_reload for example)
2609 and require special code to handle code a reloaded PLUS operand.
2611 Also consider backends where the flags register is clobbered by a
2612 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2613 lea instruction comes to mind). If we try to reload a MINUS, we
2614 may kill the flags register that was holding a useful value.
2616 So, please before trying to handle MINUS, consider reload as a
2617 whole instead of this little section as well as the backend issues. */
2619 /* If this is the sum of an eliminable register and a constant, rework
2621 if (REG_P (XEXP (x
, 0))
2622 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2623 && CONSTANT_P (XEXP (x
, 1)))
2625 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2627 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2629 /* The only time we want to replace a PLUS with a REG (this
2630 occurs when the constant operand of the PLUS is the negative
2631 of the offset) is when we are inside a MEM. We won't want
2632 to do so at other times because that would change the
2633 structure of the insn in a way that reload can't handle.
2634 We special-case the commonest situation in
2635 eliminate_regs_in_insn, so just replace a PLUS with a
2636 PLUS here, unless inside a MEM. */
2637 if (mem_mode
!= 0 && CONST_INT_P (XEXP (x
, 1))
2638 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2641 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2642 plus_constant (XEXP (x
, 1),
2643 ep
->previous_offset
));
2646 /* If the register is not eliminable, we are done since the other
2647 operand is a constant. */
2651 /* If this is part of an address, we want to bring any constant to the
2652 outermost PLUS. We will do this by doing register replacement in
2653 our operands and seeing if a constant shows up in one of them.
2655 Note that there is no risk of modifying the structure of the insn,
2656 since we only get called for its operands, thus we are either
2657 modifying the address inside a MEM, or something like an address
2658 operand of a load-address insn. */
2661 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2663 rtx new1
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2666 if (reg_renumber
&& (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1)))
2668 /* If one side is a PLUS and the other side is a pseudo that
2669 didn't get a hard register but has a reg_equiv_constant,
2670 we must replace the constant here since it may no longer
2671 be in the position of any operand. */
2672 if (GET_CODE (new0
) == PLUS
&& REG_P (new1
)
2673 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2674 && reg_renumber
[REGNO (new1
)] < 0
2676 && reg_equiv_constant (REGNO (new1
)) != 0)
2677 new1
= reg_equiv_constant (REGNO (new1
));
2678 else if (GET_CODE (new1
) == PLUS
&& REG_P (new0
)
2679 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2680 && reg_renumber
[REGNO (new0
)] < 0
2681 && reg_equiv_constant (REGNO (new0
)) != 0)
2682 new0
= reg_equiv_constant (REGNO (new0
));
2684 new_rtx
= form_sum (GET_MODE (x
), new0
, new1
);
2686 /* As above, if we are not inside a MEM we do not want to
2687 turn a PLUS into something else. We might try to do so here
2688 for an addition of 0 if we aren't optimizing. */
2689 if (! mem_mode
&& GET_CODE (new_rtx
) != PLUS
)
2690 return gen_rtx_PLUS (GET_MODE (x
), new_rtx
, const0_rtx
);
2698 /* If this is the product of an eliminable register and a
2699 constant, apply the distribute law and move the constant out
2700 so that we have (plus (mult ..) ..). This is needed in order
2701 to keep load-address insns valid. This case is pathological.
2702 We ignore the possibility of overflow here. */
2703 if (REG_P (XEXP (x
, 0))
2704 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2705 && CONST_INT_P (XEXP (x
, 1)))
2706 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2708 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2711 /* Refs inside notes or in DEBUG_INSNs don't count for
2713 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2714 || GET_CODE (insn
) == INSN_LIST
2715 || DEBUG_INSN_P (insn
))))
2716 ep
->ref_outside_mem
= 1;
2719 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2720 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2723 /* ... fall through ... */
2727 /* See comments before PLUS about handling MINUS. */
2729 case DIV
: case UDIV
:
2730 case MOD
: case UMOD
:
2731 case AND
: case IOR
: case XOR
:
2732 case ROTATERT
: case ROTATE
:
2733 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2735 case GE
: case GT
: case GEU
: case GTU
:
2736 case LE
: case LT
: case LEU
: case LTU
:
2738 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2740 rtx new1
= XEXP (x
, 1)
2741 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, false,
2744 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2745 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
2750 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2753 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2755 if (new_rtx
!= XEXP (x
, 0))
2757 /* If this is a REG_DEAD note, it is not valid anymore.
2758 Using the eliminated version could result in creating a
2759 REG_DEAD note for the stack or frame pointer. */
2760 if (REG_NOTE_KIND (x
) == REG_DEAD
)
2762 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2766 x
= alloc_reg_note (REG_NOTE_KIND (x
), new_rtx
, XEXP (x
, 1));
2770 /* ... fall through ... */
2773 /* Now do eliminations in the rest of the chain. If this was
2774 an EXPR_LIST, this might result in allocating more memory than is
2775 strictly needed, but it simplifies the code. */
2778 new_rtx
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2780 if (new_rtx
!= XEXP (x
, 1))
2782 gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new_rtx
);
2790 /* We do not support elimination of a register that is modified.
2791 elimination_effects has already make sure that this does not
2797 /* We do not support elimination of a register that is modified.
2798 elimination_effects has already make sure that this does not
2799 happen. The only remaining case we need to consider here is
2800 that the increment value may be an eliminable register. */
2801 if (GET_CODE (XEXP (x
, 1)) == PLUS
2802 && XEXP (XEXP (x
, 1), 0) == XEXP (x
, 0))
2804 rtx new_rtx
= eliminate_regs_1 (XEXP (XEXP (x
, 1), 1), mem_mode
,
2805 insn
, true, for_costs
);
2807 if (new_rtx
!= XEXP (XEXP (x
, 1), 1))
2808 return gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (x
, 0),
2809 gen_rtx_PLUS (GET_MODE (x
),
2810 XEXP (x
, 0), new_rtx
));
2814 case STRICT_LOW_PART
:
2816 case SIGN_EXTEND
: case ZERO_EXTEND
:
2817 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2818 case FLOAT
: case FIX
:
2819 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2828 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2830 if (new_rtx
!= XEXP (x
, 0))
2831 return gen_rtx_fmt_e (code
, GET_MODE (x
), new_rtx
);
2835 /* Similar to above processing, but preserve SUBREG_BYTE.
2836 Convert (subreg (mem)) to (mem) if not paradoxical.
2837 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2838 pseudo didn't get a hard reg, we must replace this with the
2839 eliminated version of the memory location because push_reload
2840 may do the replacement in certain circumstances. */
2841 if (REG_P (SUBREG_REG (x
))
2842 && !paradoxical_subreg_p (x
)
2844 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
2846 new_rtx
= SUBREG_REG (x
);
2849 new_rtx
= eliminate_regs_1 (SUBREG_REG (x
), mem_mode
, insn
, false, for_costs
);
2851 if (new_rtx
!= SUBREG_REG (x
))
2853 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
2854 int new_size
= GET_MODE_SIZE (GET_MODE (new_rtx
));
2857 && ((x_size
< new_size
2858 #ifdef WORD_REGISTER_OPERATIONS
2859 /* On these machines, combine can create rtl of the form
2860 (set (subreg:m1 (reg:m2 R) 0) ...)
2861 where m1 < m2, and expects something interesting to
2862 happen to the entire word. Moreover, it will use the
2863 (reg:m2 R) later, expecting all bits to be preserved.
2864 So if the number of words is the same, preserve the
2865 subreg so that push_reload can see it. */
2866 && ! ((x_size
- 1) / UNITS_PER_WORD
2867 == (new_size
-1 ) / UNITS_PER_WORD
)
2870 || x_size
== new_size
)
2872 return adjust_address_nv (new_rtx
, GET_MODE (x
), SUBREG_BYTE (x
));
2874 return gen_rtx_SUBREG (GET_MODE (x
), new_rtx
, SUBREG_BYTE (x
));
2880 /* Our only special processing is to pass the mode of the MEM to our
2881 recursive call and copy the flags. While we are here, handle this
2882 case more efficiently. */
2884 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), GET_MODE (x
), insn
, true,
2887 && memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2888 && !memory_address_p (GET_MODE (x
), new_rtx
))
2889 for_each_rtx (&XEXP (x
, 0), note_reg_elim_costly
, insn
);
2891 return replace_equiv_address_nv (x
, new_rtx
);
2894 /* Handle insn_list USE that a call to a pure function may generate. */
2895 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), VOIDmode
, insn
, false,
2897 if (new_rtx
!= XEXP (x
, 0))
2898 return gen_rtx_USE (GET_MODE (x
), new_rtx
);
2903 gcc_assert (insn
&& DEBUG_INSN_P (insn
));
2913 /* Process each of our operands recursively. If any have changed, make a
2915 fmt
= GET_RTX_FORMAT (code
);
2916 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2920 new_rtx
= eliminate_regs_1 (XEXP (x
, i
), mem_mode
, insn
, false,
2922 if (new_rtx
!= XEXP (x
, i
) && ! copied
)
2924 x
= shallow_copy_rtx (x
);
2927 XEXP (x
, i
) = new_rtx
;
2929 else if (*fmt
== 'E')
2932 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2934 new_rtx
= eliminate_regs_1 (XVECEXP (x
, i
, j
), mem_mode
, insn
, false,
2936 if (new_rtx
!= XVECEXP (x
, i
, j
) && ! copied_vec
)
2938 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
2942 x
= shallow_copy_rtx (x
);
2945 XVEC (x
, i
) = new_v
;
2948 XVECEXP (x
, i
, j
) = new_rtx
;
2957 eliminate_regs (rtx x
, enum machine_mode mem_mode
, rtx insn
)
2959 return eliminate_regs_1 (x
, mem_mode
, insn
, false, false);
2962 /* Scan rtx X for modifications of elimination target registers. Update
2963 the table of eliminables to reflect the changed state. MEM_MODE is
2964 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2967 elimination_effects (rtx x
, enum machine_mode mem_mode
)
2969 enum rtx_code code
= GET_CODE (x
);
2970 struct elim_table
*ep
;
2995 /* First handle the case where we encounter a bare register that
2996 is eliminable. Replace it with a PLUS. */
2997 if (regno
< FIRST_PSEUDO_REGISTER
)
2999 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3001 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
3004 ep
->ref_outside_mem
= 1;
3009 else if (reg_renumber
[regno
] < 0
3011 && reg_equiv_constant (regno
)
3012 && ! function_invariant_p (reg_equiv_constant (regno
)))
3013 elimination_effects (reg_equiv_constant (regno
), mem_mode
);
3022 /* If we modify the source of an elimination rule, disable it. */
3023 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3024 if (ep
->from_rtx
== XEXP (x
, 0))
3025 ep
->can_eliminate
= 0;
3027 /* If we modify the target of an elimination rule by adding a constant,
3028 update its offset. If we modify the target in any other way, we'll
3029 have to disable the rule as well. */
3030 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3031 if (ep
->to_rtx
== XEXP (x
, 0))
3033 int size
= GET_MODE_SIZE (mem_mode
);
3035 /* If more bytes than MEM_MODE are pushed, account for them. */
3036 #ifdef PUSH_ROUNDING
3037 if (ep
->to_rtx
== stack_pointer_rtx
)
3038 size
= PUSH_ROUNDING (size
);
3040 if (code
== PRE_DEC
|| code
== POST_DEC
)
3042 else if (code
== PRE_INC
|| code
== POST_INC
)
3044 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3046 if (GET_CODE (XEXP (x
, 1)) == PLUS
3047 && XEXP (x
, 0) == XEXP (XEXP (x
, 1), 0)
3048 && CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
3049 ep
->offset
-= INTVAL (XEXP (XEXP (x
, 1), 1));
3051 ep
->can_eliminate
= 0;
3055 /* These two aren't unary operators. */
3056 if (code
== POST_MODIFY
|| code
== PRE_MODIFY
)
3059 /* Fall through to generic unary operation case. */
3060 case STRICT_LOW_PART
:
3062 case SIGN_EXTEND
: case ZERO_EXTEND
:
3063 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3064 case FLOAT
: case FIX
:
3065 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3074 elimination_effects (XEXP (x
, 0), mem_mode
);
3078 if (REG_P (SUBREG_REG (x
))
3079 && (GET_MODE_SIZE (GET_MODE (x
))
3080 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3082 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
3085 elimination_effects (SUBREG_REG (x
), mem_mode
);
3089 /* If using a register that is the source of an eliminate we still
3090 think can be performed, note it cannot be performed since we don't
3091 know how this register is used. */
3092 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3093 if (ep
->from_rtx
== XEXP (x
, 0))
3094 ep
->can_eliminate
= 0;
3096 elimination_effects (XEXP (x
, 0), mem_mode
);
3100 /* If clobbering a register that is the replacement register for an
3101 elimination we still think can be performed, note that it cannot
3102 be performed. Otherwise, we need not be concerned about it. */
3103 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3104 if (ep
->to_rtx
== XEXP (x
, 0))
3105 ep
->can_eliminate
= 0;
3107 elimination_effects (XEXP (x
, 0), mem_mode
);
3111 /* Check for setting a register that we know about. */
3112 if (REG_P (SET_DEST (x
)))
3114 /* See if this is setting the replacement register for an
3117 If DEST is the hard frame pointer, we do nothing because we
3118 assume that all assignments to the frame pointer are for
3119 non-local gotos and are being done at a time when they are valid
3120 and do not disturb anything else. Some machines want to
3121 eliminate a fake argument pointer (or even a fake frame pointer)
3122 with either the real frame or the stack pointer. Assignments to
3123 the hard frame pointer must not prevent this elimination. */
3125 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3127 if (ep
->to_rtx
== SET_DEST (x
)
3128 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3130 /* If it is being incremented, adjust the offset. Otherwise,
3131 this elimination can't be done. */
3132 rtx src
= SET_SRC (x
);
3134 if (GET_CODE (src
) == PLUS
3135 && XEXP (src
, 0) == SET_DEST (x
)
3136 && CONST_INT_P (XEXP (src
, 1)))
3137 ep
->offset
-= INTVAL (XEXP (src
, 1));
3139 ep
->can_eliminate
= 0;
3143 elimination_effects (SET_DEST (x
), VOIDmode
);
3144 elimination_effects (SET_SRC (x
), VOIDmode
);
3148 /* Our only special processing is to pass the mode of the MEM to our
3150 elimination_effects (XEXP (x
, 0), GET_MODE (x
));
3157 fmt
= GET_RTX_FORMAT (code
);
3158 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3161 elimination_effects (XEXP (x
, i
), mem_mode
);
3162 else if (*fmt
== 'E')
3163 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3164 elimination_effects (XVECEXP (x
, i
, j
), mem_mode
);
3168 /* Descend through rtx X and verify that no references to eliminable registers
3169 remain. If any do remain, mark the involved register as not
3173 check_eliminable_occurrences (rtx x
)
3182 code
= GET_CODE (x
);
3184 if (code
== REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3186 struct elim_table
*ep
;
3188 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3189 if (ep
->from_rtx
== x
)
3190 ep
->can_eliminate
= 0;
3194 fmt
= GET_RTX_FORMAT (code
);
3195 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3198 check_eliminable_occurrences (XEXP (x
, i
));
3199 else if (*fmt
== 'E')
3202 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3203 check_eliminable_occurrences (XVECEXP (x
, i
, j
));
3208 /* Scan INSN and eliminate all eliminable registers in it.
3210 If REPLACE is nonzero, do the replacement destructively. Also
3211 delete the insn as dead it if it is setting an eliminable register.
3213 If REPLACE is zero, do all our allocations in reload_obstack.
3215 If no eliminations were done and this insn doesn't require any elimination
3216 processing (these are not identical conditions: it might be updating sp,
3217 but not referencing fp; this needs to be seen during reload_as_needed so
3218 that the offset between fp and sp can be taken into consideration), zero
3219 is returned. Otherwise, 1 is returned. */
3222 eliminate_regs_in_insn (rtx insn
, int replace
)
3224 int icode
= recog_memoized (insn
);
3225 rtx old_body
= PATTERN (insn
);
3226 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3227 rtx old_set
= single_set (insn
);
3231 rtx substed_operand
[MAX_RECOG_OPERANDS
];
3232 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3233 struct elim_table
*ep
;
3234 rtx plus_src
, plus_cst_src
;
3236 if (! insn_is_asm
&& icode
< 0)
3238 gcc_assert (GET_CODE (PATTERN (insn
)) == USE
3239 || GET_CODE (PATTERN (insn
)) == CLOBBER
3240 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
3241 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
3242 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
3243 || DEBUG_INSN_P (insn
));
3244 if (DEBUG_INSN_P (insn
))
3245 INSN_VAR_LOCATION_LOC (insn
)
3246 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn
), VOIDmode
, insn
);
3250 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3251 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3253 /* Check for setting an eliminable register. */
3254 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3255 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3257 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3258 /* If this is setting the frame pointer register to the
3259 hardware frame pointer register and this is an elimination
3260 that will be done (tested above), this insn is really
3261 adjusting the frame pointer downward to compensate for
3262 the adjustment done before a nonlocal goto. */
3263 if (ep
->from
== FRAME_POINTER_REGNUM
3264 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3266 rtx base
= SET_SRC (old_set
);
3267 rtx base_insn
= insn
;
3268 HOST_WIDE_INT offset
= 0;
3270 while (base
!= ep
->to_rtx
)
3272 rtx prev_insn
, prev_set
;
3274 if (GET_CODE (base
) == PLUS
3275 && CONST_INT_P (XEXP (base
, 1)))
3277 offset
+= INTVAL (XEXP (base
, 1));
3278 base
= XEXP (base
, 0);
3280 else if ((prev_insn
= prev_nonnote_insn (base_insn
)) != 0
3281 && (prev_set
= single_set (prev_insn
)) != 0
3282 && rtx_equal_p (SET_DEST (prev_set
), base
))
3284 base
= SET_SRC (prev_set
);
3285 base_insn
= prev_insn
;
3291 if (base
== ep
->to_rtx
)
3294 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3296 new_body
= old_body
;
3299 new_body
= copy_insn (old_body
);
3300 if (REG_NOTES (insn
))
3301 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3303 PATTERN (insn
) = new_body
;
3304 old_set
= single_set (insn
);
3306 /* First see if this insn remains valid when we
3307 make the change. If not, keep the INSN_CODE
3308 the same and let reload fit it up. */
3309 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3310 validate_change (insn
, &SET_DEST (old_set
),
3312 if (! apply_change_group ())
3314 SET_SRC (old_set
) = src
;
3315 SET_DEST (old_set
) = ep
->to_rtx
;
3324 /* In this case this insn isn't serving a useful purpose. We
3325 will delete it in reload_as_needed once we know that this
3326 elimination is, in fact, being done.
3328 If REPLACE isn't set, we can't delete this insn, but needn't
3329 process it since it won't be used unless something changes. */
3332 delete_dead_insn (insn
);
3340 /* We allow one special case which happens to work on all machines we
3341 currently support: a single set with the source or a REG_EQUAL
3342 note being a PLUS of an eliminable register and a constant. */
3343 plus_src
= plus_cst_src
= 0;
3344 if (old_set
&& REG_P (SET_DEST (old_set
)))
3346 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3347 plus_src
= SET_SRC (old_set
);
3348 /* First see if the source is of the form (plus (...) CST). */
3350 && CONST_INT_P (XEXP (plus_src
, 1)))
3351 plus_cst_src
= plus_src
;
3352 else if (REG_P (SET_SRC (old_set
))
3355 /* Otherwise, see if we have a REG_EQUAL note of the form
3356 (plus (...) CST). */
3358 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3360 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3361 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3362 && GET_CODE (XEXP (links
, 0)) == PLUS
3363 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3365 plus_cst_src
= XEXP (links
, 0);
3371 /* Check that the first operand of the PLUS is a hard reg or
3372 the lowpart subreg of one. */
3375 rtx reg
= XEXP (plus_cst_src
, 0);
3376 if (GET_CODE (reg
) == SUBREG
&& subreg_lowpart_p (reg
))
3377 reg
= SUBREG_REG (reg
);
3379 if (!REG_P (reg
) || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
3385 rtx reg
= XEXP (plus_cst_src
, 0);
3386 HOST_WIDE_INT offset
= INTVAL (XEXP (plus_cst_src
, 1));
3388 if (GET_CODE (reg
) == SUBREG
)
3389 reg
= SUBREG_REG (reg
);
3391 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3392 if (ep
->from_rtx
== reg
&& ep
->can_eliminate
)
3394 rtx to_rtx
= ep
->to_rtx
;
3395 offset
+= ep
->offset
;
3396 offset
= trunc_int_for_mode (offset
, GET_MODE (plus_cst_src
));
3398 if (GET_CODE (XEXP (plus_cst_src
, 0)) == SUBREG
)
3399 to_rtx
= gen_lowpart (GET_MODE (XEXP (plus_cst_src
, 0)),
3401 /* If we have a nonzero offset, and the source is already
3402 a simple REG, the following transformation would
3403 increase the cost of the insn by replacing a simple REG
3404 with (plus (reg sp) CST). So try only when we already
3405 had a PLUS before. */
3406 if (offset
== 0 || plus_src
)
3408 rtx new_src
= plus_constant (to_rtx
, offset
);
3410 new_body
= old_body
;
3413 new_body
= copy_insn (old_body
);
3414 if (REG_NOTES (insn
))
3415 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3417 PATTERN (insn
) = new_body
;
3418 old_set
= single_set (insn
);
3420 /* First see if this insn remains valid when we make the
3421 change. If not, try to replace the whole pattern with
3422 a simple set (this may help if the original insn was a
3423 PARALLEL that was only recognized as single_set due to
3424 REG_UNUSED notes). If this isn't valid either, keep
3425 the INSN_CODE the same and let reload fix it up. */
3426 if (!validate_change (insn
, &SET_SRC (old_set
), new_src
, 0))
3428 rtx new_pat
= gen_rtx_SET (VOIDmode
,
3429 SET_DEST (old_set
), new_src
);
3431 if (!validate_change (insn
, &PATTERN (insn
), new_pat
, 0))
3432 SET_SRC (old_set
) = new_src
;
3439 /* This can't have an effect on elimination offsets, so skip right
3445 /* Determine the effects of this insn on elimination offsets. */
3446 elimination_effects (old_body
, VOIDmode
);
3448 /* Eliminate all eliminable registers occurring in operands that
3449 can be handled by reload. */
3450 extract_insn (insn
);
3451 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3453 orig_operand
[i
] = recog_data
.operand
[i
];
3454 substed_operand
[i
] = recog_data
.operand
[i
];
3456 /* For an asm statement, every operand is eliminable. */
3457 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3459 bool is_set_src
, in_plus
;
3461 /* Check for setting a register that we know about. */
3462 if (recog_data
.operand_type
[i
] != OP_IN
3463 && REG_P (orig_operand
[i
]))
3465 /* If we are assigning to a register that can be eliminated, it
3466 must be as part of a PARALLEL, since the code above handles
3467 single SETs. We must indicate that we can no longer
3468 eliminate this reg. */
3469 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3471 if (ep
->from_rtx
== orig_operand
[i
])
3472 ep
->can_eliminate
= 0;
3475 /* Companion to the above plus substitution, we can allow
3476 invariants as the source of a plain move. */
3479 && recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3483 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3484 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3488 = eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3489 replace
? insn
: NULL_RTX
,
3490 is_set_src
|| in_plus
, false);
3491 if (substed_operand
[i
] != orig_operand
[i
])
3493 /* Terminate the search in check_eliminable_occurrences at
3495 *recog_data
.operand_loc
[i
] = 0;
3497 /* If an output operand changed from a REG to a MEM and INSN is an
3498 insn, write a CLOBBER insn. */
3499 if (recog_data
.operand_type
[i
] != OP_IN
3500 && REG_P (orig_operand
[i
])
3501 && MEM_P (substed_operand
[i
])
3503 emit_insn_after (gen_clobber (orig_operand
[i
]), insn
);
3507 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3508 *recog_data
.dup_loc
[i
]
3509 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3511 /* If any eliminable remain, they aren't eliminable anymore. */
3512 check_eliminable_occurrences (old_body
);
3514 /* Substitute the operands; the new values are in the substed_operand
3516 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3517 *recog_data
.operand_loc
[i
] = substed_operand
[i
];
3518 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3519 *recog_data
.dup_loc
[i
] = substed_operand
[(int) recog_data
.dup_num
[i
]];
3521 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3522 re-recognize the insn. We do this in case we had a simple addition
3523 but now can do this as a load-address. This saves an insn in this
3525 If re-recognition fails, the old insn code number will still be used,
3526 and some register operands may have changed into PLUS expressions.
3527 These will be handled by find_reloads by loading them into a register
3532 /* If we aren't replacing things permanently and we changed something,
3533 make another copy to ensure that all the RTL is new. Otherwise
3534 things can go wrong if find_reload swaps commutative operands
3535 and one is inside RTL that has been copied while the other is not. */
3536 new_body
= old_body
;
3539 new_body
= copy_insn (old_body
);
3540 if (REG_NOTES (insn
))
3541 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3543 PATTERN (insn
) = new_body
;
3545 /* If we had a move insn but now we don't, rerecognize it. This will
3546 cause spurious re-recognition if the old move had a PARALLEL since
3547 the new one still will, but we can't call single_set without
3548 having put NEW_BODY into the insn and the re-recognition won't
3549 hurt in this rare case. */
3550 /* ??? Why this huge if statement - why don't we just rerecognize the
3554 && ((REG_P (SET_SRC (old_set
))
3555 && (GET_CODE (new_body
) != SET
3556 || !REG_P (SET_SRC (new_body
))))
3557 /* If this was a load from or store to memory, compare
3558 the MEM in recog_data.operand to the one in the insn.
3559 If they are not equal, then rerecognize the insn. */
3561 && ((MEM_P (SET_SRC (old_set
))
3562 && SET_SRC (old_set
) != recog_data
.operand
[1])
3563 || (MEM_P (SET_DEST (old_set
))
3564 && SET_DEST (old_set
) != recog_data
.operand
[0])))
3565 /* If this was an add insn before, rerecognize. */
3566 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3568 int new_icode
= recog (PATTERN (insn
), insn
, 0);
3570 INSN_CODE (insn
) = new_icode
;
3574 /* Restore the old body. If there were any changes to it, we made a copy
3575 of it while the changes were still in place, so we'll correctly return
3576 a modified insn below. */
3579 /* Restore the old body. */
3580 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3581 /* Restoring a top-level match_parallel would clobber the new_body
3582 we installed in the insn. */
3583 if (recog_data
.operand_loc
[i
] != &PATTERN (insn
))
3584 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3585 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3586 *recog_data
.dup_loc
[i
] = orig_operand
[(int) recog_data
.dup_num
[i
]];
3589 /* Update all elimination pairs to reflect the status after the current
3590 insn. The changes we make were determined by the earlier call to
3591 elimination_effects.
3593 We also detect cases where register elimination cannot be done,
3594 namely, if a register would be both changed and referenced outside a MEM
3595 in the resulting insn since such an insn is often undefined and, even if
3596 not, we cannot know what meaning will be given to it. Note that it is
3597 valid to have a register used in an address in an insn that changes it
3598 (presumably with a pre- or post-increment or decrement).
3600 If anything changes, return nonzero. */
3602 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3604 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3605 ep
->can_eliminate
= 0;
3607 ep
->ref_outside_mem
= 0;
3609 if (ep
->previous_offset
!= ep
->offset
)
3614 /* If we changed something, perform elimination in REG_NOTES. This is
3615 needed even when REPLACE is zero because a REG_DEAD note might refer
3616 to a register that we eliminate and could cause a different number
3617 of spill registers to be needed in the final reload pass than in
3619 if (val
&& REG_NOTES (insn
) != 0)
3621 = eliminate_regs_1 (REG_NOTES (insn
), VOIDmode
, REG_NOTES (insn
), true,
3627 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3628 register allocator. INSN is the instruction we need to examine, we perform
3629 eliminations in its operands and record cases where eliminating a reg with
3630 an invariant equivalence would add extra cost. */
3633 elimination_costs_in_insn (rtx insn
)
3635 int icode
= recog_memoized (insn
);
3636 rtx old_body
= PATTERN (insn
);
3637 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3638 rtx old_set
= single_set (insn
);
3640 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3641 rtx orig_dup
[MAX_RECOG_OPERANDS
];
3642 struct elim_table
*ep
;
3643 rtx plus_src
, plus_cst_src
;
3646 if (! insn_is_asm
&& icode
< 0)
3648 gcc_assert (GET_CODE (PATTERN (insn
)) == USE
3649 || GET_CODE (PATTERN (insn
)) == CLOBBER
3650 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
3651 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
3652 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
3653 || DEBUG_INSN_P (insn
));
3657 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3658 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3660 /* Check for setting an eliminable register. */
3661 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3662 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3666 /* We allow one special case which happens to work on all machines we
3667 currently support: a single set with the source or a REG_EQUAL
3668 note being a PLUS of an eliminable register and a constant. */
3669 plus_src
= plus_cst_src
= 0;
3671 if (old_set
&& REG_P (SET_DEST (old_set
)))
3674 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3675 plus_src
= SET_SRC (old_set
);
3676 /* First see if the source is of the form (plus (...) CST). */
3678 && CONST_INT_P (XEXP (plus_src
, 1)))
3679 plus_cst_src
= plus_src
;
3680 else if (REG_P (SET_SRC (old_set
))
3683 /* Otherwise, see if we have a REG_EQUAL note of the form
3684 (plus (...) CST). */
3686 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3688 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3689 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3690 && GET_CODE (XEXP (links
, 0)) == PLUS
3691 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3693 plus_cst_src
= XEXP (links
, 0);
3700 /* Determine the effects of this insn on elimination offsets. */
3701 elimination_effects (old_body
, VOIDmode
);
3703 /* Eliminate all eliminable registers occurring in operands that
3704 can be handled by reload. */
3705 extract_insn (insn
);
3706 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3707 orig_dup
[i
] = *recog_data
.dup_loc
[i
];
3709 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3711 orig_operand
[i
] = recog_data
.operand
[i
];
3713 /* For an asm statement, every operand is eliminable. */
3714 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3716 bool is_set_src
, in_plus
;
3718 /* Check for setting a register that we know about. */
3719 if (recog_data
.operand_type
[i
] != OP_IN
3720 && REG_P (orig_operand
[i
]))
3722 /* If we are assigning to a register that can be eliminated, it
3723 must be as part of a PARALLEL, since the code above handles
3724 single SETs. We must indicate that we can no longer
3725 eliminate this reg. */
3726 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3728 if (ep
->from_rtx
== orig_operand
[i
])
3729 ep
->can_eliminate
= 0;
3732 /* Companion to the above plus substitution, we can allow
3733 invariants as the source of a plain move. */
3735 if (old_set
&& recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3737 if (is_set_src
&& !sets_reg_p
)
3738 note_reg_elim_costly (&SET_SRC (old_set
), insn
);
3740 if (plus_src
&& sets_reg_p
3741 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3742 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3745 eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3747 is_set_src
|| in_plus
, true);
3748 /* Terminate the search in check_eliminable_occurrences at
3750 *recog_data
.operand_loc
[i
] = 0;
3754 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3755 *recog_data
.dup_loc
[i
]
3756 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3758 /* If any eliminable remain, they aren't eliminable anymore. */
3759 check_eliminable_occurrences (old_body
);
3761 /* Restore the old body. */
3762 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3763 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3764 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3765 *recog_data
.dup_loc
[i
] = orig_dup
[i
];
3767 /* Update all elimination pairs to reflect the status after the current
3768 insn. The changes we make were determined by the earlier call to
3769 elimination_effects. */
3771 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3773 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3774 ep
->can_eliminate
= 0;
3776 ep
->ref_outside_mem
= 0;
3782 /* Loop through all elimination pairs.
3783 Recalculate the number not at initial offset.
3785 Compute the maximum offset (minimum offset if the stack does not
3786 grow downward) for each elimination pair. */
3789 update_eliminable_offsets (void)
3791 struct elim_table
*ep
;
3793 num_not_at_initial_offset
= 0;
3794 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3796 ep
->previous_offset
= ep
->offset
;
3797 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3798 num_not_at_initial_offset
++;
3802 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3803 replacement we currently believe is valid, mark it as not eliminable if X
3804 modifies DEST in any way other than by adding a constant integer to it.
3806 If DEST is the frame pointer, we do nothing because we assume that
3807 all assignments to the hard frame pointer are nonlocal gotos and are being
3808 done at a time when they are valid and do not disturb anything else.
3809 Some machines want to eliminate a fake argument pointer with either the
3810 frame or stack pointer. Assignments to the hard frame pointer must not
3811 prevent this elimination.
3813 Called via note_stores from reload before starting its passes to scan
3814 the insns of the function. */
3817 mark_not_eliminable (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
3821 /* A SUBREG of a hard register here is just changing its mode. We should
3822 not see a SUBREG of an eliminable hard register, but check just in
3824 if (GET_CODE (dest
) == SUBREG
)
3825 dest
= SUBREG_REG (dest
);
3827 if (dest
== hard_frame_pointer_rtx
)
3830 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3831 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3832 && (GET_CODE (x
) != SET
3833 || GET_CODE (SET_SRC (x
)) != PLUS
3834 || XEXP (SET_SRC (x
), 0) != dest
3835 || !CONST_INT_P (XEXP (SET_SRC (x
), 1))))
3837 reg_eliminate
[i
].can_eliminate_previous
3838 = reg_eliminate
[i
].can_eliminate
= 0;
3843 /* Verify that the initial elimination offsets did not change since the
3844 last call to set_initial_elim_offsets. This is used to catch cases
3845 where something illegal happened during reload_as_needed that could
3846 cause incorrect code to be generated if we did not check for it. */
3849 verify_initial_elim_offsets (void)
3853 if (!num_eliminable
)
3856 #ifdef ELIMINABLE_REGS
3858 struct elim_table
*ep
;
3860 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3862 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, t
);
3863 if (t
!= ep
->initial_offset
)
3868 INITIAL_FRAME_POINTER_OFFSET (t
);
3869 if (t
!= reg_eliminate
[0].initial_offset
)
3876 /* Reset all offsets on eliminable registers to their initial values. */
3879 set_initial_elim_offsets (void)
3881 struct elim_table
*ep
= reg_eliminate
;
3883 #ifdef ELIMINABLE_REGS
3884 for (; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3886 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
3887 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3890 INITIAL_FRAME_POINTER_OFFSET (ep
->initial_offset
);
3891 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3894 num_not_at_initial_offset
= 0;
3897 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3900 set_initial_eh_label_offset (rtx label
)
3902 set_label_offsets (label
, NULL_RTX
, 1);
3905 /* Initialize the known label offsets.
3906 Set a known offset for each forced label to be at the initial offset
3907 of each elimination. We do this because we assume that all
3908 computed jumps occur from a location where each elimination is
3909 at its initial offset.
3910 For all other labels, show that we don't know the offsets. */
3913 set_initial_label_offsets (void)
3916 memset (offsets_known_at
, 0, num_labels
);
3918 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
3920 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
3922 for_each_eh_label (set_initial_eh_label_offset
);
3925 /* Set all elimination offsets to the known values for the code label given
3929 set_offsets_for_label (rtx insn
)
3932 int label_nr
= CODE_LABEL_NUMBER (insn
);
3933 struct elim_table
*ep
;
3935 num_not_at_initial_offset
= 0;
3936 for (i
= 0, ep
= reg_eliminate
; i
< NUM_ELIMINABLE_REGS
; ep
++, i
++)
3938 ep
->offset
= ep
->previous_offset
3939 = offsets_at
[label_nr
- first_label_num
][i
];
3940 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3941 num_not_at_initial_offset
++;
3945 /* See if anything that happened changes which eliminations are valid.
3946 For example, on the SPARC, whether or not the frame pointer can
3947 be eliminated can depend on what registers have been used. We need
3948 not check some conditions again (such as flag_omit_frame_pointer)
3949 since they can't have changed. */
3952 update_eliminables (HARD_REG_SET
*pset
)
3954 int previous_frame_pointer_needed
= frame_pointer_needed
;
3955 struct elim_table
*ep
;
3957 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3958 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
3959 && targetm
.frame_pointer_required ())
3960 #ifdef ELIMINABLE_REGS
3961 || ! targetm
.can_eliminate (ep
->from
, ep
->to
)
3964 ep
->can_eliminate
= 0;
3966 /* Look for the case where we have discovered that we can't replace
3967 register A with register B and that means that we will now be
3968 trying to replace register A with register C. This means we can
3969 no longer replace register C with register B and we need to disable
3970 such an elimination, if it exists. This occurs often with A == ap,
3971 B == sp, and C == fp. */
3973 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3975 struct elim_table
*op
;
3978 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3980 /* Find the current elimination for ep->from, if there is a
3982 for (op
= reg_eliminate
;
3983 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3984 if (op
->from
== ep
->from
&& op
->can_eliminate
)
3990 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3992 for (op
= reg_eliminate
;
3993 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3994 if (op
->from
== new_to
&& op
->to
== ep
->to
)
3995 op
->can_eliminate
= 0;
3999 /* See if any registers that we thought we could eliminate the previous
4000 time are no longer eliminable. If so, something has changed and we
4001 must spill the register. Also, recompute the number of eliminable
4002 registers and see if the frame pointer is needed; it is if there is
4003 no elimination of the frame pointer that we can perform. */
4005 frame_pointer_needed
= 1;
4006 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4008 if (ep
->can_eliminate
4009 && ep
->from
== FRAME_POINTER_REGNUM
4010 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
4011 && (! SUPPORTS_STACK_ALIGNMENT
4012 || ! crtl
->stack_realign_needed
))
4013 frame_pointer_needed
= 0;
4015 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
4017 ep
->can_eliminate_previous
= 0;
4018 SET_HARD_REG_BIT (*pset
, ep
->from
);
4023 /* If we didn't need a frame pointer last time, but we do now, spill
4024 the hard frame pointer. */
4025 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
4026 SET_HARD_REG_BIT (*pset
, HARD_FRAME_POINTER_REGNUM
);
4029 /* Return true if X is used as the target register of an elimination. */
4032 elimination_target_reg_p (rtx x
)
4034 struct elim_table
*ep
;
4036 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4037 if (ep
->to_rtx
== x
&& ep
->can_eliminate
)
4043 /* Initialize the table of registers to eliminate.
4044 Pre-condition: global flag frame_pointer_needed has been set before
4045 calling this function. */
4048 init_elim_table (void)
4050 struct elim_table
*ep
;
4051 #ifdef ELIMINABLE_REGS
4052 const struct elim_table_1
*ep1
;
4056 reg_eliminate
= XCNEWVEC (struct elim_table
, NUM_ELIMINABLE_REGS
);
4060 #ifdef ELIMINABLE_REGS
4061 for (ep
= reg_eliminate
, ep1
= reg_eliminate_1
;
4062 ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++, ep1
++)
4064 ep
->from
= ep1
->from
;
4066 ep
->can_eliminate
= ep
->can_eliminate_previous
4067 = (targetm
.can_eliminate (ep
->from
, ep
->to
)
4068 && ! (ep
->to
== STACK_POINTER_REGNUM
4069 && frame_pointer_needed
4070 && (! SUPPORTS_STACK_ALIGNMENT
4071 || ! stack_realign_fp
)));
4074 reg_eliminate
[0].from
= reg_eliminate_1
[0].from
;
4075 reg_eliminate
[0].to
= reg_eliminate_1
[0].to
;
4076 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
4077 = ! frame_pointer_needed
;
4080 /* Count the number of eliminable registers and build the FROM and TO
4081 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4082 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4083 We depend on this. */
4084 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4086 num_eliminable
+= ep
->can_eliminate
;
4087 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
4088 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
4092 /* Find all the pseudo registers that didn't get hard regs
4093 but do have known equivalent constants or memory slots.
4094 These include parameters (known equivalent to parameter slots)
4095 and cse'd or loop-moved constant memory addresses.
4097 Record constant equivalents in reg_equiv_constant
4098 so they will be substituted by find_reloads.
4099 Record memory equivalents in reg_mem_equiv so they can
4100 be substituted eventually by altering the REG-rtx's. */
4103 init_eliminable_invariants (rtx first
, bool do_subregs
)
4110 reg_max_ref_width
= XCNEWVEC (unsigned int, max_regno
);
4112 reg_max_ref_width
= NULL
;
4114 num_eliminable_invariants
= 0;
4116 first_label_num
= get_first_label_num ();
4117 num_labels
= max_label_num () - first_label_num
;
4119 /* Allocate the tables used to store offset information at labels. */
4120 offsets_known_at
= XNEWVEC (char, num_labels
);
4121 offsets_at
= (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS
]) xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (HOST_WIDE_INT
));
4123 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4124 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4125 find largest such for each pseudo. FIRST is the head of the insn
4128 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4130 rtx set
= single_set (insn
);
4132 /* We may introduce USEs that we want to remove at the end, so
4133 we'll mark them with QImode. Make sure there are no
4134 previously-marked insns left by say regmove. */
4135 if (INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
4136 && GET_MODE (insn
) != VOIDmode
)
4137 PUT_MODE (insn
, VOIDmode
);
4139 if (do_subregs
&& NONDEBUG_INSN_P (insn
))
4140 scan_paradoxical_subregs (PATTERN (insn
));
4142 if (set
!= 0 && REG_P (SET_DEST (set
)))
4144 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
4150 i
= REGNO (SET_DEST (set
));
4153 if (i
<= LAST_VIRTUAL_REGISTER
)
4156 /* If flag_pic and we have constant, verify it's legitimate. */
4158 || !flag_pic
|| LEGITIMATE_PIC_OPERAND_P (x
))
4160 /* It can happen that a REG_EQUIV note contains a MEM
4161 that is not a legitimate memory operand. As later
4162 stages of reload assume that all addresses found
4163 in the reg_equiv_* arrays were originally legitimate,
4164 we ignore such REG_EQUIV notes. */
4165 if (memory_operand (x
, VOIDmode
))
4167 /* Always unshare the equivalence, so we can
4168 substitute into this insn without touching the
4170 reg_equiv_memory_loc (i
) = copy_rtx (x
);
4172 else if (function_invariant_p (x
))
4174 enum machine_mode mode
;
4176 mode
= GET_MODE (SET_DEST (set
));
4177 if (GET_CODE (x
) == PLUS
)
4179 /* This is PLUS of frame pointer and a constant,
4180 and might be shared. Unshare it. */
4181 reg_equiv_invariant (i
) = copy_rtx (x
);
4182 num_eliminable_invariants
++;
4184 else if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
4186 reg_equiv_invariant (i
) = x
;
4187 num_eliminable_invariants
++;
4189 else if (targetm
.legitimate_constant_p (mode
, x
))
4190 reg_equiv_constant (i
) = x
;
4193 reg_equiv_memory_loc (i
) = force_const_mem (mode
, x
);
4194 if (! reg_equiv_memory_loc (i
))
4195 reg_equiv_init (i
) = NULL_RTX
;
4200 reg_equiv_init (i
) = NULL_RTX
;
4205 reg_equiv_init (i
) = NULL_RTX
;
4210 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4211 if (reg_equiv_init (i
))
4213 fprintf (dump_file
, "init_insns for %u: ", i
);
4214 print_inline_rtx (dump_file
, reg_equiv_init (i
), 20);
4215 fprintf (dump_file
, "\n");
4219 /* Indicate that we no longer have known memory locations or constants.
4220 Free all data involved in tracking these. */
4223 free_reg_equiv (void)
4228 free (offsets_known_at
);
4231 offsets_known_at
= 0;
4233 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4234 if (reg_equiv_alt_mem_list (i
))
4235 free_EXPR_LIST_list (®_equiv_alt_mem_list (i
));
4236 VEC_free (reg_equivs_t
, gc
, reg_equivs
);
4241 /* Kick all pseudos out of hard register REGNO.
4243 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4244 because we found we can't eliminate some register. In the case, no pseudos
4245 are allowed to be in the register, even if they are only in a block that
4246 doesn't require spill registers, unlike the case when we are spilling this
4247 hard reg to produce another spill register.
4249 Return nonzero if any pseudos needed to be kicked out. */
4252 spill_hard_reg (unsigned int regno
, int cant_eliminate
)
4258 SET_HARD_REG_BIT (bad_spill_regs_global
, regno
);
4259 df_set_regs_ever_live (regno
, true);
4262 /* Spill every pseudo reg that was allocated to this reg
4263 or to something that overlaps this reg. */
4265 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4266 if (reg_renumber
[i
] >= 0
4267 && (unsigned int) reg_renumber
[i
] <= regno
4268 && end_hard_regno (PSEUDO_REGNO_MODE (i
), reg_renumber
[i
]) > regno
)
4269 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
4272 /* After find_reload_regs has been run for all insn that need reloads,
4273 and/or spill_hard_regs was called, this function is used to actually
4274 spill pseudo registers and try to reallocate them. It also sets up the
4275 spill_regs array for use by choose_reload_regs. */
4278 finish_spills (int global
)
4280 struct insn_chain
*chain
;
4281 int something_changed
= 0;
4283 reg_set_iterator rsi
;
4285 /* Build the spill_regs array for the function. */
4286 /* If there are some registers still to eliminate and one of the spill regs
4287 wasn't ever used before, additional stack space may have to be
4288 allocated to store this register. Thus, we may have changed the offset
4289 between the stack and frame pointers, so mark that something has changed.
4291 One might think that we need only set VAL to 1 if this is a call-used
4292 register. However, the set of registers that must be saved by the
4293 prologue is not identical to the call-used set. For example, the
4294 register used by the call insn for the return PC is a call-used register,
4295 but must be saved by the prologue. */
4298 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4299 if (TEST_HARD_REG_BIT (used_spill_regs
, i
))
4301 spill_reg_order
[i
] = n_spills
;
4302 spill_regs
[n_spills
++] = i
;
4303 if (num_eliminable
&& ! df_regs_ever_live_p (i
))
4304 something_changed
= 1;
4305 df_set_regs_ever_live (i
, true);
4308 spill_reg_order
[i
] = -1;
4310 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4311 if (! ira_conflicts_p
|| reg_renumber
[i
] >= 0)
4313 /* Record the current hard register the pseudo is allocated to
4314 in pseudo_previous_regs so we avoid reallocating it to the
4315 same hard reg in a later pass. */
4316 gcc_assert (reg_renumber
[i
] >= 0);
4318 SET_HARD_REG_BIT (pseudo_previous_regs
[i
], reg_renumber
[i
]);
4319 /* Mark it as no longer having a hard register home. */
4320 reg_renumber
[i
] = -1;
4321 if (ira_conflicts_p
)
4322 /* Inform IRA about the change. */
4323 ira_mark_allocation_change (i
);
4324 /* We will need to scan everything again. */
4325 something_changed
= 1;
4328 /* Retry global register allocation if possible. */
4329 if (global
&& ira_conflicts_p
)
4333 memset (pseudo_forbidden_regs
, 0, max_regno
* sizeof (HARD_REG_SET
));
4334 /* For every insn that needs reloads, set the registers used as spill
4335 regs in pseudo_forbidden_regs for every pseudo live across the
4337 for (chain
= insns_need_reload
; chain
; chain
= chain
->next_need_reload
)
4339 EXECUTE_IF_SET_IN_REG_SET
4340 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4342 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4343 chain
->used_spill_regs
);
4345 EXECUTE_IF_SET_IN_REG_SET
4346 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4348 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4349 chain
->used_spill_regs
);
4353 /* Retry allocating the pseudos spilled in IRA and the
4354 reload. For each reg, merge the various reg sets that
4355 indicate which hard regs can't be used, and call
4356 ira_reassign_pseudos. */
4357 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned) max_regno
; i
++)
4358 if (reg_old_renumber
[i
] != reg_renumber
[i
])
4360 if (reg_renumber
[i
] < 0)
4361 temp_pseudo_reg_arr
[n
++] = i
;
4363 CLEAR_REGNO_REG_SET (&spilled_pseudos
, i
);
4365 if (ira_reassign_pseudos (temp_pseudo_reg_arr
, n
,
4366 bad_spill_regs_global
,
4367 pseudo_forbidden_regs
, pseudo_previous_regs
,
4369 something_changed
= 1;
4371 /* Fix up the register information in the insn chain.
4372 This involves deleting those of the spilled pseudos which did not get
4373 a new hard register home from the live_{before,after} sets. */
4374 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4376 HARD_REG_SET used_by_pseudos
;
4377 HARD_REG_SET used_by_pseudos2
;
4379 if (! ira_conflicts_p
)
4381 /* Don't do it for IRA because IRA and the reload still can
4382 assign hard registers to the spilled pseudos on next
4383 reload iterations. */
4384 AND_COMPL_REG_SET (&chain
->live_throughout
, &spilled_pseudos
);
4385 AND_COMPL_REG_SET (&chain
->dead_or_set
, &spilled_pseudos
);
4387 /* Mark any unallocated hard regs as available for spills. That
4388 makes inheritance work somewhat better. */
4389 if (chain
->need_reload
)
4391 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
4392 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
4393 IOR_HARD_REG_SET (used_by_pseudos
, used_by_pseudos2
);
4395 compute_use_by_pseudos (&used_by_pseudos
, &chain
->live_throughout
);
4396 compute_use_by_pseudos (&used_by_pseudos
, &chain
->dead_or_set
);
4397 /* Value of chain->used_spill_regs from previous iteration
4398 may be not included in the value calculated here because
4399 of possible removing caller-saves insns (see function
4400 delete_caller_save_insns. */
4401 COMPL_HARD_REG_SET (chain
->used_spill_regs
, used_by_pseudos
);
4402 AND_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs
);
4406 CLEAR_REG_SET (&changed_allocation_pseudos
);
4407 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4408 for (i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned)max_regno
; i
++)
4410 int regno
= reg_renumber
[i
];
4411 if (reg_old_renumber
[i
] == regno
)
4414 SET_REGNO_REG_SET (&changed_allocation_pseudos
, i
);
4416 alter_reg (i
, reg_old_renumber
[i
], false);
4417 reg_old_renumber
[i
] = regno
;
4421 fprintf (dump_file
, " Register %d now on stack.\n\n", i
);
4423 fprintf (dump_file
, " Register %d now in %d.\n\n",
4424 i
, reg_renumber
[i
]);
4428 return something_changed
;
4431 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4434 scan_paradoxical_subregs (rtx x
)
4438 enum rtx_code code
= GET_CODE (x
);
4449 case CONST_VECTOR
: /* shouldn't happen, but just in case. */
4457 if (REG_P (SUBREG_REG (x
))
4458 && (GET_MODE_SIZE (GET_MODE (x
))
4459 > reg_max_ref_width
[REGNO (SUBREG_REG (x
))]))
4461 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
4462 = GET_MODE_SIZE (GET_MODE (x
));
4463 mark_home_live_1 (REGNO (SUBREG_REG (x
)), GET_MODE (x
));
4471 fmt
= GET_RTX_FORMAT (code
);
4472 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4475 scan_paradoxical_subregs (XEXP (x
, i
));
4476 else if (fmt
[i
] == 'E')
4479 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4480 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
4485 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4486 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4487 and apply the corresponding narrowing subreg to *OTHER_PTR.
4488 Return true if the operands were changed, false otherwise. */
4491 strip_paradoxical_subreg (rtx
*op_ptr
, rtx
*other_ptr
)
4493 rtx op
, inner
, other
, tem
;
4496 if (!paradoxical_subreg_p (op
))
4498 inner
= SUBREG_REG (op
);
4501 tem
= gen_lowpart_common (GET_MODE (inner
), other
);
4505 /* If the lowpart operation turned a hard register into a subreg,
4506 rather than simplifying it to another hard register, then the
4507 mode change cannot be properly represented. For example, OTHER
4508 might be valid in its current mode, but not in the new one. */
4509 if (GET_CODE (tem
) == SUBREG
4511 && HARD_REGISTER_P (other
))
4519 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4520 examine all of the reload insns between PREV and NEXT exclusive, and
4521 annotate all that may trap. */
4524 fixup_eh_region_note (rtx insn
, rtx prev
, rtx next
)
4526 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
4529 if (!insn_could_throw_p (insn
))
4530 remove_note (insn
, note
);
4531 copy_reg_eh_region_note_forward (note
, NEXT_INSN (prev
), next
);
4534 /* Reload pseudo-registers into hard regs around each insn as needed.
4535 Additional register load insns are output before the insn that needs it
4536 and perhaps store insns after insns that modify the reloaded pseudo reg.
4538 reg_last_reload_reg and reg_reloaded_contents keep track of
4539 which registers are already available in reload registers.
4540 We update these for the reloads that we perform,
4541 as the insns are scanned. */
4544 reload_as_needed (int live_known
)
4546 struct insn_chain
*chain
;
4547 #if defined (AUTO_INC_DEC)
4552 memset (spill_reg_rtx
, 0, sizeof spill_reg_rtx
);
4553 memset (spill_reg_store
, 0, sizeof spill_reg_store
);
4554 reg_last_reload_reg
= XCNEWVEC (rtx
, max_regno
);
4555 INIT_REG_SET (®_has_output_reload
);
4556 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4557 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered
);
4559 set_initial_elim_offsets ();
4561 /* Generate a marker insn that we will move around. */
4562 marker
= emit_note (NOTE_INSN_DELETED
);
4563 unlink_insn_chain (marker
, marker
);
4565 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4568 rtx insn
= chain
->insn
;
4569 rtx old_next
= NEXT_INSN (insn
);
4571 rtx old_prev
= PREV_INSN (insn
);
4574 /* If we pass a label, copy the offsets from the label information
4575 into the current offsets of each elimination. */
4577 set_offsets_for_label (insn
);
4579 else if (INSN_P (insn
))
4581 regset_head regs_to_forget
;
4582 INIT_REG_SET (®s_to_forget
);
4583 note_stores (PATTERN (insn
), forget_old_reloads_1
, ®s_to_forget
);
4585 /* If this is a USE and CLOBBER of a MEM, ensure that any
4586 references to eliminable registers have been removed. */
4588 if ((GET_CODE (PATTERN (insn
)) == USE
4589 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4590 && MEM_P (XEXP (PATTERN (insn
), 0)))
4591 XEXP (XEXP (PATTERN (insn
), 0), 0)
4592 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4593 GET_MODE (XEXP (PATTERN (insn
), 0)),
4596 /* If we need to do register elimination processing, do so.
4597 This might delete the insn, in which case we are done. */
4598 if ((num_eliminable
|| num_eliminable_invariants
) && chain
->need_elim
)
4600 eliminate_regs_in_insn (insn
, 1);
4603 update_eliminable_offsets ();
4604 CLEAR_REG_SET (®s_to_forget
);
4609 /* If need_elim is nonzero but need_reload is zero, one might think
4610 that we could simply set n_reloads to 0. However, find_reloads
4611 could have done some manipulation of the insn (such as swapping
4612 commutative operands), and these manipulations are lost during
4613 the first pass for every insn that needs register elimination.
4614 So the actions of find_reloads must be redone here. */
4616 if (! chain
->need_elim
&& ! chain
->need_reload
4617 && ! chain
->need_operand_change
)
4619 /* First find the pseudo regs that must be reloaded for this insn.
4620 This info is returned in the tables reload_... (see reload.h).
4621 Also modify the body of INSN by substituting RELOAD
4622 rtx's for those pseudo regs. */
4625 CLEAR_REG_SET (®_has_output_reload
);
4626 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4628 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4634 rtx next
= NEXT_INSN (insn
);
4637 /* ??? PREV can get deleted by reload inheritance.
4638 Work around this by emitting a marker note. */
4639 prev
= PREV_INSN (insn
);
4640 reorder_insns_nobb (marker
, marker
, prev
);
4642 /* Now compute which reload regs to reload them into. Perhaps
4643 reusing reload regs from previous insns, or else output
4644 load insns to reload them. Maybe output store insns too.
4645 Record the choices of reload reg in reload_reg_rtx. */
4646 choose_reload_regs (chain
);
4648 /* Generate the insns to reload operands into or out of
4649 their reload regs. */
4650 emit_reload_insns (chain
);
4652 /* Substitute the chosen reload regs from reload_reg_rtx
4653 into the insn's body (or perhaps into the bodies of other
4654 load and store insn that we just made for reloading
4655 and that we moved the structure into). */
4656 subst_reloads (insn
);
4658 prev
= PREV_INSN (marker
);
4659 unlink_insn_chain (marker
, marker
);
4661 /* Adjust the exception region notes for loads and stores. */
4662 if (cfun
->can_throw_non_call_exceptions
&& !CALL_P (insn
))
4663 fixup_eh_region_note (insn
, prev
, next
);
4665 /* Adjust the location of REG_ARGS_SIZE. */
4666 p
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4669 remove_note (insn
, p
);
4670 fixup_args_size_notes (prev
, PREV_INSN (next
),
4671 INTVAL (XEXP (p
, 0)));
4674 /* If this was an ASM, make sure that all the reload insns
4675 we have generated are valid. If not, give an error
4677 if (asm_noperands (PATTERN (insn
)) >= 0)
4678 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
4679 if (p
!= insn
&& INSN_P (p
)
4680 && GET_CODE (PATTERN (p
)) != USE
4681 && (recog_memoized (p
) < 0
4682 || (extract_insn (p
), ! constrain_operands (1))))
4684 error_for_asm (insn
,
4685 "%<asm%> operand requires "
4686 "impossible reload");
4691 if (num_eliminable
&& chain
->need_elim
)
4692 update_eliminable_offsets ();
4694 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4695 is no longer validly lying around to save a future reload.
4696 Note that this does not detect pseudos that were reloaded
4697 for this insn in order to be stored in
4698 (obeying register constraints). That is correct; such reload
4699 registers ARE still valid. */
4700 forget_marked_reloads (®s_to_forget
);
4701 CLEAR_REG_SET (®s_to_forget
);
4703 /* There may have been CLOBBER insns placed after INSN. So scan
4704 between INSN and NEXT and use them to forget old reloads. */
4705 for (x
= NEXT_INSN (insn
); x
!= old_next
; x
= NEXT_INSN (x
))
4706 if (NONJUMP_INSN_P (x
) && GET_CODE (PATTERN (x
)) == CLOBBER
)
4707 note_stores (PATTERN (x
), forget_old_reloads_1
, NULL
);
4710 /* Likewise for regs altered by auto-increment in this insn.
4711 REG_INC notes have been changed by reloading:
4712 find_reloads_address_1 records substitutions for them,
4713 which have been performed by subst_reloads above. */
4714 for (i
= n_reloads
- 1; i
>= 0; i
--)
4716 rtx in_reg
= rld
[i
].in_reg
;
4719 enum rtx_code code
= GET_CODE (in_reg
);
4720 /* PRE_INC / PRE_DEC will have the reload register ending up
4721 with the same value as the stack slot, but that doesn't
4722 hold true for POST_INC / POST_DEC. Either we have to
4723 convert the memory access to a true POST_INC / POST_DEC,
4724 or we can't use the reload register for inheritance. */
4725 if ((code
== POST_INC
|| code
== POST_DEC
)
4726 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4727 REGNO (rld
[i
].reg_rtx
))
4728 /* Make sure it is the inc/dec pseudo, and not
4729 some other (e.g. output operand) pseudo. */
4730 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4731 == REGNO (XEXP (in_reg
, 0))))
4734 rtx reload_reg
= rld
[i
].reg_rtx
;
4735 enum machine_mode mode
= GET_MODE (reload_reg
);
4739 for (p
= PREV_INSN (old_next
); p
!= prev
; p
= PREV_INSN (p
))
4741 /* We really want to ignore REG_INC notes here, so
4742 use PATTERN (p) as argument to reg_set_p . */
4743 if (reg_set_p (reload_reg
, PATTERN (p
)))
4745 n
= count_occurrences (PATTERN (p
), reload_reg
, 0);
4751 = gen_rtx_fmt_e (code
, mode
, reload_reg
);
4753 validate_replace_rtx_group (reload_reg
,
4755 n
= verify_changes (0);
4757 /* We must also verify that the constraints
4758 are met after the replacement. Make sure
4759 extract_insn is only called for an insn
4760 where the replacements were found to be
4765 n
= constrain_operands (1);
4768 /* If the constraints were not met, then
4769 undo the replacement, else confirm it. */
4773 confirm_change_group ();
4779 add_reg_note (p
, REG_INC
, reload_reg
);
4780 /* Mark this as having an output reload so that the
4781 REG_INC processing code below won't invalidate
4782 the reload for inheritance. */
4783 SET_HARD_REG_BIT (reg_is_output_reload
,
4784 REGNO (reload_reg
));
4785 SET_REGNO_REG_SET (®_has_output_reload
,
4786 REGNO (XEXP (in_reg
, 0)));
4789 forget_old_reloads_1 (XEXP (in_reg
, 0), NULL_RTX
,
4792 else if ((code
== PRE_INC
|| code
== PRE_DEC
)
4793 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4794 REGNO (rld
[i
].reg_rtx
))
4795 /* Make sure it is the inc/dec pseudo, and not
4796 some other (e.g. output operand) pseudo. */
4797 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4798 == REGNO (XEXP (in_reg
, 0))))
4800 SET_HARD_REG_BIT (reg_is_output_reload
,
4801 REGNO (rld
[i
].reg_rtx
));
4802 SET_REGNO_REG_SET (®_has_output_reload
,
4803 REGNO (XEXP (in_reg
, 0)));
4805 else if (code
== PRE_INC
|| code
== PRE_DEC
4806 || code
== POST_INC
|| code
== POST_DEC
)
4808 int in_regno
= REGNO (XEXP (in_reg
, 0));
4810 if (reg_last_reload_reg
[in_regno
] != NULL_RTX
)
4813 bool forget_p
= true;
4815 in_hard_regno
= REGNO (reg_last_reload_reg
[in_regno
]);
4816 if (TEST_HARD_REG_BIT (reg_reloaded_valid
,
4819 for (x
= old_prev
? NEXT_INSN (old_prev
) : insn
;
4822 if (x
== reg_reloaded_insn
[in_hard_regno
])
4828 /* If for some reasons, we didn't set up
4829 reg_last_reload_reg in this insn,
4830 invalidate inheritance from previous
4831 insns for the incremented/decremented
4832 register. Such registers will be not in
4833 reg_has_output_reload. Invalidate it
4834 also if the corresponding element in
4835 reg_reloaded_insn is also
4838 forget_old_reloads_1 (XEXP (in_reg
, 0),
4844 /* If a pseudo that got a hard register is auto-incremented,
4845 we must purge records of copying it into pseudos without
4847 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4848 if (REG_NOTE_KIND (x
) == REG_INC
)
4850 /* See if this pseudo reg was reloaded in this insn.
4851 If so, its last-reload info is still valid
4852 because it is based on this insn's reload. */
4853 for (i
= 0; i
< n_reloads
; i
++)
4854 if (rld
[i
].out
== XEXP (x
, 0))
4858 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
, NULL
);
4862 /* A reload reg's contents are unknown after a label. */
4864 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4866 /* Don't assume a reload reg is still good after a call insn
4867 if it is a call-used reg, or if it contains a value that will
4868 be partially clobbered by the call. */
4869 else if (CALL_P (insn
))
4871 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, call_used_reg_set
);
4872 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, reg_reloaded_call_part_clobbered
);
4874 /* If this is a call to a setjmp-type function, we must not
4875 reuse any reload reg contents across the call; that will
4876 just be clobbered by other uses of the register in later
4877 code, before the longjmp. */
4878 if (find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
4879 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4884 free (reg_last_reload_reg
);
4885 CLEAR_REG_SET (®_has_output_reload
);
4888 /* Discard all record of any value reloaded from X,
4889 or reloaded in X from someplace else;
4890 unless X is an output reload reg of the current insn.
4892 X may be a hard reg (the reload reg)
4893 or it may be a pseudo reg that was reloaded from.
4895 When DATA is non-NULL just mark the registers in regset
4896 to be forgotten later. */
4899 forget_old_reloads_1 (rtx x
, const_rtx ignored ATTRIBUTE_UNUSED
,
4904 regset regs
= (regset
) data
;
4906 /* note_stores does give us subregs of hard regs,
4907 subreg_regno_offset requires a hard reg. */
4908 while (GET_CODE (x
) == SUBREG
)
4910 /* We ignore the subreg offset when calculating the regno,
4911 because we are using the entire underlying hard register
4921 if (regno
>= FIRST_PSEUDO_REGISTER
)
4927 nr
= hard_regno_nregs
[regno
][GET_MODE (x
)];
4928 /* Storing into a spilled-reg invalidates its contents.
4929 This can happen if a block-local pseudo is allocated to that reg
4930 and it wasn't spilled because this block's total need is 0.
4931 Then some insn might have an optional reload and use this reg. */
4933 for (i
= 0; i
< nr
; i
++)
4934 /* But don't do this if the reg actually serves as an output
4935 reload reg in the current instruction. */
4937 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4939 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4940 spill_reg_store
[regno
+ i
] = 0;
4946 SET_REGNO_REG_SET (regs
, regno
+ nr
);
4949 /* Since value of X has changed,
4950 forget any value previously copied from it. */
4953 /* But don't forget a copy if this is the output reload
4954 that establishes the copy's validity. */
4956 || !REGNO_REG_SET_P (®_has_output_reload
, regno
+ nr
))
4957 reg_last_reload_reg
[regno
+ nr
] = 0;
4961 /* Forget the reloads marked in regset by previous function. */
4963 forget_marked_reloads (regset regs
)
4966 reg_set_iterator rsi
;
4967 EXECUTE_IF_SET_IN_REG_SET (regs
, 0, reg
, rsi
)
4969 if (reg
< FIRST_PSEUDO_REGISTER
4970 /* But don't do this if the reg actually serves as an output
4971 reload reg in the current instruction. */
4973 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, reg
)))
4975 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, reg
);
4976 spill_reg_store
[reg
] = 0;
4979 || !REGNO_REG_SET_P (®_has_output_reload
, reg
))
4980 reg_last_reload_reg
[reg
] = 0;
4984 /* The following HARD_REG_SETs indicate when each hard register is
4985 used for a reload of various parts of the current insn. */
4987 /* If reg is unavailable for all reloads. */
4988 static HARD_REG_SET reload_reg_unavailable
;
4989 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4990 static HARD_REG_SET reload_reg_used
;
4991 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4992 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4993 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4994 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4995 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4996 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4997 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4998 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4999 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5000 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
5001 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5002 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
5003 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5004 static HARD_REG_SET reload_reg_used_in_op_addr
;
5005 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5006 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
5007 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5008 static HARD_REG_SET reload_reg_used_in_insn
;
5009 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5010 static HARD_REG_SET reload_reg_used_in_other_addr
;
5012 /* If reg is in use as a reload reg for any sort of reload. */
5013 static HARD_REG_SET reload_reg_used_at_all
;
5015 /* If reg is use as an inherited reload. We just mark the first register
5017 static HARD_REG_SET reload_reg_used_for_inherit
;
5019 /* Records which hard regs are used in any way, either as explicit use or
5020 by being allocated to a pseudo during any point of the current insn. */
5021 static HARD_REG_SET reg_used_in_insn
;
5023 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5024 TYPE. MODE is used to indicate how many consecutive regs are
5028 mark_reload_reg_in_use (unsigned int regno
, int opnum
, enum reload_type type
,
5029 enum machine_mode mode
)
5034 add_to_hard_reg_set (&reload_reg_used
, mode
, regno
);
5037 case RELOAD_FOR_INPUT_ADDRESS
:
5038 add_to_hard_reg_set (&reload_reg_used_in_input_addr
[opnum
], mode
, regno
);
5041 case RELOAD_FOR_INPADDR_ADDRESS
:
5042 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr
[opnum
], mode
, regno
);
5045 case RELOAD_FOR_OUTPUT_ADDRESS
:
5046 add_to_hard_reg_set (&reload_reg_used_in_output_addr
[opnum
], mode
, regno
);
5049 case RELOAD_FOR_OUTADDR_ADDRESS
:
5050 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr
[opnum
], mode
, regno
);
5053 case RELOAD_FOR_OPERAND_ADDRESS
:
5054 add_to_hard_reg_set (&reload_reg_used_in_op_addr
, mode
, regno
);
5057 case RELOAD_FOR_OPADDR_ADDR
:
5058 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload
, mode
, regno
);
5061 case RELOAD_FOR_OTHER_ADDRESS
:
5062 add_to_hard_reg_set (&reload_reg_used_in_other_addr
, mode
, regno
);
5065 case RELOAD_FOR_INPUT
:
5066 add_to_hard_reg_set (&reload_reg_used_in_input
[opnum
], mode
, regno
);
5069 case RELOAD_FOR_OUTPUT
:
5070 add_to_hard_reg_set (&reload_reg_used_in_output
[opnum
], mode
, regno
);
5073 case RELOAD_FOR_INSN
:
5074 add_to_hard_reg_set (&reload_reg_used_in_insn
, mode
, regno
);
5078 add_to_hard_reg_set (&reload_reg_used_at_all
, mode
, regno
);
5081 /* Similarly, but show REGNO is no longer in use for a reload. */
5084 clear_reload_reg_in_use (unsigned int regno
, int opnum
,
5085 enum reload_type type
, enum machine_mode mode
)
5087 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
5088 unsigned int start_regno
, end_regno
, r
;
5090 /* A complication is that for some reload types, inheritance might
5091 allow multiple reloads of the same types to share a reload register.
5092 We set check_opnum if we have to check only reloads with the same
5093 operand number, and check_any if we have to check all reloads. */
5094 int check_opnum
= 0;
5096 HARD_REG_SET
*used_in_set
;
5101 used_in_set
= &reload_reg_used
;
5104 case RELOAD_FOR_INPUT_ADDRESS
:
5105 used_in_set
= &reload_reg_used_in_input_addr
[opnum
];
5108 case RELOAD_FOR_INPADDR_ADDRESS
:
5110 used_in_set
= &reload_reg_used_in_inpaddr_addr
[opnum
];
5113 case RELOAD_FOR_OUTPUT_ADDRESS
:
5114 used_in_set
= &reload_reg_used_in_output_addr
[opnum
];
5117 case RELOAD_FOR_OUTADDR_ADDRESS
:
5119 used_in_set
= &reload_reg_used_in_outaddr_addr
[opnum
];
5122 case RELOAD_FOR_OPERAND_ADDRESS
:
5123 used_in_set
= &reload_reg_used_in_op_addr
;
5126 case RELOAD_FOR_OPADDR_ADDR
:
5128 used_in_set
= &reload_reg_used_in_op_addr_reload
;
5131 case RELOAD_FOR_OTHER_ADDRESS
:
5132 used_in_set
= &reload_reg_used_in_other_addr
;
5136 case RELOAD_FOR_INPUT
:
5137 used_in_set
= &reload_reg_used_in_input
[opnum
];
5140 case RELOAD_FOR_OUTPUT
:
5141 used_in_set
= &reload_reg_used_in_output
[opnum
];
5144 case RELOAD_FOR_INSN
:
5145 used_in_set
= &reload_reg_used_in_insn
;
5150 /* We resolve conflicts with remaining reloads of the same type by
5151 excluding the intervals of reload registers by them from the
5152 interval of freed reload registers. Since we only keep track of
5153 one set of interval bounds, we might have to exclude somewhat
5154 more than what would be necessary if we used a HARD_REG_SET here.
5155 But this should only happen very infrequently, so there should
5156 be no reason to worry about it. */
5158 start_regno
= regno
;
5159 end_regno
= regno
+ nregs
;
5160 if (check_opnum
|| check_any
)
5162 for (i
= n_reloads
- 1; i
>= 0; i
--)
5164 if (rld
[i
].when_needed
== type
5165 && (check_any
|| rld
[i
].opnum
== opnum
)
5168 unsigned int conflict_start
= true_regnum (rld
[i
].reg_rtx
);
5169 unsigned int conflict_end
5170 = end_hard_regno (rld
[i
].mode
, conflict_start
);
5172 /* If there is an overlap with the first to-be-freed register,
5173 adjust the interval start. */
5174 if (conflict_start
<= start_regno
&& conflict_end
> start_regno
)
5175 start_regno
= conflict_end
;
5176 /* Otherwise, if there is a conflict with one of the other
5177 to-be-freed registers, adjust the interval end. */
5178 if (conflict_start
> start_regno
&& conflict_start
< end_regno
)
5179 end_regno
= conflict_start
;
5184 for (r
= start_regno
; r
< end_regno
; r
++)
5185 CLEAR_HARD_REG_BIT (*used_in_set
, r
);
5188 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5189 specified by OPNUM and TYPE. */
5192 reload_reg_free_p (unsigned int regno
, int opnum
, enum reload_type type
)
5196 /* In use for a RELOAD_OTHER means it's not available for anything. */
5197 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
5198 || TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5204 /* In use for anything means we can't use it for RELOAD_OTHER. */
5205 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
5206 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5207 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5208 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5211 for (i
= 0; i
< reload_n_operands
; i
++)
5212 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5213 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5214 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5215 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5216 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5217 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5222 case RELOAD_FOR_INPUT
:
5223 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5224 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
5227 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5230 /* If it is used for some other input, can't use it. */
5231 for (i
= 0; i
< reload_n_operands
; i
++)
5232 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5235 /* If it is used in a later operand's address, can't use it. */
5236 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5237 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5238 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5243 case RELOAD_FOR_INPUT_ADDRESS
:
5244 /* Can't use a register if it is used for an input address for this
5245 operand or used as an input in an earlier one. */
5246 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
5247 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5250 for (i
= 0; i
< opnum
; i
++)
5251 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5256 case RELOAD_FOR_INPADDR_ADDRESS
:
5257 /* Can't use a register if it is used for an input address
5258 for this operand or used as an input in an earlier
5260 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5263 for (i
= 0; i
< opnum
; i
++)
5264 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5269 case RELOAD_FOR_OUTPUT_ADDRESS
:
5270 /* Can't use a register if it is used for an output address for this
5271 operand or used as an output in this or a later operand. Note
5272 that multiple output operands are emitted in reverse order, so
5273 the conflicting ones are those with lower indices. */
5274 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
5277 for (i
= 0; i
<= opnum
; i
++)
5278 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5283 case RELOAD_FOR_OUTADDR_ADDRESS
:
5284 /* Can't use a register if it is used for an output address
5285 for this operand or used as an output in this or a
5286 later operand. Note that multiple output operands are
5287 emitted in reverse order, so the conflicting ones are
5288 those with lower indices. */
5289 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
5292 for (i
= 0; i
<= opnum
; i
++)
5293 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5298 case RELOAD_FOR_OPERAND_ADDRESS
:
5299 for (i
= 0; i
< reload_n_operands
; i
++)
5300 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5303 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5304 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5306 case RELOAD_FOR_OPADDR_ADDR
:
5307 for (i
= 0; i
< reload_n_operands
; i
++)
5308 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5311 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
5313 case RELOAD_FOR_OUTPUT
:
5314 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5315 outputs, or an operand address for this or an earlier output.
5316 Note that multiple output operands are emitted in reverse order,
5317 so the conflicting ones are those with higher indices. */
5318 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5321 for (i
= 0; i
< reload_n_operands
; i
++)
5322 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5325 for (i
= opnum
; i
< reload_n_operands
; i
++)
5326 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5327 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5332 case RELOAD_FOR_INSN
:
5333 for (i
= 0; i
< reload_n_operands
; i
++)
5334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5335 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5338 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5339 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5341 case RELOAD_FOR_OTHER_ADDRESS
:
5342 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
5349 /* Return 1 if the value in reload reg REGNO, as used by a reload
5350 needed for the part of the insn specified by OPNUM and TYPE,
5351 is still available in REGNO at the end of the insn.
5353 We can assume that the reload reg was already tested for availability
5354 at the time it is needed, and we should not check this again,
5355 in case the reg has already been marked in use. */
5358 reload_reg_reaches_end_p (unsigned int regno
, int opnum
, enum reload_type type
)
5365 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5366 its value must reach the end. */
5369 /* If this use is for part of the insn,
5370 its value reaches if no subsequent part uses the same register.
5371 Just like the above function, don't try to do this with lots
5374 case RELOAD_FOR_OTHER_ADDRESS
:
5375 /* Here we check for everything else, since these don't conflict
5376 with anything else and everything comes later. */
5378 for (i
= 0; i
< reload_n_operands
; i
++)
5379 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5380 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5381 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
5382 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5383 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5384 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5387 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5388 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5389 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5390 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5392 case RELOAD_FOR_INPUT_ADDRESS
:
5393 case RELOAD_FOR_INPADDR_ADDRESS
:
5394 /* Similar, except that we check only for this and subsequent inputs
5395 and the address of only subsequent inputs and we do not need
5396 to check for RELOAD_OTHER objects since they are known not to
5399 for (i
= opnum
; i
< reload_n_operands
; i
++)
5400 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5403 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5404 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5405 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5408 for (i
= 0; i
< reload_n_operands
; i
++)
5409 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5410 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5411 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5414 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5417 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5418 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5419 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5421 case RELOAD_FOR_INPUT
:
5422 /* Similar to input address, except we start at the next operand for
5423 both input and input address and we do not check for
5424 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5427 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5428 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5429 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5430 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5433 /* ... fall through ... */
5435 case RELOAD_FOR_OPERAND_ADDRESS
:
5436 /* Check outputs and their addresses. */
5438 for (i
= 0; i
< reload_n_operands
; i
++)
5439 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5440 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5441 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5444 return (!TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5446 case RELOAD_FOR_OPADDR_ADDR
:
5447 for (i
= 0; i
< reload_n_operands
; i
++)
5448 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5449 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5450 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5453 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5454 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5455 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5457 case RELOAD_FOR_INSN
:
5458 /* These conflict with other outputs with RELOAD_OTHER. So
5459 we need only check for output addresses. */
5461 opnum
= reload_n_operands
;
5463 /* ... fall through ... */
5465 case RELOAD_FOR_OUTPUT
:
5466 case RELOAD_FOR_OUTPUT_ADDRESS
:
5467 case RELOAD_FOR_OUTADDR_ADDRESS
:
5468 /* We already know these can't conflict with a later output. So the
5469 only thing to check are later output addresses.
5470 Note that multiple output operands are emitted in reverse order,
5471 so the conflicting ones are those with lower indices. */
5472 for (i
= 0; i
< opnum
; i
++)
5473 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5474 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5484 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5485 every register in the range [REGNO, REGNO + NREGS). */
5488 reload_regs_reach_end_p (unsigned int regno
, int nregs
,
5489 int opnum
, enum reload_type type
)
5493 for (i
= 0; i
< nregs
; i
++)
5494 if (!reload_reg_reaches_end_p (regno
+ i
, opnum
, type
))
5500 /* Returns whether R1 and R2 are uniquely chained: the value of one
5501 is used by the other, and that value is not used by any other
5502 reload for this insn. This is used to partially undo the decision
5503 made in find_reloads when in the case of multiple
5504 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5505 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5506 reloads. This code tries to avoid the conflict created by that
5507 change. It might be cleaner to explicitly keep track of which
5508 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5509 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5510 this after the fact. */
5512 reloads_unique_chain_p (int r1
, int r2
)
5516 /* We only check input reloads. */
5517 if (! rld
[r1
].in
|| ! rld
[r2
].in
)
5520 /* Avoid anything with output reloads. */
5521 if (rld
[r1
].out
|| rld
[r2
].out
)
5524 /* "chained" means one reload is a component of the other reload,
5525 not the same as the other reload. */
5526 if (rld
[r1
].opnum
!= rld
[r2
].opnum
5527 || rtx_equal_p (rld
[r1
].in
, rld
[r2
].in
)
5528 || rld
[r1
].optional
|| rld
[r2
].optional
5529 || ! (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
)
5530 || reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
)))
5533 for (i
= 0; i
< n_reloads
; i
++)
5534 /* Look for input reloads that aren't our two */
5535 if (i
!= r1
&& i
!= r2
&& rld
[i
].in
)
5537 /* If our reload is mentioned at all, it isn't a simple chain. */
5538 if (reg_mentioned_p (rld
[r1
].in
, rld
[i
].in
))
5544 /* The recursive function change all occurrences of WHAT in *WHERE
5547 substitute (rtx
*where
, const_rtx what
, rtx repl
)
5556 if (*where
== what
|| rtx_equal_p (*where
, what
))
5558 /* Record the location of the changed rtx. */
5559 VEC_safe_push (rtx_p
, heap
, substitute_stack
, where
);
5564 code
= GET_CODE (*where
);
5565 fmt
= GET_RTX_FORMAT (code
);
5566 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5572 for (j
= XVECLEN (*where
, i
) - 1; j
>= 0; j
--)
5573 substitute (&XVECEXP (*where
, i
, j
), what
, repl
);
5575 else if (fmt
[i
] == 'e')
5576 substitute (&XEXP (*where
, i
), what
, repl
);
5580 /* The function returns TRUE if chain of reload R1 and R2 (in any
5581 order) can be evaluated without usage of intermediate register for
5582 the reload containing another reload. It is important to see
5583 gen_reload to understand what the function is trying to do. As an
5584 example, let us have reload chain
5587 r1: <something> + const
5589 and reload R2 got reload reg HR. The function returns true if
5590 there is a correct insn HR = HR + <something>. Otherwise,
5591 gen_reload will use intermediate register (and this is the reload
5592 reg for R1) to reload <something>.
5594 We need this function to find a conflict for chain reloads. In our
5595 example, if HR = HR + <something> is incorrect insn, then we cannot
5596 use HR as a reload register for R2. If we do use it then we get a
5605 gen_reload_chain_without_interm_reg_p (int r1
, int r2
)
5607 /* Assume other cases in gen_reload are not possible for
5608 chain reloads or do need an intermediate hard registers. */
5612 rtx last
= get_last_insn ();
5614 /* Make r2 a component of r1. */
5615 if (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
))
5621 gcc_assert (reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
));
5622 regno
= rld
[r1
].regno
>= 0 ? rld
[r1
].regno
: rld
[r2
].regno
;
5623 gcc_assert (regno
>= 0);
5624 out
= gen_rtx_REG (rld
[r1
].mode
, regno
);
5626 substitute (&in
, rld
[r2
].in
, gen_rtx_REG (rld
[r2
].mode
, regno
));
5628 /* If IN is a paradoxical SUBREG, remove it and try to put the
5629 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5630 strip_paradoxical_subreg (&in
, &out
);
5632 if (GET_CODE (in
) == PLUS
5633 && (REG_P (XEXP (in
, 0))
5634 || GET_CODE (XEXP (in
, 0)) == SUBREG
5635 || MEM_P (XEXP (in
, 0)))
5636 && (REG_P (XEXP (in
, 1))
5637 || GET_CODE (XEXP (in
, 1)) == SUBREG
5638 || CONSTANT_P (XEXP (in
, 1))
5639 || MEM_P (XEXP (in
, 1))))
5641 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
5642 code
= recog_memoized (insn
);
5647 extract_insn (insn
);
5648 /* We want constrain operands to treat this insn strictly in
5649 its validity determination, i.e., the way it would after
5650 reload has completed. */
5651 result
= constrain_operands (1);
5654 delete_insns_since (last
);
5657 /* Restore the original value at each changed address within R1. */
5658 while (!VEC_empty (rtx_p
, substitute_stack
))
5660 rtx
*where
= VEC_pop (rtx_p
, substitute_stack
);
5661 *where
= rld
[r2
].in
;
5667 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5670 This function uses the same algorithm as reload_reg_free_p above. */
5673 reloads_conflict (int r1
, int r2
)
5675 enum reload_type r1_type
= rld
[r1
].when_needed
;
5676 enum reload_type r2_type
= rld
[r2
].when_needed
;
5677 int r1_opnum
= rld
[r1
].opnum
;
5678 int r2_opnum
= rld
[r2
].opnum
;
5680 /* RELOAD_OTHER conflicts with everything. */
5681 if (r2_type
== RELOAD_OTHER
)
5684 /* Otherwise, check conflicts differently for each type. */
5688 case RELOAD_FOR_INPUT
:
5689 return (r2_type
== RELOAD_FOR_INSN
5690 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5691 || r2_type
== RELOAD_FOR_OPADDR_ADDR
5692 || r2_type
== RELOAD_FOR_INPUT
5693 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
5694 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
5695 && r2_opnum
> r1_opnum
));
5697 case RELOAD_FOR_INPUT_ADDRESS
:
5698 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
5699 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5701 case RELOAD_FOR_INPADDR_ADDRESS
:
5702 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
5703 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5705 case RELOAD_FOR_OUTPUT_ADDRESS
:
5706 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
5707 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5709 case RELOAD_FOR_OUTADDR_ADDRESS
:
5710 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
5711 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5713 case RELOAD_FOR_OPERAND_ADDRESS
:
5714 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
5715 || (r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5716 && (!reloads_unique_chain_p (r1
, r2
)
5717 || !gen_reload_chain_without_interm_reg_p (r1
, r2
))));
5719 case RELOAD_FOR_OPADDR_ADDR
:
5720 return (r2_type
== RELOAD_FOR_INPUT
5721 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
5723 case RELOAD_FOR_OUTPUT
:
5724 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
5725 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
5726 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
5727 && r2_opnum
>= r1_opnum
));
5729 case RELOAD_FOR_INSN
:
5730 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
5731 || r2_type
== RELOAD_FOR_INSN
5732 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5734 case RELOAD_FOR_OTHER_ADDRESS
:
5735 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
5745 /* Indexed by reload number, 1 if incoming value
5746 inherited from previous insns. */
5747 static char reload_inherited
[MAX_RELOADS
];
5749 /* For an inherited reload, this is the insn the reload was inherited from,
5750 if we know it. Otherwise, this is 0. */
5751 static rtx reload_inheritance_insn
[MAX_RELOADS
];
5753 /* If nonzero, this is a place to get the value of the reload,
5754 rather than using reload_in. */
5755 static rtx reload_override_in
[MAX_RELOADS
];
5757 /* For each reload, the hard register number of the register used,
5758 or -1 if we did not need a register for this reload. */
5759 static int reload_spill_index
[MAX_RELOADS
];
5761 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5762 static rtx reload_reg_rtx_for_input
[MAX_RELOADS
];
5764 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5765 static rtx reload_reg_rtx_for_output
[MAX_RELOADS
];
5767 /* Subroutine of free_for_value_p, used to check a single register.
5768 START_REGNO is the starting regno of the full reload register
5769 (possibly comprising multiple hard registers) that we are considering. */
5772 reload_reg_free_for_value_p (int start_regno
, int regno
, int opnum
,
5773 enum reload_type type
, rtx value
, rtx out
,
5774 int reloadnum
, int ignore_address_reloads
)
5777 /* Set if we see an input reload that must not share its reload register
5778 with any new earlyclobber, but might otherwise share the reload
5779 register with an output or input-output reload. */
5780 int check_earlyclobber
= 0;
5784 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5787 if (out
== const0_rtx
)
5793 /* We use some pseudo 'time' value to check if the lifetimes of the
5794 new register use would overlap with the one of a previous reload
5795 that is not read-only or uses a different value.
5796 The 'time' used doesn't have to be linear in any shape or form, just
5798 Some reload types use different 'buckets' for each operand.
5799 So there are MAX_RECOG_OPERANDS different time values for each
5801 We compute TIME1 as the time when the register for the prospective
5802 new reload ceases to be live, and TIME2 for each existing
5803 reload as the time when that the reload register of that reload
5805 Where there is little to be gained by exact lifetime calculations,
5806 we just make conservative assumptions, i.e. a longer lifetime;
5807 this is done in the 'default:' cases. */
5810 case RELOAD_FOR_OTHER_ADDRESS
:
5811 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5812 time1
= copy
? 0 : 1;
5815 time1
= copy
? 1 : MAX_RECOG_OPERANDS
* 5 + 5;
5817 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5818 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5819 respectively, to the time values for these, we get distinct time
5820 values. To get distinct time values for each operand, we have to
5821 multiply opnum by at least three. We round that up to four because
5822 multiply by four is often cheaper. */
5823 case RELOAD_FOR_INPADDR_ADDRESS
:
5824 time1
= opnum
* 4 + 2;
5826 case RELOAD_FOR_INPUT_ADDRESS
:
5827 time1
= opnum
* 4 + 3;
5829 case RELOAD_FOR_INPUT
:
5830 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5831 executes (inclusive). */
5832 time1
= copy
? opnum
* 4 + 4 : MAX_RECOG_OPERANDS
* 4 + 3;
5834 case RELOAD_FOR_OPADDR_ADDR
:
5836 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5837 time1
= MAX_RECOG_OPERANDS
* 4 + 1;
5839 case RELOAD_FOR_OPERAND_ADDRESS
:
5840 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5842 time1
= copy
? MAX_RECOG_OPERANDS
* 4 + 2 : MAX_RECOG_OPERANDS
* 4 + 3;
5844 case RELOAD_FOR_OUTADDR_ADDRESS
:
5845 time1
= MAX_RECOG_OPERANDS
* 4 + 4 + opnum
;
5847 case RELOAD_FOR_OUTPUT_ADDRESS
:
5848 time1
= MAX_RECOG_OPERANDS
* 4 + 5 + opnum
;
5851 time1
= MAX_RECOG_OPERANDS
* 5 + 5;
5854 for (i
= 0; i
< n_reloads
; i
++)
5856 rtx reg
= rld
[i
].reg_rtx
;
5857 if (reg
&& REG_P (reg
)
5858 && ((unsigned) regno
- true_regnum (reg
)
5859 <= hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] - (unsigned) 1)
5862 rtx other_input
= rld
[i
].in
;
5864 /* If the other reload loads the same input value, that
5865 will not cause a conflict only if it's loading it into
5866 the same register. */
5867 if (true_regnum (reg
) != start_regno
)
5868 other_input
= NULL_RTX
;
5869 if (! other_input
|| ! rtx_equal_p (other_input
, value
)
5870 || rld
[i
].out
|| out
)
5873 switch (rld
[i
].when_needed
)
5875 case RELOAD_FOR_OTHER_ADDRESS
:
5878 case RELOAD_FOR_INPADDR_ADDRESS
:
5879 /* find_reloads makes sure that a
5880 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5881 by at most one - the first -
5882 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5883 address reload is inherited, the address address reload
5884 goes away, so we can ignore this conflict. */
5885 if (type
== RELOAD_FOR_INPUT_ADDRESS
&& reloadnum
== i
+ 1
5886 && ignore_address_reloads
5887 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5888 Then the address address is still needed to store
5889 back the new address. */
5890 && ! rld
[reloadnum
].out
)
5892 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5893 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5895 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5896 && ignore_address_reloads
5897 /* Unless we are reloading an auto_inc expression. */
5898 && ! rld
[reloadnum
].out
)
5900 time2
= rld
[i
].opnum
* 4 + 2;
5902 case RELOAD_FOR_INPUT_ADDRESS
:
5903 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5904 && ignore_address_reloads
5905 && ! rld
[reloadnum
].out
)
5907 time2
= rld
[i
].opnum
* 4 + 3;
5909 case RELOAD_FOR_INPUT
:
5910 time2
= rld
[i
].opnum
* 4 + 4;
5911 check_earlyclobber
= 1;
5913 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5914 == MAX_RECOG_OPERAND * 4 */
5915 case RELOAD_FOR_OPADDR_ADDR
:
5916 if (type
== RELOAD_FOR_OPERAND_ADDRESS
&& reloadnum
== i
+ 1
5917 && ignore_address_reloads
5918 && ! rld
[reloadnum
].out
)
5920 time2
= MAX_RECOG_OPERANDS
* 4 + 1;
5922 case RELOAD_FOR_OPERAND_ADDRESS
:
5923 time2
= MAX_RECOG_OPERANDS
* 4 + 2;
5924 check_earlyclobber
= 1;
5926 case RELOAD_FOR_INSN
:
5927 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5929 case RELOAD_FOR_OUTPUT
:
5930 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5931 instruction is executed. */
5932 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
5934 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5935 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5937 case RELOAD_FOR_OUTADDR_ADDRESS
:
5938 if (type
== RELOAD_FOR_OUTPUT_ADDRESS
&& reloadnum
== i
+ 1
5939 && ignore_address_reloads
5940 && ! rld
[reloadnum
].out
)
5942 time2
= MAX_RECOG_OPERANDS
* 4 + 4 + rld
[i
].opnum
;
5944 case RELOAD_FOR_OUTPUT_ADDRESS
:
5945 time2
= MAX_RECOG_OPERANDS
* 4 + 5 + rld
[i
].opnum
;
5948 /* If there is no conflict in the input part, handle this
5949 like an output reload. */
5950 if (! rld
[i
].in
|| rtx_equal_p (other_input
, value
))
5952 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
5953 /* Earlyclobbered outputs must conflict with inputs. */
5954 if (earlyclobber_operand_p (rld
[i
].out
))
5955 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5960 /* RELOAD_OTHER might be live beyond instruction execution,
5961 but this is not obvious when we set time2 = 1. So check
5962 here if there might be a problem with the new reload
5963 clobbering the register used by the RELOAD_OTHER. */
5971 && (! rld
[i
].in
|| rld
[i
].out
5972 || ! rtx_equal_p (other_input
, value
)))
5973 || (out
&& rld
[reloadnum
].out_reg
5974 && time2
>= MAX_RECOG_OPERANDS
* 4 + 3))
5980 /* Earlyclobbered outputs must conflict with inputs. */
5981 if (check_earlyclobber
&& out
&& earlyclobber_operand_p (out
))
5987 /* Return 1 if the value in reload reg REGNO, as used by a reload
5988 needed for the part of the insn specified by OPNUM and TYPE,
5989 may be used to load VALUE into it.
5991 MODE is the mode in which the register is used, this is needed to
5992 determine how many hard regs to test.
5994 Other read-only reloads with the same value do not conflict
5995 unless OUT is nonzero and these other reloads have to live while
5996 output reloads live.
5997 If OUT is CONST0_RTX, this is a special case: it means that the
5998 test should not be for using register REGNO as reload register, but
5999 for copying from register REGNO into the reload register.
6001 RELOADNUM is the number of the reload we want to load this value for;
6002 a reload does not conflict with itself.
6004 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6005 reloads that load an address for the very reload we are considering.
6007 The caller has to make sure that there is no conflict with the return
6011 free_for_value_p (int regno
, enum machine_mode mode
, int opnum
,
6012 enum reload_type type
, rtx value
, rtx out
, int reloadnum
,
6013 int ignore_address_reloads
)
6015 int nregs
= hard_regno_nregs
[regno
][mode
];
6017 if (! reload_reg_free_for_value_p (regno
, regno
+ nregs
, opnum
, type
,
6018 value
, out
, reloadnum
,
6019 ignore_address_reloads
))
6024 /* Return nonzero if the rtx X is invariant over the current function. */
6025 /* ??? Actually, the places where we use this expect exactly what is
6026 tested here, and not everything that is function invariant. In
6027 particular, the frame pointer and arg pointer are special cased;
6028 pic_offset_table_rtx is not, and we must not spill these things to
6032 function_invariant_p (const_rtx x
)
6036 if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
6038 if (GET_CODE (x
) == PLUS
6039 && (XEXP (x
, 0) == frame_pointer_rtx
|| XEXP (x
, 0) == arg_pointer_rtx
)
6040 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
6045 /* Determine whether the reload reg X overlaps any rtx'es used for
6046 overriding inheritance. Return nonzero if so. */
6049 conflicts_with_override (rtx x
)
6052 for (i
= 0; i
< n_reloads
; i
++)
6053 if (reload_override_in
[i
]
6054 && reg_overlap_mentioned_p (x
, reload_override_in
[i
]))
6059 /* Give an error message saying we failed to find a reload for INSN,
6060 and clear out reload R. */
6062 failed_reload (rtx insn
, int r
)
6064 if (asm_noperands (PATTERN (insn
)) < 0)
6065 /* It's the compiler's fault. */
6066 fatal_insn ("could not find a spill register", insn
);
6068 /* It's the user's fault; the operand's mode and constraint
6069 don't match. Disable this reload so we don't crash in final. */
6070 error_for_asm (insn
,
6071 "%<asm%> operand constraint incompatible with operand size");
6075 rld
[r
].optional
= 1;
6076 rld
[r
].secondary_p
= 1;
6079 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6080 for reload R. If it's valid, get an rtx for it. Return nonzero if
6083 set_reload_reg (int i
, int r
)
6085 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6087 int regno ATTRIBUTE_UNUSED
;
6088 rtx reg
= spill_reg_rtx
[i
];
6090 if (reg
== 0 || GET_MODE (reg
) != rld
[r
].mode
)
6091 spill_reg_rtx
[i
] = reg
6092 = gen_rtx_REG (rld
[r
].mode
, spill_regs
[i
]);
6094 regno
= true_regnum (reg
);
6096 /* Detect when the reload reg can't hold the reload mode.
6097 This used to be one `if', but Sequent compiler can't handle that. */
6098 if (HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6100 enum machine_mode test_mode
= VOIDmode
;
6102 test_mode
= GET_MODE (rld
[r
].in
);
6103 /* If rld[r].in has VOIDmode, it means we will load it
6104 in whatever mode the reload reg has: to wit, rld[r].mode.
6105 We have already tested that for validity. */
6106 /* Aside from that, we need to test that the expressions
6107 to reload from or into have modes which are valid for this
6108 reload register. Otherwise the reload insns would be invalid. */
6109 if (! (rld
[r
].in
!= 0 && test_mode
!= VOIDmode
6110 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
6111 if (! (rld
[r
].out
!= 0
6112 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (rld
[r
].out
))))
6114 /* The reg is OK. */
6117 /* Mark as in use for this insn the reload regs we use
6119 mark_reload_reg_in_use (spill_regs
[i
], rld
[r
].opnum
,
6120 rld
[r
].when_needed
, rld
[r
].mode
);
6122 rld
[r
].reg_rtx
= reg
;
6123 reload_spill_index
[r
] = spill_regs
[i
];
6130 /* Find a spill register to use as a reload register for reload R.
6131 LAST_RELOAD is nonzero if this is the last reload for the insn being
6134 Set rld[R].reg_rtx to the register allocated.
6136 We return 1 if successful, or 0 if we couldn't find a spill reg and
6137 we didn't change anything. */
6140 allocate_reload_reg (struct insn_chain
*chain ATTRIBUTE_UNUSED
, int r
,
6145 /* If we put this reload ahead, thinking it is a group,
6146 then insist on finding a group. Otherwise we can grab a
6147 reg that some other reload needs.
6148 (That can happen when we have a 68000 DATA_OR_FP_REG
6149 which is a group of data regs or one fp reg.)
6150 We need not be so restrictive if there are no more reloads
6153 ??? Really it would be nicer to have smarter handling
6154 for that kind of reg class, where a problem like this is normal.
6155 Perhaps those classes should be avoided for reloading
6156 by use of more alternatives. */
6158 int force_group
= rld
[r
].nregs
> 1 && ! last_reload
;
6160 /* If we want a single register and haven't yet found one,
6161 take any reg in the right class and not in use.
6162 If we want a consecutive group, here is where we look for it.
6164 We use three passes so we can first look for reload regs to
6165 reuse, which are already in use for other reloads in this insn,
6166 and only then use additional registers which are not "bad", then
6167 finally any register.
6169 I think that maximizing reuse is needed to make sure we don't
6170 run out of reload regs. Suppose we have three reloads, and
6171 reloads A and B can share regs. These need two regs.
6172 Suppose A and B are given different regs.
6173 That leaves none for C. */
6174 for (pass
= 0; pass
< 3; pass
++)
6176 /* I is the index in spill_regs.
6177 We advance it round-robin between insns to use all spill regs
6178 equally, so that inherited reloads have a chance
6179 of leapfrogging each other. */
6183 for (count
= 0; count
< n_spills
; count
++)
6185 int rclass
= (int) rld
[r
].rclass
;
6191 regnum
= spill_regs
[i
];
6193 if ((reload_reg_free_p (regnum
, rld
[r
].opnum
,
6196 /* We check reload_reg_used to make sure we
6197 don't clobber the return register. */
6198 && ! TEST_HARD_REG_BIT (reload_reg_used
, regnum
)
6199 && free_for_value_p (regnum
, rld
[r
].mode
, rld
[r
].opnum
,
6200 rld
[r
].when_needed
, rld
[r
].in
,
6202 && TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regnum
)
6203 && HARD_REGNO_MODE_OK (regnum
, rld
[r
].mode
)
6204 /* Look first for regs to share, then for unshared. But
6205 don't share regs used for inherited reloads; they are
6206 the ones we want to preserve. */
6208 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6210 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
6213 int nr
= hard_regno_nregs
[regnum
][rld
[r
].mode
];
6215 /* During the second pass we want to avoid reload registers
6216 which are "bad" for this reload. */
6218 && ira_bad_reload_regno (regnum
, rld
[r
].in
, rld
[r
].out
))
6221 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6222 (on 68000) got us two FP regs. If NR is 1,
6223 we would reject both of them. */
6226 /* If we need only one reg, we have already won. */
6229 /* But reject a single reg if we demand a group. */
6234 /* Otherwise check that as many consecutive regs as we need
6235 are available here. */
6238 int regno
= regnum
+ nr
- 1;
6239 if (!(TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
)
6240 && spill_reg_order
[regno
] >= 0
6241 && reload_reg_free_p (regno
, rld
[r
].opnum
,
6242 rld
[r
].when_needed
)))
6251 /* If we found something on the current pass, omit later passes. */
6252 if (count
< n_spills
)
6256 /* We should have found a spill register by now. */
6257 if (count
>= n_spills
)
6260 /* I is the index in SPILL_REG_RTX of the reload register we are to
6261 allocate. Get an rtx for it and find its register number. */
6263 return set_reload_reg (i
, r
);
6266 /* Initialize all the tables needed to allocate reload registers.
6267 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6268 is the array we use to restore the reg_rtx field for every reload. */
6271 choose_reload_regs_init (struct insn_chain
*chain
, rtx
*save_reload_reg_rtx
)
6275 for (i
= 0; i
< n_reloads
; i
++)
6276 rld
[i
].reg_rtx
= save_reload_reg_rtx
[i
];
6278 memset (reload_inherited
, 0, MAX_RELOADS
);
6279 memset (reload_inheritance_insn
, 0, MAX_RELOADS
* sizeof (rtx
));
6280 memset (reload_override_in
, 0, MAX_RELOADS
* sizeof (rtx
));
6282 CLEAR_HARD_REG_SET (reload_reg_used
);
6283 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
6284 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
6285 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
6286 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
6287 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
6289 CLEAR_HARD_REG_SET (reg_used_in_insn
);
6292 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->live_throughout
);
6293 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6294 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->dead_or_set
);
6295 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6296 compute_use_by_pseudos (®_used_in_insn
, &chain
->live_throughout
);
6297 compute_use_by_pseudos (®_used_in_insn
, &chain
->dead_or_set
);
6300 for (i
= 0; i
< reload_n_operands
; i
++)
6302 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
6303 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
6304 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
6305 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
6306 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
6307 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
6310 COMPL_HARD_REG_SET (reload_reg_unavailable
, chain
->used_spill_regs
);
6312 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
6314 for (i
= 0; i
< n_reloads
; i
++)
6315 /* If we have already decided to use a certain register,
6316 don't use it in another way. */
6318 mark_reload_reg_in_use (REGNO (rld
[i
].reg_rtx
), rld
[i
].opnum
,
6319 rld
[i
].when_needed
, rld
[i
].mode
);
6322 /* Assign hard reg targets for the pseudo-registers we must reload
6323 into hard regs for this insn.
6324 Also output the instructions to copy them in and out of the hard regs.
6326 For machines with register classes, we are responsible for
6327 finding a reload reg in the proper class. */
6330 choose_reload_regs (struct insn_chain
*chain
)
6332 rtx insn
= chain
->insn
;
6334 unsigned int max_group_size
= 1;
6335 enum reg_class group_class
= NO_REGS
;
6336 int pass
, win
, inheritance
;
6338 rtx save_reload_reg_rtx
[MAX_RELOADS
];
6340 /* In order to be certain of getting the registers we need,
6341 we must sort the reloads into order of increasing register class.
6342 Then our grabbing of reload registers will parallel the process
6343 that provided the reload registers.
6345 Also note whether any of the reloads wants a consecutive group of regs.
6346 If so, record the maximum size of the group desired and what
6347 register class contains all the groups needed by this insn. */
6349 for (j
= 0; j
< n_reloads
; j
++)
6351 reload_order
[j
] = j
;
6352 if (rld
[j
].reg_rtx
!= NULL_RTX
)
6354 gcc_assert (REG_P (rld
[j
].reg_rtx
)
6355 && HARD_REGISTER_P (rld
[j
].reg_rtx
));
6356 reload_spill_index
[j
] = REGNO (rld
[j
].reg_rtx
);
6359 reload_spill_index
[j
] = -1;
6361 if (rld
[j
].nregs
> 1)
6363 max_group_size
= MAX (rld
[j
].nregs
, max_group_size
);
6365 = reg_class_superunion
[(int) rld
[j
].rclass
][(int) group_class
];
6368 save_reload_reg_rtx
[j
] = rld
[j
].reg_rtx
;
6372 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
6374 /* If -O, try first with inheritance, then turning it off.
6375 If not -O, don't do inheritance.
6376 Using inheritance when not optimizing leads to paradoxes
6377 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6378 because one side of the comparison might be inherited. */
6380 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
6382 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6384 /* Process the reloads in order of preference just found.
6385 Beyond this point, subregs can be found in reload_reg_rtx.
6387 This used to look for an existing reloaded home for all of the
6388 reloads, and only then perform any new reloads. But that could lose
6389 if the reloads were done out of reg-class order because a later
6390 reload with a looser constraint might have an old home in a register
6391 needed by an earlier reload with a tighter constraint.
6393 To solve this, we make two passes over the reloads, in the order
6394 described above. In the first pass we try to inherit a reload
6395 from a previous insn. If there is a later reload that needs a
6396 class that is a proper subset of the class being processed, we must
6397 also allocate a spill register during the first pass.
6399 Then make a second pass over the reloads to allocate any reloads
6400 that haven't been given registers yet. */
6402 for (j
= 0; j
< n_reloads
; j
++)
6404 int r
= reload_order
[j
];
6405 rtx search_equiv
= NULL_RTX
;
6407 /* Ignore reloads that got marked inoperative. */
6408 if (rld
[r
].out
== 0 && rld
[r
].in
== 0
6409 && ! rld
[r
].secondary_p
)
6412 /* If find_reloads chose to use reload_in or reload_out as a reload
6413 register, we don't need to chose one. Otherwise, try even if it
6414 found one since we might save an insn if we find the value lying
6416 Try also when reload_in is a pseudo without a hard reg. */
6417 if (rld
[r
].in
!= 0 && rld
[r
].reg_rtx
!= 0
6418 && (rtx_equal_p (rld
[r
].in
, rld
[r
].reg_rtx
)
6419 || (rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)
6420 && !MEM_P (rld
[r
].in
)
6421 && true_regnum (rld
[r
].in
) < FIRST_PSEUDO_REGISTER
)))
6424 #if 0 /* No longer needed for correct operation.
6425 It might give better code, or might not; worth an experiment? */
6426 /* If this is an optional reload, we can't inherit from earlier insns
6427 until we are sure that any non-optional reloads have been allocated.
6428 The following code takes advantage of the fact that optional reloads
6429 are at the end of reload_order. */
6430 if (rld
[r
].optional
!= 0)
6431 for (i
= 0; i
< j
; i
++)
6432 if ((rld
[reload_order
[i
]].out
!= 0
6433 || rld
[reload_order
[i
]].in
!= 0
6434 || rld
[reload_order
[i
]].secondary_p
)
6435 && ! rld
[reload_order
[i
]].optional
6436 && rld
[reload_order
[i
]].reg_rtx
== 0)
6437 allocate_reload_reg (chain
, reload_order
[i
], 0);
6440 /* First see if this pseudo is already available as reloaded
6441 for a previous insn. We cannot try to inherit for reloads
6442 that are smaller than the maximum number of registers needed
6443 for groups unless the register we would allocate cannot be used
6446 We could check here to see if this is a secondary reload for
6447 an object that is already in a register of the desired class.
6448 This would avoid the need for the secondary reload register.
6449 But this is complex because we can't easily determine what
6450 objects might want to be loaded via this reload. So let a
6451 register be allocated here. In `emit_reload_insns' we suppress
6452 one of the loads in the case described above. */
6458 enum machine_mode mode
= VOIDmode
;
6462 else if (REG_P (rld
[r
].in
))
6464 regno
= REGNO (rld
[r
].in
);
6465 mode
= GET_MODE (rld
[r
].in
);
6467 else if (REG_P (rld
[r
].in_reg
))
6469 regno
= REGNO (rld
[r
].in_reg
);
6470 mode
= GET_MODE (rld
[r
].in_reg
);
6472 else if (GET_CODE (rld
[r
].in_reg
) == SUBREG
6473 && REG_P (SUBREG_REG (rld
[r
].in_reg
)))
6475 regno
= REGNO (SUBREG_REG (rld
[r
].in_reg
));
6476 if (regno
< FIRST_PSEUDO_REGISTER
)
6477 regno
= subreg_regno (rld
[r
].in_reg
);
6479 byte
= SUBREG_BYTE (rld
[r
].in_reg
);
6480 mode
= GET_MODE (rld
[r
].in_reg
);
6483 else if (GET_RTX_CLASS (GET_CODE (rld
[r
].in_reg
)) == RTX_AUTOINC
6484 && REG_P (XEXP (rld
[r
].in_reg
, 0)))
6486 regno
= REGNO (XEXP (rld
[r
].in_reg
, 0));
6487 mode
= GET_MODE (XEXP (rld
[r
].in_reg
, 0));
6488 rld
[r
].out
= rld
[r
].in
;
6492 /* This won't work, since REGNO can be a pseudo reg number.
6493 Also, it takes much more hair to keep track of all the things
6494 that can invalidate an inherited reload of part of a pseudoreg. */
6495 else if (GET_CODE (rld
[r
].in
) == SUBREG
6496 && REG_P (SUBREG_REG (rld
[r
].in
)))
6497 regno
= subreg_regno (rld
[r
].in
);
6501 && reg_last_reload_reg
[regno
] != 0
6502 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
6503 >= GET_MODE_SIZE (mode
) + byte
)
6504 #ifdef CANNOT_CHANGE_MODE_CLASS
6505 /* Verify that the register it's in can be used in
6507 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg
[regno
]),
6508 GET_MODE (reg_last_reload_reg
[regno
]),
6513 enum reg_class rclass
= rld
[r
].rclass
, last_class
;
6514 rtx last_reg
= reg_last_reload_reg
[regno
];
6516 i
= REGNO (last_reg
);
6517 i
+= subreg_regno_offset (i
, GET_MODE (last_reg
), byte
, mode
);
6518 last_class
= REGNO_REG_CLASS (i
);
6520 if (reg_reloaded_contents
[i
] == regno
6521 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
6522 && HARD_REGNO_MODE_OK (i
, rld
[r
].mode
)
6523 && (TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
], i
)
6524 /* Even if we can't use this register as a reload
6525 register, we might use it for reload_override_in,
6526 if copying it to the desired class is cheap
6528 || ((register_move_cost (mode
, last_class
, rclass
)
6529 < memory_move_cost (mode
, rclass
, true))
6530 && (secondary_reload_class (1, rclass
, mode
,
6533 #ifdef SECONDARY_MEMORY_NEEDED
6534 && ! SECONDARY_MEMORY_NEEDED (last_class
, rclass
,
6539 && (rld
[r
].nregs
== max_group_size
6540 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
6542 && free_for_value_p (i
, rld
[r
].mode
, rld
[r
].opnum
,
6543 rld
[r
].when_needed
, rld
[r
].in
,
6546 /* If a group is needed, verify that all the subsequent
6547 registers still have their values intact. */
6548 int nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
6551 for (k
= 1; k
< nr
; k
++)
6552 if (reg_reloaded_contents
[i
+ k
] != regno
6553 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
6561 last_reg
= (GET_MODE (last_reg
) == mode
6562 ? last_reg
: gen_rtx_REG (mode
, i
));
6565 for (k
= 0; k
< nr
; k
++)
6566 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6569 /* We found a register that contains the
6570 value we need. If this register is the
6571 same as an `earlyclobber' operand of the
6572 current insn, just mark it as a place to
6573 reload from since we can't use it as the
6574 reload register itself. */
6576 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
6577 if (reg_overlap_mentioned_for_reload_p
6578 (reg_last_reload_reg
[regno
],
6579 reload_earlyclobbers
[i1
]))
6582 if (i1
!= n_earlyclobbers
6583 || ! (free_for_value_p (i
, rld
[r
].mode
,
6585 rld
[r
].when_needed
, rld
[r
].in
,
6587 /* Don't use it if we'd clobber a pseudo reg. */
6588 || (TEST_HARD_REG_BIT (reg_used_in_insn
, i
)
6590 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
6591 /* Don't clobber the frame pointer. */
6592 || (i
== HARD_FRAME_POINTER_REGNUM
6593 && frame_pointer_needed
6595 /* Don't really use the inherited spill reg
6596 if we need it wider than we've got it. */
6597 || (GET_MODE_SIZE (rld
[r
].mode
)
6598 > GET_MODE_SIZE (mode
))
6601 /* If find_reloads chose reload_out as reload
6602 register, stay with it - that leaves the
6603 inherited register for subsequent reloads. */
6604 || (rld
[r
].out
&& rld
[r
].reg_rtx
6605 && rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)))
6607 if (! rld
[r
].optional
)
6609 reload_override_in
[r
] = last_reg
;
6610 reload_inheritance_insn
[r
]
6611 = reg_reloaded_insn
[i
];
6617 /* We can use this as a reload reg. */
6618 /* Mark the register as in use for this part of
6620 mark_reload_reg_in_use (i
,
6624 rld
[r
].reg_rtx
= last_reg
;
6625 reload_inherited
[r
] = 1;
6626 reload_inheritance_insn
[r
]
6627 = reg_reloaded_insn
[i
];
6628 reload_spill_index
[r
] = i
;
6629 for (k
= 0; k
< nr
; k
++)
6630 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6638 /* Here's another way to see if the value is already lying around. */
6641 && ! reload_inherited
[r
]
6643 && (CONSTANT_P (rld
[r
].in
)
6644 || GET_CODE (rld
[r
].in
) == PLUS
6645 || REG_P (rld
[r
].in
)
6646 || MEM_P (rld
[r
].in
))
6647 && (rld
[r
].nregs
== max_group_size
6648 || ! reg_classes_intersect_p (rld
[r
].rclass
, group_class
)))
6649 search_equiv
= rld
[r
].in
;
6654 = find_equiv_reg (search_equiv
, insn
, rld
[r
].rclass
,
6655 -1, NULL
, 0, rld
[r
].mode
);
6661 regno
= REGNO (equiv
);
6664 /* This must be a SUBREG of a hard register.
6665 Make a new REG since this might be used in an
6666 address and not all machines support SUBREGs
6668 gcc_assert (GET_CODE (equiv
) == SUBREG
);
6669 regno
= subreg_regno (equiv
);
6670 equiv
= gen_rtx_REG (rld
[r
].mode
, regno
);
6671 /* If we choose EQUIV as the reload register, but the
6672 loop below decides to cancel the inheritance, we'll
6673 end up reloading EQUIV in rld[r].mode, not the mode
6674 it had originally. That isn't safe when EQUIV isn't
6675 available as a spill register since its value might
6676 still be live at this point. */
6677 for (i
= regno
; i
< regno
+ (int) rld
[r
].nregs
; i
++)
6678 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, i
))
6683 /* If we found a spill reg, reject it unless it is free
6684 and of the desired class. */
6688 int bad_for_class
= 0;
6689 int max_regno
= regno
+ rld
[r
].nregs
;
6691 for (i
= regno
; i
< max_regno
; i
++)
6693 regs_used
|= TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6695 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6700 && ! free_for_value_p (regno
, rld
[r
].mode
,
6701 rld
[r
].opnum
, rld
[r
].when_needed
,
6702 rld
[r
].in
, rld
[r
].out
, r
, 1))
6707 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6710 /* We found a register that contains the value we need.
6711 If this register is the same as an `earlyclobber' operand
6712 of the current insn, just mark it as a place to reload from
6713 since we can't use it as the reload register itself. */
6716 for (i
= 0; i
< n_earlyclobbers
; i
++)
6717 if (reg_overlap_mentioned_for_reload_p (equiv
,
6718 reload_earlyclobbers
[i
]))
6720 if (! rld
[r
].optional
)
6721 reload_override_in
[r
] = equiv
;
6726 /* If the equiv register we have found is explicitly clobbered
6727 in the current insn, it depends on the reload type if we
6728 can use it, use it for reload_override_in, or not at all.
6729 In particular, we then can't use EQUIV for a
6730 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6734 if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 2))
6735 switch (rld
[r
].when_needed
)
6737 case RELOAD_FOR_OTHER_ADDRESS
:
6738 case RELOAD_FOR_INPADDR_ADDRESS
:
6739 case RELOAD_FOR_INPUT_ADDRESS
:
6740 case RELOAD_FOR_OPADDR_ADDR
:
6743 case RELOAD_FOR_INPUT
:
6744 case RELOAD_FOR_OPERAND_ADDRESS
:
6745 if (! rld
[r
].optional
)
6746 reload_override_in
[r
] = equiv
;
6752 else if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 1))
6753 switch (rld
[r
].when_needed
)
6755 case RELOAD_FOR_OTHER_ADDRESS
:
6756 case RELOAD_FOR_INPADDR_ADDRESS
:
6757 case RELOAD_FOR_INPUT_ADDRESS
:
6758 case RELOAD_FOR_OPADDR_ADDR
:
6759 case RELOAD_FOR_OPERAND_ADDRESS
:
6760 case RELOAD_FOR_INPUT
:
6763 if (! rld
[r
].optional
)
6764 reload_override_in
[r
] = equiv
;
6772 /* If we found an equivalent reg, say no code need be generated
6773 to load it, and use it as our reload reg. */
6775 && (regno
!= HARD_FRAME_POINTER_REGNUM
6776 || !frame_pointer_needed
))
6778 int nr
= hard_regno_nregs
[regno
][rld
[r
].mode
];
6780 rld
[r
].reg_rtx
= equiv
;
6781 reload_spill_index
[r
] = regno
;
6782 reload_inherited
[r
] = 1;
6784 /* If reg_reloaded_valid is not set for this register,
6785 there might be a stale spill_reg_store lying around.
6786 We must clear it, since otherwise emit_reload_insns
6787 might delete the store. */
6788 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
6789 spill_reg_store
[regno
] = NULL_RTX
;
6790 /* If any of the hard registers in EQUIV are spill
6791 registers, mark them as in use for this insn. */
6792 for (k
= 0; k
< nr
; k
++)
6794 i
= spill_reg_order
[regno
+ k
];
6797 mark_reload_reg_in_use (regno
, rld
[r
].opnum
,
6800 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6807 /* If we found a register to use already, or if this is an optional
6808 reload, we are done. */
6809 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
!= 0)
6813 /* No longer needed for correct operation. Might or might
6814 not give better code on the average. Want to experiment? */
6816 /* See if there is a later reload that has a class different from our
6817 class that intersects our class or that requires less register
6818 than our reload. If so, we must allocate a register to this
6819 reload now, since that reload might inherit a previous reload
6820 and take the only available register in our class. Don't do this
6821 for optional reloads since they will force all previous reloads
6822 to be allocated. Also don't do this for reloads that have been
6825 for (i
= j
+ 1; i
< n_reloads
; i
++)
6827 int s
= reload_order
[i
];
6829 if ((rld
[s
].in
== 0 && rld
[s
].out
== 0
6830 && ! rld
[s
].secondary_p
)
6834 if ((rld
[s
].rclass
!= rld
[r
].rclass
6835 && reg_classes_intersect_p (rld
[r
].rclass
,
6837 || rld
[s
].nregs
< rld
[r
].nregs
)
6844 allocate_reload_reg (chain
, r
, j
== n_reloads
- 1);
6848 /* Now allocate reload registers for anything non-optional that
6849 didn't get one yet. */
6850 for (j
= 0; j
< n_reloads
; j
++)
6852 int r
= reload_order
[j
];
6854 /* Ignore reloads that got marked inoperative. */
6855 if (rld
[r
].out
== 0 && rld
[r
].in
== 0 && ! rld
[r
].secondary_p
)
6858 /* Skip reloads that already have a register allocated or are
6860 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
)
6863 if (! allocate_reload_reg (chain
, r
, j
== n_reloads
- 1))
6867 /* If that loop got all the way, we have won. */
6874 /* Loop around and try without any inheritance. */
6879 /* First undo everything done by the failed attempt
6880 to allocate with inheritance. */
6881 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6883 /* Some sanity tests to verify that the reloads found in the first
6884 pass are identical to the ones we have now. */
6885 gcc_assert (chain
->n_reloads
== n_reloads
);
6887 for (i
= 0; i
< n_reloads
; i
++)
6889 if (chain
->rld
[i
].regno
< 0 || chain
->rld
[i
].reg_rtx
!= 0)
6891 gcc_assert (chain
->rld
[i
].when_needed
== rld
[i
].when_needed
);
6892 for (j
= 0; j
< n_spills
; j
++)
6893 if (spill_regs
[j
] == chain
->rld
[i
].regno
)
6894 if (! set_reload_reg (j
, i
))
6895 failed_reload (chain
->insn
, i
);
6899 /* If we thought we could inherit a reload, because it seemed that
6900 nothing else wanted the same reload register earlier in the insn,
6901 verify that assumption, now that all reloads have been assigned.
6902 Likewise for reloads where reload_override_in has been set. */
6904 /* If doing expensive optimizations, do one preliminary pass that doesn't
6905 cancel any inheritance, but removes reloads that have been needed only
6906 for reloads that we know can be inherited. */
6907 for (pass
= flag_expensive_optimizations
; pass
>= 0; pass
--)
6909 for (j
= 0; j
< n_reloads
; j
++)
6911 int r
= reload_order
[j
];
6913 if (reload_inherited
[r
] && rld
[r
].reg_rtx
)
6914 check_reg
= rld
[r
].reg_rtx
;
6915 else if (reload_override_in
[r
]
6916 && (REG_P (reload_override_in
[r
])
6917 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
6918 check_reg
= reload_override_in
[r
];
6921 if (! free_for_value_p (true_regnum (check_reg
), rld
[r
].mode
,
6922 rld
[r
].opnum
, rld
[r
].when_needed
, rld
[r
].in
,
6923 (reload_inherited
[r
]
6924 ? rld
[r
].out
: const0_rtx
),
6929 reload_inherited
[r
] = 0;
6930 reload_override_in
[r
] = 0;
6932 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6933 reload_override_in, then we do not need its related
6934 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6935 likewise for other reload types.
6936 We handle this by removing a reload when its only replacement
6937 is mentioned in reload_in of the reload we are going to inherit.
6938 A special case are auto_inc expressions; even if the input is
6939 inherited, we still need the address for the output. We can
6940 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6941 If we succeeded removing some reload and we are doing a preliminary
6942 pass just to remove such reloads, make another pass, since the
6943 removal of one reload might allow us to inherit another one. */
6945 && rld
[r
].out
!= rld
[r
].in
6946 && remove_address_replacements (rld
[r
].in
) && pass
)
6951 /* Now that reload_override_in is known valid,
6952 actually override reload_in. */
6953 for (j
= 0; j
< n_reloads
; j
++)
6954 if (reload_override_in
[j
])
6955 rld
[j
].in
= reload_override_in
[j
];
6957 /* If this reload won't be done because it has been canceled or is
6958 optional and not inherited, clear reload_reg_rtx so other
6959 routines (such as subst_reloads) don't get confused. */
6960 for (j
= 0; j
< n_reloads
; j
++)
6961 if (rld
[j
].reg_rtx
!= 0
6962 && ((rld
[j
].optional
&& ! reload_inherited
[j
])
6963 || (rld
[j
].in
== 0 && rld
[j
].out
== 0
6964 && ! rld
[j
].secondary_p
)))
6966 int regno
= true_regnum (rld
[j
].reg_rtx
);
6968 if (spill_reg_order
[regno
] >= 0)
6969 clear_reload_reg_in_use (regno
, rld
[j
].opnum
,
6970 rld
[j
].when_needed
, rld
[j
].mode
);
6972 reload_spill_index
[j
] = -1;
6975 /* Record which pseudos and which spill regs have output reloads. */
6976 for (j
= 0; j
< n_reloads
; j
++)
6978 int r
= reload_order
[j
];
6980 i
= reload_spill_index
[r
];
6982 /* I is nonneg if this reload uses a register.
6983 If rld[r].reg_rtx is 0, this is an optional reload
6984 that we opted to ignore. */
6985 if (rld
[r
].out_reg
!= 0 && REG_P (rld
[r
].out_reg
)
6986 && rld
[r
].reg_rtx
!= 0)
6988 int nregno
= REGNO (rld
[r
].out_reg
);
6991 if (nregno
< FIRST_PSEUDO_REGISTER
)
6992 nr
= hard_regno_nregs
[nregno
][rld
[r
].mode
];
6995 SET_REGNO_REG_SET (®_has_output_reload
,
6999 add_to_hard_reg_set (®_is_output_reload
, rld
[r
].mode
, i
);
7001 gcc_assert (rld
[r
].when_needed
== RELOAD_OTHER
7002 || rld
[r
].when_needed
== RELOAD_FOR_OUTPUT
7003 || rld
[r
].when_needed
== RELOAD_FOR_INSN
);
7008 /* Deallocate the reload register for reload R. This is called from
7009 remove_address_replacements. */
7012 deallocate_reload_reg (int r
)
7016 if (! rld
[r
].reg_rtx
)
7018 regno
= true_regnum (rld
[r
].reg_rtx
);
7020 if (spill_reg_order
[regno
] >= 0)
7021 clear_reload_reg_in_use (regno
, rld
[r
].opnum
, rld
[r
].when_needed
,
7023 reload_spill_index
[r
] = -1;
7026 /* These arrays are filled by emit_reload_insns and its subroutines. */
7027 static rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
7028 static rtx other_input_address_reload_insns
= 0;
7029 static rtx other_input_reload_insns
= 0;
7030 static rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
7031 static rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7032 static rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
7033 static rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
7034 static rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7035 static rtx operand_reload_insns
= 0;
7036 static rtx other_operand_reload_insns
= 0;
7037 static rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
7039 /* Values to be put in spill_reg_store are put here first. */
7040 static rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
7041 static HARD_REG_SET reg_reloaded_died
;
7043 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7044 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7045 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7046 adjusted register, and return true. Otherwise, return false. */
7048 reload_adjust_reg_for_temp (rtx
*reload_reg
, rtx alt_reload_reg
,
7049 enum reg_class new_class
,
7050 enum machine_mode new_mode
)
7055 for (reg
= *reload_reg
; reg
; reg
= alt_reload_reg
, alt_reload_reg
= 0)
7057 unsigned regno
= REGNO (reg
);
7059 if (!TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
], regno
))
7061 if (GET_MODE (reg
) != new_mode
)
7063 if (!HARD_REGNO_MODE_OK (regno
, new_mode
))
7065 if (hard_regno_nregs
[regno
][new_mode
]
7066 > hard_regno_nregs
[regno
][GET_MODE (reg
)])
7068 reg
= reload_adjust_reg_for_mode (reg
, new_mode
);
7076 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7077 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7078 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7079 adjusted register, and return true. Otherwise, return false. */
7081 reload_adjust_reg_for_icode (rtx
*reload_reg
, rtx alt_reload_reg
,
7082 enum insn_code icode
)
7085 enum reg_class new_class
= scratch_reload_class (icode
);
7086 enum machine_mode new_mode
= insn_data
[(int) icode
].operand
[2].mode
;
7088 return reload_adjust_reg_for_temp (reload_reg
, alt_reload_reg
,
7089 new_class
, new_mode
);
7092 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7093 has the number J. OLD contains the value to be used as input. */
7096 emit_input_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7099 rtx insn
= chain
->insn
;
7101 rtx oldequiv_reg
= 0;
7104 enum machine_mode mode
;
7107 /* delete_output_reload is only invoked properly if old contains
7108 the original pseudo register. Since this is replaced with a
7109 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7110 find the pseudo in RELOAD_IN_REG. */
7111 if (reload_override_in
[j
]
7112 && REG_P (rl
->in_reg
))
7119 else if (REG_P (oldequiv
))
7120 oldequiv_reg
= oldequiv
;
7121 else if (GET_CODE (oldequiv
) == SUBREG
)
7122 oldequiv_reg
= SUBREG_REG (oldequiv
);
7124 reloadreg
= reload_reg_rtx_for_input
[j
];
7125 mode
= GET_MODE (reloadreg
);
7127 /* If we are reloading from a register that was recently stored in
7128 with an output-reload, see if we can prove there was
7129 actually no need to store the old value in it. */
7131 if (optimize
&& REG_P (oldequiv
)
7132 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
7133 && spill_reg_store
[REGNO (oldequiv
)]
7135 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (oldequiv
)])
7136 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
7138 delete_output_reload (insn
, j
, REGNO (oldequiv
), reloadreg
);
7140 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7143 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
7144 oldequiv
= SUBREG_REG (oldequiv
);
7145 if (GET_MODE (oldequiv
) != VOIDmode
7146 && mode
!= GET_MODE (oldequiv
))
7147 oldequiv
= gen_lowpart_SUBREG (mode
, oldequiv
);
7149 /* Switch to the right place to emit the reload insns. */
7150 switch (rl
->when_needed
)
7153 where
= &other_input_reload_insns
;
7155 case RELOAD_FOR_INPUT
:
7156 where
= &input_reload_insns
[rl
->opnum
];
7158 case RELOAD_FOR_INPUT_ADDRESS
:
7159 where
= &input_address_reload_insns
[rl
->opnum
];
7161 case RELOAD_FOR_INPADDR_ADDRESS
:
7162 where
= &inpaddr_address_reload_insns
[rl
->opnum
];
7164 case RELOAD_FOR_OUTPUT_ADDRESS
:
7165 where
= &output_address_reload_insns
[rl
->opnum
];
7167 case RELOAD_FOR_OUTADDR_ADDRESS
:
7168 where
= &outaddr_address_reload_insns
[rl
->opnum
];
7170 case RELOAD_FOR_OPERAND_ADDRESS
:
7171 where
= &operand_reload_insns
;
7173 case RELOAD_FOR_OPADDR_ADDR
:
7174 where
= &other_operand_reload_insns
;
7176 case RELOAD_FOR_OTHER_ADDRESS
:
7177 where
= &other_input_address_reload_insns
;
7183 push_to_sequence (*where
);
7185 /* Auto-increment addresses must be reloaded in a special way. */
7186 if (rl
->out
&& ! rl
->out_reg
)
7188 /* We are not going to bother supporting the case where a
7189 incremented register can't be copied directly from
7190 OLDEQUIV since this seems highly unlikely. */
7191 gcc_assert (rl
->secondary_in_reload
< 0);
7193 if (reload_inherited
[j
])
7194 oldequiv
= reloadreg
;
7196 old
= XEXP (rl
->in_reg
, 0);
7198 /* Prevent normal processing of this reload. */
7200 /* Output a special code sequence for this case, and forget about
7201 spill reg information. */
7202 new_spill_reg_store
[REGNO (reloadreg
)] = NULL
;
7203 inc_for_reload (reloadreg
, oldequiv
, rl
->out
, rl
->inc
);
7206 /* If we are reloading a pseudo-register that was set by the previous
7207 insn, see if we can get rid of that pseudo-register entirely
7208 by redirecting the previous insn into our reload register. */
7210 else if (optimize
&& REG_P (old
)
7211 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7212 && dead_or_set_p (insn
, old
)
7213 /* This is unsafe if some other reload
7214 uses the same reg first. */
7215 && ! conflicts_with_override (reloadreg
)
7216 && free_for_value_p (REGNO (reloadreg
), rl
->mode
, rl
->opnum
,
7217 rl
->when_needed
, old
, rl
->out
, j
, 0))
7219 rtx temp
= PREV_INSN (insn
);
7220 while (temp
&& (NOTE_P (temp
) || DEBUG_INSN_P (temp
)))
7221 temp
= PREV_INSN (temp
);
7223 && NONJUMP_INSN_P (temp
)
7224 && GET_CODE (PATTERN (temp
)) == SET
7225 && SET_DEST (PATTERN (temp
)) == old
7226 /* Make sure we can access insn_operand_constraint. */
7227 && asm_noperands (PATTERN (temp
)) < 0
7228 /* This is unsafe if operand occurs more than once in current
7229 insn. Perhaps some occurrences aren't reloaded. */
7230 && count_occurrences (PATTERN (insn
), old
, 0) == 1)
7232 rtx old
= SET_DEST (PATTERN (temp
));
7233 /* Store into the reload register instead of the pseudo. */
7234 SET_DEST (PATTERN (temp
)) = reloadreg
;
7236 /* Verify that resulting insn is valid. */
7237 extract_insn (temp
);
7238 if (constrain_operands (1))
7240 /* If the previous insn is an output reload, the source is
7241 a reload register, and its spill_reg_store entry will
7242 contain the previous destination. This is now
7244 if (REG_P (SET_SRC (PATTERN (temp
)))
7245 && REGNO (SET_SRC (PATTERN (temp
))) < FIRST_PSEUDO_REGISTER
)
7247 spill_reg_store
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7248 spill_reg_stored_to
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7251 /* If these are the only uses of the pseudo reg,
7252 pretend for GDB it lives in the reload reg we used. */
7253 if (REG_N_DEATHS (REGNO (old
)) == 1
7254 && REG_N_SETS (REGNO (old
)) == 1)
7256 reg_renumber
[REGNO (old
)] = REGNO (reloadreg
);
7257 if (ira_conflicts_p
)
7258 /* Inform IRA about the change. */
7259 ira_mark_allocation_change (REGNO (old
));
7260 alter_reg (REGNO (old
), -1, false);
7264 /* Adjust any debug insns between temp and insn. */
7265 while ((temp
= NEXT_INSN (temp
)) != insn
)
7266 if (DEBUG_INSN_P (temp
))
7267 replace_rtx (PATTERN (temp
), old
, reloadreg
);
7269 gcc_assert (NOTE_P (temp
));
7273 SET_DEST (PATTERN (temp
)) = old
;
7278 /* We can't do that, so output an insn to load RELOADREG. */
7280 /* If we have a secondary reload, pick up the secondary register
7281 and icode, if any. If OLDEQUIV and OLD are different or
7282 if this is an in-out reload, recompute whether or not we
7283 still need a secondary register and what the icode should
7284 be. If we still need a secondary register and the class or
7285 icode is different, go back to reloading from OLD if using
7286 OLDEQUIV means that we got the wrong type of register. We
7287 cannot have different class or icode due to an in-out reload
7288 because we don't make such reloads when both the input and
7289 output need secondary reload registers. */
7291 if (! special
&& rl
->secondary_in_reload
>= 0)
7293 rtx second_reload_reg
= 0;
7294 rtx third_reload_reg
= 0;
7295 int secondary_reload
= rl
->secondary_in_reload
;
7296 rtx real_oldequiv
= oldequiv
;
7299 enum insn_code icode
;
7300 enum insn_code tertiary_icode
= CODE_FOR_nothing
;
7302 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7303 and similarly for OLD.
7304 See comments in get_secondary_reload in reload.c. */
7305 /* If it is a pseudo that cannot be replaced with its
7306 equivalent MEM, we must fall back to reload_in, which
7307 will have all the necessary substitutions registered.
7308 Likewise for a pseudo that can't be replaced with its
7309 equivalent constant.
7311 Take extra care for subregs of such pseudos. Note that
7312 we cannot use reg_equiv_mem in this case because it is
7313 not in the right mode. */
7316 if (GET_CODE (tmp
) == SUBREG
)
7317 tmp
= SUBREG_REG (tmp
);
7319 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7320 && (reg_equiv_memory_loc (REGNO (tmp
)) != 0
7321 || reg_equiv_constant (REGNO (tmp
)) != 0))
7323 if (! reg_equiv_mem (REGNO (tmp
))
7324 || num_not_at_initial_offset
7325 || GET_CODE (oldequiv
) == SUBREG
)
7326 real_oldequiv
= rl
->in
;
7328 real_oldequiv
= reg_equiv_mem (REGNO (tmp
));
7332 if (GET_CODE (tmp
) == SUBREG
)
7333 tmp
= SUBREG_REG (tmp
);
7335 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7336 && (reg_equiv_memory_loc (REGNO (tmp
)) != 0
7337 || reg_equiv_constant (REGNO (tmp
)) != 0))
7339 if (! reg_equiv_mem (REGNO (tmp
))
7340 || num_not_at_initial_offset
7341 || GET_CODE (old
) == SUBREG
)
7344 real_old
= reg_equiv_mem (REGNO (tmp
));
7347 second_reload_reg
= rld
[secondary_reload
].reg_rtx
;
7348 if (rld
[secondary_reload
].secondary_in_reload
>= 0)
7350 int tertiary_reload
= rld
[secondary_reload
].secondary_in_reload
;
7352 third_reload_reg
= rld
[tertiary_reload
].reg_rtx
;
7353 tertiary_icode
= rld
[secondary_reload
].secondary_in_icode
;
7354 /* We'd have to add more code for quartary reloads. */
7355 gcc_assert (rld
[tertiary_reload
].secondary_in_reload
< 0);
7357 icode
= rl
->secondary_in_icode
;
7359 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
7360 || (rl
->in
!= 0 && rl
->out
!= 0))
7362 secondary_reload_info sri
, sri2
;
7363 enum reg_class new_class
, new_t_class
;
7365 sri
.icode
= CODE_FOR_nothing
;
7366 sri
.prev_sri
= NULL
;
7368 = (enum reg_class
) targetm
.secondary_reload (1, real_oldequiv
,
7372 if (new_class
== NO_REGS
&& sri
.icode
== CODE_FOR_nothing
)
7373 second_reload_reg
= 0;
7374 else if (new_class
== NO_REGS
)
7376 if (reload_adjust_reg_for_icode (&second_reload_reg
,
7378 (enum insn_code
) sri
.icode
))
7380 icode
= (enum insn_code
) sri
.icode
;
7381 third_reload_reg
= 0;
7386 real_oldequiv
= real_old
;
7389 else if (sri
.icode
!= CODE_FOR_nothing
)
7390 /* We currently lack a way to express this in reloads. */
7394 sri2
.icode
= CODE_FOR_nothing
;
7395 sri2
.prev_sri
= &sri
;
7397 = (enum reg_class
) targetm
.secondary_reload (1, real_oldequiv
,
7400 if (new_t_class
== NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7402 if (reload_adjust_reg_for_temp (&second_reload_reg
,
7406 third_reload_reg
= 0;
7407 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7412 real_oldequiv
= real_old
;
7415 else if (new_t_class
== NO_REGS
&& sri2
.icode
!= CODE_FOR_nothing
)
7417 rtx intermediate
= second_reload_reg
;
7419 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7421 && reload_adjust_reg_for_icode (&third_reload_reg
, NULL
,
7425 second_reload_reg
= intermediate
;
7426 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7431 real_oldequiv
= real_old
;
7434 else if (new_t_class
!= NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7436 rtx intermediate
= second_reload_reg
;
7438 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7440 && reload_adjust_reg_for_temp (&third_reload_reg
, NULL
,
7443 second_reload_reg
= intermediate
;
7444 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7449 real_oldequiv
= real_old
;
7454 /* This could be handled more intelligently too. */
7456 real_oldequiv
= real_old
;
7461 /* If we still need a secondary reload register, check
7462 to see if it is being used as a scratch or intermediate
7463 register and generate code appropriately. If we need
7464 a scratch register, use REAL_OLDEQUIV since the form of
7465 the insn may depend on the actual address if it is
7468 if (second_reload_reg
)
7470 if (icode
!= CODE_FOR_nothing
)
7472 /* We'd have to add extra code to handle this case. */
7473 gcc_assert (!third_reload_reg
);
7475 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
7476 second_reload_reg
));
7481 /* See if we need a scratch register to load the
7482 intermediate register (a tertiary reload). */
7483 if (tertiary_icode
!= CODE_FOR_nothing
)
7485 emit_insn ((GEN_FCN (tertiary_icode
)
7486 (second_reload_reg
, real_oldequiv
,
7487 third_reload_reg
)));
7489 else if (third_reload_reg
)
7491 gen_reload (third_reload_reg
, real_oldequiv
,
7494 gen_reload (second_reload_reg
, third_reload_reg
,
7499 gen_reload (second_reload_reg
, real_oldequiv
,
7503 oldequiv
= second_reload_reg
;
7508 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
7510 rtx real_oldequiv
= oldequiv
;
7512 if ((REG_P (oldequiv
)
7513 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
7514 && (reg_equiv_memory_loc (REGNO (oldequiv
)) != 0
7515 || reg_equiv_constant (REGNO (oldequiv
)) != 0))
7516 || (GET_CODE (oldequiv
) == SUBREG
7517 && REG_P (SUBREG_REG (oldequiv
))
7518 && (REGNO (SUBREG_REG (oldequiv
))
7519 >= FIRST_PSEUDO_REGISTER
)
7520 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv
))) != 0)
7521 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv
))) != 0)))
7522 || (CONSTANT_P (oldequiv
)
7523 && (targetm
.preferred_reload_class (oldequiv
,
7524 REGNO_REG_CLASS (REGNO (reloadreg
)))
7526 real_oldequiv
= rl
->in
;
7527 gen_reload (reloadreg
, real_oldequiv
, rl
->opnum
,
7531 if (cfun
->can_throw_non_call_exceptions
)
7532 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7534 /* End this sequence. */
7535 *where
= get_insns ();
7538 /* Update reload_override_in so that delete_address_reloads_1
7539 can see the actual register usage. */
7541 reload_override_in
[j
] = oldequiv
;
7544 /* Generate insns to for the output reload RL, which is for the insn described
7545 by CHAIN and has the number J. */
7547 emit_output_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7551 rtx insn
= chain
->insn
;
7554 enum machine_mode mode
;
7558 if (rl
->when_needed
== RELOAD_OTHER
)
7561 push_to_sequence (output_reload_insns
[rl
->opnum
]);
7563 rl_reg_rtx
= reload_reg_rtx_for_output
[j
];
7564 mode
= GET_MODE (rl_reg_rtx
);
7566 reloadreg
= rl_reg_rtx
;
7568 /* If we need two reload regs, set RELOADREG to the intermediate
7569 one, since it will be stored into OLD. We might need a secondary
7570 register only for an input reload, so check again here. */
7572 if (rl
->secondary_out_reload
>= 0)
7575 int secondary_reload
= rl
->secondary_out_reload
;
7576 int tertiary_reload
= rld
[secondary_reload
].secondary_out_reload
;
7578 if (REG_P (old
) && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7579 && reg_equiv_mem (REGNO (old
)) != 0)
7580 real_old
= reg_equiv_mem (REGNO (old
));
7582 if (secondary_reload_class (0, rl
->rclass
, mode
, real_old
) != NO_REGS
)
7584 rtx second_reloadreg
= reloadreg
;
7585 reloadreg
= rld
[secondary_reload
].reg_rtx
;
7587 /* See if RELOADREG is to be used as a scratch register
7588 or as an intermediate register. */
7589 if (rl
->secondary_out_icode
!= CODE_FOR_nothing
)
7591 /* We'd have to add extra code to handle this case. */
7592 gcc_assert (tertiary_reload
< 0);
7594 emit_insn ((GEN_FCN (rl
->secondary_out_icode
)
7595 (real_old
, second_reloadreg
, reloadreg
)));
7600 /* See if we need both a scratch and intermediate reload
7603 enum insn_code tertiary_icode
7604 = rld
[secondary_reload
].secondary_out_icode
;
7606 /* We'd have to add more code for quartary reloads. */
7607 gcc_assert (tertiary_reload
< 0
7608 || rld
[tertiary_reload
].secondary_out_reload
< 0);
7610 if (GET_MODE (reloadreg
) != mode
)
7611 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
7613 if (tertiary_icode
!= CODE_FOR_nothing
)
7615 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7617 /* Copy primary reload reg to secondary reload reg.
7618 (Note that these have been swapped above, then
7619 secondary reload reg to OLD using our insn.) */
7621 /* If REAL_OLD is a paradoxical SUBREG, remove it
7622 and try to put the opposite SUBREG on
7624 strip_paradoxical_subreg (&real_old
, &reloadreg
);
7626 gen_reload (reloadreg
, second_reloadreg
,
7627 rl
->opnum
, rl
->when_needed
);
7628 emit_insn ((GEN_FCN (tertiary_icode
)
7629 (real_old
, reloadreg
, third_reloadreg
)));
7635 /* Copy between the reload regs here and then to
7638 gen_reload (reloadreg
, second_reloadreg
,
7639 rl
->opnum
, rl
->when_needed
);
7640 if (tertiary_reload
>= 0)
7642 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7644 gen_reload (third_reloadreg
, reloadreg
,
7645 rl
->opnum
, rl
->when_needed
);
7646 reloadreg
= third_reloadreg
;
7653 /* Output the last reload insn. */
7658 /* Don't output the last reload if OLD is not the dest of
7659 INSN and is in the src and is clobbered by INSN. */
7660 if (! flag_expensive_optimizations
7662 || !(set
= single_set (insn
))
7663 || rtx_equal_p (old
, SET_DEST (set
))
7664 || !reg_mentioned_p (old
, SET_SRC (set
))
7665 || !((REGNO (old
) < FIRST_PSEUDO_REGISTER
)
7666 && regno_clobbered_p (REGNO (old
), insn
, rl
->mode
, 0)))
7667 gen_reload (old
, reloadreg
, rl
->opnum
,
7671 /* Look at all insns we emitted, just to be safe. */
7672 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7675 rtx pat
= PATTERN (p
);
7677 /* If this output reload doesn't come from a spill reg,
7678 clear any memory of reloaded copies of the pseudo reg.
7679 If this output reload comes from a spill reg,
7680 reg_has_output_reload will make this do nothing. */
7681 note_stores (pat
, forget_old_reloads_1
, NULL
);
7683 if (reg_mentioned_p (rl_reg_rtx
, pat
))
7685 rtx set
= single_set (insn
);
7686 if (reload_spill_index
[j
] < 0
7688 && SET_SRC (set
) == rl_reg_rtx
)
7690 int src
= REGNO (SET_SRC (set
));
7692 reload_spill_index
[j
] = src
;
7693 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7694 if (find_regno_note (insn
, REG_DEAD
, src
))
7695 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7697 if (HARD_REGISTER_P (rl_reg_rtx
))
7699 int s
= rl
->secondary_out_reload
;
7700 set
= single_set (p
);
7701 /* If this reload copies only to the secondary reload
7702 register, the secondary reload does the actual
7704 if (s
>= 0 && set
== NULL_RTX
)
7705 /* We can't tell what function the secondary reload
7706 has and where the actual store to the pseudo is
7707 made; leave new_spill_reg_store alone. */
7710 && SET_SRC (set
) == rl_reg_rtx
7711 && SET_DEST (set
) == rld
[s
].reg_rtx
)
7713 /* Usually the next instruction will be the
7714 secondary reload insn; if we can confirm
7715 that it is, setting new_spill_reg_store to
7716 that insn will allow an extra optimization. */
7717 rtx s_reg
= rld
[s
].reg_rtx
;
7718 rtx next
= NEXT_INSN (p
);
7719 rld
[s
].out
= rl
->out
;
7720 rld
[s
].out_reg
= rl
->out_reg
;
7721 set
= single_set (next
);
7722 if (set
&& SET_SRC (set
) == s_reg
7723 && ! new_spill_reg_store
[REGNO (s_reg
)])
7725 SET_HARD_REG_BIT (reg_is_output_reload
,
7727 new_spill_reg_store
[REGNO (s_reg
)] = next
;
7731 new_spill_reg_store
[REGNO (rl_reg_rtx
)] = p
;
7736 if (rl
->when_needed
== RELOAD_OTHER
)
7738 emit_insn (other_output_reload_insns
[rl
->opnum
]);
7739 other_output_reload_insns
[rl
->opnum
] = get_insns ();
7742 output_reload_insns
[rl
->opnum
] = get_insns ();
7744 if (cfun
->can_throw_non_call_exceptions
)
7745 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7750 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7751 and has the number J. */
7753 do_input_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
7755 rtx insn
= chain
->insn
;
7756 rtx old
= (rl
->in
&& MEM_P (rl
->in
)
7757 ? rl
->in_reg
: rl
->in
);
7758 rtx reg_rtx
= rl
->reg_rtx
;
7762 enum machine_mode mode
;
7764 /* Determine the mode to reload in.
7765 This is very tricky because we have three to choose from.
7766 There is the mode the insn operand wants (rl->inmode).
7767 There is the mode of the reload register RELOADREG.
7768 There is the intrinsic mode of the operand, which we could find
7769 by stripping some SUBREGs.
7770 It turns out that RELOADREG's mode is irrelevant:
7771 we can change that arbitrarily.
7773 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7774 then the reload reg may not support QImode moves, so use SImode.
7775 If foo is in memory due to spilling a pseudo reg, this is safe,
7776 because the QImode value is in the least significant part of a
7777 slot big enough for a SImode. If foo is some other sort of
7778 memory reference, then it is impossible to reload this case,
7779 so previous passes had better make sure this never happens.
7781 Then consider a one-word union which has SImode and one of its
7782 members is a float, being fetched as (SUBREG:SF union:SI).
7783 We must fetch that as SFmode because we could be loading into
7784 a float-only register. In this case OLD's mode is correct.
7786 Consider an immediate integer: it has VOIDmode. Here we need
7787 to get a mode from something else.
7789 In some cases, there is a fourth mode, the operand's
7790 containing mode. If the insn specifies a containing mode for
7791 this operand, it overrides all others.
7793 I am not sure whether the algorithm here is always right,
7794 but it does the right things in those cases. */
7796 mode
= GET_MODE (old
);
7797 if (mode
== VOIDmode
)
7800 /* We cannot use gen_lowpart_common since it can do the wrong thing
7801 when REG_RTX has a multi-word mode. Note that REG_RTX must
7802 always be a REG here. */
7803 if (GET_MODE (reg_rtx
) != mode
)
7804 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
7806 reload_reg_rtx_for_input
[j
] = reg_rtx
;
7809 /* AUTO_INC reloads need to be handled even if inherited. We got an
7810 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7811 && (! reload_inherited
[j
] || (rl
->out
&& ! rl
->out_reg
))
7812 && ! rtx_equal_p (reg_rtx
, old
)
7814 emit_input_reload_insns (chain
, rld
+ j
, old
, j
);
7816 /* When inheriting a wider reload, we have a MEM in rl->in,
7817 e.g. inheriting a SImode output reload for
7818 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7819 if (optimize
&& reload_inherited
[j
] && rl
->in
7821 && MEM_P (rl
->in_reg
)
7822 && reload_spill_index
[j
] >= 0
7823 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
7824 rl
->in
= regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
7826 /* If we are reloading a register that was recently stored in with an
7827 output-reload, see if we can prove there was
7828 actually no need to store the old value in it. */
7831 && (reload_inherited
[j
] || reload_override_in
[j
])
7834 && spill_reg_store
[REGNO (reg_rtx
)] != 0
7836 /* There doesn't seem to be any reason to restrict this to pseudos
7837 and doing so loses in the case where we are copying from a
7838 register of the wrong class. */
7839 && !HARD_REGISTER_P (spill_reg_stored_to
[REGNO (reg_rtx
)])
7841 /* The insn might have already some references to stackslots
7842 replaced by MEMs, while reload_out_reg still names the
7844 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (reg_rtx
)])
7845 || rtx_equal_p (spill_reg_stored_to
[REGNO (reg_rtx
)], rl
->out_reg
)))
7846 delete_output_reload (insn
, j
, REGNO (reg_rtx
), reg_rtx
);
7849 /* Do output reloading for reload RL, which is for the insn described by
7850 CHAIN and has the number J.
7851 ??? At some point we need to support handling output reloads of
7852 JUMP_INSNs or insns that set cc0. */
7854 do_output_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
7857 rtx insn
= chain
->insn
;
7858 /* If this is an output reload that stores something that is
7859 not loaded in this same reload, see if we can eliminate a previous
7861 rtx pseudo
= rl
->out_reg
;
7862 rtx reg_rtx
= rl
->reg_rtx
;
7864 if (rl
->out
&& reg_rtx
)
7866 enum machine_mode mode
;
7868 /* Determine the mode to reload in.
7869 See comments above (for input reloading). */
7870 mode
= GET_MODE (rl
->out
);
7871 if (mode
== VOIDmode
)
7873 /* VOIDmode should never happen for an output. */
7874 if (asm_noperands (PATTERN (insn
)) < 0)
7875 /* It's the compiler's fault. */
7876 fatal_insn ("VOIDmode on an output", insn
);
7877 error_for_asm (insn
, "output operand is constant in %<asm%>");
7878 /* Prevent crash--use something we know is valid. */
7880 rl
->out
= gen_rtx_REG (mode
, REGNO (reg_rtx
));
7882 if (GET_MODE (reg_rtx
) != mode
)
7883 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
7885 reload_reg_rtx_for_output
[j
] = reg_rtx
;
7890 && ! rtx_equal_p (rl
->in_reg
, pseudo
)
7891 && REGNO (pseudo
) >= FIRST_PSEUDO_REGISTER
7892 && reg_last_reload_reg
[REGNO (pseudo
)])
7894 int pseudo_no
= REGNO (pseudo
);
7895 int last_regno
= REGNO (reg_last_reload_reg
[pseudo_no
]);
7897 /* We don't need to test full validity of last_regno for
7898 inherit here; we only want to know if the store actually
7899 matches the pseudo. */
7900 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, last_regno
)
7901 && reg_reloaded_contents
[last_regno
] == pseudo_no
7902 && spill_reg_store
[last_regno
]
7903 && rtx_equal_p (pseudo
, spill_reg_stored_to
[last_regno
]))
7904 delete_output_reload (insn
, j
, last_regno
, reg_rtx
);
7910 || rtx_equal_p (old
, reg_rtx
))
7913 /* An output operand that dies right away does need a reload,
7914 but need not be copied from it. Show the new location in the
7916 if ((REG_P (old
) || GET_CODE (old
) == SCRATCH
)
7917 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
7919 XEXP (note
, 0) = reg_rtx
;
7922 /* Likewise for a SUBREG of an operand that dies. */
7923 else if (GET_CODE (old
) == SUBREG
7924 && REG_P (SUBREG_REG (old
))
7925 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
7928 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
), reg_rtx
);
7931 else if (GET_CODE (old
) == SCRATCH
)
7932 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7933 but we don't want to make an output reload. */
7936 /* If is a JUMP_INSN, we can't support output reloads yet. */
7937 gcc_assert (NONJUMP_INSN_P (insn
));
7939 emit_output_reload_insns (chain
, rld
+ j
, j
);
7942 /* A reload copies values of MODE from register SRC to register DEST.
7943 Return true if it can be treated for inheritance purposes like a
7944 group of reloads, each one reloading a single hard register. The
7945 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7946 occupy the same number of hard registers. */
7949 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED
,
7950 int src ATTRIBUTE_UNUSED
,
7951 enum machine_mode mode ATTRIBUTE_UNUSED
)
7953 #ifdef CANNOT_CHANGE_MODE_CLASS
7954 return (!REG_CANNOT_CHANGE_MODE_P (dest
, mode
, reg_raw_mode
[dest
])
7955 && !REG_CANNOT_CHANGE_MODE_P (src
, mode
, reg_raw_mode
[src
]));
7961 /* Output insns to reload values in and out of the chosen reload regs. */
7964 emit_reload_insns (struct insn_chain
*chain
)
7966 rtx insn
= chain
->insn
;
7970 CLEAR_HARD_REG_SET (reg_reloaded_died
);
7972 for (j
= 0; j
< reload_n_operands
; j
++)
7973 input_reload_insns
[j
] = input_address_reload_insns
[j
]
7974 = inpaddr_address_reload_insns
[j
]
7975 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
7976 = outaddr_address_reload_insns
[j
]
7977 = other_output_reload_insns
[j
] = 0;
7978 other_input_address_reload_insns
= 0;
7979 other_input_reload_insns
= 0;
7980 operand_reload_insns
= 0;
7981 other_operand_reload_insns
= 0;
7983 /* Dump reloads into the dump file. */
7986 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
7987 debug_reload_to_stream (dump_file
);
7990 /* Now output the instructions to copy the data into and out of the
7991 reload registers. Do these in the order that the reloads were reported,
7992 since reloads of base and index registers precede reloads of operands
7993 and the operands may need the base and index registers reloaded. */
7995 for (j
= 0; j
< n_reloads
; j
++)
7997 if (rld
[j
].reg_rtx
&& HARD_REGISTER_P (rld
[j
].reg_rtx
))
8001 for (i
= REGNO (rld
[j
].reg_rtx
); i
< END_REGNO (rld
[j
].reg_rtx
); i
++)
8002 new_spill_reg_store
[i
] = 0;
8005 do_input_reload (chain
, rld
+ j
, j
);
8006 do_output_reload (chain
, rld
+ j
, j
);
8009 /* Now write all the insns we made for reloads in the order expected by
8010 the allocation functions. Prior to the insn being reloaded, we write
8011 the following reloads:
8013 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8015 RELOAD_OTHER reloads.
8017 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8018 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8019 RELOAD_FOR_INPUT reload for the operand.
8021 RELOAD_FOR_OPADDR_ADDRS reloads.
8023 RELOAD_FOR_OPERAND_ADDRESS reloads.
8025 After the insn being reloaded, we write the following:
8027 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8028 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8029 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8030 reloads for the operand. The RELOAD_OTHER output reloads are
8031 output in descending order by reload number. */
8033 emit_insn_before (other_input_address_reload_insns
, insn
);
8034 emit_insn_before (other_input_reload_insns
, insn
);
8036 for (j
= 0; j
< reload_n_operands
; j
++)
8038 emit_insn_before (inpaddr_address_reload_insns
[j
], insn
);
8039 emit_insn_before (input_address_reload_insns
[j
], insn
);
8040 emit_insn_before (input_reload_insns
[j
], insn
);
8043 emit_insn_before (other_operand_reload_insns
, insn
);
8044 emit_insn_before (operand_reload_insns
, insn
);
8046 for (j
= 0; j
< reload_n_operands
; j
++)
8048 rtx x
= emit_insn_after (outaddr_address_reload_insns
[j
], insn
);
8049 x
= emit_insn_after (output_address_reload_insns
[j
], x
);
8050 x
= emit_insn_after (output_reload_insns
[j
], x
);
8051 emit_insn_after (other_output_reload_insns
[j
], x
);
8054 /* For all the spill regs newly reloaded in this instruction,
8055 record what they were reloaded from, so subsequent instructions
8056 can inherit the reloads.
8058 Update spill_reg_store for the reloads of this insn.
8059 Copy the elements that were updated in the loop above. */
8061 for (j
= 0; j
< n_reloads
; j
++)
8063 int r
= reload_order
[j
];
8064 int i
= reload_spill_index
[r
];
8066 /* If this is a non-inherited input reload from a pseudo, we must
8067 clear any memory of a previous store to the same pseudo. Only do
8068 something if there will not be an output reload for the pseudo
8070 if (rld
[r
].in_reg
!= 0
8071 && ! (reload_inherited
[r
] || reload_override_in
[r
]))
8073 rtx reg
= rld
[r
].in_reg
;
8075 if (GET_CODE (reg
) == SUBREG
)
8076 reg
= SUBREG_REG (reg
);
8079 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
8080 && !REGNO_REG_SET_P (®_has_output_reload
, REGNO (reg
)))
8082 int nregno
= REGNO (reg
);
8084 if (reg_last_reload_reg
[nregno
])
8086 int last_regno
= REGNO (reg_last_reload_reg
[nregno
]);
8088 if (reg_reloaded_contents
[last_regno
] == nregno
)
8089 spill_reg_store
[last_regno
] = 0;
8094 /* I is nonneg if this reload used a register.
8095 If rld[r].reg_rtx is 0, this is an optional reload
8096 that we opted to ignore. */
8098 if (i
>= 0 && rld
[r
].reg_rtx
!= 0)
8100 int nr
= hard_regno_nregs
[i
][GET_MODE (rld
[r
].reg_rtx
)];
8103 /* For a multi register reload, we need to check if all or part
8104 of the value lives to the end. */
8105 for (k
= 0; k
< nr
; k
++)
8106 if (reload_reg_reaches_end_p (i
+ k
, rld
[r
].opnum
,
8107 rld
[r
].when_needed
))
8108 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
8110 /* Maybe the spill reg contains a copy of reload_out. */
8112 && (REG_P (rld
[r
].out
)
8114 ? REG_P (rld
[r
].out_reg
)
8115 /* The reload value is an auto-modification of
8116 some kind. For PRE_INC, POST_INC, PRE_DEC
8117 and POST_DEC, we record an equivalence
8118 between the reload register and the operand
8119 on the optimistic assumption that we can make
8120 the equivalence hold. reload_as_needed must
8121 then either make it hold or invalidate the
8124 PRE_MODIFY and POST_MODIFY addresses are reloaded
8125 somewhat differently, and allowing them here leads
8127 : (GET_CODE (rld
[r
].out
) != POST_MODIFY
8128 && GET_CODE (rld
[r
].out
) != PRE_MODIFY
))))
8131 enum machine_mode mode
;
8134 reg
= reload_reg_rtx_for_output
[r
];
8135 mode
= GET_MODE (reg
);
8136 regno
= REGNO (reg
);
8137 nregs
= hard_regno_nregs
[regno
][mode
];
8138 if (reload_regs_reach_end_p (regno
, nregs
, rld
[r
].opnum
,
8139 rld
[r
].when_needed
))
8141 rtx out
= (REG_P (rld
[r
].out
)
8145 /* AUTO_INC */ : XEXP (rld
[r
].in_reg
, 0));
8146 int out_regno
= REGNO (out
);
8147 int out_nregs
= (!HARD_REGISTER_NUM_P (out_regno
) ? 1
8148 : hard_regno_nregs
[out_regno
][mode
]);
8151 spill_reg_store
[regno
] = new_spill_reg_store
[regno
];
8152 spill_reg_stored_to
[regno
] = out
;
8153 reg_last_reload_reg
[out_regno
] = reg
;
8155 piecemeal
= (HARD_REGISTER_NUM_P (out_regno
)
8156 && nregs
== out_nregs
8157 && inherit_piecemeal_p (out_regno
, regno
, mode
));
8159 /* If OUT_REGNO is a hard register, it may occupy more than
8160 one register. If it does, say what is in the
8161 rest of the registers assuming that both registers
8162 agree on how many words the object takes. If not,
8163 invalidate the subsequent registers. */
8165 if (HARD_REGISTER_NUM_P (out_regno
))
8166 for (k
= 1; k
< out_nregs
; k
++)
8167 reg_last_reload_reg
[out_regno
+ k
]
8168 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8170 /* Now do the inverse operation. */
8171 for (k
= 0; k
< nregs
; k
++)
8173 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8174 reg_reloaded_contents
[regno
+ k
]
8175 = (!HARD_REGISTER_NUM_P (out_regno
) || !piecemeal
8178 reg_reloaded_insn
[regno
+ k
] = insn
;
8179 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8180 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8181 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8184 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8189 /* Maybe the spill reg contains a copy of reload_in. Only do
8190 something if there will not be an output reload for
8191 the register being reloaded. */
8192 else if (rld
[r
].out_reg
== 0
8194 && ((REG_P (rld
[r
].in
)
8195 && !HARD_REGISTER_P (rld
[r
].in
)
8196 && !REGNO_REG_SET_P (®_has_output_reload
,
8198 || (REG_P (rld
[r
].in_reg
)
8199 && !REGNO_REG_SET_P (®_has_output_reload
,
8200 REGNO (rld
[r
].in_reg
))))
8201 && !reg_set_p (reload_reg_rtx_for_input
[r
], PATTERN (insn
)))
8204 enum machine_mode mode
;
8207 reg
= reload_reg_rtx_for_input
[r
];
8208 mode
= GET_MODE (reg
);
8209 regno
= REGNO (reg
);
8210 nregs
= hard_regno_nregs
[regno
][mode
];
8211 if (reload_regs_reach_end_p (regno
, nregs
, rld
[r
].opnum
,
8212 rld
[r
].when_needed
))
8219 if (REG_P (rld
[r
].in
)
8220 && REGNO (rld
[r
].in
) >= FIRST_PSEUDO_REGISTER
)
8222 else if (REG_P (rld
[r
].in_reg
))
8225 in
= XEXP (rld
[r
].in_reg
, 0);
8226 in_regno
= REGNO (in
);
8228 in_nregs
= (!HARD_REGISTER_NUM_P (in_regno
) ? 1
8229 : hard_regno_nregs
[in_regno
][mode
]);
8231 reg_last_reload_reg
[in_regno
] = reg
;
8233 piecemeal
= (HARD_REGISTER_NUM_P (in_regno
)
8234 && nregs
== in_nregs
8235 && inherit_piecemeal_p (regno
, in_regno
, mode
));
8237 if (HARD_REGISTER_NUM_P (in_regno
))
8238 for (k
= 1; k
< in_nregs
; k
++)
8239 reg_last_reload_reg
[in_regno
+ k
]
8240 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8242 /* Unless we inherited this reload, show we haven't
8243 recently done a store.
8244 Previous stores of inherited auto_inc expressions
8245 also have to be discarded. */
8246 if (! reload_inherited
[r
]
8247 || (rld
[r
].out
&& ! rld
[r
].out_reg
))
8248 spill_reg_store
[regno
] = 0;
8250 for (k
= 0; k
< nregs
; k
++)
8252 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8253 reg_reloaded_contents
[regno
+ k
]
8254 = (!HARD_REGISTER_NUM_P (in_regno
) || !piecemeal
8257 reg_reloaded_insn
[regno
+ k
] = insn
;
8258 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8259 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8260 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8263 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8270 /* The following if-statement was #if 0'd in 1.34 (or before...).
8271 It's reenabled in 1.35 because supposedly nothing else
8272 deals with this problem. */
8274 /* If a register gets output-reloaded from a non-spill register,
8275 that invalidates any previous reloaded copy of it.
8276 But forget_old_reloads_1 won't get to see it, because
8277 it thinks only about the original insn. So invalidate it here.
8278 Also do the same thing for RELOAD_OTHER constraints where the
8279 output is discarded. */
8281 && ((rld
[r
].out
!= 0
8282 && (REG_P (rld
[r
].out
)
8283 || (MEM_P (rld
[r
].out
)
8284 && REG_P (rld
[r
].out_reg
))))
8285 || (rld
[r
].out
== 0 && rld
[r
].out_reg
8286 && REG_P (rld
[r
].out_reg
))))
8288 rtx out
= ((rld
[r
].out
&& REG_P (rld
[r
].out
))
8289 ? rld
[r
].out
: rld
[r
].out_reg
);
8290 int out_regno
= REGNO (out
);
8291 enum machine_mode mode
= GET_MODE (out
);
8293 /* REG_RTX is now set or clobbered by the main instruction.
8294 As the comment above explains, forget_old_reloads_1 only
8295 sees the original instruction, and there is no guarantee
8296 that the original instruction also clobbered REG_RTX.
8297 For example, if find_reloads sees that the input side of
8298 a matched operand pair dies in this instruction, it may
8299 use the input register as the reload register.
8301 Calling forget_old_reloads_1 is a waste of effort if
8302 REG_RTX is also the output register.
8304 If we know that REG_RTX holds the value of a pseudo
8305 register, the code after the call will record that fact. */
8306 if (rld
[r
].reg_rtx
&& rld
[r
].reg_rtx
!= out
)
8307 forget_old_reloads_1 (rld
[r
].reg_rtx
, NULL_RTX
, NULL
);
8309 if (!HARD_REGISTER_NUM_P (out_regno
))
8311 rtx src_reg
, store_insn
= NULL_RTX
;
8313 reg_last_reload_reg
[out_regno
] = 0;
8315 /* If we can find a hard register that is stored, record
8316 the storing insn so that we may delete this insn with
8317 delete_output_reload. */
8318 src_reg
= reload_reg_rtx_for_output
[r
];
8320 /* If this is an optional reload, try to find the source reg
8321 from an input reload. */
8324 rtx set
= single_set (insn
);
8325 if (set
&& SET_DEST (set
) == rld
[r
].out
)
8329 src_reg
= SET_SRC (set
);
8331 for (k
= 0; k
< n_reloads
; k
++)
8333 if (rld
[k
].in
== src_reg
)
8335 src_reg
= reload_reg_rtx_for_input
[k
];
8342 store_insn
= new_spill_reg_store
[REGNO (src_reg
)];
8343 if (src_reg
&& REG_P (src_reg
)
8344 && REGNO (src_reg
) < FIRST_PSEUDO_REGISTER
)
8346 int src_regno
, src_nregs
, k
;
8349 gcc_assert (GET_MODE (src_reg
) == mode
);
8350 src_regno
= REGNO (src_reg
);
8351 src_nregs
= hard_regno_nregs
[src_regno
][mode
];
8352 /* The place where to find a death note varies with
8353 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8354 necessarily checked exactly in the code that moves
8355 notes, so just check both locations. */
8356 note
= find_regno_note (insn
, REG_DEAD
, src_regno
);
8357 if (! note
&& store_insn
)
8358 note
= find_regno_note (store_insn
, REG_DEAD
, src_regno
);
8359 for (k
= 0; k
< src_nregs
; k
++)
8361 spill_reg_store
[src_regno
+ k
] = store_insn
;
8362 spill_reg_stored_to
[src_regno
+ k
] = out
;
8363 reg_reloaded_contents
[src_regno
+ k
] = out_regno
;
8364 reg_reloaded_insn
[src_regno
+ k
] = store_insn
;
8365 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, src_regno
+ k
);
8366 SET_HARD_REG_BIT (reg_reloaded_valid
, src_regno
+ k
);
8367 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno
+ k
,
8369 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8372 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8374 SET_HARD_REG_BIT (reg_is_output_reload
, src_regno
+ k
);
8376 SET_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8378 CLEAR_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8380 reg_last_reload_reg
[out_regno
] = src_reg
;
8381 /* We have to set reg_has_output_reload here, or else
8382 forget_old_reloads_1 will clear reg_last_reload_reg
8384 SET_REGNO_REG_SET (®_has_output_reload
,
8390 int k
, out_nregs
= hard_regno_nregs
[out_regno
][mode
];
8392 for (k
= 0; k
< out_nregs
; k
++)
8393 reg_last_reload_reg
[out_regno
+ k
] = 0;
8397 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
8400 /* Go through the motions to emit INSN and test if it is strictly valid.
8401 Return the emitted insn if valid, else return NULL. */
8404 emit_insn_if_valid_for_reload (rtx insn
)
8406 rtx last
= get_last_insn ();
8409 insn
= emit_insn (insn
);
8410 code
= recog_memoized (insn
);
8414 extract_insn (insn
);
8415 /* We want constrain operands to treat this insn strictly in its
8416 validity determination, i.e., the way it would after reload has
8418 if (constrain_operands (1))
8422 delete_insns_since (last
);
8426 /* Emit code to perform a reload from IN (which may be a reload register) to
8427 OUT (which may also be a reload register). IN or OUT is from operand
8428 OPNUM with reload type TYPE.
8430 Returns first insn emitted. */
8433 gen_reload (rtx out
, rtx in
, int opnum
, enum reload_type type
)
8435 rtx last
= get_last_insn ();
8438 /* If IN is a paradoxical SUBREG, remove it and try to put the
8439 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8440 if (!strip_paradoxical_subreg (&in
, &out
))
8441 strip_paradoxical_subreg (&out
, &in
);
8443 /* How to do this reload can get quite tricky. Normally, we are being
8444 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8445 register that didn't get a hard register. In that case we can just
8446 call emit_move_insn.
8448 We can also be asked to reload a PLUS that adds a register or a MEM to
8449 another register, constant or MEM. This can occur during frame pointer
8450 elimination and while reloading addresses. This case is handled by
8451 trying to emit a single insn to perform the add. If it is not valid,
8452 we use a two insn sequence.
8454 Or we can be asked to reload an unary operand that was a fragment of
8455 an addressing mode, into a register. If it isn't recognized as-is,
8456 we try making the unop operand and the reload-register the same:
8457 (set reg:X (unop:X expr:Y))
8458 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8460 Finally, we could be called to handle an 'o' constraint by putting
8461 an address into a register. In that case, we first try to do this
8462 with a named pattern of "reload_load_address". If no such pattern
8463 exists, we just emit a SET insn and hope for the best (it will normally
8464 be valid on machines that use 'o').
8466 This entire process is made complex because reload will never
8467 process the insns we generate here and so we must ensure that
8468 they will fit their constraints and also by the fact that parts of
8469 IN might be being reloaded separately and replaced with spill registers.
8470 Because of this, we are, in some sense, just guessing the right approach
8471 here. The one listed above seems to work.
8473 ??? At some point, this whole thing needs to be rethought. */
8475 if (GET_CODE (in
) == PLUS
8476 && (REG_P (XEXP (in
, 0))
8477 || GET_CODE (XEXP (in
, 0)) == SUBREG
8478 || MEM_P (XEXP (in
, 0)))
8479 && (REG_P (XEXP (in
, 1))
8480 || GET_CODE (XEXP (in
, 1)) == SUBREG
8481 || CONSTANT_P (XEXP (in
, 1))
8482 || MEM_P (XEXP (in
, 1))))
8484 /* We need to compute the sum of a register or a MEM and another
8485 register, constant, or MEM, and put it into the reload
8486 register. The best possible way of doing this is if the machine
8487 has a three-operand ADD insn that accepts the required operands.
8489 The simplest approach is to try to generate such an insn and see if it
8490 is recognized and matches its constraints. If so, it can be used.
8492 It might be better not to actually emit the insn unless it is valid,
8493 but we need to pass the insn as an operand to `recog' and
8494 `extract_insn' and it is simpler to emit and then delete the insn if
8495 not valid than to dummy things up. */
8497 rtx op0
, op1
, tem
, insn
;
8498 enum insn_code code
;
8500 op0
= find_replacement (&XEXP (in
, 0));
8501 op1
= find_replacement (&XEXP (in
, 1));
8503 /* Since constraint checking is strict, commutativity won't be
8504 checked, so we need to do that here to avoid spurious failure
8505 if the add instruction is two-address and the second operand
8506 of the add is the same as the reload reg, which is frequently
8507 the case. If the insn would be A = B + A, rearrange it so
8508 it will be A = A + B as constrain_operands expects. */
8510 if (REG_P (XEXP (in
, 1))
8511 && REGNO (out
) == REGNO (XEXP (in
, 1)))
8512 tem
= op0
, op0
= op1
, op1
= tem
;
8514 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
8515 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
8517 insn
= emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode
, out
, in
));
8521 /* If that failed, we must use a conservative two-insn sequence.
8523 Use a move to copy one operand into the reload register. Prefer
8524 to reload a constant, MEM or pseudo since the move patterns can
8525 handle an arbitrary operand. If OP1 is not a constant, MEM or
8526 pseudo and OP1 is not a valid operand for an add instruction, then
8529 After reloading one of the operands into the reload register, add
8530 the reload register to the output register.
8532 If there is another way to do this for a specific machine, a
8533 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8536 code
= optab_handler (add_optab
, GET_MODE (out
));
8538 if (CONSTANT_P (op1
) || MEM_P (op1
) || GET_CODE (op1
) == SUBREG
8540 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
)
8541 || (code
!= CODE_FOR_nothing
8542 && !insn_operand_matches (code
, 2, op1
)))
8543 tem
= op0
, op0
= op1
, op1
= tem
;
8545 gen_reload (out
, op0
, opnum
, type
);
8547 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8548 This fixes a problem on the 32K where the stack pointer cannot
8549 be used as an operand of an add insn. */
8551 if (rtx_equal_p (op0
, op1
))
8554 insn
= emit_insn_if_valid_for_reload (gen_add2_insn (out
, op1
));
8557 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8558 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8562 /* If that failed, copy the address register to the reload register.
8563 Then add the constant to the reload register. */
8565 gcc_assert (!reg_overlap_mentioned_p (out
, op0
));
8566 gen_reload (out
, op1
, opnum
, type
);
8567 insn
= emit_insn (gen_add2_insn (out
, op0
));
8568 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8571 #ifdef SECONDARY_MEMORY_NEEDED
8572 /* If we need a memory location to do the move, do it that way. */
8573 else if ((REG_P (in
)
8574 || (GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))))
8575 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
8577 || (GET_CODE (out
) == SUBREG
&& REG_P (SUBREG_REG (out
))))
8578 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
8579 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in
)),
8580 REGNO_REG_CLASS (reg_or_subregno (out
)),
8583 /* Get the memory to use and rewrite both registers to its mode. */
8584 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
8586 if (GET_MODE (loc
) != GET_MODE (out
))
8587 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
8589 if (GET_MODE (loc
) != GET_MODE (in
))
8590 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
8592 gen_reload (loc
, in
, opnum
, type
);
8593 gen_reload (out
, loc
, opnum
, type
);
8596 else if (REG_P (out
) && UNARY_P (in
))
8603 op1
= find_replacement (&XEXP (in
, 0));
8604 if (op1
!= XEXP (in
, 0))
8605 in
= gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
), op1
);
8607 /* First, try a plain SET. */
8608 set
= emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode
, out
, in
));
8612 /* If that failed, move the inner operand to the reload
8613 register, and try the same unop with the inner expression
8614 replaced with the reload register. */
8616 if (GET_MODE (op1
) != GET_MODE (out
))
8617 out_moded
= gen_rtx_REG (GET_MODE (op1
), REGNO (out
));
8621 gen_reload (out_moded
, op1
, opnum
, type
);
8624 = gen_rtx_SET (VOIDmode
, out
,
8625 gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
),
8627 insn
= emit_insn_if_valid_for_reload (insn
);
8630 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8634 fatal_insn ("failure trying to reload:", set
);
8636 /* If IN is a simple operand, use gen_move_insn. */
8637 else if (OBJECT_P (in
) || GET_CODE (in
) == SUBREG
)
8639 tem
= emit_insn (gen_move_insn (out
, in
));
8640 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8641 mark_jump_label (in
, tem
, 0);
8644 #ifdef HAVE_reload_load_address
8645 else if (HAVE_reload_load_address
)
8646 emit_insn (gen_reload_load_address (out
, in
));
8649 /* Otherwise, just write (set OUT IN) and hope for the best. */
8651 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
8653 /* Return the first insn emitted.
8654 We can not just return get_last_insn, because there may have
8655 been multiple instructions emitted. Also note that gen_move_insn may
8656 emit more than one insn itself, so we can not assume that there is one
8657 insn emitted per emit_insn_before call. */
8659 return last
? NEXT_INSN (last
) : get_insns ();
8662 /* Delete a previously made output-reload whose result we now believe
8663 is not needed. First we double-check.
8665 INSN is the insn now being processed.
8666 LAST_RELOAD_REG is the hard register number for which we want to delete
8667 the last output reload.
8668 J is the reload-number that originally used REG. The caller has made
8669 certain that reload J doesn't use REG any longer for input.
8670 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8673 delete_output_reload (rtx insn
, int j
, int last_reload_reg
, rtx new_reload_reg
)
8675 rtx output_reload_insn
= spill_reg_store
[last_reload_reg
];
8676 rtx reg
= spill_reg_stored_to
[last_reload_reg
];
8679 int n_inherited
= 0;
8685 /* It is possible that this reload has been only used to set another reload
8686 we eliminated earlier and thus deleted this instruction too. */
8687 if (INSN_DELETED_P (output_reload_insn
))
8690 /* Get the raw pseudo-register referred to. */
8692 while (GET_CODE (reg
) == SUBREG
)
8693 reg
= SUBREG_REG (reg
);
8694 substed
= reg_equiv_memory_loc (REGNO (reg
));
8696 /* This is unsafe if the operand occurs more often in the current
8697 insn than it is inherited. */
8698 for (k
= n_reloads
- 1; k
>= 0; k
--)
8700 rtx reg2
= rld
[k
].in
;
8703 if (MEM_P (reg2
) || reload_override_in
[k
])
8704 reg2
= rld
[k
].in_reg
;
8706 if (rld
[k
].out
&& ! rld
[k
].out_reg
)
8707 reg2
= XEXP (rld
[k
].in_reg
, 0);
8709 while (GET_CODE (reg2
) == SUBREG
)
8710 reg2
= SUBREG_REG (reg2
);
8711 if (rtx_equal_p (reg2
, reg
))
8713 if (reload_inherited
[k
] || reload_override_in
[k
] || k
== j
)
8719 n_occurrences
= count_occurrences (PATTERN (insn
), reg
, 0);
8720 if (CALL_P (insn
) && CALL_INSN_FUNCTION_USAGE (insn
))
8721 n_occurrences
+= count_occurrences (CALL_INSN_FUNCTION_USAGE (insn
),
8724 n_occurrences
+= count_occurrences (PATTERN (insn
),
8725 eliminate_regs (substed
, VOIDmode
,
8727 for (i1
= reg_equiv_alt_mem_list (REGNO (reg
)); i1
; i1
= XEXP (i1
, 1))
8729 gcc_assert (!rtx_equal_p (XEXP (i1
, 0), substed
));
8730 n_occurrences
+= count_occurrences (PATTERN (insn
), XEXP (i1
, 0), 0);
8732 if (n_occurrences
> n_inherited
)
8735 regno
= REGNO (reg
);
8736 if (regno
>= FIRST_PSEUDO_REGISTER
)
8739 nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
8741 /* If the pseudo-reg we are reloading is no longer referenced
8742 anywhere between the store into it and here,
8743 and we're within the same basic block, then the value can only
8744 pass through the reload reg and end up here.
8745 Otherwise, give up--return. */
8746 for (i1
= NEXT_INSN (output_reload_insn
);
8747 i1
!= insn
; i1
= NEXT_INSN (i1
))
8749 if (NOTE_INSN_BASIC_BLOCK_P (i1
))
8751 if ((NONJUMP_INSN_P (i1
) || CALL_P (i1
))
8752 && refers_to_regno_p (regno
, regno
+ nregs
, PATTERN (i1
), NULL
))
8754 /* If this is USE in front of INSN, we only have to check that
8755 there are no more references than accounted for by inheritance. */
8756 while (NONJUMP_INSN_P (i1
) && GET_CODE (PATTERN (i1
)) == USE
)
8758 n_occurrences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
8759 i1
= NEXT_INSN (i1
);
8761 if (n_occurrences
<= n_inherited
&& i1
== insn
)
8767 /* We will be deleting the insn. Remove the spill reg information. */
8768 for (k
= hard_regno_nregs
[last_reload_reg
][GET_MODE (reg
)]; k
-- > 0; )
8770 spill_reg_store
[last_reload_reg
+ k
] = 0;
8771 spill_reg_stored_to
[last_reload_reg
+ k
] = 0;
8774 /* The caller has already checked that REG dies or is set in INSN.
8775 It has also checked that we are optimizing, and thus some
8776 inaccuracies in the debugging information are acceptable.
8777 So we could just delete output_reload_insn. But in some cases
8778 we can improve the debugging information without sacrificing
8779 optimization - maybe even improving the code: See if the pseudo
8780 reg has been completely replaced with reload regs. If so, delete
8781 the store insn and forget we had a stack slot for the pseudo. */
8782 if (rld
[j
].out
!= rld
[j
].in
8783 && REG_N_DEATHS (REGNO (reg
)) == 1
8784 && REG_N_SETS (REGNO (reg
)) == 1
8785 && REG_BASIC_BLOCK (REGNO (reg
)) >= NUM_FIXED_BLOCKS
8786 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
8790 /* We know that it was used only between here and the beginning of
8791 the current basic block. (We also know that the last use before
8792 INSN was the output reload we are thinking of deleting, but never
8793 mind that.) Search that range; see if any ref remains. */
8794 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8796 rtx set
= single_set (i2
);
8798 /* Uses which just store in the pseudo don't count,
8799 since if they are the only uses, they are dead. */
8800 if (set
!= 0 && SET_DEST (set
) == reg
)
8805 if ((NONJUMP_INSN_P (i2
) || CALL_P (i2
))
8806 && reg_mentioned_p (reg
, PATTERN (i2
)))
8808 /* Some other ref remains; just delete the output reload we
8810 delete_address_reloads (output_reload_insn
, insn
);
8811 delete_insn (output_reload_insn
);
8816 /* Delete the now-dead stores into this pseudo. Note that this
8817 loop also takes care of deleting output_reload_insn. */
8818 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8820 rtx set
= single_set (i2
);
8822 if (set
!= 0 && SET_DEST (set
) == reg
)
8824 delete_address_reloads (i2
, insn
);
8832 /* For the debugging info, say the pseudo lives in this reload reg. */
8833 reg_renumber
[REGNO (reg
)] = REGNO (new_reload_reg
);
8834 if (ira_conflicts_p
)
8835 /* Inform IRA about the change. */
8836 ira_mark_allocation_change (REGNO (reg
));
8837 alter_reg (REGNO (reg
), -1, false);
8841 delete_address_reloads (output_reload_insn
, insn
);
8842 delete_insn (output_reload_insn
);
8846 /* We are going to delete DEAD_INSN. Recursively delete loads of
8847 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8848 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8850 delete_address_reloads (rtx dead_insn
, rtx current_insn
)
8852 rtx set
= single_set (dead_insn
);
8853 rtx set2
, dst
, prev
, next
;
8856 rtx dst
= SET_DEST (set
);
8858 delete_address_reloads_1 (dead_insn
, XEXP (dst
, 0), current_insn
);
8860 /* If we deleted the store from a reloaded post_{in,de}c expression,
8861 we can delete the matching adds. */
8862 prev
= PREV_INSN (dead_insn
);
8863 next
= NEXT_INSN (dead_insn
);
8864 if (! prev
|| ! next
)
8866 set
= single_set (next
);
8867 set2
= single_set (prev
);
8869 || GET_CODE (SET_SRC (set
)) != PLUS
|| GET_CODE (SET_SRC (set2
)) != PLUS
8870 || !CONST_INT_P (XEXP (SET_SRC (set
), 1))
8871 || !CONST_INT_P (XEXP (SET_SRC (set2
), 1)))
8873 dst
= SET_DEST (set
);
8874 if (! rtx_equal_p (dst
, SET_DEST (set2
))
8875 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set
), 0))
8876 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set2
), 0))
8877 || (INTVAL (XEXP (SET_SRC (set
), 1))
8878 != -INTVAL (XEXP (SET_SRC (set2
), 1))))
8880 delete_related_insns (prev
);
8881 delete_related_insns (next
);
8884 /* Subfunction of delete_address_reloads: process registers found in X. */
8886 delete_address_reloads_1 (rtx dead_insn
, rtx x
, rtx current_insn
)
8888 rtx prev
, set
, dst
, i2
;
8890 enum rtx_code code
= GET_CODE (x
);
8894 const char *fmt
= GET_RTX_FORMAT (code
);
8895 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8898 delete_address_reloads_1 (dead_insn
, XEXP (x
, i
), current_insn
);
8899 else if (fmt
[i
] == 'E')
8901 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
8902 delete_address_reloads_1 (dead_insn
, XVECEXP (x
, i
, j
),
8909 if (spill_reg_order
[REGNO (x
)] < 0)
8912 /* Scan backwards for the insn that sets x. This might be a way back due
8914 for (prev
= PREV_INSN (dead_insn
); prev
; prev
= PREV_INSN (prev
))
8916 code
= GET_CODE (prev
);
8917 if (code
== CODE_LABEL
|| code
== JUMP_INSN
)
8921 if (reg_set_p (x
, PATTERN (prev
)))
8923 if (reg_referenced_p (x
, PATTERN (prev
)))
8926 if (! prev
|| INSN_UID (prev
) < reload_first_uid
)
8928 /* Check that PREV only sets the reload register. */
8929 set
= single_set (prev
);
8932 dst
= SET_DEST (set
);
8934 || ! rtx_equal_p (dst
, x
))
8936 if (! reg_set_p (dst
, PATTERN (dead_insn
)))
8938 /* Check if DST was used in a later insn -
8939 it might have been inherited. */
8940 for (i2
= NEXT_INSN (dead_insn
); i2
; i2
= NEXT_INSN (i2
))
8946 if (reg_referenced_p (dst
, PATTERN (i2
)))
8948 /* If there is a reference to the register in the current insn,
8949 it might be loaded in a non-inherited reload. If no other
8950 reload uses it, that means the register is set before
8952 if (i2
== current_insn
)
8954 for (j
= n_reloads
- 1; j
>= 0; j
--)
8955 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
8956 || reload_override_in
[j
] == dst
)
8958 for (j
= n_reloads
- 1; j
>= 0; j
--)
8959 if (rld
[j
].in
&& rld
[j
].reg_rtx
== dst
)
8968 /* If DST is still live at CURRENT_INSN, check if it is used for
8969 any reload. Note that even if CURRENT_INSN sets DST, we still
8970 have to check the reloads. */
8971 if (i2
== current_insn
)
8973 for (j
= n_reloads
- 1; j
>= 0; j
--)
8974 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
8975 || reload_override_in
[j
] == dst
)
8977 /* ??? We can't finish the loop here, because dst might be
8978 allocated to a pseudo in this block if no reload in this
8979 block needs any of the classes containing DST - see
8980 spill_hard_reg. There is no easy way to tell this, so we
8981 have to scan till the end of the basic block. */
8983 if (reg_set_p (dst
, PATTERN (i2
)))
8987 delete_address_reloads_1 (prev
, SET_SRC (set
), current_insn
);
8988 reg_reloaded_contents
[REGNO (dst
)] = -1;
8992 /* Output reload-insns to reload VALUE into RELOADREG.
8993 VALUE is an autoincrement or autodecrement RTX whose operand
8994 is a register or memory location;
8995 so reloading involves incrementing that location.
8996 IN is either identical to VALUE, or some cheaper place to reload from.
8998 INC_AMOUNT is the number to increment or decrement by (always positive).
8999 This cannot be deduced from VALUE. */
9002 inc_for_reload (rtx reloadreg
, rtx in
, rtx value
, int inc_amount
)
9004 /* REG or MEM to be copied and incremented. */
9005 rtx incloc
= find_replacement (&XEXP (value
, 0));
9006 /* Nonzero if increment after copying. */
9007 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
9008 || GET_CODE (value
) == POST_MODIFY
);
9013 rtx real_in
= in
== value
? incloc
: in
;
9015 /* No hard register is equivalent to this register after
9016 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9017 we could inc/dec that register as well (maybe even using it for
9018 the source), but I'm not sure it's worth worrying about. */
9020 reg_last_reload_reg
[REGNO (incloc
)] = 0;
9022 if (GET_CODE (value
) == PRE_MODIFY
|| GET_CODE (value
) == POST_MODIFY
)
9024 gcc_assert (GET_CODE (XEXP (value
, 1)) == PLUS
);
9025 inc
= find_replacement (&XEXP (XEXP (value
, 1), 1));
9029 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
9030 inc_amount
= -inc_amount
;
9032 inc
= GEN_INT (inc_amount
);
9035 /* If this is post-increment, first copy the location to the reload reg. */
9036 if (post
&& real_in
!= reloadreg
)
9037 emit_insn (gen_move_insn (reloadreg
, real_in
));
9041 /* See if we can directly increment INCLOC. Use a method similar to
9042 that in gen_reload. */
9044 last
= get_last_insn ();
9045 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
9046 gen_rtx_PLUS (GET_MODE (incloc
),
9049 code
= recog_memoized (add_insn
);
9052 extract_insn (add_insn
);
9053 if (constrain_operands (1))
9055 /* If this is a pre-increment and we have incremented the value
9056 where it lives, copy the incremented value to RELOADREG to
9057 be used as an address. */
9060 emit_insn (gen_move_insn (reloadreg
, incloc
));
9064 delete_insns_since (last
);
9067 /* If couldn't do the increment directly, must increment in RELOADREG.
9068 The way we do this depends on whether this is pre- or post-increment.
9069 For pre-increment, copy INCLOC to the reload register, increment it
9070 there, then save back. */
9074 if (in
!= reloadreg
)
9075 emit_insn (gen_move_insn (reloadreg
, real_in
));
9076 emit_insn (gen_add2_insn (reloadreg
, inc
));
9077 emit_insn (gen_move_insn (incloc
, reloadreg
));
9082 Because this might be a jump insn or a compare, and because RELOADREG
9083 may not be available after the insn in an input reload, we must do
9084 the incrementation before the insn being reloaded for.
9086 We have already copied IN to RELOADREG. Increment the copy in
9087 RELOADREG, save that back, then decrement RELOADREG so it has
9088 the original value. */
9090 emit_insn (gen_add2_insn (reloadreg
, inc
));
9091 emit_insn (gen_move_insn (incloc
, reloadreg
));
9092 if (CONST_INT_P (inc
))
9093 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-INTVAL (inc
))));
9095 emit_insn (gen_sub2_insn (reloadreg
, inc
));
9101 add_auto_inc_notes (rtx insn
, rtx x
)
9103 enum rtx_code code
= GET_CODE (x
);
9107 if (code
== MEM
&& auto_inc_p (XEXP (x
, 0)))
9109 add_reg_note (insn
, REG_INC
, XEXP (XEXP (x
, 0), 0));
9113 /* Scan all the operand sub-expressions. */
9114 fmt
= GET_RTX_FORMAT (code
);
9115 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9118 add_auto_inc_notes (insn
, XEXP (x
, i
));
9119 else if (fmt
[i
] == 'E')
9120 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9121 add_auto_inc_notes (insn
, XVECEXP (x
, i
, j
));