* gcc.dg/i386-asm-4.c: New test.
[official-gcc.git] / gcc / reload1.c
blob71c1afadd54c66850b1386b19d61aa33f4433362
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "basic-block.h"
39 #include "reload.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "real.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "tree.h"
47 /* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
52 that need them.
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
106 collector can keep track of what is inside. */
107 varray_type reg_equiv_memory_loc_varray;
109 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
110 This is used when the address is not valid as a memory address
111 (because its displacement is too big for the machine.) */
112 rtx *reg_equiv_address;
114 /* Element N is the memory slot to which pseudo reg N is equivalent,
115 or zero if pseudo reg N is not equivalent to a memory slot. */
116 rtx *reg_equiv_mem;
118 /* Widest width in which each pseudo reg is referred to (via subreg). */
119 static unsigned int *reg_max_ref_width;
121 /* Element N is the list of insns that initialized reg N from its equivalent
122 constant or memory slot. */
123 static rtx *reg_equiv_init;
125 /* Vector to remember old contents of reg_renumber before spilling. */
126 static short *reg_old_renumber;
128 /* During reload_as_needed, element N contains the last pseudo regno reloaded
129 into hard register N. If that pseudo reg occupied more than one register,
130 reg_reloaded_contents points to that pseudo for each spill register in
131 use; all of these must remain set for an inheritance to occur. */
132 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
134 /* During reload_as_needed, element N contains the insn for which
135 hard register N was last used. Its contents are significant only
136 when reg_reloaded_valid is set for this register. */
137 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
139 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
140 static HARD_REG_SET reg_reloaded_valid;
141 /* Indicate if the register was dead at the end of the reload.
142 This is only valid if reg_reloaded_contents is set and valid. */
143 static HARD_REG_SET reg_reloaded_dead;
145 /* Indicate whether the register's current value is one that is not
146 safe to retain across a call, even for registers that are normally
147 call-saved. */
148 static HARD_REG_SET reg_reloaded_call_part_clobbered;
150 /* Number of spill-regs so far; number of valid elements of spill_regs. */
151 static int n_spills;
153 /* In parallel with spill_regs, contains REG rtx's for those regs.
154 Holds the last rtx used for any given reg, or 0 if it has never
155 been used for spilling yet. This rtx is reused, provided it has
156 the proper mode. */
157 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
159 /* In parallel with spill_regs, contains nonzero for a spill reg
160 that was stored after the last time it was used.
161 The precise value is the insn generated to do the store. */
162 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
164 /* This is the register that was stored with spill_reg_store. This is a
165 copy of reload_out / reload_out_reg when the value was stored; if
166 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
167 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
169 /* This table is the inverse mapping of spill_regs:
170 indexed by hard reg number,
171 it contains the position of that reg in spill_regs,
172 or -1 for something that is not in spill_regs.
174 ?!? This is no longer accurate. */
175 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
177 /* This reg set indicates registers that can't be used as spill registers for
178 the currently processed insn. These are the hard registers which are live
179 during the insn, but not allocated to pseudos, as well as fixed
180 registers. */
181 static HARD_REG_SET bad_spill_regs;
183 /* These are the hard registers that can't be used as spill register for any
184 insn. This includes registers used for user variables and registers that
185 we can't eliminate. A register that appears in this set also can't be used
186 to retry register allocation. */
187 static HARD_REG_SET bad_spill_regs_global;
189 /* Describes order of use of registers for reloading
190 of spilled pseudo-registers. `n_spills' is the number of
191 elements that are actually valid; new ones are added at the end.
193 Both spill_regs and spill_reg_order are used on two occasions:
194 once during find_reload_regs, where they keep track of the spill registers
195 for a single insn, but also during reload_as_needed where they show all
196 the registers ever used by reload. For the latter case, the information
197 is calculated during finish_spills. */
198 static short spill_regs[FIRST_PSEUDO_REGISTER];
200 /* This vector of reg sets indicates, for each pseudo, which hard registers
201 may not be used for retrying global allocation because the register was
202 formerly spilled from one of them. If we allowed reallocating a pseudo to
203 a register that it was already allocated to, reload might not
204 terminate. */
205 static HARD_REG_SET *pseudo_previous_regs;
207 /* This vector of reg sets indicates, for each pseudo, which hard
208 registers may not be used for retrying global allocation because they
209 are used as spill registers during one of the insns in which the
210 pseudo is live. */
211 static HARD_REG_SET *pseudo_forbidden_regs;
213 /* All hard regs that have been used as spill registers for any insn are
214 marked in this set. */
215 static HARD_REG_SET used_spill_regs;
217 /* Index of last register assigned as a spill register. We allocate in
218 a round-robin fashion. */
219 static int last_spill_reg;
221 /* Nonzero if indirect addressing is supported on the machine; this means
222 that spilling (REG n) does not require reloading it into a register in
223 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
224 value indicates the level of indirect addressing supported, e.g., two
225 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
226 a hard register. */
227 static char spill_indirect_levels;
229 /* Nonzero if indirect addressing is supported when the innermost MEM is
230 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
231 which these are valid is the same as spill_indirect_levels, above. */
232 char indirect_symref_ok;
234 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
235 char double_reg_address_ok;
237 /* Record the stack slot for each spilled hard register. */
238 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
240 /* Width allocated so far for that stack slot. */
241 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
243 /* Record which pseudos needed to be spilled. */
244 static regset_head spilled_pseudos;
246 /* Used for communication between order_regs_for_reload and count_pseudo.
247 Used to avoid counting one pseudo twice. */
248 static regset_head pseudos_counted;
250 /* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252 int reload_first_uid;
254 /* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
256 int caller_save_needed;
258 /* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
260 int reload_in_progress = 0;
262 /* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271 struct obstack reload_obstack;
273 /* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275 char *reload_startobj;
277 /* The point after all insn_chain structures. Used to quickly deallocate
278 memory allocated in copy_reloads during calculate_needs_all_insns. */
279 char *reload_firstobj;
281 /* This points before all local rtl generated by register elimination.
282 Used to quickly free all memory after processing one insn. */
283 static char *reload_insn_firstobj;
285 /* List of insn_chain instructions, one for every insn that reload needs to
286 examine. */
287 struct insn_chain *reload_insn_chain;
289 /* List of all insns needing reloads. */
290 static struct insn_chain *insns_need_reload;
292 /* This structure is used to record information about register eliminations.
293 Each array entry describes one possible way of eliminating a register
294 in favor of another. If there is more than one way of eliminating a
295 particular register, the most preferred should be specified first. */
297 struct elim_table
299 int from; /* Register number to be eliminated. */
300 int to; /* Register number used as replacement. */
301 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
302 int can_eliminate; /* Nonzero if this elimination can be done. */
303 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
304 insns made by reload. */
305 HOST_WIDE_INT offset; /* Current offset between the two regs. */
306 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
307 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
308 rtx from_rtx; /* REG rtx for the register to be eliminated.
309 We cannot simply compare the number since
310 we might then spuriously replace a hard
311 register corresponding to a pseudo
312 assigned to the reg to be eliminated. */
313 rtx to_rtx; /* REG rtx for the replacement. */
316 static struct elim_table *reg_eliminate = 0;
318 /* This is an intermediate structure to initialize the table. It has
319 exactly the members provided by ELIMINABLE_REGS. */
320 static const struct elim_table_1
322 const int from;
323 const int to;
324 } reg_eliminate_1[] =
326 /* If a set of eliminable registers was specified, define the table from it.
327 Otherwise, default to the normal case of the frame pointer being
328 replaced by the stack pointer. */
330 #ifdef ELIMINABLE_REGS
331 ELIMINABLE_REGS;
332 #else
333 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
334 #endif
336 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
338 /* Record the number of pending eliminations that have an offset not equal
339 to their initial offset. If nonzero, we use a new copy of each
340 replacement result in any insns encountered. */
341 int num_not_at_initial_offset;
343 /* Count the number of registers that we may be able to eliminate. */
344 static int num_eliminable;
345 /* And the number of registers that are equivalent to a constant that
346 can be eliminated to frame_pointer / arg_pointer + constant. */
347 static int num_eliminable_invariants;
349 /* For each label, we record the offset of each elimination. If we reach
350 a label by more than one path and an offset differs, we cannot do the
351 elimination. This information is indexed by the difference of the
352 number of the label and the first label number. We can't offset the
353 pointer itself as this can cause problems on machines with segmented
354 memory. The first table is an array of flags that records whether we
355 have yet encountered a label and the second table is an array of arrays,
356 one entry in the latter array for each elimination. */
358 static int first_label_num;
359 static char *offsets_known_at;
360 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
362 /* Number of labels in the current function. */
364 static int num_labels;
366 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
367 static void maybe_fix_stack_asms (void);
368 static void copy_reloads (struct insn_chain *);
369 static void calculate_needs_all_insns (int);
370 static int find_reg (struct insn_chain *, int);
371 static void find_reload_regs (struct insn_chain *);
372 static void select_reload_regs (void);
373 static void delete_caller_save_insns (void);
375 static void spill_failure (rtx, enum reg_class);
376 static void count_spilled_pseudo (int, int, int);
377 static void delete_dead_insn (rtx);
378 static void alter_reg (int, int);
379 static void set_label_offsets (rtx, rtx, int);
380 static void check_eliminable_occurrences (rtx);
381 static void elimination_effects (rtx, enum machine_mode);
382 static int eliminate_regs_in_insn (rtx, int);
383 static void update_eliminable_offsets (void);
384 static void mark_not_eliminable (rtx, rtx, void *);
385 static void set_initial_elim_offsets (void);
386 static void verify_initial_elim_offsets (void);
387 static void set_initial_label_offsets (void);
388 static void set_offsets_for_label (rtx);
389 static void init_elim_table (void);
390 static void update_eliminables (HARD_REG_SET *);
391 static void spill_hard_reg (unsigned int, int);
392 static int finish_spills (int);
393 static void scan_paradoxical_subregs (rtx);
394 static void count_pseudo (int);
395 static void order_regs_for_reload (struct insn_chain *);
396 static void reload_as_needed (int);
397 static void forget_old_reloads_1 (rtx, rtx, void *);
398 static int reload_reg_class_lower (const void *, const void *);
399 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
400 enum machine_mode);
401 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
402 enum machine_mode);
403 static int reload_reg_free_p (unsigned int, int, enum reload_type);
404 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
405 rtx, rtx, int, int);
406 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
407 rtx, rtx, int, int);
408 static int function_invariant_p (rtx);
409 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
410 static int allocate_reload_reg (struct insn_chain *, int, int);
411 static int conflicts_with_override (rtx);
412 static void failed_reload (rtx, int);
413 static int set_reload_reg (int, int);
414 static void choose_reload_regs_init (struct insn_chain *, rtx *);
415 static void choose_reload_regs (struct insn_chain *);
416 static void merge_assigned_reloads (rtx);
417 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
418 rtx, int);
419 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
420 int);
421 static void do_input_reload (struct insn_chain *, struct reload *, int);
422 static void do_output_reload (struct insn_chain *, struct reload *, int);
423 static bool inherit_piecemeal_p (int, int);
424 static void emit_reload_insns (struct insn_chain *);
425 static void delete_output_reload (rtx, int, int);
426 static void delete_address_reloads (rtx, rtx);
427 static void delete_address_reloads_1 (rtx, rtx, rtx);
428 static rtx inc_for_reload (rtx, rtx, rtx, int);
429 #ifdef AUTO_INC_DEC
430 static void add_auto_inc_notes (rtx, rtx);
431 #endif
432 static void copy_eh_notes (rtx, rtx);
433 static int reloads_conflict (int, int);
434 static rtx gen_reload (rtx, rtx, int, enum reload_type);
436 /* Initialize the reload pass once per compilation. */
438 void
439 init_reload (void)
441 int i;
443 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
444 Set spill_indirect_levels to the number of levels such addressing is
445 permitted, zero if it is not permitted at all. */
447 rtx tem
448 = gen_rtx_MEM (Pmode,
449 gen_rtx_PLUS (Pmode,
450 gen_rtx_REG (Pmode,
451 LAST_VIRTUAL_REGISTER + 1),
452 GEN_INT (4)));
453 spill_indirect_levels = 0;
455 while (memory_address_p (QImode, tem))
457 spill_indirect_levels++;
458 tem = gen_rtx_MEM (Pmode, tem);
461 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
463 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
464 indirect_symref_ok = memory_address_p (QImode, tem);
466 /* See if reg+reg is a valid (and offsettable) address. */
468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
470 tem = gen_rtx_PLUS (Pmode,
471 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
472 gen_rtx_REG (Pmode, i));
474 /* This way, we make sure that reg+reg is an offsettable address. */
475 tem = plus_constant (tem, 4);
477 if (memory_address_p (QImode, tem))
479 double_reg_address_ok = 1;
480 break;
484 /* Initialize obstack for our rtl allocation. */
485 gcc_obstack_init (&reload_obstack);
486 reload_startobj = obstack_alloc (&reload_obstack, 0);
488 INIT_REG_SET (&spilled_pseudos);
489 INIT_REG_SET (&pseudos_counted);
490 VARRAY_RTX_INIT (reg_equiv_memory_loc_varray, 0, "reg_equiv_memory_loc");
493 /* List of insn chains that are currently unused. */
494 static struct insn_chain *unused_insn_chains = 0;
496 /* Allocate an empty insn_chain structure. */
497 struct insn_chain *
498 new_insn_chain (void)
500 struct insn_chain *c;
502 if (unused_insn_chains == 0)
504 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
505 INIT_REG_SET (&c->live_throughout);
506 INIT_REG_SET (&c->dead_or_set);
508 else
510 c = unused_insn_chains;
511 unused_insn_chains = c->next;
513 c->is_caller_save_insn = 0;
514 c->need_operand_change = 0;
515 c->need_reload = 0;
516 c->need_elim = 0;
517 return c;
520 /* Small utility function to set all regs in hard reg set TO which are
521 allocated to pseudos in regset FROM. */
523 void
524 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
526 unsigned int regno;
527 reg_set_iterator rsi;
529 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
531 int r = reg_renumber[regno];
532 int nregs;
534 if (r < 0)
536 /* reload_combine uses the information from
537 BASIC_BLOCK->global_live_at_start, which might still
538 contain registers that have not actually been allocated
539 since they have an equivalence. */
540 gcc_assert (reload_completed);
542 else
544 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
545 while (nregs-- > 0)
546 SET_HARD_REG_BIT (*to, r + nregs);
551 /* Replace all pseudos found in LOC with their corresponding
552 equivalences. */
554 static void
555 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
557 rtx x = *loc;
558 enum rtx_code code;
559 const char *fmt;
560 int i, j;
562 if (! x)
563 return;
565 code = GET_CODE (x);
566 if (code == REG)
568 unsigned int regno = REGNO (x);
570 if (regno < FIRST_PSEUDO_REGISTER)
571 return;
573 x = eliminate_regs (x, mem_mode, usage);
574 if (x != *loc)
576 *loc = x;
577 replace_pseudos_in (loc, mem_mode, usage);
578 return;
581 if (reg_equiv_constant[regno])
582 *loc = reg_equiv_constant[regno];
583 else if (reg_equiv_mem[regno])
584 *loc = reg_equiv_mem[regno];
585 else if (reg_equiv_address[regno])
586 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
587 else
589 gcc_assert (!REG_P (regno_reg_rtx[regno])
590 || REGNO (regno_reg_rtx[regno]) != regno);
591 *loc = regno_reg_rtx[regno];
594 return;
596 else if (code == MEM)
598 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
599 return;
602 /* Process each of our operands recursively. */
603 fmt = GET_RTX_FORMAT (code);
604 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
605 if (*fmt == 'e')
606 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
607 else if (*fmt == 'E')
608 for (j = 0; j < XVECLEN (x, i); j++)
609 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
613 /* Global variables used by reload and its subroutines. */
615 /* Set during calculate_needs if an insn needs register elimination. */
616 static int something_needs_elimination;
617 /* Set during calculate_needs if an insn needs an operand changed. */
618 int something_needs_operands_changed;
620 /* Nonzero means we couldn't get enough spill regs. */
621 static int failure;
623 /* Main entry point for the reload pass.
625 FIRST is the first insn of the function being compiled.
627 GLOBAL nonzero means we were called from global_alloc
628 and should attempt to reallocate any pseudoregs that we
629 displace from hard regs we will use for reloads.
630 If GLOBAL is zero, we do not have enough information to do that,
631 so any pseudo reg that is spilled must go to the stack.
633 Return value is nonzero if reload failed
634 and we must not do any more for this function. */
637 reload (rtx first, int global)
639 int i;
640 rtx insn;
641 struct elim_table *ep;
642 basic_block bb;
644 /* Make sure even insns with volatile mem refs are recognizable. */
645 init_recog ();
647 failure = 0;
649 reload_firstobj = obstack_alloc (&reload_obstack, 0);
651 /* Make sure that the last insn in the chain
652 is not something that needs reloading. */
653 emit_note (NOTE_INSN_DELETED);
655 /* Enable find_equiv_reg to distinguish insns made by reload. */
656 reload_first_uid = get_max_uid ();
658 #ifdef SECONDARY_MEMORY_NEEDED
659 /* Initialize the secondary memory table. */
660 clear_secondary_mem ();
661 #endif
663 /* We don't have a stack slot for any spill reg yet. */
664 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
665 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
667 /* Initialize the save area information for caller-save, in case some
668 are needed. */
669 init_save_areas ();
671 /* Compute which hard registers are now in use
672 as homes for pseudo registers.
673 This is done here rather than (eg) in global_alloc
674 because this point is reached even if not optimizing. */
675 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
676 mark_home_live (i);
678 /* A function that receives a nonlocal goto must save all call-saved
679 registers. */
680 if (current_function_has_nonlocal_label)
681 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
682 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
683 regs_ever_live[i] = 1;
685 /* Find all the pseudo registers that didn't get hard regs
686 but do have known equivalent constants or memory slots.
687 These include parameters (known equivalent to parameter slots)
688 and cse'd or loop-moved constant memory addresses.
690 Record constant equivalents in reg_equiv_constant
691 so they will be substituted by find_reloads.
692 Record memory equivalents in reg_mem_equiv so they can
693 be substituted eventually by altering the REG-rtx's. */
695 reg_equiv_constant = xcalloc (max_regno, sizeof (rtx));
696 reg_equiv_mem = xcalloc (max_regno, sizeof (rtx));
697 reg_equiv_init = xcalloc (max_regno, sizeof (rtx));
698 reg_equiv_address = xcalloc (max_regno, sizeof (rtx));
699 reg_max_ref_width = xcalloc (max_regno, sizeof (int));
700 reg_old_renumber = xcalloc (max_regno, sizeof (short));
701 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
702 pseudo_forbidden_regs = xmalloc (max_regno * sizeof (HARD_REG_SET));
703 pseudo_previous_regs = xcalloc (max_regno, sizeof (HARD_REG_SET));
705 CLEAR_HARD_REG_SET (bad_spill_regs_global);
707 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
708 to. Also find all paradoxical subregs and find largest such for
709 each pseudo. */
711 num_eliminable_invariants = 0;
712 for (insn = first; insn; insn = NEXT_INSN (insn))
714 rtx set = single_set (insn);
716 /* We may introduce USEs that we want to remove at the end, so
717 we'll mark them with QImode. Make sure there are no
718 previously-marked insns left by say regmove. */
719 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
720 && GET_MODE (insn) != VOIDmode)
721 PUT_MODE (insn, VOIDmode);
723 if (set != 0 && REG_P (SET_DEST (set)))
725 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
726 if (note
727 && (! function_invariant_p (XEXP (note, 0))
728 || ! flag_pic
729 /* A function invariant is often CONSTANT_P but may
730 include a register. We promise to only pass
731 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
732 || (CONSTANT_P (XEXP (note, 0))
733 && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))))
735 rtx x = XEXP (note, 0);
736 i = REGNO (SET_DEST (set));
737 if (i > LAST_VIRTUAL_REGISTER)
739 /* It can happen that a REG_EQUIV note contains a MEM
740 that is not a legitimate memory operand. As later
741 stages of reload assume that all addresses found
742 in the reg_equiv_* arrays were originally legitimate,
743 we ignore such REG_EQUIV notes. */
744 if (memory_operand (x, VOIDmode))
746 /* Always unshare the equivalence, so we can
747 substitute into this insn without touching the
748 equivalence. */
749 reg_equiv_memory_loc[i] = copy_rtx (x);
751 else if (function_invariant_p (x))
753 if (GET_CODE (x) == PLUS)
755 /* This is PLUS of frame pointer and a constant,
756 and might be shared. Unshare it. */
757 reg_equiv_constant[i] = copy_rtx (x);
758 num_eliminable_invariants++;
760 else if (x == frame_pointer_rtx
761 || x == arg_pointer_rtx)
763 reg_equiv_constant[i] = x;
764 num_eliminable_invariants++;
766 else if (LEGITIMATE_CONSTANT_P (x))
767 reg_equiv_constant[i] = x;
768 else
770 reg_equiv_memory_loc[i]
771 = force_const_mem (GET_MODE (SET_DEST (set)), x);
772 if (!reg_equiv_memory_loc[i])
773 continue;
776 else
777 continue;
779 /* If this register is being made equivalent to a MEM
780 and the MEM is not SET_SRC, the equivalencing insn
781 is one with the MEM as a SET_DEST and it occurs later.
782 So don't mark this insn now. */
783 if (!MEM_P (x)
784 || rtx_equal_p (SET_SRC (set), x))
785 reg_equiv_init[i]
786 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
791 /* If this insn is setting a MEM from a register equivalent to it,
792 this is the equivalencing insn. */
793 else if (set && MEM_P (SET_DEST (set))
794 && REG_P (SET_SRC (set))
795 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
796 && rtx_equal_p (SET_DEST (set),
797 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
798 reg_equiv_init[REGNO (SET_SRC (set))]
799 = gen_rtx_INSN_LIST (VOIDmode, insn,
800 reg_equiv_init[REGNO (SET_SRC (set))]);
802 if (INSN_P (insn))
803 scan_paradoxical_subregs (PATTERN (insn));
806 init_elim_table ();
808 first_label_num = get_first_label_num ();
809 num_labels = max_label_num () - first_label_num;
811 /* Allocate the tables used to store offset information at labels. */
812 /* We used to use alloca here, but the size of what it would try to
813 allocate would occasionally cause it to exceed the stack limit and
814 cause a core dump. */
815 offsets_known_at = xmalloc (num_labels);
816 offsets_at = xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
818 /* Alter each pseudo-reg rtx to contain its hard reg number.
819 Assign stack slots to the pseudos that lack hard regs or equivalents.
820 Do not touch virtual registers. */
822 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
823 alter_reg (i, -1);
825 /* If we have some registers we think can be eliminated, scan all insns to
826 see if there is an insn that sets one of these registers to something
827 other than itself plus a constant. If so, the register cannot be
828 eliminated. Doing this scan here eliminates an extra pass through the
829 main reload loop in the most common case where register elimination
830 cannot be done. */
831 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
832 if (INSN_P (insn))
833 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
835 maybe_fix_stack_asms ();
837 insns_need_reload = 0;
838 something_needs_elimination = 0;
840 /* Initialize to -1, which means take the first spill register. */
841 last_spill_reg = -1;
843 /* Spill any hard regs that we know we can't eliminate. */
844 CLEAR_HARD_REG_SET (used_spill_regs);
845 /* There can be multiple ways to eliminate a register;
846 they should be listed adjacently.
847 Elimination for any register fails only if all possible ways fail. */
848 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
850 int from = ep->from;
851 int can_eliminate = 0;
854 can_eliminate |= ep->can_eliminate;
855 ep++;
857 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
858 if (! can_eliminate)
859 spill_hard_reg (from, 1);
862 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
863 if (frame_pointer_needed)
864 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
865 #endif
866 finish_spills (global);
868 /* From now on, we may need to generate moves differently. We may also
869 allow modifications of insns which cause them to not be recognized.
870 Any such modifications will be cleaned up during reload itself. */
871 reload_in_progress = 1;
873 /* This loop scans the entire function each go-round
874 and repeats until one repetition spills no additional hard regs. */
875 for (;;)
877 int something_changed;
878 int did_spill;
880 HOST_WIDE_INT starting_frame_size;
882 /* Round size of stack frame to stack_alignment_needed. This must be done
883 here because the stack size may be a part of the offset computation
884 for register elimination, and there might have been new stack slots
885 created in the last iteration of this loop. */
886 if (cfun->stack_alignment_needed)
887 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
889 starting_frame_size = get_frame_size ();
891 set_initial_elim_offsets ();
892 set_initial_label_offsets ();
894 /* For each pseudo register that has an equivalent location defined,
895 try to eliminate any eliminable registers (such as the frame pointer)
896 assuming initial offsets for the replacement register, which
897 is the normal case.
899 If the resulting location is directly addressable, substitute
900 the MEM we just got directly for the old REG.
902 If it is not addressable but is a constant or the sum of a hard reg
903 and constant, it is probably not addressable because the constant is
904 out of range, in that case record the address; we will generate
905 hairy code to compute the address in a register each time it is
906 needed. Similarly if it is a hard register, but one that is not
907 valid as an address register.
909 If the location is not addressable, but does not have one of the
910 above forms, assign a stack slot. We have to do this to avoid the
911 potential of producing lots of reloads if, e.g., a location involves
912 a pseudo that didn't get a hard register and has an equivalent memory
913 location that also involves a pseudo that didn't get a hard register.
915 Perhaps at some point we will improve reload_when_needed handling
916 so this problem goes away. But that's very hairy. */
918 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
919 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
921 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
923 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
924 XEXP (x, 0)))
925 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
926 else if (CONSTANT_P (XEXP (x, 0))
927 || (REG_P (XEXP (x, 0))
928 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
929 || (GET_CODE (XEXP (x, 0)) == PLUS
930 && REG_P (XEXP (XEXP (x, 0), 0))
931 && (REGNO (XEXP (XEXP (x, 0), 0))
932 < FIRST_PSEUDO_REGISTER)
933 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
934 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
935 else
937 /* Make a new stack slot. Then indicate that something
938 changed so we go back and recompute offsets for
939 eliminable registers because the allocation of memory
940 below might change some offset. reg_equiv_{mem,address}
941 will be set up for this pseudo on the next pass around
942 the loop. */
943 reg_equiv_memory_loc[i] = 0;
944 reg_equiv_init[i] = 0;
945 alter_reg (i, -1);
949 if (caller_save_needed)
950 setup_save_areas ();
952 /* If we allocated another stack slot, redo elimination bookkeeping. */
953 if (starting_frame_size != get_frame_size ())
954 continue;
956 if (caller_save_needed)
958 save_call_clobbered_regs ();
959 /* That might have allocated new insn_chain structures. */
960 reload_firstobj = obstack_alloc (&reload_obstack, 0);
963 calculate_needs_all_insns (global);
965 CLEAR_REG_SET (&spilled_pseudos);
966 did_spill = 0;
968 something_changed = 0;
970 /* If we allocated any new memory locations, make another pass
971 since it might have changed elimination offsets. */
972 if (starting_frame_size != get_frame_size ())
973 something_changed = 1;
976 HARD_REG_SET to_spill;
977 CLEAR_HARD_REG_SET (to_spill);
978 update_eliminables (&to_spill);
979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
980 if (TEST_HARD_REG_BIT (to_spill, i))
982 spill_hard_reg (i, 1);
983 did_spill = 1;
985 /* Regardless of the state of spills, if we previously had
986 a register that we thought we could eliminate, but now can
987 not eliminate, we must run another pass.
989 Consider pseudos which have an entry in reg_equiv_* which
990 reference an eliminable register. We must make another pass
991 to update reg_equiv_* so that we do not substitute in the
992 old value from when we thought the elimination could be
993 performed. */
994 something_changed = 1;
998 select_reload_regs ();
999 if (failure)
1000 goto failed;
1002 if (insns_need_reload != 0 || did_spill)
1003 something_changed |= finish_spills (global);
1005 if (! something_changed)
1006 break;
1008 if (caller_save_needed)
1009 delete_caller_save_insns ();
1011 obstack_free (&reload_obstack, reload_firstobj);
1014 /* If global-alloc was run, notify it of any register eliminations we have
1015 done. */
1016 if (global)
1017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1018 if (ep->can_eliminate)
1019 mark_elimination (ep->from, ep->to);
1021 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1022 If that insn didn't set the register (i.e., it copied the register to
1023 memory), just delete that insn instead of the equivalencing insn plus
1024 anything now dead. If we call delete_dead_insn on that insn, we may
1025 delete the insn that actually sets the register if the register dies
1026 there and that is incorrect. */
1028 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1030 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1032 rtx list;
1033 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1035 rtx equiv_insn = XEXP (list, 0);
1037 /* If we already deleted the insn or if it may trap, we can't
1038 delete it. The latter case shouldn't happen, but can
1039 if an insn has a variable address, gets a REG_EH_REGION
1040 note added to it, and then gets converted into an load
1041 from a constant address. */
1042 if (NOTE_P (equiv_insn)
1043 || can_throw_internal (equiv_insn))
1045 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1046 delete_dead_insn (equiv_insn);
1047 else
1048 SET_INSN_DELETED (equiv_insn);
1053 /* Use the reload registers where necessary
1054 by generating move instructions to move the must-be-register
1055 values into or out of the reload registers. */
1057 if (insns_need_reload != 0 || something_needs_elimination
1058 || something_needs_operands_changed)
1060 HOST_WIDE_INT old_frame_size = get_frame_size ();
1062 reload_as_needed (global);
1064 gcc_assert (old_frame_size == get_frame_size ());
1066 if (num_eliminable)
1067 verify_initial_elim_offsets ();
1070 /* If we were able to eliminate the frame pointer, show that it is no
1071 longer live at the start of any basic block. If it ls live by
1072 virtue of being in a pseudo, that pseudo will be marked live
1073 and hence the frame pointer will be known to be live via that
1074 pseudo. */
1076 if (! frame_pointer_needed)
1077 FOR_EACH_BB (bb)
1078 CLEAR_REGNO_REG_SET (bb->global_live_at_start,
1079 HARD_FRAME_POINTER_REGNUM);
1081 /* Come here (with failure set nonzero) if we can't get enough spill regs
1082 and we decide not to abort about it. */
1083 failed:
1085 CLEAR_REG_SET (&spilled_pseudos);
1086 reload_in_progress = 0;
1088 /* Now eliminate all pseudo regs by modifying them into
1089 their equivalent memory references.
1090 The REG-rtx's for the pseudos are modified in place,
1091 so all insns that used to refer to them now refer to memory.
1093 For a reg that has a reg_equiv_address, all those insns
1094 were changed by reloading so that no insns refer to it any longer;
1095 but the DECL_RTL of a variable decl may refer to it,
1096 and if so this causes the debugging info to mention the variable. */
1098 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1100 rtx addr = 0;
1102 if (reg_equiv_mem[i])
1103 addr = XEXP (reg_equiv_mem[i], 0);
1105 if (reg_equiv_address[i])
1106 addr = reg_equiv_address[i];
1108 if (addr)
1110 if (reg_renumber[i] < 0)
1112 rtx reg = regno_reg_rtx[i];
1114 REG_USERVAR_P (reg) = 0;
1115 PUT_CODE (reg, MEM);
1116 XEXP (reg, 0) = addr;
1117 if (reg_equiv_memory_loc[i])
1118 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1119 else
1121 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1122 MEM_ATTRS (reg) = 0;
1125 else if (reg_equiv_mem[i])
1126 XEXP (reg_equiv_mem[i], 0) = addr;
1130 /* We must set reload_completed now since the cleanup_subreg_operands call
1131 below will re-recognize each insn and reload may have generated insns
1132 which are only valid during and after reload. */
1133 reload_completed = 1;
1135 /* Make a pass over all the insns and delete all USEs which we inserted
1136 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1137 notes. Delete all CLOBBER insns, except those that refer to the return
1138 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1139 from misarranging variable-array code, and simplify (subreg (reg))
1140 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1141 are no longer useful or accurate. Strip and regenerate REG_INC notes
1142 that may have been moved around. */
1144 for (insn = first; insn; insn = NEXT_INSN (insn))
1145 if (INSN_P (insn))
1147 rtx *pnote;
1149 if (CALL_P (insn))
1150 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1151 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1153 if ((GET_CODE (PATTERN (insn)) == USE
1154 /* We mark with QImode USEs introduced by reload itself. */
1155 && (GET_MODE (insn) == QImode
1156 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1157 || (GET_CODE (PATTERN (insn)) == CLOBBER
1158 && (!MEM_P (XEXP (PATTERN (insn), 0))
1159 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1160 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1161 && XEXP (XEXP (PATTERN (insn), 0), 0)
1162 != stack_pointer_rtx))
1163 && (!REG_P (XEXP (PATTERN (insn), 0))
1164 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1166 delete_insn (insn);
1167 continue;
1170 /* Some CLOBBERs may survive until here and still reference unassigned
1171 pseudos with const equivalent, which may in turn cause ICE in later
1172 passes if the reference remains in place. */
1173 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1174 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1175 VOIDmode, PATTERN (insn));
1177 pnote = &REG_NOTES (insn);
1178 while (*pnote != 0)
1180 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1181 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1182 || REG_NOTE_KIND (*pnote) == REG_INC
1183 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1184 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1185 *pnote = XEXP (*pnote, 1);
1186 else
1187 pnote = &XEXP (*pnote, 1);
1190 #ifdef AUTO_INC_DEC
1191 add_auto_inc_notes (insn, PATTERN (insn));
1192 #endif
1194 /* And simplify (subreg (reg)) if it appears as an operand. */
1195 cleanup_subreg_operands (insn);
1198 /* If we are doing stack checking, give a warning if this function's
1199 frame size is larger than we expect. */
1200 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1202 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1203 static int verbose_warned = 0;
1205 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1206 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1207 size += UNITS_PER_WORD;
1209 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1211 warning ("frame size too large for reliable stack checking");
1212 if (! verbose_warned)
1214 warning ("try reducing the number of local variables");
1215 verbose_warned = 1;
1220 /* Indicate that we no longer have known memory locations or constants. */
1221 if (reg_equiv_constant)
1222 free (reg_equiv_constant);
1223 reg_equiv_constant = 0;
1224 VARRAY_GROW (reg_equiv_memory_loc_varray, 0);
1225 reg_equiv_memory_loc = 0;
1227 if (offsets_known_at)
1228 free (offsets_known_at);
1229 if (offsets_at)
1230 free (offsets_at);
1232 free (reg_equiv_mem);
1233 free (reg_equiv_init);
1234 free (reg_equiv_address);
1235 free (reg_max_ref_width);
1236 free (reg_old_renumber);
1237 free (pseudo_previous_regs);
1238 free (pseudo_forbidden_regs);
1240 CLEAR_HARD_REG_SET (used_spill_regs);
1241 for (i = 0; i < n_spills; i++)
1242 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1244 /* Free all the insn_chain structures at once. */
1245 obstack_free (&reload_obstack, reload_startobj);
1246 unused_insn_chains = 0;
1247 fixup_abnormal_edges ();
1249 /* Replacing pseudos with their memory equivalents might have
1250 created shared rtx. Subsequent passes would get confused
1251 by this, so unshare everything here. */
1252 unshare_all_rtl_again (first);
1254 #ifdef STACK_BOUNDARY
1255 /* init_emit has set the alignment of the hard frame pointer
1256 to STACK_BOUNDARY. It is very likely no longer valid if
1257 the hard frame pointer was used for register allocation. */
1258 if (!frame_pointer_needed)
1259 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1260 #endif
1262 return failure;
1265 /* Yet another special case. Unfortunately, reg-stack forces people to
1266 write incorrect clobbers in asm statements. These clobbers must not
1267 cause the register to appear in bad_spill_regs, otherwise we'll call
1268 fatal_insn later. We clear the corresponding regnos in the live
1269 register sets to avoid this.
1270 The whole thing is rather sick, I'm afraid. */
1272 static void
1273 maybe_fix_stack_asms (void)
1275 #ifdef STACK_REGS
1276 const char *constraints[MAX_RECOG_OPERANDS];
1277 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1278 struct insn_chain *chain;
1280 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1282 int i, noperands;
1283 HARD_REG_SET clobbered, allowed;
1284 rtx pat;
1286 if (! INSN_P (chain->insn)
1287 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1288 continue;
1289 pat = PATTERN (chain->insn);
1290 if (GET_CODE (pat) != PARALLEL)
1291 continue;
1293 CLEAR_HARD_REG_SET (clobbered);
1294 CLEAR_HARD_REG_SET (allowed);
1296 /* First, make a mask of all stack regs that are clobbered. */
1297 for (i = 0; i < XVECLEN (pat, 0); i++)
1299 rtx t = XVECEXP (pat, 0, i);
1300 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1301 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1304 /* Get the operand values and constraints out of the insn. */
1305 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1306 constraints, operand_mode);
1308 /* For every operand, see what registers are allowed. */
1309 for (i = 0; i < noperands; i++)
1311 const char *p = constraints[i];
1312 /* For every alternative, we compute the class of registers allowed
1313 for reloading in CLS, and merge its contents into the reg set
1314 ALLOWED. */
1315 int cls = (int) NO_REGS;
1317 for (;;)
1319 char c = *p;
1321 if (c == '\0' || c == ',' || c == '#')
1323 /* End of one alternative - mark the regs in the current
1324 class, and reset the class. */
1325 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1326 cls = NO_REGS;
1327 p++;
1328 if (c == '#')
1329 do {
1330 c = *p++;
1331 } while (c != '\0' && c != ',');
1332 if (c == '\0')
1333 break;
1334 continue;
1337 switch (c)
1339 case '=': case '+': case '*': case '%': case '?': case '!':
1340 case '0': case '1': case '2': case '3': case '4': case 'm':
1341 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1342 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1343 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1344 case 'P':
1345 break;
1347 case 'p':
1348 cls = (int) reg_class_subunion[cls]
1349 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1350 break;
1352 case 'g':
1353 case 'r':
1354 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1355 break;
1357 default:
1358 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1359 cls = (int) reg_class_subunion[cls]
1360 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1361 else
1362 cls = (int) reg_class_subunion[cls]
1363 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1365 p += CONSTRAINT_LEN (c, p);
1368 /* Those of the registers which are clobbered, but allowed by the
1369 constraints, must be usable as reload registers. So clear them
1370 out of the life information. */
1371 AND_HARD_REG_SET (allowed, clobbered);
1372 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1373 if (TEST_HARD_REG_BIT (allowed, i))
1375 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1376 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1380 #endif
1383 /* Copy the global variables n_reloads and rld into the corresponding elts
1384 of CHAIN. */
1385 static void
1386 copy_reloads (struct insn_chain *chain)
1388 chain->n_reloads = n_reloads;
1389 chain->rld = obstack_alloc (&reload_obstack,
1390 n_reloads * sizeof (struct reload));
1391 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1392 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1395 /* Walk the chain of insns, and determine for each whether it needs reloads
1396 and/or eliminations. Build the corresponding insns_need_reload list, and
1397 set something_needs_elimination as appropriate. */
1398 static void
1399 calculate_needs_all_insns (int global)
1401 struct insn_chain **pprev_reload = &insns_need_reload;
1402 struct insn_chain *chain, *next = 0;
1404 something_needs_elimination = 0;
1406 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1407 for (chain = reload_insn_chain; chain != 0; chain = next)
1409 rtx insn = chain->insn;
1411 next = chain->next;
1413 /* Clear out the shortcuts. */
1414 chain->n_reloads = 0;
1415 chain->need_elim = 0;
1416 chain->need_reload = 0;
1417 chain->need_operand_change = 0;
1419 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1420 include REG_LABEL), we need to see what effects this has on the
1421 known offsets at labels. */
1423 if (LABEL_P (insn) || JUMP_P (insn)
1424 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1425 set_label_offsets (insn, insn, 0);
1427 if (INSN_P (insn))
1429 rtx old_body = PATTERN (insn);
1430 int old_code = INSN_CODE (insn);
1431 rtx old_notes = REG_NOTES (insn);
1432 int did_elimination = 0;
1433 int operands_changed = 0;
1434 rtx set = single_set (insn);
1436 /* Skip insns that only set an equivalence. */
1437 if (set && REG_P (SET_DEST (set))
1438 && reg_renumber[REGNO (SET_DEST (set))] < 0
1439 && reg_equiv_constant[REGNO (SET_DEST (set))])
1440 continue;
1442 /* If needed, eliminate any eliminable registers. */
1443 if (num_eliminable || num_eliminable_invariants)
1444 did_elimination = eliminate_regs_in_insn (insn, 0);
1446 /* Analyze the instruction. */
1447 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1448 global, spill_reg_order);
1450 /* If a no-op set needs more than one reload, this is likely
1451 to be something that needs input address reloads. We
1452 can't get rid of this cleanly later, and it is of no use
1453 anyway, so discard it now.
1454 We only do this when expensive_optimizations is enabled,
1455 since this complements reload inheritance / output
1456 reload deletion, and it can make debugging harder. */
1457 if (flag_expensive_optimizations && n_reloads > 1)
1459 rtx set = single_set (insn);
1460 if (set
1461 && SET_SRC (set) == SET_DEST (set)
1462 && REG_P (SET_SRC (set))
1463 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1465 delete_insn (insn);
1466 /* Delete it from the reload chain. */
1467 if (chain->prev)
1468 chain->prev->next = next;
1469 else
1470 reload_insn_chain = next;
1471 if (next)
1472 next->prev = chain->prev;
1473 chain->next = unused_insn_chains;
1474 unused_insn_chains = chain;
1475 continue;
1478 if (num_eliminable)
1479 update_eliminable_offsets ();
1481 /* Remember for later shortcuts which insns had any reloads or
1482 register eliminations. */
1483 chain->need_elim = did_elimination;
1484 chain->need_reload = n_reloads > 0;
1485 chain->need_operand_change = operands_changed;
1487 /* Discard any register replacements done. */
1488 if (did_elimination)
1490 obstack_free (&reload_obstack, reload_insn_firstobj);
1491 PATTERN (insn) = old_body;
1492 INSN_CODE (insn) = old_code;
1493 REG_NOTES (insn) = old_notes;
1494 something_needs_elimination = 1;
1497 something_needs_operands_changed |= operands_changed;
1499 if (n_reloads != 0)
1501 copy_reloads (chain);
1502 *pprev_reload = chain;
1503 pprev_reload = &chain->next_need_reload;
1507 *pprev_reload = 0;
1510 /* Comparison function for qsort to decide which of two reloads
1511 should be handled first. *P1 and *P2 are the reload numbers. */
1513 static int
1514 reload_reg_class_lower (const void *r1p, const void *r2p)
1516 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1517 int t;
1519 /* Consider required reloads before optional ones. */
1520 t = rld[r1].optional - rld[r2].optional;
1521 if (t != 0)
1522 return t;
1524 /* Count all solitary classes before non-solitary ones. */
1525 t = ((reg_class_size[(int) rld[r2].class] == 1)
1526 - (reg_class_size[(int) rld[r1].class] == 1));
1527 if (t != 0)
1528 return t;
1530 /* Aside from solitaires, consider all multi-reg groups first. */
1531 t = rld[r2].nregs - rld[r1].nregs;
1532 if (t != 0)
1533 return t;
1535 /* Consider reloads in order of increasing reg-class number. */
1536 t = (int) rld[r1].class - (int) rld[r2].class;
1537 if (t != 0)
1538 return t;
1540 /* If reloads are equally urgent, sort by reload number,
1541 so that the results of qsort leave nothing to chance. */
1542 return r1 - r2;
1545 /* The cost of spilling each hard reg. */
1546 static int spill_cost[FIRST_PSEUDO_REGISTER];
1548 /* When spilling multiple hard registers, we use SPILL_COST for the first
1549 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1550 only the first hard reg for a multi-reg pseudo. */
1551 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1553 /* Update the spill cost arrays, considering that pseudo REG is live. */
1555 static void
1556 count_pseudo (int reg)
1558 int freq = REG_FREQ (reg);
1559 int r = reg_renumber[reg];
1560 int nregs;
1562 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1563 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1564 return;
1566 SET_REGNO_REG_SET (&pseudos_counted, reg);
1568 gcc_assert (r >= 0);
1570 spill_add_cost[r] += freq;
1572 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1573 while (nregs-- > 0)
1574 spill_cost[r + nregs] += freq;
1577 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1578 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1580 static void
1581 order_regs_for_reload (struct insn_chain *chain)
1583 unsigned i;
1584 HARD_REG_SET used_by_pseudos;
1585 HARD_REG_SET used_by_pseudos2;
1586 reg_set_iterator rsi;
1588 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1590 memset (spill_cost, 0, sizeof spill_cost);
1591 memset (spill_add_cost, 0, sizeof spill_add_cost);
1593 /* Count number of uses of each hard reg by pseudo regs allocated to it
1594 and then order them by decreasing use. First exclude hard registers
1595 that are live in or across this insn. */
1597 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1598 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1599 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1600 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1602 /* Now find out which pseudos are allocated to it, and update
1603 hard_reg_n_uses. */
1604 CLEAR_REG_SET (&pseudos_counted);
1606 EXECUTE_IF_SET_IN_REG_SET
1607 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1609 count_pseudo (i);
1611 EXECUTE_IF_SET_IN_REG_SET
1612 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1614 count_pseudo (i);
1616 CLEAR_REG_SET (&pseudos_counted);
1619 /* Vector of reload-numbers showing the order in which the reloads should
1620 be processed. */
1621 static short reload_order[MAX_RELOADS];
1623 /* This is used to keep track of the spill regs used in one insn. */
1624 static HARD_REG_SET used_spill_regs_local;
1626 /* We decided to spill hard register SPILLED, which has a size of
1627 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1628 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1629 update SPILL_COST/SPILL_ADD_COST. */
1631 static void
1632 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1634 int r = reg_renumber[reg];
1635 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1637 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1638 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1639 return;
1641 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1643 spill_add_cost[r] -= REG_FREQ (reg);
1644 while (nregs-- > 0)
1645 spill_cost[r + nregs] -= REG_FREQ (reg);
1648 /* Find reload register to use for reload number ORDER. */
1650 static int
1651 find_reg (struct insn_chain *chain, int order)
1653 int rnum = reload_order[order];
1654 struct reload *rl = rld + rnum;
1655 int best_cost = INT_MAX;
1656 int best_reg = -1;
1657 unsigned int i, j;
1658 int k;
1659 HARD_REG_SET not_usable;
1660 HARD_REG_SET used_by_other_reload;
1661 reg_set_iterator rsi;
1663 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1664 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1665 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1667 CLEAR_HARD_REG_SET (used_by_other_reload);
1668 for (k = 0; k < order; k++)
1670 int other = reload_order[k];
1672 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1673 for (j = 0; j < rld[other].nregs; j++)
1674 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1679 unsigned int regno = i;
1681 if (! TEST_HARD_REG_BIT (not_usable, regno)
1682 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1683 && HARD_REGNO_MODE_OK (regno, rl->mode))
1685 int this_cost = spill_cost[regno];
1686 int ok = 1;
1687 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1689 for (j = 1; j < this_nregs; j++)
1691 this_cost += spill_add_cost[regno + j];
1692 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1693 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1694 ok = 0;
1696 if (! ok)
1697 continue;
1698 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1699 this_cost--;
1700 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1701 this_cost--;
1702 if (this_cost < best_cost
1703 /* Among registers with equal cost, prefer caller-saved ones, or
1704 use REG_ALLOC_ORDER if it is defined. */
1705 || (this_cost == best_cost
1706 #ifdef REG_ALLOC_ORDER
1707 && (inv_reg_alloc_order[regno]
1708 < inv_reg_alloc_order[best_reg])
1709 #else
1710 && call_used_regs[regno]
1711 && ! call_used_regs[best_reg]
1712 #endif
1715 best_reg = regno;
1716 best_cost = this_cost;
1720 if (best_reg == -1)
1721 return 0;
1723 if (dump_file)
1724 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1726 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1727 rl->regno = best_reg;
1729 EXECUTE_IF_SET_IN_REG_SET
1730 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1732 count_spilled_pseudo (best_reg, rl->nregs, j);
1735 EXECUTE_IF_SET_IN_REG_SET
1736 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1738 count_spilled_pseudo (best_reg, rl->nregs, j);
1741 for (i = 0; i < rl->nregs; i++)
1743 gcc_assert (spill_cost[best_reg + i] == 0);
1744 gcc_assert (spill_add_cost[best_reg + i] == 0);
1745 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1747 return 1;
1750 /* Find more reload regs to satisfy the remaining need of an insn, which
1751 is given by CHAIN.
1752 Do it by ascending class number, since otherwise a reg
1753 might be spilled for a big class and might fail to count
1754 for a smaller class even though it belongs to that class. */
1756 static void
1757 find_reload_regs (struct insn_chain *chain)
1759 int i;
1761 /* In order to be certain of getting the registers we need,
1762 we must sort the reloads into order of increasing register class.
1763 Then our grabbing of reload registers will parallel the process
1764 that provided the reload registers. */
1765 for (i = 0; i < chain->n_reloads; i++)
1767 /* Show whether this reload already has a hard reg. */
1768 if (chain->rld[i].reg_rtx)
1770 int regno = REGNO (chain->rld[i].reg_rtx);
1771 chain->rld[i].regno = regno;
1772 chain->rld[i].nregs
1773 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1775 else
1776 chain->rld[i].regno = -1;
1777 reload_order[i] = i;
1780 n_reloads = chain->n_reloads;
1781 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1783 CLEAR_HARD_REG_SET (used_spill_regs_local);
1785 if (dump_file)
1786 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1788 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1790 /* Compute the order of preference for hard registers to spill. */
1792 order_regs_for_reload (chain);
1794 for (i = 0; i < n_reloads; i++)
1796 int r = reload_order[i];
1798 /* Ignore reloads that got marked inoperative. */
1799 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1800 && ! rld[r].optional
1801 && rld[r].regno == -1)
1802 if (! find_reg (chain, i))
1804 spill_failure (chain->insn, rld[r].class);
1805 failure = 1;
1806 return;
1810 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1811 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1813 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1816 static void
1817 select_reload_regs (void)
1819 struct insn_chain *chain;
1821 /* Try to satisfy the needs for each insn. */
1822 for (chain = insns_need_reload; chain != 0;
1823 chain = chain->next_need_reload)
1824 find_reload_regs (chain);
1827 /* Delete all insns that were inserted by emit_caller_save_insns during
1828 this iteration. */
1829 static void
1830 delete_caller_save_insns (void)
1832 struct insn_chain *c = reload_insn_chain;
1834 while (c != 0)
1836 while (c != 0 && c->is_caller_save_insn)
1838 struct insn_chain *next = c->next;
1839 rtx insn = c->insn;
1841 if (c == reload_insn_chain)
1842 reload_insn_chain = next;
1843 delete_insn (insn);
1845 if (next)
1846 next->prev = c->prev;
1847 if (c->prev)
1848 c->prev->next = next;
1849 c->next = unused_insn_chains;
1850 unused_insn_chains = c;
1851 c = next;
1853 if (c != 0)
1854 c = c->next;
1858 /* Handle the failure to find a register to spill.
1859 INSN should be one of the insns which needed this particular spill reg. */
1861 static void
1862 spill_failure (rtx insn, enum reg_class class)
1864 static const char *const reg_class_names[] = REG_CLASS_NAMES;
1865 if (asm_noperands (PATTERN (insn)) >= 0)
1866 error_for_asm (insn, "can't find a register in class %qs while "
1867 "reloading %<asm%>",
1868 reg_class_names[class]);
1869 else
1871 error ("unable to find a register to spill in class %qs",
1872 reg_class_names[class]);
1873 fatal_insn ("this is the insn:", insn);
1877 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1878 data that is dead in INSN. */
1880 static void
1881 delete_dead_insn (rtx insn)
1883 rtx prev = prev_real_insn (insn);
1884 rtx prev_dest;
1886 /* If the previous insn sets a register that dies in our insn, delete it
1887 too. */
1888 if (prev && GET_CODE (PATTERN (prev)) == SET
1889 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
1890 && reg_mentioned_p (prev_dest, PATTERN (insn))
1891 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1892 && ! side_effects_p (SET_SRC (PATTERN (prev))))
1893 delete_dead_insn (prev);
1895 SET_INSN_DELETED (insn);
1898 /* Modify the home of pseudo-reg I.
1899 The new home is present in reg_renumber[I].
1901 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1902 or it may be -1, meaning there is none or it is not relevant.
1903 This is used so that all pseudos spilled from a given hard reg
1904 can share one stack slot. */
1906 static void
1907 alter_reg (int i, int from_reg)
1909 /* When outputting an inline function, this can happen
1910 for a reg that isn't actually used. */
1911 if (regno_reg_rtx[i] == 0)
1912 return;
1914 /* If the reg got changed to a MEM at rtl-generation time,
1915 ignore it. */
1916 if (!REG_P (regno_reg_rtx[i]))
1917 return;
1919 /* Modify the reg-rtx to contain the new hard reg
1920 number or else to contain its pseudo reg number. */
1921 REGNO (regno_reg_rtx[i])
1922 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1924 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1925 allocate a stack slot for it. */
1927 if (reg_renumber[i] < 0
1928 && REG_N_REFS (i) > 0
1929 && reg_equiv_constant[i] == 0
1930 && reg_equiv_memory_loc[i] == 0)
1932 rtx x;
1933 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1934 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1935 int adjust = 0;
1937 /* Each pseudo reg has an inherent size which comes from its own mode,
1938 and a total size which provides room for paradoxical subregs
1939 which refer to the pseudo reg in wider modes.
1941 We can use a slot already allocated if it provides both
1942 enough inherent space and enough total space.
1943 Otherwise, we allocate a new slot, making sure that it has no less
1944 inherent space, and no less total space, then the previous slot. */
1945 if (from_reg == -1)
1947 /* No known place to spill from => no slot to reuse. */
1948 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1949 inherent_size == total_size ? 0 : -1);
1950 if (BYTES_BIG_ENDIAN)
1951 /* Cancel the big-endian correction done in assign_stack_local.
1952 Get the address of the beginning of the slot.
1953 This is so we can do a big-endian correction unconditionally
1954 below. */
1955 adjust = inherent_size - total_size;
1957 /* Nothing can alias this slot except this pseudo. */
1958 set_mem_alias_set (x, new_alias_set ());
1961 /* Reuse a stack slot if possible. */
1962 else if (spill_stack_slot[from_reg] != 0
1963 && spill_stack_slot_width[from_reg] >= total_size
1964 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1965 >= inherent_size))
1966 x = spill_stack_slot[from_reg];
1968 /* Allocate a bigger slot. */
1969 else
1971 /* Compute maximum size needed, both for inherent size
1972 and for total size. */
1973 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
1974 rtx stack_slot;
1976 if (spill_stack_slot[from_reg])
1978 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1979 > inherent_size)
1980 mode = GET_MODE (spill_stack_slot[from_reg]);
1981 if (spill_stack_slot_width[from_reg] > total_size)
1982 total_size = spill_stack_slot_width[from_reg];
1985 /* Make a slot with that size. */
1986 x = assign_stack_local (mode, total_size,
1987 inherent_size == total_size ? 0 : -1);
1988 stack_slot = x;
1990 /* All pseudos mapped to this slot can alias each other. */
1991 if (spill_stack_slot[from_reg])
1992 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
1993 else
1994 set_mem_alias_set (x, new_alias_set ());
1996 if (BYTES_BIG_ENDIAN)
1998 /* Cancel the big-endian correction done in assign_stack_local.
1999 Get the address of the beginning of the slot.
2000 This is so we can do a big-endian correction unconditionally
2001 below. */
2002 adjust = GET_MODE_SIZE (mode) - total_size;
2003 if (adjust)
2004 stack_slot
2005 = adjust_address_nv (x, mode_for_size (total_size
2006 * BITS_PER_UNIT,
2007 MODE_INT, 1),
2008 adjust);
2011 spill_stack_slot[from_reg] = stack_slot;
2012 spill_stack_slot_width[from_reg] = total_size;
2015 /* On a big endian machine, the "address" of the slot
2016 is the address of the low part that fits its inherent mode. */
2017 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2018 adjust += (total_size - inherent_size);
2020 /* If we have any adjustment to make, or if the stack slot is the
2021 wrong mode, make a new stack slot. */
2022 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2024 /* If we have a decl for the original register, set it for the
2025 memory. If this is a shared MEM, make a copy. */
2026 if (REG_EXPR (regno_reg_rtx[i])
2027 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2029 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2031 /* We can do this only for the DECLs home pseudo, not for
2032 any copies of it, since otherwise when the stack slot
2033 is reused, nonoverlapping_memrefs_p might think they
2034 cannot overlap. */
2035 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2037 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2038 x = copy_rtx (x);
2040 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2044 /* Save the stack slot for later. */
2045 reg_equiv_memory_loc[i] = x;
2049 /* Mark the slots in regs_ever_live for the hard regs
2050 used by pseudo-reg number REGNO. */
2052 void
2053 mark_home_live (int regno)
2055 int i, lim;
2057 i = reg_renumber[regno];
2058 if (i < 0)
2059 return;
2060 lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
2061 while (i < lim)
2062 regs_ever_live[i++] = 1;
2065 /* This function handles the tracking of elimination offsets around branches.
2067 X is a piece of RTL being scanned.
2069 INSN is the insn that it came from, if any.
2071 INITIAL_P is nonzero if we are to set the offset to be the initial
2072 offset and zero if we are setting the offset of the label to be the
2073 current offset. */
2075 static void
2076 set_label_offsets (rtx x, rtx insn, int initial_p)
2078 enum rtx_code code = GET_CODE (x);
2079 rtx tem;
2080 unsigned int i;
2081 struct elim_table *p;
2083 switch (code)
2085 case LABEL_REF:
2086 if (LABEL_REF_NONLOCAL_P (x))
2087 return;
2089 x = XEXP (x, 0);
2091 /* ... fall through ... */
2093 case CODE_LABEL:
2094 /* If we know nothing about this label, set the desired offsets. Note
2095 that this sets the offset at a label to be the offset before a label
2096 if we don't know anything about the label. This is not correct for
2097 the label after a BARRIER, but is the best guess we can make. If
2098 we guessed wrong, we will suppress an elimination that might have
2099 been possible had we been able to guess correctly. */
2101 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2103 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2104 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2105 = (initial_p ? reg_eliminate[i].initial_offset
2106 : reg_eliminate[i].offset);
2107 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2110 /* Otherwise, if this is the definition of a label and it is
2111 preceded by a BARRIER, set our offsets to the known offset of
2112 that label. */
2114 else if (x == insn
2115 && (tem = prev_nonnote_insn (insn)) != 0
2116 && BARRIER_P (tem))
2117 set_offsets_for_label (insn);
2118 else
2119 /* If neither of the above cases is true, compare each offset
2120 with those previously recorded and suppress any eliminations
2121 where the offsets disagree. */
2123 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2124 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2125 != (initial_p ? reg_eliminate[i].initial_offset
2126 : reg_eliminate[i].offset))
2127 reg_eliminate[i].can_eliminate = 0;
2129 return;
2131 case JUMP_INSN:
2132 set_label_offsets (PATTERN (insn), insn, initial_p);
2134 /* ... fall through ... */
2136 case INSN:
2137 case CALL_INSN:
2138 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2139 and hence must have all eliminations at their initial offsets. */
2140 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2141 if (REG_NOTE_KIND (tem) == REG_LABEL)
2142 set_label_offsets (XEXP (tem, 0), insn, 1);
2143 return;
2145 case PARALLEL:
2146 case ADDR_VEC:
2147 case ADDR_DIFF_VEC:
2148 /* Each of the labels in the parallel or address vector must be
2149 at their initial offsets. We want the first field for PARALLEL
2150 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2152 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2153 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2154 insn, initial_p);
2155 return;
2157 case SET:
2158 /* We only care about setting PC. If the source is not RETURN,
2159 IF_THEN_ELSE, or a label, disable any eliminations not at
2160 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2161 isn't one of those possibilities. For branches to a label,
2162 call ourselves recursively.
2164 Note that this can disable elimination unnecessarily when we have
2165 a non-local goto since it will look like a non-constant jump to
2166 someplace in the current function. This isn't a significant
2167 problem since such jumps will normally be when all elimination
2168 pairs are back to their initial offsets. */
2170 if (SET_DEST (x) != pc_rtx)
2171 return;
2173 switch (GET_CODE (SET_SRC (x)))
2175 case PC:
2176 case RETURN:
2177 return;
2179 case LABEL_REF:
2180 set_label_offsets (SET_SRC (x), insn, initial_p);
2181 return;
2183 case IF_THEN_ELSE:
2184 tem = XEXP (SET_SRC (x), 1);
2185 if (GET_CODE (tem) == LABEL_REF)
2186 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2187 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2188 break;
2190 tem = XEXP (SET_SRC (x), 2);
2191 if (GET_CODE (tem) == LABEL_REF)
2192 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2193 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2194 break;
2195 return;
2197 default:
2198 break;
2201 /* If we reach here, all eliminations must be at their initial
2202 offset because we are doing a jump to a variable address. */
2203 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2204 if (p->offset != p->initial_offset)
2205 p->can_eliminate = 0;
2206 break;
2208 default:
2209 break;
2213 /* Scan X and replace any eliminable registers (such as fp) with a
2214 replacement (such as sp), plus an offset.
2216 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2217 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2218 MEM, we are allowed to replace a sum of a register and the constant zero
2219 with the register, which we cannot do outside a MEM. In addition, we need
2220 to record the fact that a register is referenced outside a MEM.
2222 If INSN is an insn, it is the insn containing X. If we replace a REG
2223 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2224 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2225 the REG is being modified.
2227 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2228 That's used when we eliminate in expressions stored in notes.
2229 This means, do not set ref_outside_mem even if the reference
2230 is outside of MEMs.
2232 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2233 replacements done assuming all offsets are at their initial values. If
2234 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2235 encounter, return the actual location so that find_reloads will do
2236 the proper thing. */
2239 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2241 enum rtx_code code = GET_CODE (x);
2242 struct elim_table *ep;
2243 int regno;
2244 rtx new;
2245 int i, j;
2246 const char *fmt;
2247 int copied = 0;
2249 if (! current_function_decl)
2250 return x;
2252 switch (code)
2254 case CONST_INT:
2255 case CONST_DOUBLE:
2256 case CONST_VECTOR:
2257 case CONST:
2258 case SYMBOL_REF:
2259 case CODE_LABEL:
2260 case PC:
2261 case CC0:
2262 case ASM_INPUT:
2263 case ADDR_VEC:
2264 case ADDR_DIFF_VEC:
2265 case RETURN:
2266 return x;
2268 case REG:
2269 regno = REGNO (x);
2271 /* First handle the case where we encounter a bare register that
2272 is eliminable. Replace it with a PLUS. */
2273 if (regno < FIRST_PSEUDO_REGISTER)
2275 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2276 ep++)
2277 if (ep->from_rtx == x && ep->can_eliminate)
2278 return plus_constant (ep->to_rtx, ep->previous_offset);
2281 else if (reg_renumber && reg_renumber[regno] < 0
2282 && reg_equiv_constant && reg_equiv_constant[regno]
2283 && ! CONSTANT_P (reg_equiv_constant[regno]))
2284 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2285 mem_mode, insn);
2286 return x;
2288 /* You might think handling MINUS in a manner similar to PLUS is a
2289 good idea. It is not. It has been tried multiple times and every
2290 time the change has had to have been reverted.
2292 Other parts of reload know a PLUS is special (gen_reload for example)
2293 and require special code to handle code a reloaded PLUS operand.
2295 Also consider backends where the flags register is clobbered by a
2296 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2297 lea instruction comes to mind). If we try to reload a MINUS, we
2298 may kill the flags register that was holding a useful value.
2300 So, please before trying to handle MINUS, consider reload as a
2301 whole instead of this little section as well as the backend issues. */
2302 case PLUS:
2303 /* If this is the sum of an eliminable register and a constant, rework
2304 the sum. */
2305 if (REG_P (XEXP (x, 0))
2306 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2307 && CONSTANT_P (XEXP (x, 1)))
2309 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2310 ep++)
2311 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2313 /* The only time we want to replace a PLUS with a REG (this
2314 occurs when the constant operand of the PLUS is the negative
2315 of the offset) is when we are inside a MEM. We won't want
2316 to do so at other times because that would change the
2317 structure of the insn in a way that reload can't handle.
2318 We special-case the commonest situation in
2319 eliminate_regs_in_insn, so just replace a PLUS with a
2320 PLUS here, unless inside a MEM. */
2321 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2322 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2323 return ep->to_rtx;
2324 else
2325 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2326 plus_constant (XEXP (x, 1),
2327 ep->previous_offset));
2330 /* If the register is not eliminable, we are done since the other
2331 operand is a constant. */
2332 return x;
2335 /* If this is part of an address, we want to bring any constant to the
2336 outermost PLUS. We will do this by doing register replacement in
2337 our operands and seeing if a constant shows up in one of them.
2339 Note that there is no risk of modifying the structure of the insn,
2340 since we only get called for its operands, thus we are either
2341 modifying the address inside a MEM, or something like an address
2342 operand of a load-address insn. */
2345 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2346 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2348 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2350 /* If one side is a PLUS and the other side is a pseudo that
2351 didn't get a hard register but has a reg_equiv_constant,
2352 we must replace the constant here since it may no longer
2353 be in the position of any operand. */
2354 if (GET_CODE (new0) == PLUS && REG_P (new1)
2355 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2356 && reg_renumber[REGNO (new1)] < 0
2357 && reg_equiv_constant != 0
2358 && reg_equiv_constant[REGNO (new1)] != 0)
2359 new1 = reg_equiv_constant[REGNO (new1)];
2360 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2361 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2362 && reg_renumber[REGNO (new0)] < 0
2363 && reg_equiv_constant[REGNO (new0)] != 0)
2364 new0 = reg_equiv_constant[REGNO (new0)];
2366 new = form_sum (new0, new1);
2368 /* As above, if we are not inside a MEM we do not want to
2369 turn a PLUS into something else. We might try to do so here
2370 for an addition of 0 if we aren't optimizing. */
2371 if (! mem_mode && GET_CODE (new) != PLUS)
2372 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2373 else
2374 return new;
2377 return x;
2379 case MULT:
2380 /* If this is the product of an eliminable register and a
2381 constant, apply the distribute law and move the constant out
2382 so that we have (plus (mult ..) ..). This is needed in order
2383 to keep load-address insns valid. This case is pathological.
2384 We ignore the possibility of overflow here. */
2385 if (REG_P (XEXP (x, 0))
2386 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2387 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2388 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2389 ep++)
2390 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2392 if (! mem_mode
2393 /* Refs inside notes don't count for this purpose. */
2394 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2395 || GET_CODE (insn) == INSN_LIST)))
2396 ep->ref_outside_mem = 1;
2398 return
2399 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2400 ep->previous_offset * INTVAL (XEXP (x, 1)));
2403 /* ... fall through ... */
2405 case CALL:
2406 case COMPARE:
2407 /* See comments before PLUS about handling MINUS. */
2408 case MINUS:
2409 case DIV: case UDIV:
2410 case MOD: case UMOD:
2411 case AND: case IOR: case XOR:
2412 case ROTATERT: case ROTATE:
2413 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2414 case NE: case EQ:
2415 case GE: case GT: case GEU: case GTU:
2416 case LE: case LT: case LEU: case LTU:
2418 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2419 rtx new1
2420 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2423 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2425 return x;
2427 case EXPR_LIST:
2428 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2429 if (XEXP (x, 0))
2431 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2432 if (new != XEXP (x, 0))
2434 /* If this is a REG_DEAD note, it is not valid anymore.
2435 Using the eliminated version could result in creating a
2436 REG_DEAD note for the stack or frame pointer. */
2437 if (GET_MODE (x) == REG_DEAD)
2438 return (XEXP (x, 1)
2439 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2440 : NULL_RTX);
2442 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2446 /* ... fall through ... */
2448 case INSN_LIST:
2449 /* Now do eliminations in the rest of the chain. If this was
2450 an EXPR_LIST, this might result in allocating more memory than is
2451 strictly needed, but it simplifies the code. */
2452 if (XEXP (x, 1))
2454 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2455 if (new != XEXP (x, 1))
2456 return
2457 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2459 return x;
2461 case PRE_INC:
2462 case POST_INC:
2463 case PRE_DEC:
2464 case POST_DEC:
2465 case STRICT_LOW_PART:
2466 case NEG: case NOT:
2467 case SIGN_EXTEND: case ZERO_EXTEND:
2468 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2469 case FLOAT: case FIX:
2470 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2471 case ABS:
2472 case SQRT:
2473 case FFS:
2474 case CLZ:
2475 case CTZ:
2476 case POPCOUNT:
2477 case PARITY:
2478 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2479 if (new != XEXP (x, 0))
2480 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2481 return x;
2483 case SUBREG:
2484 /* Similar to above processing, but preserve SUBREG_BYTE.
2485 Convert (subreg (mem)) to (mem) if not paradoxical.
2486 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2487 pseudo didn't get a hard reg, we must replace this with the
2488 eliminated version of the memory location because push_reload
2489 may do the replacement in certain circumstances. */
2490 if (REG_P (SUBREG_REG (x))
2491 && (GET_MODE_SIZE (GET_MODE (x))
2492 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2493 && reg_equiv_memory_loc != 0
2494 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2496 new = SUBREG_REG (x);
2498 else
2499 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2501 if (new != SUBREG_REG (x))
2503 int x_size = GET_MODE_SIZE (GET_MODE (x));
2504 int new_size = GET_MODE_SIZE (GET_MODE (new));
2506 if (MEM_P (new)
2507 && ((x_size < new_size
2508 #ifdef WORD_REGISTER_OPERATIONS
2509 /* On these machines, combine can create rtl of the form
2510 (set (subreg:m1 (reg:m2 R) 0) ...)
2511 where m1 < m2, and expects something interesting to
2512 happen to the entire word. Moreover, it will use the
2513 (reg:m2 R) later, expecting all bits to be preserved.
2514 So if the number of words is the same, preserve the
2515 subreg so that push_reload can see it. */
2516 && ! ((x_size - 1) / UNITS_PER_WORD
2517 == (new_size -1 ) / UNITS_PER_WORD)
2518 #endif
2520 || x_size == new_size)
2522 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2523 else
2524 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2527 return x;
2529 case MEM:
2530 /* Our only special processing is to pass the mode of the MEM to our
2531 recursive call and copy the flags. While we are here, handle this
2532 case more efficiently. */
2533 return
2534 replace_equiv_address_nv (x,
2535 eliminate_regs (XEXP (x, 0),
2536 GET_MODE (x), insn));
2538 case USE:
2539 /* Handle insn_list USE that a call to a pure function may generate. */
2540 new = eliminate_regs (XEXP (x, 0), 0, insn);
2541 if (new != XEXP (x, 0))
2542 return gen_rtx_USE (GET_MODE (x), new);
2543 return x;
2545 case CLOBBER:
2546 case ASM_OPERANDS:
2547 case SET:
2548 gcc_unreachable ();
2550 default:
2551 break;
2554 /* Process each of our operands recursively. If any have changed, make a
2555 copy of the rtx. */
2556 fmt = GET_RTX_FORMAT (code);
2557 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2559 if (*fmt == 'e')
2561 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2562 if (new != XEXP (x, i) && ! copied)
2564 rtx new_x = rtx_alloc (code);
2565 memcpy (new_x, x, RTX_SIZE (code));
2566 x = new_x;
2567 copied = 1;
2569 XEXP (x, i) = new;
2571 else if (*fmt == 'E')
2573 int copied_vec = 0;
2574 for (j = 0; j < XVECLEN (x, i); j++)
2576 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2577 if (new != XVECEXP (x, i, j) && ! copied_vec)
2579 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2580 XVEC (x, i)->elem);
2581 if (! copied)
2583 rtx new_x = rtx_alloc (code);
2584 memcpy (new_x, x, RTX_SIZE (code));
2585 x = new_x;
2586 copied = 1;
2588 XVEC (x, i) = new_v;
2589 copied_vec = 1;
2591 XVECEXP (x, i, j) = new;
2596 return x;
2599 /* Scan rtx X for modifications of elimination target registers. Update
2600 the table of eliminables to reflect the changed state. MEM_MODE is
2601 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2603 static void
2604 elimination_effects (rtx x, enum machine_mode mem_mode)
2606 enum rtx_code code = GET_CODE (x);
2607 struct elim_table *ep;
2608 int regno;
2609 int i, j;
2610 const char *fmt;
2612 switch (code)
2614 case CONST_INT:
2615 case CONST_DOUBLE:
2616 case CONST_VECTOR:
2617 case CONST:
2618 case SYMBOL_REF:
2619 case CODE_LABEL:
2620 case PC:
2621 case CC0:
2622 case ASM_INPUT:
2623 case ADDR_VEC:
2624 case ADDR_DIFF_VEC:
2625 case RETURN:
2626 return;
2628 case REG:
2629 regno = REGNO (x);
2631 /* First handle the case where we encounter a bare register that
2632 is eliminable. Replace it with a PLUS. */
2633 if (regno < FIRST_PSEUDO_REGISTER)
2635 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2636 ep++)
2637 if (ep->from_rtx == x && ep->can_eliminate)
2639 if (! mem_mode)
2640 ep->ref_outside_mem = 1;
2641 return;
2645 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2646 && reg_equiv_constant[regno]
2647 && ! function_invariant_p (reg_equiv_constant[regno]))
2648 elimination_effects (reg_equiv_constant[regno], mem_mode);
2649 return;
2651 case PRE_INC:
2652 case POST_INC:
2653 case PRE_DEC:
2654 case POST_DEC:
2655 case POST_MODIFY:
2656 case PRE_MODIFY:
2657 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2658 if (ep->to_rtx == XEXP (x, 0))
2660 int size = GET_MODE_SIZE (mem_mode);
2662 /* If more bytes than MEM_MODE are pushed, account for them. */
2663 #ifdef PUSH_ROUNDING
2664 if (ep->to_rtx == stack_pointer_rtx)
2665 size = PUSH_ROUNDING (size);
2666 #endif
2667 if (code == PRE_DEC || code == POST_DEC)
2668 ep->offset += size;
2669 else if (code == PRE_INC || code == POST_INC)
2670 ep->offset -= size;
2671 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2672 && GET_CODE (XEXP (x, 1)) == PLUS
2673 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2674 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2675 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2678 /* These two aren't unary operators. */
2679 if (code == POST_MODIFY || code == PRE_MODIFY)
2680 break;
2682 /* Fall through to generic unary operation case. */
2683 case STRICT_LOW_PART:
2684 case NEG: case NOT:
2685 case SIGN_EXTEND: case ZERO_EXTEND:
2686 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2687 case FLOAT: case FIX:
2688 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2689 case ABS:
2690 case SQRT:
2691 case FFS:
2692 case CLZ:
2693 case CTZ:
2694 case POPCOUNT:
2695 case PARITY:
2696 elimination_effects (XEXP (x, 0), mem_mode);
2697 return;
2699 case SUBREG:
2700 if (REG_P (SUBREG_REG (x))
2701 && (GET_MODE_SIZE (GET_MODE (x))
2702 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2703 && reg_equiv_memory_loc != 0
2704 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2705 return;
2707 elimination_effects (SUBREG_REG (x), mem_mode);
2708 return;
2710 case USE:
2711 /* If using a register that is the source of an eliminate we still
2712 think can be performed, note it cannot be performed since we don't
2713 know how this register is used. */
2714 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2715 if (ep->from_rtx == XEXP (x, 0))
2716 ep->can_eliminate = 0;
2718 elimination_effects (XEXP (x, 0), mem_mode);
2719 return;
2721 case CLOBBER:
2722 /* If clobbering a register that is the replacement register for an
2723 elimination we still think can be performed, note that it cannot
2724 be performed. Otherwise, we need not be concerned about it. */
2725 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2726 if (ep->to_rtx == XEXP (x, 0))
2727 ep->can_eliminate = 0;
2729 elimination_effects (XEXP (x, 0), mem_mode);
2730 return;
2732 case SET:
2733 /* Check for setting a register that we know about. */
2734 if (REG_P (SET_DEST (x)))
2736 /* See if this is setting the replacement register for an
2737 elimination.
2739 If DEST is the hard frame pointer, we do nothing because we
2740 assume that all assignments to the frame pointer are for
2741 non-local gotos and are being done at a time when they are valid
2742 and do not disturb anything else. Some machines want to
2743 eliminate a fake argument pointer (or even a fake frame pointer)
2744 with either the real frame or the stack pointer. Assignments to
2745 the hard frame pointer must not prevent this elimination. */
2747 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2748 ep++)
2749 if (ep->to_rtx == SET_DEST (x)
2750 && SET_DEST (x) != hard_frame_pointer_rtx)
2752 /* If it is being incremented, adjust the offset. Otherwise,
2753 this elimination can't be done. */
2754 rtx src = SET_SRC (x);
2756 if (GET_CODE (src) == PLUS
2757 && XEXP (src, 0) == SET_DEST (x)
2758 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2759 ep->offset -= INTVAL (XEXP (src, 1));
2760 else
2761 ep->can_eliminate = 0;
2765 elimination_effects (SET_DEST (x), 0);
2766 elimination_effects (SET_SRC (x), 0);
2767 return;
2769 case MEM:
2770 /* Our only special processing is to pass the mode of the MEM to our
2771 recursive call. */
2772 elimination_effects (XEXP (x, 0), GET_MODE (x));
2773 return;
2775 default:
2776 break;
2779 fmt = GET_RTX_FORMAT (code);
2780 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2782 if (*fmt == 'e')
2783 elimination_effects (XEXP (x, i), mem_mode);
2784 else if (*fmt == 'E')
2785 for (j = 0; j < XVECLEN (x, i); j++)
2786 elimination_effects (XVECEXP (x, i, j), mem_mode);
2790 /* Descend through rtx X and verify that no references to eliminable registers
2791 remain. If any do remain, mark the involved register as not
2792 eliminable. */
2794 static void
2795 check_eliminable_occurrences (rtx x)
2797 const char *fmt;
2798 int i;
2799 enum rtx_code code;
2801 if (x == 0)
2802 return;
2804 code = GET_CODE (x);
2806 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2808 struct elim_table *ep;
2810 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2811 if (ep->from_rtx == x)
2812 ep->can_eliminate = 0;
2813 return;
2816 fmt = GET_RTX_FORMAT (code);
2817 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2819 if (*fmt == 'e')
2820 check_eliminable_occurrences (XEXP (x, i));
2821 else if (*fmt == 'E')
2823 int j;
2824 for (j = 0; j < XVECLEN (x, i); j++)
2825 check_eliminable_occurrences (XVECEXP (x, i, j));
2830 /* Scan INSN and eliminate all eliminable registers in it.
2832 If REPLACE is nonzero, do the replacement destructively. Also
2833 delete the insn as dead it if it is setting an eliminable register.
2835 If REPLACE is zero, do all our allocations in reload_obstack.
2837 If no eliminations were done and this insn doesn't require any elimination
2838 processing (these are not identical conditions: it might be updating sp,
2839 but not referencing fp; this needs to be seen during reload_as_needed so
2840 that the offset between fp and sp can be taken into consideration), zero
2841 is returned. Otherwise, 1 is returned. */
2843 static int
2844 eliminate_regs_in_insn (rtx insn, int replace)
2846 int icode = recog_memoized (insn);
2847 rtx old_body = PATTERN (insn);
2848 int insn_is_asm = asm_noperands (old_body) >= 0;
2849 rtx old_set = single_set (insn);
2850 rtx new_body;
2851 int val = 0;
2852 int i;
2853 rtx substed_operand[MAX_RECOG_OPERANDS];
2854 rtx orig_operand[MAX_RECOG_OPERANDS];
2855 struct elim_table *ep;
2856 rtx plus_src;
2858 if (! insn_is_asm && icode < 0)
2860 gcc_assert (GET_CODE (PATTERN (insn)) == USE
2861 || GET_CODE (PATTERN (insn)) == CLOBBER
2862 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2863 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2864 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2865 return 0;
2868 if (old_set != 0 && REG_P (SET_DEST (old_set))
2869 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2871 /* Check for setting an eliminable register. */
2872 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2873 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2875 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2876 /* If this is setting the frame pointer register to the
2877 hardware frame pointer register and this is an elimination
2878 that will be done (tested above), this insn is really
2879 adjusting the frame pointer downward to compensate for
2880 the adjustment done before a nonlocal goto. */
2881 if (ep->from == FRAME_POINTER_REGNUM
2882 && ep->to == HARD_FRAME_POINTER_REGNUM)
2884 rtx base = SET_SRC (old_set);
2885 rtx base_insn = insn;
2886 HOST_WIDE_INT offset = 0;
2888 while (base != ep->to_rtx)
2890 rtx prev_insn, prev_set;
2892 if (GET_CODE (base) == PLUS
2893 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2895 offset += INTVAL (XEXP (base, 1));
2896 base = XEXP (base, 0);
2898 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2899 && (prev_set = single_set (prev_insn)) != 0
2900 && rtx_equal_p (SET_DEST (prev_set), base))
2902 base = SET_SRC (prev_set);
2903 base_insn = prev_insn;
2905 else
2906 break;
2909 if (base == ep->to_rtx)
2911 rtx src
2912 = plus_constant (ep->to_rtx, offset - ep->offset);
2914 new_body = old_body;
2915 if (! replace)
2917 new_body = copy_insn (old_body);
2918 if (REG_NOTES (insn))
2919 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2921 PATTERN (insn) = new_body;
2922 old_set = single_set (insn);
2924 /* First see if this insn remains valid when we
2925 make the change. If not, keep the INSN_CODE
2926 the same and let reload fit it up. */
2927 validate_change (insn, &SET_SRC (old_set), src, 1);
2928 validate_change (insn, &SET_DEST (old_set),
2929 ep->to_rtx, 1);
2930 if (! apply_change_group ())
2932 SET_SRC (old_set) = src;
2933 SET_DEST (old_set) = ep->to_rtx;
2936 val = 1;
2937 goto done;
2940 #endif
2942 /* In this case this insn isn't serving a useful purpose. We
2943 will delete it in reload_as_needed once we know that this
2944 elimination is, in fact, being done.
2946 If REPLACE isn't set, we can't delete this insn, but needn't
2947 process it since it won't be used unless something changes. */
2948 if (replace)
2950 delete_dead_insn (insn);
2951 return 1;
2953 val = 1;
2954 goto done;
2958 /* We allow one special case which happens to work on all machines we
2959 currently support: a single set with the source or a REG_EQUAL
2960 note being a PLUS of an eliminable register and a constant. */
2961 plus_src = 0;
2962 if (old_set && REG_P (SET_DEST (old_set)))
2964 /* First see if the source is of the form (plus (reg) CST). */
2965 if (GET_CODE (SET_SRC (old_set)) == PLUS
2966 && REG_P (XEXP (SET_SRC (old_set), 0))
2967 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
2968 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
2969 plus_src = SET_SRC (old_set);
2970 else if (REG_P (SET_SRC (old_set)))
2972 /* Otherwise, see if we have a REG_EQUAL note of the form
2973 (plus (reg) CST). */
2974 rtx links;
2975 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
2977 if (REG_NOTE_KIND (links) == REG_EQUAL
2978 && GET_CODE (XEXP (links, 0)) == PLUS
2979 && REG_P (XEXP (XEXP (links, 0), 0))
2980 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT
2981 && REGNO (XEXP (XEXP (links, 0), 0)) < FIRST_PSEUDO_REGISTER)
2983 plus_src = XEXP (links, 0);
2984 break;
2989 if (plus_src)
2991 rtx reg = XEXP (plus_src, 0);
2992 HOST_WIDE_INT offset = INTVAL (XEXP (plus_src, 1));
2994 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2995 if (ep->from_rtx == reg && ep->can_eliminate)
2997 offset += ep->offset;
2999 if (offset == 0)
3001 int num_clobbers;
3002 /* We assume here that if we need a PARALLEL with
3003 CLOBBERs for this assignment, we can do with the
3004 MATCH_SCRATCHes that add_clobbers allocates.
3005 There's not much we can do if that doesn't work. */
3006 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3007 SET_DEST (old_set),
3008 ep->to_rtx);
3009 num_clobbers = 0;
3010 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3011 if (num_clobbers)
3013 rtvec vec = rtvec_alloc (num_clobbers + 1);
3015 vec->elem[0] = PATTERN (insn);
3016 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3017 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3019 gcc_assert (INSN_CODE (insn) >= 0);
3021 /* If we have a nonzero offset, and the source is already
3022 a simple REG, the following transformation would
3023 increase the cost of the insn by replacing a simple REG
3024 with (plus (reg sp) CST). So try only when plus_src
3025 comes from old_set proper, not REG_NOTES. */
3026 else if (SET_SRC (old_set) == plus_src)
3028 new_body = old_body;
3029 if (! replace)
3031 new_body = copy_insn (old_body);
3032 if (REG_NOTES (insn))
3033 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3035 PATTERN (insn) = new_body;
3036 old_set = single_set (insn);
3038 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3039 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3041 else
3042 break;
3044 val = 1;
3045 /* This can't have an effect on elimination offsets, so skip right
3046 to the end. */
3047 goto done;
3051 /* Determine the effects of this insn on elimination offsets. */
3052 elimination_effects (old_body, 0);
3054 /* Eliminate all eliminable registers occurring in operands that
3055 can be handled by reload. */
3056 extract_insn (insn);
3057 for (i = 0; i < recog_data.n_operands; i++)
3059 orig_operand[i] = recog_data.operand[i];
3060 substed_operand[i] = recog_data.operand[i];
3062 /* For an asm statement, every operand is eliminable. */
3063 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3065 /* Check for setting a register that we know about. */
3066 if (recog_data.operand_type[i] != OP_IN
3067 && REG_P (orig_operand[i]))
3069 /* If we are assigning to a register that can be eliminated, it
3070 must be as part of a PARALLEL, since the code above handles
3071 single SETs. We must indicate that we can no longer
3072 eliminate this reg. */
3073 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3074 ep++)
3075 if (ep->from_rtx == orig_operand[i])
3076 ep->can_eliminate = 0;
3079 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3080 replace ? insn : NULL_RTX);
3081 if (substed_operand[i] != orig_operand[i])
3082 val = 1;
3083 /* Terminate the search in check_eliminable_occurrences at
3084 this point. */
3085 *recog_data.operand_loc[i] = 0;
3087 /* If an output operand changed from a REG to a MEM and INSN is an
3088 insn, write a CLOBBER insn. */
3089 if (recog_data.operand_type[i] != OP_IN
3090 && REG_P (orig_operand[i])
3091 && MEM_P (substed_operand[i])
3092 && replace)
3093 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3094 insn);
3098 for (i = 0; i < recog_data.n_dups; i++)
3099 *recog_data.dup_loc[i]
3100 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3102 /* If any eliminable remain, they aren't eliminable anymore. */
3103 check_eliminable_occurrences (old_body);
3105 /* Substitute the operands; the new values are in the substed_operand
3106 array. */
3107 for (i = 0; i < recog_data.n_operands; i++)
3108 *recog_data.operand_loc[i] = substed_operand[i];
3109 for (i = 0; i < recog_data.n_dups; i++)
3110 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3112 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3113 re-recognize the insn. We do this in case we had a simple addition
3114 but now can do this as a load-address. This saves an insn in this
3115 common case.
3116 If re-recognition fails, the old insn code number will still be used,
3117 and some register operands may have changed into PLUS expressions.
3118 These will be handled by find_reloads by loading them into a register
3119 again. */
3121 if (val)
3123 /* If we aren't replacing things permanently and we changed something,
3124 make another copy to ensure that all the RTL is new. Otherwise
3125 things can go wrong if find_reload swaps commutative operands
3126 and one is inside RTL that has been copied while the other is not. */
3127 new_body = old_body;
3128 if (! replace)
3130 new_body = copy_insn (old_body);
3131 if (REG_NOTES (insn))
3132 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3134 PATTERN (insn) = new_body;
3136 /* If we had a move insn but now we don't, rerecognize it. This will
3137 cause spurious re-recognition if the old move had a PARALLEL since
3138 the new one still will, but we can't call single_set without
3139 having put NEW_BODY into the insn and the re-recognition won't
3140 hurt in this rare case. */
3141 /* ??? Why this huge if statement - why don't we just rerecognize the
3142 thing always? */
3143 if (! insn_is_asm
3144 && old_set != 0
3145 && ((REG_P (SET_SRC (old_set))
3146 && (GET_CODE (new_body) != SET
3147 || !REG_P (SET_SRC (new_body))))
3148 /* If this was a load from or store to memory, compare
3149 the MEM in recog_data.operand to the one in the insn.
3150 If they are not equal, then rerecognize the insn. */
3151 || (old_set != 0
3152 && ((MEM_P (SET_SRC (old_set))
3153 && SET_SRC (old_set) != recog_data.operand[1])
3154 || (MEM_P (SET_DEST (old_set))
3155 && SET_DEST (old_set) != recog_data.operand[0])))
3156 /* If this was an add insn before, rerecognize. */
3157 || GET_CODE (SET_SRC (old_set)) == PLUS))
3159 int new_icode = recog (PATTERN (insn), insn, 0);
3160 if (new_icode < 0)
3161 INSN_CODE (insn) = icode;
3165 /* Restore the old body. If there were any changes to it, we made a copy
3166 of it while the changes were still in place, so we'll correctly return
3167 a modified insn below. */
3168 if (! replace)
3170 /* Restore the old body. */
3171 for (i = 0; i < recog_data.n_operands; i++)
3172 *recog_data.operand_loc[i] = orig_operand[i];
3173 for (i = 0; i < recog_data.n_dups; i++)
3174 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3177 /* Update all elimination pairs to reflect the status after the current
3178 insn. The changes we make were determined by the earlier call to
3179 elimination_effects.
3181 We also detect cases where register elimination cannot be done,
3182 namely, if a register would be both changed and referenced outside a MEM
3183 in the resulting insn since such an insn is often undefined and, even if
3184 not, we cannot know what meaning will be given to it. Note that it is
3185 valid to have a register used in an address in an insn that changes it
3186 (presumably with a pre- or post-increment or decrement).
3188 If anything changes, return nonzero. */
3190 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3192 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3193 ep->can_eliminate = 0;
3195 ep->ref_outside_mem = 0;
3197 if (ep->previous_offset != ep->offset)
3198 val = 1;
3201 done:
3202 /* If we changed something, perform elimination in REG_NOTES. This is
3203 needed even when REPLACE is zero because a REG_DEAD note might refer
3204 to a register that we eliminate and could cause a different number
3205 of spill registers to be needed in the final reload pass than in
3206 the pre-passes. */
3207 if (val && REG_NOTES (insn) != 0)
3208 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3210 return val;
3213 /* Loop through all elimination pairs.
3214 Recalculate the number not at initial offset.
3216 Compute the maximum offset (minimum offset if the stack does not
3217 grow downward) for each elimination pair. */
3219 static void
3220 update_eliminable_offsets (void)
3222 struct elim_table *ep;
3224 num_not_at_initial_offset = 0;
3225 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3227 ep->previous_offset = ep->offset;
3228 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3229 num_not_at_initial_offset++;
3233 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3234 replacement we currently believe is valid, mark it as not eliminable if X
3235 modifies DEST in any way other than by adding a constant integer to it.
3237 If DEST is the frame pointer, we do nothing because we assume that
3238 all assignments to the hard frame pointer are nonlocal gotos and are being
3239 done at a time when they are valid and do not disturb anything else.
3240 Some machines want to eliminate a fake argument pointer with either the
3241 frame or stack pointer. Assignments to the hard frame pointer must not
3242 prevent this elimination.
3244 Called via note_stores from reload before starting its passes to scan
3245 the insns of the function. */
3247 static void
3248 mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
3250 unsigned int i;
3252 /* A SUBREG of a hard register here is just changing its mode. We should
3253 not see a SUBREG of an eliminable hard register, but check just in
3254 case. */
3255 if (GET_CODE (dest) == SUBREG)
3256 dest = SUBREG_REG (dest);
3258 if (dest == hard_frame_pointer_rtx)
3259 return;
3261 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3262 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3263 && (GET_CODE (x) != SET
3264 || GET_CODE (SET_SRC (x)) != PLUS
3265 || XEXP (SET_SRC (x), 0) != dest
3266 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3268 reg_eliminate[i].can_eliminate_previous
3269 = reg_eliminate[i].can_eliminate = 0;
3270 num_eliminable--;
3274 /* Verify that the initial elimination offsets did not change since the
3275 last call to set_initial_elim_offsets. This is used to catch cases
3276 where something illegal happened during reload_as_needed that could
3277 cause incorrect code to be generated if we did not check for it. */
3279 static void
3280 verify_initial_elim_offsets (void)
3282 HOST_WIDE_INT t;
3284 #ifdef ELIMINABLE_REGS
3285 struct elim_table *ep;
3287 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3289 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3290 gcc_assert (t == ep->initial_offset);
3292 #else
3293 INITIAL_FRAME_POINTER_OFFSET (t);
3294 gcc_assert (t == reg_eliminate[0].initial_offset);
3295 #endif
3298 /* Reset all offsets on eliminable registers to their initial values. */
3300 static void
3301 set_initial_elim_offsets (void)
3303 struct elim_table *ep = reg_eliminate;
3305 #ifdef ELIMINABLE_REGS
3306 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3308 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3309 ep->previous_offset = ep->offset = ep->initial_offset;
3311 #else
3312 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3313 ep->previous_offset = ep->offset = ep->initial_offset;
3314 #endif
3316 num_not_at_initial_offset = 0;
3319 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3321 static void
3322 set_initial_eh_label_offset (rtx label)
3324 set_label_offsets (label, NULL_RTX, 1);
3327 /* Initialize the known label offsets.
3328 Set a known offset for each forced label to be at the initial offset
3329 of each elimination. We do this because we assume that all
3330 computed jumps occur from a location where each elimination is
3331 at its initial offset.
3332 For all other labels, show that we don't know the offsets. */
3334 static void
3335 set_initial_label_offsets (void)
3337 rtx x;
3338 memset (offsets_known_at, 0, num_labels);
3340 for (x = forced_labels; x; x = XEXP (x, 1))
3341 if (XEXP (x, 0))
3342 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3344 for_each_eh_label (set_initial_eh_label_offset);
3347 /* Set all elimination offsets to the known values for the code label given
3348 by INSN. */
3350 static void
3351 set_offsets_for_label (rtx insn)
3353 unsigned int i;
3354 int label_nr = CODE_LABEL_NUMBER (insn);
3355 struct elim_table *ep;
3357 num_not_at_initial_offset = 0;
3358 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3360 ep->offset = ep->previous_offset
3361 = offsets_at[label_nr - first_label_num][i];
3362 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3363 num_not_at_initial_offset++;
3367 /* See if anything that happened changes which eliminations are valid.
3368 For example, on the SPARC, whether or not the frame pointer can
3369 be eliminated can depend on what registers have been used. We need
3370 not check some conditions again (such as flag_omit_frame_pointer)
3371 since they can't have changed. */
3373 static void
3374 update_eliminables (HARD_REG_SET *pset)
3376 int previous_frame_pointer_needed = frame_pointer_needed;
3377 struct elim_table *ep;
3379 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3380 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3381 #ifdef ELIMINABLE_REGS
3382 || ! CAN_ELIMINATE (ep->from, ep->to)
3383 #endif
3385 ep->can_eliminate = 0;
3387 /* Look for the case where we have discovered that we can't replace
3388 register A with register B and that means that we will now be
3389 trying to replace register A with register C. This means we can
3390 no longer replace register C with register B and we need to disable
3391 such an elimination, if it exists. This occurs often with A == ap,
3392 B == sp, and C == fp. */
3394 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3396 struct elim_table *op;
3397 int new_to = -1;
3399 if (! ep->can_eliminate && ep->can_eliminate_previous)
3401 /* Find the current elimination for ep->from, if there is a
3402 new one. */
3403 for (op = reg_eliminate;
3404 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3405 if (op->from == ep->from && op->can_eliminate)
3407 new_to = op->to;
3408 break;
3411 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3412 disable it. */
3413 for (op = reg_eliminate;
3414 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3415 if (op->from == new_to && op->to == ep->to)
3416 op->can_eliminate = 0;
3420 /* See if any registers that we thought we could eliminate the previous
3421 time are no longer eliminable. If so, something has changed and we
3422 must spill the register. Also, recompute the number of eliminable
3423 registers and see if the frame pointer is needed; it is if there is
3424 no elimination of the frame pointer that we can perform. */
3426 frame_pointer_needed = 1;
3427 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3429 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3430 && ep->to != HARD_FRAME_POINTER_REGNUM)
3431 frame_pointer_needed = 0;
3433 if (! ep->can_eliminate && ep->can_eliminate_previous)
3435 ep->can_eliminate_previous = 0;
3436 SET_HARD_REG_BIT (*pset, ep->from);
3437 num_eliminable--;
3441 /* If we didn't need a frame pointer last time, but we do now, spill
3442 the hard frame pointer. */
3443 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3444 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3447 /* Initialize the table of registers to eliminate. */
3449 static void
3450 init_elim_table (void)
3452 struct elim_table *ep;
3453 #ifdef ELIMINABLE_REGS
3454 const struct elim_table_1 *ep1;
3455 #endif
3457 if (!reg_eliminate)
3458 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3460 /* Does this function require a frame pointer? */
3462 frame_pointer_needed = (! flag_omit_frame_pointer
3463 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3464 and restore sp for alloca. So we can't eliminate
3465 the frame pointer in that case. At some point,
3466 we should improve this by emitting the
3467 sp-adjusting insns for this case. */
3468 || (current_function_calls_alloca
3469 && EXIT_IGNORE_STACK)
3470 || FRAME_POINTER_REQUIRED);
3472 num_eliminable = 0;
3474 #ifdef ELIMINABLE_REGS
3475 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3476 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3478 ep->from = ep1->from;
3479 ep->to = ep1->to;
3480 ep->can_eliminate = ep->can_eliminate_previous
3481 = (CAN_ELIMINATE (ep->from, ep->to)
3482 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3484 #else
3485 reg_eliminate[0].from = reg_eliminate_1[0].from;
3486 reg_eliminate[0].to = reg_eliminate_1[0].to;
3487 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3488 = ! frame_pointer_needed;
3489 #endif
3491 /* Count the number of eliminable registers and build the FROM and TO
3492 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3493 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3494 We depend on this. */
3495 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3497 num_eliminable += ep->can_eliminate;
3498 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3499 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3503 /* Kick all pseudos out of hard register REGNO.
3505 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3506 because we found we can't eliminate some register. In the case, no pseudos
3507 are allowed to be in the register, even if they are only in a block that
3508 doesn't require spill registers, unlike the case when we are spilling this
3509 hard reg to produce another spill register.
3511 Return nonzero if any pseudos needed to be kicked out. */
3513 static void
3514 spill_hard_reg (unsigned int regno, int cant_eliminate)
3516 int i;
3518 if (cant_eliminate)
3520 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3521 regs_ever_live[regno] = 1;
3524 /* Spill every pseudo reg that was allocated to this reg
3525 or to something that overlaps this reg. */
3527 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3528 if (reg_renumber[i] >= 0
3529 && (unsigned int) reg_renumber[i] <= regno
3530 && ((unsigned int) reg_renumber[i]
3531 + hard_regno_nregs[(unsigned int) reg_renumber[i]]
3532 [PSEUDO_REGNO_MODE (i)]
3533 > regno))
3534 SET_REGNO_REG_SET (&spilled_pseudos, i);
3537 /* After find_reload_regs has been run for all insn that need reloads,
3538 and/or spill_hard_regs was called, this function is used to actually
3539 spill pseudo registers and try to reallocate them. It also sets up the
3540 spill_regs array for use by choose_reload_regs. */
3542 static int
3543 finish_spills (int global)
3545 struct insn_chain *chain;
3546 int something_changed = 0;
3547 unsigned i;
3548 reg_set_iterator rsi;
3550 /* Build the spill_regs array for the function. */
3551 /* If there are some registers still to eliminate and one of the spill regs
3552 wasn't ever used before, additional stack space may have to be
3553 allocated to store this register. Thus, we may have changed the offset
3554 between the stack and frame pointers, so mark that something has changed.
3556 One might think that we need only set VAL to 1 if this is a call-used
3557 register. However, the set of registers that must be saved by the
3558 prologue is not identical to the call-used set. For example, the
3559 register used by the call insn for the return PC is a call-used register,
3560 but must be saved by the prologue. */
3562 n_spills = 0;
3563 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3564 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3566 spill_reg_order[i] = n_spills;
3567 spill_regs[n_spills++] = i;
3568 if (num_eliminable && ! regs_ever_live[i])
3569 something_changed = 1;
3570 regs_ever_live[i] = 1;
3572 else
3573 spill_reg_order[i] = -1;
3575 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3577 /* Record the current hard register the pseudo is allocated to in
3578 pseudo_previous_regs so we avoid reallocating it to the same
3579 hard reg in a later pass. */
3580 gcc_assert (reg_renumber[i] >= 0);
3582 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3583 /* Mark it as no longer having a hard register home. */
3584 reg_renumber[i] = -1;
3585 /* We will need to scan everything again. */
3586 something_changed = 1;
3589 /* Retry global register allocation if possible. */
3590 if (global)
3592 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3593 /* For every insn that needs reloads, set the registers used as spill
3594 regs in pseudo_forbidden_regs for every pseudo live across the
3595 insn. */
3596 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3598 EXECUTE_IF_SET_IN_REG_SET
3599 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3601 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3602 chain->used_spill_regs);
3604 EXECUTE_IF_SET_IN_REG_SET
3605 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3607 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3608 chain->used_spill_regs);
3612 /* Retry allocating the spilled pseudos. For each reg, merge the
3613 various reg sets that indicate which hard regs can't be used,
3614 and call retry_global_alloc.
3615 We change spill_pseudos here to only contain pseudos that did not
3616 get a new hard register. */
3617 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3618 if (reg_old_renumber[i] != reg_renumber[i])
3620 HARD_REG_SET forbidden;
3621 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3622 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3623 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3624 retry_global_alloc (i, forbidden);
3625 if (reg_renumber[i] >= 0)
3626 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3630 /* Fix up the register information in the insn chain.
3631 This involves deleting those of the spilled pseudos which did not get
3632 a new hard register home from the live_{before,after} sets. */
3633 for (chain = reload_insn_chain; chain; chain = chain->next)
3635 HARD_REG_SET used_by_pseudos;
3636 HARD_REG_SET used_by_pseudos2;
3638 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3639 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3641 /* Mark any unallocated hard regs as available for spills. That
3642 makes inheritance work somewhat better. */
3643 if (chain->need_reload)
3645 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3646 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3647 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3649 /* Save the old value for the sanity test below. */
3650 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3652 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3653 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3654 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3655 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3657 /* Make sure we only enlarge the set. */
3658 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3659 gcc_unreachable ();
3660 ok:;
3664 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3665 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3667 int regno = reg_renumber[i];
3668 if (reg_old_renumber[i] == regno)
3669 continue;
3671 alter_reg (i, reg_old_renumber[i]);
3672 reg_old_renumber[i] = regno;
3673 if (dump_file)
3675 if (regno == -1)
3676 fprintf (dump_file, " Register %d now on stack.\n\n", i);
3677 else
3678 fprintf (dump_file, " Register %d now in %d.\n\n",
3679 i, reg_renumber[i]);
3683 return something_changed;
3686 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3688 static void
3689 scan_paradoxical_subregs (rtx x)
3691 int i;
3692 const char *fmt;
3693 enum rtx_code code = GET_CODE (x);
3695 switch (code)
3697 case REG:
3698 case CONST_INT:
3699 case CONST:
3700 case SYMBOL_REF:
3701 case LABEL_REF:
3702 case CONST_DOUBLE:
3703 case CONST_VECTOR: /* shouldn't happen, but just in case. */
3704 case CC0:
3705 case PC:
3706 case USE:
3707 case CLOBBER:
3708 return;
3710 case SUBREG:
3711 if (REG_P (SUBREG_REG (x))
3712 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3713 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3714 = GET_MODE_SIZE (GET_MODE (x));
3715 return;
3717 default:
3718 break;
3721 fmt = GET_RTX_FORMAT (code);
3722 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3724 if (fmt[i] == 'e')
3725 scan_paradoxical_subregs (XEXP (x, i));
3726 else if (fmt[i] == 'E')
3728 int j;
3729 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3730 scan_paradoxical_subregs (XVECEXP (x, i, j));
3735 /* Reload pseudo-registers into hard regs around each insn as needed.
3736 Additional register load insns are output before the insn that needs it
3737 and perhaps store insns after insns that modify the reloaded pseudo reg.
3739 reg_last_reload_reg and reg_reloaded_contents keep track of
3740 which registers are already available in reload registers.
3741 We update these for the reloads that we perform,
3742 as the insns are scanned. */
3744 static void
3745 reload_as_needed (int live_known)
3747 struct insn_chain *chain;
3748 #if defined (AUTO_INC_DEC)
3749 int i;
3750 #endif
3751 rtx x;
3753 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
3754 memset (spill_reg_store, 0, sizeof spill_reg_store);
3755 reg_last_reload_reg = xcalloc (max_regno, sizeof (rtx));
3756 reg_has_output_reload = xmalloc (max_regno);
3757 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3758 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
3760 set_initial_elim_offsets ();
3762 for (chain = reload_insn_chain; chain; chain = chain->next)
3764 rtx prev = 0;
3765 rtx insn = chain->insn;
3766 rtx old_next = NEXT_INSN (insn);
3768 /* If we pass a label, copy the offsets from the label information
3769 into the current offsets of each elimination. */
3770 if (LABEL_P (insn))
3771 set_offsets_for_label (insn);
3773 else if (INSN_P (insn))
3775 rtx oldpat = copy_rtx (PATTERN (insn));
3777 /* If this is a USE and CLOBBER of a MEM, ensure that any
3778 references to eliminable registers have been removed. */
3780 if ((GET_CODE (PATTERN (insn)) == USE
3781 || GET_CODE (PATTERN (insn)) == CLOBBER)
3782 && MEM_P (XEXP (PATTERN (insn), 0)))
3783 XEXP (XEXP (PATTERN (insn), 0), 0)
3784 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3785 GET_MODE (XEXP (PATTERN (insn), 0)),
3786 NULL_RTX);
3788 /* If we need to do register elimination processing, do so.
3789 This might delete the insn, in which case we are done. */
3790 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3792 eliminate_regs_in_insn (insn, 1);
3793 if (NOTE_P (insn))
3795 update_eliminable_offsets ();
3796 continue;
3800 /* If need_elim is nonzero but need_reload is zero, one might think
3801 that we could simply set n_reloads to 0. However, find_reloads
3802 could have done some manipulation of the insn (such as swapping
3803 commutative operands), and these manipulations are lost during
3804 the first pass for every insn that needs register elimination.
3805 So the actions of find_reloads must be redone here. */
3807 if (! chain->need_elim && ! chain->need_reload
3808 && ! chain->need_operand_change)
3809 n_reloads = 0;
3810 /* First find the pseudo regs that must be reloaded for this insn.
3811 This info is returned in the tables reload_... (see reload.h).
3812 Also modify the body of INSN by substituting RELOAD
3813 rtx's for those pseudo regs. */
3814 else
3816 memset (reg_has_output_reload, 0, max_regno);
3817 CLEAR_HARD_REG_SET (reg_is_output_reload);
3819 find_reloads (insn, 1, spill_indirect_levels, live_known,
3820 spill_reg_order);
3823 if (n_reloads > 0)
3825 rtx next = NEXT_INSN (insn);
3826 rtx p;
3828 prev = PREV_INSN (insn);
3830 /* Now compute which reload regs to reload them into. Perhaps
3831 reusing reload regs from previous insns, or else output
3832 load insns to reload them. Maybe output store insns too.
3833 Record the choices of reload reg in reload_reg_rtx. */
3834 choose_reload_regs (chain);
3836 /* Merge any reloads that we didn't combine for fear of
3837 increasing the number of spill registers needed but now
3838 discover can be safely merged. */
3839 if (SMALL_REGISTER_CLASSES)
3840 merge_assigned_reloads (insn);
3842 /* Generate the insns to reload operands into or out of
3843 their reload regs. */
3844 emit_reload_insns (chain);
3846 /* Substitute the chosen reload regs from reload_reg_rtx
3847 into the insn's body (or perhaps into the bodies of other
3848 load and store insn that we just made for reloading
3849 and that we moved the structure into). */
3850 subst_reloads (insn);
3852 /* If this was an ASM, make sure that all the reload insns
3853 we have generated are valid. If not, give an error
3854 and delete them. */
3856 if (asm_noperands (PATTERN (insn)) >= 0)
3857 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3858 if (p != insn && INSN_P (p)
3859 && GET_CODE (PATTERN (p)) != USE
3860 && (recog_memoized (p) < 0
3861 || (extract_insn (p), ! constrain_operands (1))))
3863 error_for_asm (insn,
3864 "%<asm%> operand requires "
3865 "impossible reload");
3866 delete_insn (p);
3870 if (num_eliminable && chain->need_elim)
3871 update_eliminable_offsets ();
3873 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3874 is no longer validly lying around to save a future reload.
3875 Note that this does not detect pseudos that were reloaded
3876 for this insn in order to be stored in
3877 (obeying register constraints). That is correct; such reload
3878 registers ARE still valid. */
3879 note_stores (oldpat, forget_old_reloads_1, NULL);
3881 /* There may have been CLOBBER insns placed after INSN. So scan
3882 between INSN and NEXT and use them to forget old reloads. */
3883 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
3884 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
3885 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
3887 #ifdef AUTO_INC_DEC
3888 /* Likewise for regs altered by auto-increment in this insn.
3889 REG_INC notes have been changed by reloading:
3890 find_reloads_address_1 records substitutions for them,
3891 which have been performed by subst_reloads above. */
3892 for (i = n_reloads - 1; i >= 0; i--)
3894 rtx in_reg = rld[i].in_reg;
3895 if (in_reg)
3897 enum rtx_code code = GET_CODE (in_reg);
3898 /* PRE_INC / PRE_DEC will have the reload register ending up
3899 with the same value as the stack slot, but that doesn't
3900 hold true for POST_INC / POST_DEC. Either we have to
3901 convert the memory access to a true POST_INC / POST_DEC,
3902 or we can't use the reload register for inheritance. */
3903 if ((code == POST_INC || code == POST_DEC)
3904 && TEST_HARD_REG_BIT (reg_reloaded_valid,
3905 REGNO (rld[i].reg_rtx))
3906 /* Make sure it is the inc/dec pseudo, and not
3907 some other (e.g. output operand) pseudo. */
3908 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
3909 == REGNO (XEXP (in_reg, 0))))
3912 rtx reload_reg = rld[i].reg_rtx;
3913 enum machine_mode mode = GET_MODE (reload_reg);
3914 int n = 0;
3915 rtx p;
3917 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
3919 /* We really want to ignore REG_INC notes here, so
3920 use PATTERN (p) as argument to reg_set_p . */
3921 if (reg_set_p (reload_reg, PATTERN (p)))
3922 break;
3923 n = count_occurrences (PATTERN (p), reload_reg, 0);
3924 if (! n)
3925 continue;
3926 if (n == 1)
3928 n = validate_replace_rtx (reload_reg,
3929 gen_rtx_fmt_e (code,
3930 mode,
3931 reload_reg),
3934 /* We must also verify that the constraints
3935 are met after the replacement. */
3936 extract_insn (p);
3937 if (n)
3938 n = constrain_operands (1);
3939 else
3940 break;
3942 /* If the constraints were not met, then
3943 undo the replacement. */
3944 if (!n)
3946 validate_replace_rtx (gen_rtx_fmt_e (code,
3947 mode,
3948 reload_reg),
3949 reload_reg, p);
3950 break;
3954 break;
3956 if (n == 1)
3958 REG_NOTES (p)
3959 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
3960 REG_NOTES (p));
3961 /* Mark this as having an output reload so that the
3962 REG_INC processing code below won't invalidate
3963 the reload for inheritance. */
3964 SET_HARD_REG_BIT (reg_is_output_reload,
3965 REGNO (reload_reg));
3966 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
3968 else
3969 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
3970 NULL);
3972 else if ((code == PRE_INC || code == PRE_DEC)
3973 && TEST_HARD_REG_BIT (reg_reloaded_valid,
3974 REGNO (rld[i].reg_rtx))
3975 /* Make sure it is the inc/dec pseudo, and not
3976 some other (e.g. output operand) pseudo. */
3977 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
3978 == REGNO (XEXP (in_reg, 0))))
3980 SET_HARD_REG_BIT (reg_is_output_reload,
3981 REGNO (rld[i].reg_rtx));
3982 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
3986 /* If a pseudo that got a hard register is auto-incremented,
3987 we must purge records of copying it into pseudos without
3988 hard registers. */
3989 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3990 if (REG_NOTE_KIND (x) == REG_INC)
3992 /* See if this pseudo reg was reloaded in this insn.
3993 If so, its last-reload info is still valid
3994 because it is based on this insn's reload. */
3995 for (i = 0; i < n_reloads; i++)
3996 if (rld[i].out == XEXP (x, 0))
3997 break;
3999 if (i == n_reloads)
4000 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4002 #endif
4004 /* A reload reg's contents are unknown after a label. */
4005 if (LABEL_P (insn))
4006 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4008 /* Don't assume a reload reg is still good after a call insn
4009 if it is a call-used reg, or if it contains a value that will
4010 be partially clobbered by the call. */
4011 else if (CALL_P (insn))
4013 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4014 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4018 /* Clean up. */
4019 free (reg_last_reload_reg);
4020 free (reg_has_output_reload);
4023 /* Discard all record of any value reloaded from X,
4024 or reloaded in X from someplace else;
4025 unless X is an output reload reg of the current insn.
4027 X may be a hard reg (the reload reg)
4028 or it may be a pseudo reg that was reloaded from. */
4030 static void
4031 forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
4032 void *data ATTRIBUTE_UNUSED)
4034 unsigned int regno;
4035 unsigned int nr;
4037 /* note_stores does give us subregs of hard regs,
4038 subreg_regno_offset will abort if it is not a hard reg. */
4039 while (GET_CODE (x) == SUBREG)
4041 /* We ignore the subreg offset when calculating the regno,
4042 because we are using the entire underlying hard register
4043 below. */
4044 x = SUBREG_REG (x);
4047 if (!REG_P (x))
4048 return;
4050 regno = REGNO (x);
4052 if (regno >= FIRST_PSEUDO_REGISTER)
4053 nr = 1;
4054 else
4056 unsigned int i;
4058 nr = hard_regno_nregs[regno][GET_MODE (x)];
4059 /* Storing into a spilled-reg invalidates its contents.
4060 This can happen if a block-local pseudo is allocated to that reg
4061 and it wasn't spilled because this block's total need is 0.
4062 Then some insn might have an optional reload and use this reg. */
4063 for (i = 0; i < nr; i++)
4064 /* But don't do this if the reg actually serves as an output
4065 reload reg in the current instruction. */
4066 if (n_reloads == 0
4067 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4069 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4070 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4071 spill_reg_store[regno + i] = 0;
4075 /* Since value of X has changed,
4076 forget any value previously copied from it. */
4078 while (nr-- > 0)
4079 /* But don't forget a copy if this is the output reload
4080 that establishes the copy's validity. */
4081 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4082 reg_last_reload_reg[regno + nr] = 0;
4085 /* The following HARD_REG_SETs indicate when each hard register is
4086 used for a reload of various parts of the current insn. */
4088 /* If reg is unavailable for all reloads. */
4089 static HARD_REG_SET reload_reg_unavailable;
4090 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4091 static HARD_REG_SET reload_reg_used;
4092 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4093 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4094 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4095 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4096 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4097 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4098 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4099 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4100 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4101 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4102 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4103 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4104 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4105 static HARD_REG_SET reload_reg_used_in_op_addr;
4106 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4107 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4108 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4109 static HARD_REG_SET reload_reg_used_in_insn;
4110 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4111 static HARD_REG_SET reload_reg_used_in_other_addr;
4113 /* If reg is in use as a reload reg for any sort of reload. */
4114 static HARD_REG_SET reload_reg_used_at_all;
4116 /* If reg is use as an inherited reload. We just mark the first register
4117 in the group. */
4118 static HARD_REG_SET reload_reg_used_for_inherit;
4120 /* Records which hard regs are used in any way, either as explicit use or
4121 by being allocated to a pseudo during any point of the current insn. */
4122 static HARD_REG_SET reg_used_in_insn;
4124 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4125 TYPE. MODE is used to indicate how many consecutive regs are
4126 actually used. */
4128 static void
4129 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4130 enum machine_mode mode)
4132 unsigned int nregs = hard_regno_nregs[regno][mode];
4133 unsigned int i;
4135 for (i = regno; i < nregs + regno; i++)
4137 switch (type)
4139 case RELOAD_OTHER:
4140 SET_HARD_REG_BIT (reload_reg_used, i);
4141 break;
4143 case RELOAD_FOR_INPUT_ADDRESS:
4144 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4145 break;
4147 case RELOAD_FOR_INPADDR_ADDRESS:
4148 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4149 break;
4151 case RELOAD_FOR_OUTPUT_ADDRESS:
4152 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4153 break;
4155 case RELOAD_FOR_OUTADDR_ADDRESS:
4156 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4157 break;
4159 case RELOAD_FOR_OPERAND_ADDRESS:
4160 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4161 break;
4163 case RELOAD_FOR_OPADDR_ADDR:
4164 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4165 break;
4167 case RELOAD_FOR_OTHER_ADDRESS:
4168 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4169 break;
4171 case RELOAD_FOR_INPUT:
4172 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4173 break;
4175 case RELOAD_FOR_OUTPUT:
4176 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4177 break;
4179 case RELOAD_FOR_INSN:
4180 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4181 break;
4184 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4188 /* Similarly, but show REGNO is no longer in use for a reload. */
4190 static void
4191 clear_reload_reg_in_use (unsigned int regno, int opnum,
4192 enum reload_type type, enum machine_mode mode)
4194 unsigned int nregs = hard_regno_nregs[regno][mode];
4195 unsigned int start_regno, end_regno, r;
4196 int i;
4197 /* A complication is that for some reload types, inheritance might
4198 allow multiple reloads of the same types to share a reload register.
4199 We set check_opnum if we have to check only reloads with the same
4200 operand number, and check_any if we have to check all reloads. */
4201 int check_opnum = 0;
4202 int check_any = 0;
4203 HARD_REG_SET *used_in_set;
4205 switch (type)
4207 case RELOAD_OTHER:
4208 used_in_set = &reload_reg_used;
4209 break;
4211 case RELOAD_FOR_INPUT_ADDRESS:
4212 used_in_set = &reload_reg_used_in_input_addr[opnum];
4213 break;
4215 case RELOAD_FOR_INPADDR_ADDRESS:
4216 check_opnum = 1;
4217 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4218 break;
4220 case RELOAD_FOR_OUTPUT_ADDRESS:
4221 used_in_set = &reload_reg_used_in_output_addr[opnum];
4222 break;
4224 case RELOAD_FOR_OUTADDR_ADDRESS:
4225 check_opnum = 1;
4226 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4227 break;
4229 case RELOAD_FOR_OPERAND_ADDRESS:
4230 used_in_set = &reload_reg_used_in_op_addr;
4231 break;
4233 case RELOAD_FOR_OPADDR_ADDR:
4234 check_any = 1;
4235 used_in_set = &reload_reg_used_in_op_addr_reload;
4236 break;
4238 case RELOAD_FOR_OTHER_ADDRESS:
4239 used_in_set = &reload_reg_used_in_other_addr;
4240 check_any = 1;
4241 break;
4243 case RELOAD_FOR_INPUT:
4244 used_in_set = &reload_reg_used_in_input[opnum];
4245 break;
4247 case RELOAD_FOR_OUTPUT:
4248 used_in_set = &reload_reg_used_in_output[opnum];
4249 break;
4251 case RELOAD_FOR_INSN:
4252 used_in_set = &reload_reg_used_in_insn;
4253 break;
4254 default:
4255 gcc_unreachable ();
4257 /* We resolve conflicts with remaining reloads of the same type by
4258 excluding the intervals of reload registers by them from the
4259 interval of freed reload registers. Since we only keep track of
4260 one set of interval bounds, we might have to exclude somewhat
4261 more than what would be necessary if we used a HARD_REG_SET here.
4262 But this should only happen very infrequently, so there should
4263 be no reason to worry about it. */
4265 start_regno = regno;
4266 end_regno = regno + nregs;
4267 if (check_opnum || check_any)
4269 for (i = n_reloads - 1; i >= 0; i--)
4271 if (rld[i].when_needed == type
4272 && (check_any || rld[i].opnum == opnum)
4273 && rld[i].reg_rtx)
4275 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4276 unsigned int conflict_end
4277 = (conflict_start
4278 + hard_regno_nregs[conflict_start][rld[i].mode]);
4280 /* If there is an overlap with the first to-be-freed register,
4281 adjust the interval start. */
4282 if (conflict_start <= start_regno && conflict_end > start_regno)
4283 start_regno = conflict_end;
4284 /* Otherwise, if there is a conflict with one of the other
4285 to-be-freed registers, adjust the interval end. */
4286 if (conflict_start > start_regno && conflict_start < end_regno)
4287 end_regno = conflict_start;
4292 for (r = start_regno; r < end_regno; r++)
4293 CLEAR_HARD_REG_BIT (*used_in_set, r);
4296 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4297 specified by OPNUM and TYPE. */
4299 static int
4300 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4302 int i;
4304 /* In use for a RELOAD_OTHER means it's not available for anything. */
4305 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4306 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4307 return 0;
4309 switch (type)
4311 case RELOAD_OTHER:
4312 /* In use for anything means we can't use it for RELOAD_OTHER. */
4313 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4314 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4315 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4316 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4317 return 0;
4319 for (i = 0; i < reload_n_operands; i++)
4320 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4321 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4322 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4323 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4324 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4325 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4326 return 0;
4328 return 1;
4330 case RELOAD_FOR_INPUT:
4331 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4332 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4333 return 0;
4335 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4336 return 0;
4338 /* If it is used for some other input, can't use it. */
4339 for (i = 0; i < reload_n_operands; i++)
4340 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4341 return 0;
4343 /* If it is used in a later operand's address, can't use it. */
4344 for (i = opnum + 1; i < reload_n_operands; i++)
4345 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4346 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4347 return 0;
4349 return 1;
4351 case RELOAD_FOR_INPUT_ADDRESS:
4352 /* Can't use a register if it is used for an input address for this
4353 operand or used as an input in an earlier one. */
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4355 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4356 return 0;
4358 for (i = 0; i < opnum; i++)
4359 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4360 return 0;
4362 return 1;
4364 case RELOAD_FOR_INPADDR_ADDRESS:
4365 /* Can't use a register if it is used for an input address
4366 for this operand or used as an input in an earlier
4367 one. */
4368 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4369 return 0;
4371 for (i = 0; i < opnum; i++)
4372 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4373 return 0;
4375 return 1;
4377 case RELOAD_FOR_OUTPUT_ADDRESS:
4378 /* Can't use a register if it is used for an output address for this
4379 operand or used as an output in this or a later operand. Note
4380 that multiple output operands are emitted in reverse order, so
4381 the conflicting ones are those with lower indices. */
4382 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4383 return 0;
4385 for (i = 0; i <= opnum; i++)
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4387 return 0;
4389 return 1;
4391 case RELOAD_FOR_OUTADDR_ADDRESS:
4392 /* Can't use a register if it is used for an output address
4393 for this operand or used as an output in this or a
4394 later operand. Note that multiple output operands are
4395 emitted in reverse order, so the conflicting ones are
4396 those with lower indices. */
4397 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4398 return 0;
4400 for (i = 0; i <= opnum; i++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4402 return 0;
4404 return 1;
4406 case RELOAD_FOR_OPERAND_ADDRESS:
4407 for (i = 0; i < reload_n_operands; i++)
4408 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4409 return 0;
4411 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4412 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4414 case RELOAD_FOR_OPADDR_ADDR:
4415 for (i = 0; i < reload_n_operands; i++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4417 return 0;
4419 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4421 case RELOAD_FOR_OUTPUT:
4422 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4423 outputs, or an operand address for this or an earlier output.
4424 Note that multiple output operands are emitted in reverse order,
4425 so the conflicting ones are those with higher indices. */
4426 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4427 return 0;
4429 for (i = 0; i < reload_n_operands; i++)
4430 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4431 return 0;
4433 for (i = opnum; i < reload_n_operands; i++)
4434 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4435 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4436 return 0;
4438 return 1;
4440 case RELOAD_FOR_INSN:
4441 for (i = 0; i < reload_n_operands; i++)
4442 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4443 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4444 return 0;
4446 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4447 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4449 case RELOAD_FOR_OTHER_ADDRESS:
4450 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4452 default:
4453 gcc_unreachable ();
4457 /* Return 1 if the value in reload reg REGNO, as used by a reload
4458 needed for the part of the insn specified by OPNUM and TYPE,
4459 is still available in REGNO at the end of the insn.
4461 We can assume that the reload reg was already tested for availability
4462 at the time it is needed, and we should not check this again,
4463 in case the reg has already been marked in use. */
4465 static int
4466 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4468 int i;
4470 switch (type)
4472 case RELOAD_OTHER:
4473 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4474 its value must reach the end. */
4475 return 1;
4477 /* If this use is for part of the insn,
4478 its value reaches if no subsequent part uses the same register.
4479 Just like the above function, don't try to do this with lots
4480 of fallthroughs. */
4482 case RELOAD_FOR_OTHER_ADDRESS:
4483 /* Here we check for everything else, since these don't conflict
4484 with anything else and everything comes later. */
4486 for (i = 0; i < reload_n_operands; i++)
4487 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4488 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4489 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4490 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4491 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4493 return 0;
4495 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4496 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4497 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4498 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4500 case RELOAD_FOR_INPUT_ADDRESS:
4501 case RELOAD_FOR_INPADDR_ADDRESS:
4502 /* Similar, except that we check only for this and subsequent inputs
4503 and the address of only subsequent inputs and we do not need
4504 to check for RELOAD_OTHER objects since they are known not to
4505 conflict. */
4507 for (i = opnum; i < reload_n_operands; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4509 return 0;
4511 for (i = opnum + 1; i < reload_n_operands; i++)
4512 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4513 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4514 return 0;
4516 for (i = 0; i < reload_n_operands; i++)
4517 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4518 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4519 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4520 return 0;
4522 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4523 return 0;
4525 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4526 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4527 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4529 case RELOAD_FOR_INPUT:
4530 /* Similar to input address, except we start at the next operand for
4531 both input and input address and we do not check for
4532 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4533 would conflict. */
4535 for (i = opnum + 1; i < reload_n_operands; i++)
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4537 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4539 return 0;
4541 /* ... fall through ... */
4543 case RELOAD_FOR_OPERAND_ADDRESS:
4544 /* Check outputs and their addresses. */
4546 for (i = 0; i < reload_n_operands; i++)
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4548 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4549 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4550 return 0;
4552 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4554 case RELOAD_FOR_OPADDR_ADDR:
4555 for (i = 0; i < reload_n_operands; i++)
4556 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4557 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4558 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4559 return 0;
4561 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4562 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4563 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4565 case RELOAD_FOR_INSN:
4566 /* These conflict with other outputs with RELOAD_OTHER. So
4567 we need only check for output addresses. */
4569 opnum = reload_n_operands;
4571 /* ... fall through ... */
4573 case RELOAD_FOR_OUTPUT:
4574 case RELOAD_FOR_OUTPUT_ADDRESS:
4575 case RELOAD_FOR_OUTADDR_ADDRESS:
4576 /* We already know these can't conflict with a later output. So the
4577 only thing to check are later output addresses.
4578 Note that multiple output operands are emitted in reverse order,
4579 so the conflicting ones are those with lower indices. */
4580 for (i = 0; i < opnum; i++)
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4582 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4583 return 0;
4585 return 1;
4587 default:
4588 gcc_unreachable ();
4592 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4593 Return 0 otherwise.
4595 This function uses the same algorithm as reload_reg_free_p above. */
4597 static int
4598 reloads_conflict (int r1, int r2)
4600 enum reload_type r1_type = rld[r1].when_needed;
4601 enum reload_type r2_type = rld[r2].when_needed;
4602 int r1_opnum = rld[r1].opnum;
4603 int r2_opnum = rld[r2].opnum;
4605 /* RELOAD_OTHER conflicts with everything. */
4606 if (r2_type == RELOAD_OTHER)
4607 return 1;
4609 /* Otherwise, check conflicts differently for each type. */
4611 switch (r1_type)
4613 case RELOAD_FOR_INPUT:
4614 return (r2_type == RELOAD_FOR_INSN
4615 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4616 || r2_type == RELOAD_FOR_OPADDR_ADDR
4617 || r2_type == RELOAD_FOR_INPUT
4618 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4619 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4620 && r2_opnum > r1_opnum));
4622 case RELOAD_FOR_INPUT_ADDRESS:
4623 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4624 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4626 case RELOAD_FOR_INPADDR_ADDRESS:
4627 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4628 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4630 case RELOAD_FOR_OUTPUT_ADDRESS:
4631 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4632 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4634 case RELOAD_FOR_OUTADDR_ADDRESS:
4635 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4636 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4638 case RELOAD_FOR_OPERAND_ADDRESS:
4639 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4640 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4642 case RELOAD_FOR_OPADDR_ADDR:
4643 return (r2_type == RELOAD_FOR_INPUT
4644 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4646 case RELOAD_FOR_OUTPUT:
4647 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4648 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4649 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4650 && r2_opnum >= r1_opnum));
4652 case RELOAD_FOR_INSN:
4653 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4654 || r2_type == RELOAD_FOR_INSN
4655 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4657 case RELOAD_FOR_OTHER_ADDRESS:
4658 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4660 case RELOAD_OTHER:
4661 return 1;
4663 default:
4664 gcc_unreachable ();
4668 /* Indexed by reload number, 1 if incoming value
4669 inherited from previous insns. */
4670 char reload_inherited[MAX_RELOADS];
4672 /* For an inherited reload, this is the insn the reload was inherited from,
4673 if we know it. Otherwise, this is 0. */
4674 rtx reload_inheritance_insn[MAX_RELOADS];
4676 /* If nonzero, this is a place to get the value of the reload,
4677 rather than using reload_in. */
4678 rtx reload_override_in[MAX_RELOADS];
4680 /* For each reload, the hard register number of the register used,
4681 or -1 if we did not need a register for this reload. */
4682 int reload_spill_index[MAX_RELOADS];
4684 /* Subroutine of free_for_value_p, used to check a single register.
4685 START_REGNO is the starting regno of the full reload register
4686 (possibly comprising multiple hard registers) that we are considering. */
4688 static int
4689 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
4690 enum reload_type type, rtx value, rtx out,
4691 int reloadnum, int ignore_address_reloads)
4693 int time1;
4694 /* Set if we see an input reload that must not share its reload register
4695 with any new earlyclobber, but might otherwise share the reload
4696 register with an output or input-output reload. */
4697 int check_earlyclobber = 0;
4698 int i;
4699 int copy = 0;
4701 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4702 return 0;
4704 if (out == const0_rtx)
4706 copy = 1;
4707 out = NULL_RTX;
4710 /* We use some pseudo 'time' value to check if the lifetimes of the
4711 new register use would overlap with the one of a previous reload
4712 that is not read-only or uses a different value.
4713 The 'time' used doesn't have to be linear in any shape or form, just
4714 monotonic.
4715 Some reload types use different 'buckets' for each operand.
4716 So there are MAX_RECOG_OPERANDS different time values for each
4717 such reload type.
4718 We compute TIME1 as the time when the register for the prospective
4719 new reload ceases to be live, and TIME2 for each existing
4720 reload as the time when that the reload register of that reload
4721 becomes live.
4722 Where there is little to be gained by exact lifetime calculations,
4723 we just make conservative assumptions, i.e. a longer lifetime;
4724 this is done in the 'default:' cases. */
4725 switch (type)
4727 case RELOAD_FOR_OTHER_ADDRESS:
4728 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
4729 time1 = copy ? 0 : 1;
4730 break;
4731 case RELOAD_OTHER:
4732 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4733 break;
4734 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4735 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4736 respectively, to the time values for these, we get distinct time
4737 values. To get distinct time values for each operand, we have to
4738 multiply opnum by at least three. We round that up to four because
4739 multiply by four is often cheaper. */
4740 case RELOAD_FOR_INPADDR_ADDRESS:
4741 time1 = opnum * 4 + 2;
4742 break;
4743 case RELOAD_FOR_INPUT_ADDRESS:
4744 time1 = opnum * 4 + 3;
4745 break;
4746 case RELOAD_FOR_INPUT:
4747 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4748 executes (inclusive). */
4749 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4750 break;
4751 case RELOAD_FOR_OPADDR_ADDR:
4752 /* opnum * 4 + 4
4753 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4754 time1 = MAX_RECOG_OPERANDS * 4 + 1;
4755 break;
4756 case RELOAD_FOR_OPERAND_ADDRESS:
4757 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4758 is executed. */
4759 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4760 break;
4761 case RELOAD_FOR_OUTADDR_ADDRESS:
4762 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4763 break;
4764 case RELOAD_FOR_OUTPUT_ADDRESS:
4765 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4766 break;
4767 default:
4768 time1 = MAX_RECOG_OPERANDS * 5 + 5;
4771 for (i = 0; i < n_reloads; i++)
4773 rtx reg = rld[i].reg_rtx;
4774 if (reg && REG_P (reg)
4775 && ((unsigned) regno - true_regnum (reg)
4776 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
4777 && i != reloadnum)
4779 rtx other_input = rld[i].in;
4781 /* If the other reload loads the same input value, that
4782 will not cause a conflict only if it's loading it into
4783 the same register. */
4784 if (true_regnum (reg) != start_regno)
4785 other_input = NULL_RTX;
4786 if (! other_input || ! rtx_equal_p (other_input, value)
4787 || rld[i].out || out)
4789 int time2;
4790 switch (rld[i].when_needed)
4792 case RELOAD_FOR_OTHER_ADDRESS:
4793 time2 = 0;
4794 break;
4795 case RELOAD_FOR_INPADDR_ADDRESS:
4796 /* find_reloads makes sure that a
4797 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4798 by at most one - the first -
4799 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4800 address reload is inherited, the address address reload
4801 goes away, so we can ignore this conflict. */
4802 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4803 && ignore_address_reloads
4804 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4805 Then the address address is still needed to store
4806 back the new address. */
4807 && ! rld[reloadnum].out)
4808 continue;
4809 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4810 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4811 reloads go away. */
4812 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4813 && ignore_address_reloads
4814 /* Unless we are reloading an auto_inc expression. */
4815 && ! rld[reloadnum].out)
4816 continue;
4817 time2 = rld[i].opnum * 4 + 2;
4818 break;
4819 case RELOAD_FOR_INPUT_ADDRESS:
4820 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4821 && ignore_address_reloads
4822 && ! rld[reloadnum].out)
4823 continue;
4824 time2 = rld[i].opnum * 4 + 3;
4825 break;
4826 case RELOAD_FOR_INPUT:
4827 time2 = rld[i].opnum * 4 + 4;
4828 check_earlyclobber = 1;
4829 break;
4830 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4831 == MAX_RECOG_OPERAND * 4 */
4832 case RELOAD_FOR_OPADDR_ADDR:
4833 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4834 && ignore_address_reloads
4835 && ! rld[reloadnum].out)
4836 continue;
4837 time2 = MAX_RECOG_OPERANDS * 4 + 1;
4838 break;
4839 case RELOAD_FOR_OPERAND_ADDRESS:
4840 time2 = MAX_RECOG_OPERANDS * 4 + 2;
4841 check_earlyclobber = 1;
4842 break;
4843 case RELOAD_FOR_INSN:
4844 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4845 break;
4846 case RELOAD_FOR_OUTPUT:
4847 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4848 instruction is executed. */
4849 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4850 break;
4851 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4852 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4853 value. */
4854 case RELOAD_FOR_OUTADDR_ADDRESS:
4855 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4856 && ignore_address_reloads
4857 && ! rld[reloadnum].out)
4858 continue;
4859 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4860 break;
4861 case RELOAD_FOR_OUTPUT_ADDRESS:
4862 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4863 break;
4864 case RELOAD_OTHER:
4865 /* If there is no conflict in the input part, handle this
4866 like an output reload. */
4867 if (! rld[i].in || rtx_equal_p (other_input, value))
4869 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4870 /* Earlyclobbered outputs must conflict with inputs. */
4871 if (earlyclobber_operand_p (rld[i].out))
4872 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4874 break;
4876 time2 = 1;
4877 /* RELOAD_OTHER might be live beyond instruction execution,
4878 but this is not obvious when we set time2 = 1. So check
4879 here if there might be a problem with the new reload
4880 clobbering the register used by the RELOAD_OTHER. */
4881 if (out)
4882 return 0;
4883 break;
4884 default:
4885 return 0;
4887 if ((time1 >= time2
4888 && (! rld[i].in || rld[i].out
4889 || ! rtx_equal_p (other_input, value)))
4890 || (out && rld[reloadnum].out_reg
4891 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
4892 return 0;
4897 /* Earlyclobbered outputs must conflict with inputs. */
4898 if (check_earlyclobber && out && earlyclobber_operand_p (out))
4899 return 0;
4901 return 1;
4904 /* Return 1 if the value in reload reg REGNO, as used by a reload
4905 needed for the part of the insn specified by OPNUM and TYPE,
4906 may be used to load VALUE into it.
4908 MODE is the mode in which the register is used, this is needed to
4909 determine how many hard regs to test.
4911 Other read-only reloads with the same value do not conflict
4912 unless OUT is nonzero and these other reloads have to live while
4913 output reloads live.
4914 If OUT is CONST0_RTX, this is a special case: it means that the
4915 test should not be for using register REGNO as reload register, but
4916 for copying from register REGNO into the reload register.
4918 RELOADNUM is the number of the reload we want to load this value for;
4919 a reload does not conflict with itself.
4921 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
4922 reloads that load an address for the very reload we are considering.
4924 The caller has to make sure that there is no conflict with the return
4925 register. */
4927 static int
4928 free_for_value_p (int regno, enum machine_mode mode, int opnum,
4929 enum reload_type type, rtx value, rtx out, int reloadnum,
4930 int ignore_address_reloads)
4932 int nregs = hard_regno_nregs[regno][mode];
4933 while (nregs-- > 0)
4934 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
4935 value, out, reloadnum,
4936 ignore_address_reloads))
4937 return 0;
4938 return 1;
4941 /* Return nonzero if the rtx X is invariant over the current function. */
4942 /* ??? Actually, the places where we use this expect exactly what
4943 * is tested here, and not everything that is function invariant. In
4944 * particular, the frame pointer and arg pointer are special cased;
4945 * pic_offset_table_rtx is not, and this will cause aborts when we
4946 * go to spill these things to memory. */
4948 static int
4949 function_invariant_p (rtx x)
4951 if (CONSTANT_P (x))
4952 return 1;
4953 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4954 return 1;
4955 if (GET_CODE (x) == PLUS
4956 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
4957 && CONSTANT_P (XEXP (x, 1)))
4958 return 1;
4959 return 0;
4962 /* Determine whether the reload reg X overlaps any rtx'es used for
4963 overriding inheritance. Return nonzero if so. */
4965 static int
4966 conflicts_with_override (rtx x)
4968 int i;
4969 for (i = 0; i < n_reloads; i++)
4970 if (reload_override_in[i]
4971 && reg_overlap_mentioned_p (x, reload_override_in[i]))
4972 return 1;
4973 return 0;
4976 /* Give an error message saying we failed to find a reload for INSN,
4977 and clear out reload R. */
4978 static void
4979 failed_reload (rtx insn, int r)
4981 if (asm_noperands (PATTERN (insn)) < 0)
4982 /* It's the compiler's fault. */
4983 fatal_insn ("could not find a spill register", insn);
4985 /* It's the user's fault; the operand's mode and constraint
4986 don't match. Disable this reload so we don't crash in final. */
4987 error_for_asm (insn,
4988 "%<asm%> operand constraint incompatible with operand size");
4989 rld[r].in = 0;
4990 rld[r].out = 0;
4991 rld[r].reg_rtx = 0;
4992 rld[r].optional = 1;
4993 rld[r].secondary_p = 1;
4996 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
4997 for reload R. If it's valid, get an rtx for it. Return nonzero if
4998 successful. */
4999 static int
5000 set_reload_reg (int i, int r)
5002 int regno;
5003 rtx reg = spill_reg_rtx[i];
5005 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5006 spill_reg_rtx[i] = reg
5007 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5009 regno = true_regnum (reg);
5011 /* Detect when the reload reg can't hold the reload mode.
5012 This used to be one `if', but Sequent compiler can't handle that. */
5013 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5015 enum machine_mode test_mode = VOIDmode;
5016 if (rld[r].in)
5017 test_mode = GET_MODE (rld[r].in);
5018 /* If rld[r].in has VOIDmode, it means we will load it
5019 in whatever mode the reload reg has: to wit, rld[r].mode.
5020 We have already tested that for validity. */
5021 /* Aside from that, we need to test that the expressions
5022 to reload from or into have modes which are valid for this
5023 reload register. Otherwise the reload insns would be invalid. */
5024 if (! (rld[r].in != 0 && test_mode != VOIDmode
5025 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5026 if (! (rld[r].out != 0
5027 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5029 /* The reg is OK. */
5030 last_spill_reg = i;
5032 /* Mark as in use for this insn the reload regs we use
5033 for this. */
5034 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5035 rld[r].when_needed, rld[r].mode);
5037 rld[r].reg_rtx = reg;
5038 reload_spill_index[r] = spill_regs[i];
5039 return 1;
5042 return 0;
5045 /* Find a spill register to use as a reload register for reload R.
5046 LAST_RELOAD is nonzero if this is the last reload for the insn being
5047 processed.
5049 Set rld[R].reg_rtx to the register allocated.
5051 We return 1 if successful, or 0 if we couldn't find a spill reg and
5052 we didn't change anything. */
5054 static int
5055 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5056 int last_reload)
5058 int i, pass, count;
5060 /* If we put this reload ahead, thinking it is a group,
5061 then insist on finding a group. Otherwise we can grab a
5062 reg that some other reload needs.
5063 (That can happen when we have a 68000 DATA_OR_FP_REG
5064 which is a group of data regs or one fp reg.)
5065 We need not be so restrictive if there are no more reloads
5066 for this insn.
5068 ??? Really it would be nicer to have smarter handling
5069 for that kind of reg class, where a problem like this is normal.
5070 Perhaps those classes should be avoided for reloading
5071 by use of more alternatives. */
5073 int force_group = rld[r].nregs > 1 && ! last_reload;
5075 /* If we want a single register and haven't yet found one,
5076 take any reg in the right class and not in use.
5077 If we want a consecutive group, here is where we look for it.
5079 We use two passes so we can first look for reload regs to
5080 reuse, which are already in use for other reloads in this insn,
5081 and only then use additional registers.
5082 I think that maximizing reuse is needed to make sure we don't
5083 run out of reload regs. Suppose we have three reloads, and
5084 reloads A and B can share regs. These need two regs.
5085 Suppose A and B are given different regs.
5086 That leaves none for C. */
5087 for (pass = 0; pass < 2; pass++)
5089 /* I is the index in spill_regs.
5090 We advance it round-robin between insns to use all spill regs
5091 equally, so that inherited reloads have a chance
5092 of leapfrogging each other. */
5094 i = last_spill_reg;
5096 for (count = 0; count < n_spills; count++)
5098 int class = (int) rld[r].class;
5099 int regnum;
5101 i++;
5102 if (i >= n_spills)
5103 i -= n_spills;
5104 regnum = spill_regs[i];
5106 if ((reload_reg_free_p (regnum, rld[r].opnum,
5107 rld[r].when_needed)
5108 || (rld[r].in
5109 /* We check reload_reg_used to make sure we
5110 don't clobber the return register. */
5111 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5112 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5113 rld[r].when_needed, rld[r].in,
5114 rld[r].out, r, 1)))
5115 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5116 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5117 /* Look first for regs to share, then for unshared. But
5118 don't share regs used for inherited reloads; they are
5119 the ones we want to preserve. */
5120 && (pass
5121 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5122 regnum)
5123 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5124 regnum))))
5126 int nr = hard_regno_nregs[regnum][rld[r].mode];
5127 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5128 (on 68000) got us two FP regs. If NR is 1,
5129 we would reject both of them. */
5130 if (force_group)
5131 nr = rld[r].nregs;
5132 /* If we need only one reg, we have already won. */
5133 if (nr == 1)
5135 /* But reject a single reg if we demand a group. */
5136 if (force_group)
5137 continue;
5138 break;
5140 /* Otherwise check that as many consecutive regs as we need
5141 are available here. */
5142 while (nr > 1)
5144 int regno = regnum + nr - 1;
5145 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5146 && spill_reg_order[regno] >= 0
5147 && reload_reg_free_p (regno, rld[r].opnum,
5148 rld[r].when_needed)))
5149 break;
5150 nr--;
5152 if (nr == 1)
5153 break;
5157 /* If we found something on pass 1, omit pass 2. */
5158 if (count < n_spills)
5159 break;
5162 /* We should have found a spill register by now. */
5163 if (count >= n_spills)
5164 return 0;
5166 /* I is the index in SPILL_REG_RTX of the reload register we are to
5167 allocate. Get an rtx for it and find its register number. */
5169 return set_reload_reg (i, r);
5172 /* Initialize all the tables needed to allocate reload registers.
5173 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5174 is the array we use to restore the reg_rtx field for every reload. */
5176 static void
5177 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5179 int i;
5181 for (i = 0; i < n_reloads; i++)
5182 rld[i].reg_rtx = save_reload_reg_rtx[i];
5184 memset (reload_inherited, 0, MAX_RELOADS);
5185 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5186 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5188 CLEAR_HARD_REG_SET (reload_reg_used);
5189 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5190 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5191 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5192 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5193 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5195 CLEAR_HARD_REG_SET (reg_used_in_insn);
5197 HARD_REG_SET tmp;
5198 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5199 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5200 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5201 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5202 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5203 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5206 for (i = 0; i < reload_n_operands; i++)
5208 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5209 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5210 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5211 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5212 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5213 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5216 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5218 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5220 for (i = 0; i < n_reloads; i++)
5221 /* If we have already decided to use a certain register,
5222 don't use it in another way. */
5223 if (rld[i].reg_rtx)
5224 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5225 rld[i].when_needed, rld[i].mode);
5228 /* Assign hard reg targets for the pseudo-registers we must reload
5229 into hard regs for this insn.
5230 Also output the instructions to copy them in and out of the hard regs.
5232 For machines with register classes, we are responsible for
5233 finding a reload reg in the proper class. */
5235 static void
5236 choose_reload_regs (struct insn_chain *chain)
5238 rtx insn = chain->insn;
5239 int i, j;
5240 unsigned int max_group_size = 1;
5241 enum reg_class group_class = NO_REGS;
5242 int pass, win, inheritance;
5244 rtx save_reload_reg_rtx[MAX_RELOADS];
5246 /* In order to be certain of getting the registers we need,
5247 we must sort the reloads into order of increasing register class.
5248 Then our grabbing of reload registers will parallel the process
5249 that provided the reload registers.
5251 Also note whether any of the reloads wants a consecutive group of regs.
5252 If so, record the maximum size of the group desired and what
5253 register class contains all the groups needed by this insn. */
5255 for (j = 0; j < n_reloads; j++)
5257 reload_order[j] = j;
5258 reload_spill_index[j] = -1;
5260 if (rld[j].nregs > 1)
5262 max_group_size = MAX (rld[j].nregs, max_group_size);
5263 group_class
5264 = reg_class_superunion[(int) rld[j].class][(int) group_class];
5267 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5270 if (n_reloads > 1)
5271 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5273 /* If -O, try first with inheritance, then turning it off.
5274 If not -O, don't do inheritance.
5275 Using inheritance when not optimizing leads to paradoxes
5276 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5277 because one side of the comparison might be inherited. */
5278 win = 0;
5279 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5281 choose_reload_regs_init (chain, save_reload_reg_rtx);
5283 /* Process the reloads in order of preference just found.
5284 Beyond this point, subregs can be found in reload_reg_rtx.
5286 This used to look for an existing reloaded home for all of the
5287 reloads, and only then perform any new reloads. But that could lose
5288 if the reloads were done out of reg-class order because a later
5289 reload with a looser constraint might have an old home in a register
5290 needed by an earlier reload with a tighter constraint.
5292 To solve this, we make two passes over the reloads, in the order
5293 described above. In the first pass we try to inherit a reload
5294 from a previous insn. If there is a later reload that needs a
5295 class that is a proper subset of the class being processed, we must
5296 also allocate a spill register during the first pass.
5298 Then make a second pass over the reloads to allocate any reloads
5299 that haven't been given registers yet. */
5301 for (j = 0; j < n_reloads; j++)
5303 int r = reload_order[j];
5304 rtx search_equiv = NULL_RTX;
5306 /* Ignore reloads that got marked inoperative. */
5307 if (rld[r].out == 0 && rld[r].in == 0
5308 && ! rld[r].secondary_p)
5309 continue;
5311 /* If find_reloads chose to use reload_in or reload_out as a reload
5312 register, we don't need to chose one. Otherwise, try even if it
5313 found one since we might save an insn if we find the value lying
5314 around.
5315 Try also when reload_in is a pseudo without a hard reg. */
5316 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5317 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5318 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5319 && !MEM_P (rld[r].in)
5320 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5321 continue;
5323 #if 0 /* No longer needed for correct operation.
5324 It might give better code, or might not; worth an experiment? */
5325 /* If this is an optional reload, we can't inherit from earlier insns
5326 until we are sure that any non-optional reloads have been allocated.
5327 The following code takes advantage of the fact that optional reloads
5328 are at the end of reload_order. */
5329 if (rld[r].optional != 0)
5330 for (i = 0; i < j; i++)
5331 if ((rld[reload_order[i]].out != 0
5332 || rld[reload_order[i]].in != 0
5333 || rld[reload_order[i]].secondary_p)
5334 && ! rld[reload_order[i]].optional
5335 && rld[reload_order[i]].reg_rtx == 0)
5336 allocate_reload_reg (chain, reload_order[i], 0);
5337 #endif
5339 /* First see if this pseudo is already available as reloaded
5340 for a previous insn. We cannot try to inherit for reloads
5341 that are smaller than the maximum number of registers needed
5342 for groups unless the register we would allocate cannot be used
5343 for the groups.
5345 We could check here to see if this is a secondary reload for
5346 an object that is already in a register of the desired class.
5347 This would avoid the need for the secondary reload register.
5348 But this is complex because we can't easily determine what
5349 objects might want to be loaded via this reload. So let a
5350 register be allocated here. In `emit_reload_insns' we suppress
5351 one of the loads in the case described above. */
5353 if (inheritance)
5355 int byte = 0;
5356 int regno = -1;
5357 enum machine_mode mode = VOIDmode;
5359 if (rld[r].in == 0)
5361 else if (REG_P (rld[r].in))
5363 regno = REGNO (rld[r].in);
5364 mode = GET_MODE (rld[r].in);
5366 else if (REG_P (rld[r].in_reg))
5368 regno = REGNO (rld[r].in_reg);
5369 mode = GET_MODE (rld[r].in_reg);
5371 else if (GET_CODE (rld[r].in_reg) == SUBREG
5372 && REG_P (SUBREG_REG (rld[r].in_reg)))
5374 byte = SUBREG_BYTE (rld[r].in_reg);
5375 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5376 if (regno < FIRST_PSEUDO_REGISTER)
5377 regno = subreg_regno (rld[r].in_reg);
5378 mode = GET_MODE (rld[r].in_reg);
5380 #ifdef AUTO_INC_DEC
5381 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5382 || GET_CODE (rld[r].in_reg) == PRE_DEC
5383 || GET_CODE (rld[r].in_reg) == POST_INC
5384 || GET_CODE (rld[r].in_reg) == POST_DEC)
5385 && REG_P (XEXP (rld[r].in_reg, 0)))
5387 regno = REGNO (XEXP (rld[r].in_reg, 0));
5388 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5389 rld[r].out = rld[r].in;
5391 #endif
5392 #if 0
5393 /* This won't work, since REGNO can be a pseudo reg number.
5394 Also, it takes much more hair to keep track of all the things
5395 that can invalidate an inherited reload of part of a pseudoreg. */
5396 else if (GET_CODE (rld[r].in) == SUBREG
5397 && REG_P (SUBREG_REG (rld[r].in)))
5398 regno = subreg_regno (rld[r].in);
5399 #endif
5401 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5403 enum reg_class class = rld[r].class, last_class;
5404 rtx last_reg = reg_last_reload_reg[regno];
5405 enum machine_mode need_mode;
5407 i = REGNO (last_reg);
5408 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5409 last_class = REGNO_REG_CLASS (i);
5411 if (byte == 0)
5412 need_mode = mode;
5413 else
5414 need_mode
5415 = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
5416 GET_MODE_CLASS (mode));
5418 if (
5419 #ifdef CANNOT_CHANGE_MODE_CLASS
5420 (!REG_CANNOT_CHANGE_MODE_P (i, GET_MODE (last_reg),
5421 need_mode)
5423 #endif
5424 (GET_MODE_SIZE (GET_MODE (last_reg))
5425 >= GET_MODE_SIZE (need_mode))
5426 #ifdef CANNOT_CHANGE_MODE_CLASS
5428 #endif
5429 && reg_reloaded_contents[i] == regno
5430 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5431 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5432 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5433 /* Even if we can't use this register as a reload
5434 register, we might use it for reload_override_in,
5435 if copying it to the desired class is cheap
5436 enough. */
5437 || ((REGISTER_MOVE_COST (mode, last_class, class)
5438 < MEMORY_MOVE_COST (mode, class, 1))
5439 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5440 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5441 last_reg)
5442 == NO_REGS)
5443 #endif
5444 #ifdef SECONDARY_MEMORY_NEEDED
5445 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5446 mode)
5447 #endif
5450 && (rld[r].nregs == max_group_size
5451 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5453 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5454 rld[r].when_needed, rld[r].in,
5455 const0_rtx, r, 1))
5457 /* If a group is needed, verify that all the subsequent
5458 registers still have their values intact. */
5459 int nr = hard_regno_nregs[i][rld[r].mode];
5460 int k;
5462 for (k = 1; k < nr; k++)
5463 if (reg_reloaded_contents[i + k] != regno
5464 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5465 break;
5467 if (k == nr)
5469 int i1;
5470 int bad_for_class;
5472 last_reg = (GET_MODE (last_reg) == mode
5473 ? last_reg : gen_rtx_REG (mode, i));
5475 bad_for_class = 0;
5476 for (k = 0; k < nr; k++)
5477 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5478 i+k);
5480 /* We found a register that contains the
5481 value we need. If this register is the
5482 same as an `earlyclobber' operand of the
5483 current insn, just mark it as a place to
5484 reload from since we can't use it as the
5485 reload register itself. */
5487 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5488 if (reg_overlap_mentioned_for_reload_p
5489 (reg_last_reload_reg[regno],
5490 reload_earlyclobbers[i1]))
5491 break;
5493 if (i1 != n_earlyclobbers
5494 || ! (free_for_value_p (i, rld[r].mode,
5495 rld[r].opnum,
5496 rld[r].when_needed, rld[r].in,
5497 rld[r].out, r, 1))
5498 /* Don't use it if we'd clobber a pseudo reg. */
5499 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5500 && rld[r].out
5501 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5502 /* Don't clobber the frame pointer. */
5503 || (i == HARD_FRAME_POINTER_REGNUM
5504 && frame_pointer_needed
5505 && rld[r].out)
5506 /* Don't really use the inherited spill reg
5507 if we need it wider than we've got it. */
5508 || (GET_MODE_SIZE (rld[r].mode)
5509 > GET_MODE_SIZE (mode))
5510 || bad_for_class
5512 /* If find_reloads chose reload_out as reload
5513 register, stay with it - that leaves the
5514 inherited register for subsequent reloads. */
5515 || (rld[r].out && rld[r].reg_rtx
5516 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5518 if (! rld[r].optional)
5520 reload_override_in[r] = last_reg;
5521 reload_inheritance_insn[r]
5522 = reg_reloaded_insn[i];
5525 else
5527 int k;
5528 /* We can use this as a reload reg. */
5529 /* Mark the register as in use for this part of
5530 the insn. */
5531 mark_reload_reg_in_use (i,
5532 rld[r].opnum,
5533 rld[r].when_needed,
5534 rld[r].mode);
5535 rld[r].reg_rtx = last_reg;
5536 reload_inherited[r] = 1;
5537 reload_inheritance_insn[r]
5538 = reg_reloaded_insn[i];
5539 reload_spill_index[r] = i;
5540 for (k = 0; k < nr; k++)
5541 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5542 i + k);
5549 /* Here's another way to see if the value is already lying around. */
5550 if (inheritance
5551 && rld[r].in != 0
5552 && ! reload_inherited[r]
5553 && rld[r].out == 0
5554 && (CONSTANT_P (rld[r].in)
5555 || GET_CODE (rld[r].in) == PLUS
5556 || REG_P (rld[r].in)
5557 || MEM_P (rld[r].in))
5558 && (rld[r].nregs == max_group_size
5559 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5560 search_equiv = rld[r].in;
5561 /* If this is an output reload from a simple move insn, look
5562 if an equivalence for the input is available. */
5563 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5565 rtx set = single_set (insn);
5567 if (set
5568 && rtx_equal_p (rld[r].out, SET_DEST (set))
5569 && CONSTANT_P (SET_SRC (set)))
5570 search_equiv = SET_SRC (set);
5573 if (search_equiv)
5575 rtx equiv
5576 = find_equiv_reg (search_equiv, insn, rld[r].class,
5577 -1, NULL, 0, rld[r].mode);
5578 int regno = 0;
5580 if (equiv != 0)
5582 if (REG_P (equiv))
5583 regno = REGNO (equiv);
5584 else
5586 /* This must be a SUBREG of a hard register.
5587 Make a new REG since this might be used in an
5588 address and not all machines support SUBREGs
5589 there. */
5590 gcc_assert (GET_CODE (equiv) == SUBREG);
5591 regno = subreg_regno (equiv);
5592 equiv = gen_rtx_REG (rld[r].mode, regno);
5596 /* If we found a spill reg, reject it unless it is free
5597 and of the desired class. */
5598 if (equiv != 0)
5600 int regs_used = 0;
5601 int bad_for_class = 0;
5602 int max_regno = regno + rld[r].nregs;
5604 for (i = regno; i < max_regno; i++)
5606 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
5608 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5612 if ((regs_used
5613 && ! free_for_value_p (regno, rld[r].mode,
5614 rld[r].opnum, rld[r].when_needed,
5615 rld[r].in, rld[r].out, r, 1))
5616 || bad_for_class)
5617 equiv = 0;
5620 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5621 equiv = 0;
5623 /* We found a register that contains the value we need.
5624 If this register is the same as an `earlyclobber' operand
5625 of the current insn, just mark it as a place to reload from
5626 since we can't use it as the reload register itself. */
5628 if (equiv != 0)
5629 for (i = 0; i < n_earlyclobbers; i++)
5630 if (reg_overlap_mentioned_for_reload_p (equiv,
5631 reload_earlyclobbers[i]))
5633 if (! rld[r].optional)
5634 reload_override_in[r] = equiv;
5635 equiv = 0;
5636 break;
5639 /* If the equiv register we have found is explicitly clobbered
5640 in the current insn, it depends on the reload type if we
5641 can use it, use it for reload_override_in, or not at all.
5642 In particular, we then can't use EQUIV for a
5643 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5645 if (equiv != 0)
5647 if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5648 switch (rld[r].when_needed)
5650 case RELOAD_FOR_OTHER_ADDRESS:
5651 case RELOAD_FOR_INPADDR_ADDRESS:
5652 case RELOAD_FOR_INPUT_ADDRESS:
5653 case RELOAD_FOR_OPADDR_ADDR:
5654 break;
5655 case RELOAD_OTHER:
5656 case RELOAD_FOR_INPUT:
5657 case RELOAD_FOR_OPERAND_ADDRESS:
5658 if (! rld[r].optional)
5659 reload_override_in[r] = equiv;
5660 /* Fall through. */
5661 default:
5662 equiv = 0;
5663 break;
5665 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5666 switch (rld[r].when_needed)
5668 case RELOAD_FOR_OTHER_ADDRESS:
5669 case RELOAD_FOR_INPADDR_ADDRESS:
5670 case RELOAD_FOR_INPUT_ADDRESS:
5671 case RELOAD_FOR_OPADDR_ADDR:
5672 case RELOAD_FOR_OPERAND_ADDRESS:
5673 case RELOAD_FOR_INPUT:
5674 break;
5675 case RELOAD_OTHER:
5676 if (! rld[r].optional)
5677 reload_override_in[r] = equiv;
5678 /* Fall through. */
5679 default:
5680 equiv = 0;
5681 break;
5685 /* If we found an equivalent reg, say no code need be generated
5686 to load it, and use it as our reload reg. */
5687 if (equiv != 0
5688 && (regno != HARD_FRAME_POINTER_REGNUM
5689 || !frame_pointer_needed))
5691 int nr = hard_regno_nregs[regno][rld[r].mode];
5692 int k;
5693 rld[r].reg_rtx = equiv;
5694 reload_inherited[r] = 1;
5696 /* If reg_reloaded_valid is not set for this register,
5697 there might be a stale spill_reg_store lying around.
5698 We must clear it, since otherwise emit_reload_insns
5699 might delete the store. */
5700 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5701 spill_reg_store[regno] = NULL_RTX;
5702 /* If any of the hard registers in EQUIV are spill
5703 registers, mark them as in use for this insn. */
5704 for (k = 0; k < nr; k++)
5706 i = spill_reg_order[regno + k];
5707 if (i >= 0)
5709 mark_reload_reg_in_use (regno, rld[r].opnum,
5710 rld[r].when_needed,
5711 rld[r].mode);
5712 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5713 regno + k);
5719 /* If we found a register to use already, or if this is an optional
5720 reload, we are done. */
5721 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5722 continue;
5724 #if 0
5725 /* No longer needed for correct operation. Might or might
5726 not give better code on the average. Want to experiment? */
5728 /* See if there is a later reload that has a class different from our
5729 class that intersects our class or that requires less register
5730 than our reload. If so, we must allocate a register to this
5731 reload now, since that reload might inherit a previous reload
5732 and take the only available register in our class. Don't do this
5733 for optional reloads since they will force all previous reloads
5734 to be allocated. Also don't do this for reloads that have been
5735 turned off. */
5737 for (i = j + 1; i < n_reloads; i++)
5739 int s = reload_order[i];
5741 if ((rld[s].in == 0 && rld[s].out == 0
5742 && ! rld[s].secondary_p)
5743 || rld[s].optional)
5744 continue;
5746 if ((rld[s].class != rld[r].class
5747 && reg_classes_intersect_p (rld[r].class,
5748 rld[s].class))
5749 || rld[s].nregs < rld[r].nregs)
5750 break;
5753 if (i == n_reloads)
5754 continue;
5756 allocate_reload_reg (chain, r, j == n_reloads - 1);
5757 #endif
5760 /* Now allocate reload registers for anything non-optional that
5761 didn't get one yet. */
5762 for (j = 0; j < n_reloads; j++)
5764 int r = reload_order[j];
5766 /* Ignore reloads that got marked inoperative. */
5767 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5768 continue;
5770 /* Skip reloads that already have a register allocated or are
5771 optional. */
5772 if (rld[r].reg_rtx != 0 || rld[r].optional)
5773 continue;
5775 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5776 break;
5779 /* If that loop got all the way, we have won. */
5780 if (j == n_reloads)
5782 win = 1;
5783 break;
5786 /* Loop around and try without any inheritance. */
5789 if (! win)
5791 /* First undo everything done by the failed attempt
5792 to allocate with inheritance. */
5793 choose_reload_regs_init (chain, save_reload_reg_rtx);
5795 /* Some sanity tests to verify that the reloads found in the first
5796 pass are identical to the ones we have now. */
5797 gcc_assert (chain->n_reloads == n_reloads);
5799 for (i = 0; i < n_reloads; i++)
5801 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5802 continue;
5803 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
5804 for (j = 0; j < n_spills; j++)
5805 if (spill_regs[j] == chain->rld[i].regno)
5806 if (! set_reload_reg (j, i))
5807 failed_reload (chain->insn, i);
5811 /* If we thought we could inherit a reload, because it seemed that
5812 nothing else wanted the same reload register earlier in the insn,
5813 verify that assumption, now that all reloads have been assigned.
5814 Likewise for reloads where reload_override_in has been set. */
5816 /* If doing expensive optimizations, do one preliminary pass that doesn't
5817 cancel any inheritance, but removes reloads that have been needed only
5818 for reloads that we know can be inherited. */
5819 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5821 for (j = 0; j < n_reloads; j++)
5823 int r = reload_order[j];
5824 rtx check_reg;
5825 if (reload_inherited[r] && rld[r].reg_rtx)
5826 check_reg = rld[r].reg_rtx;
5827 else if (reload_override_in[r]
5828 && (REG_P (reload_override_in[r])
5829 || GET_CODE (reload_override_in[r]) == SUBREG))
5830 check_reg = reload_override_in[r];
5831 else
5832 continue;
5833 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5834 rld[r].opnum, rld[r].when_needed, rld[r].in,
5835 (reload_inherited[r]
5836 ? rld[r].out : const0_rtx),
5837 r, 1))
5839 if (pass)
5840 continue;
5841 reload_inherited[r] = 0;
5842 reload_override_in[r] = 0;
5844 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5845 reload_override_in, then we do not need its related
5846 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5847 likewise for other reload types.
5848 We handle this by removing a reload when its only replacement
5849 is mentioned in reload_in of the reload we are going to inherit.
5850 A special case are auto_inc expressions; even if the input is
5851 inherited, we still need the address for the output. We can
5852 recognize them because they have RELOAD_OUT set to RELOAD_IN.
5853 If we succeeded removing some reload and we are doing a preliminary
5854 pass just to remove such reloads, make another pass, since the
5855 removal of one reload might allow us to inherit another one. */
5856 else if (rld[r].in
5857 && rld[r].out != rld[r].in
5858 && remove_address_replacements (rld[r].in) && pass)
5859 pass = 2;
5863 /* Now that reload_override_in is known valid,
5864 actually override reload_in. */
5865 for (j = 0; j < n_reloads; j++)
5866 if (reload_override_in[j])
5867 rld[j].in = reload_override_in[j];
5869 /* If this reload won't be done because it has been canceled or is
5870 optional and not inherited, clear reload_reg_rtx so other
5871 routines (such as subst_reloads) don't get confused. */
5872 for (j = 0; j < n_reloads; j++)
5873 if (rld[j].reg_rtx != 0
5874 && ((rld[j].optional && ! reload_inherited[j])
5875 || (rld[j].in == 0 && rld[j].out == 0
5876 && ! rld[j].secondary_p)))
5878 int regno = true_regnum (rld[j].reg_rtx);
5880 if (spill_reg_order[regno] >= 0)
5881 clear_reload_reg_in_use (regno, rld[j].opnum,
5882 rld[j].when_needed, rld[j].mode);
5883 rld[j].reg_rtx = 0;
5884 reload_spill_index[j] = -1;
5887 /* Record which pseudos and which spill regs have output reloads. */
5888 for (j = 0; j < n_reloads; j++)
5890 int r = reload_order[j];
5892 i = reload_spill_index[r];
5894 /* I is nonneg if this reload uses a register.
5895 If rld[r].reg_rtx is 0, this is an optional reload
5896 that we opted to ignore. */
5897 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
5898 && rld[r].reg_rtx != 0)
5900 int nregno = REGNO (rld[r].out_reg);
5901 int nr = 1;
5903 if (nregno < FIRST_PSEUDO_REGISTER)
5904 nr = hard_regno_nregs[nregno][rld[r].mode];
5906 while (--nr >= 0)
5907 reg_has_output_reload[nregno + nr] = 1;
5909 if (i >= 0)
5911 nr = hard_regno_nregs[i][rld[r].mode];
5912 while (--nr >= 0)
5913 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
5916 gcc_assert (rld[r].when_needed == RELOAD_OTHER
5917 || rld[r].when_needed == RELOAD_FOR_OUTPUT
5918 || rld[r].when_needed == RELOAD_FOR_INSN);
5923 /* Deallocate the reload register for reload R. This is called from
5924 remove_address_replacements. */
5926 void
5927 deallocate_reload_reg (int r)
5929 int regno;
5931 if (! rld[r].reg_rtx)
5932 return;
5933 regno = true_regnum (rld[r].reg_rtx);
5934 rld[r].reg_rtx = 0;
5935 if (spill_reg_order[regno] >= 0)
5936 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
5937 rld[r].mode);
5938 reload_spill_index[r] = -1;
5941 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
5942 reloads of the same item for fear that we might not have enough reload
5943 registers. However, normally they will get the same reload register
5944 and hence actually need not be loaded twice.
5946 Here we check for the most common case of this phenomenon: when we have
5947 a number of reloads for the same object, each of which were allocated
5948 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5949 reload, and is not modified in the insn itself. If we find such,
5950 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5951 This will not increase the number of spill registers needed and will
5952 prevent redundant code. */
5954 static void
5955 merge_assigned_reloads (rtx insn)
5957 int i, j;
5959 /* Scan all the reloads looking for ones that only load values and
5960 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5961 assigned and not modified by INSN. */
5963 for (i = 0; i < n_reloads; i++)
5965 int conflicting_input = 0;
5966 int max_input_address_opnum = -1;
5967 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
5969 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
5970 || rld[i].out != 0 || rld[i].reg_rtx == 0
5971 || reg_set_p (rld[i].reg_rtx, insn))
5972 continue;
5974 /* Look at all other reloads. Ensure that the only use of this
5975 reload_reg_rtx is in a reload that just loads the same value
5976 as we do. Note that any secondary reloads must be of the identical
5977 class since the values, modes, and result registers are the
5978 same, so we need not do anything with any secondary reloads. */
5980 for (j = 0; j < n_reloads; j++)
5982 if (i == j || rld[j].reg_rtx == 0
5983 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
5984 rld[i].reg_rtx))
5985 continue;
5987 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
5988 && rld[j].opnum > max_input_address_opnum)
5989 max_input_address_opnum = rld[j].opnum;
5991 /* If the reload regs aren't exactly the same (e.g, different modes)
5992 or if the values are different, we can't merge this reload.
5993 But if it is an input reload, we might still merge
5994 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
5996 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
5997 || rld[j].out != 0 || rld[j].in == 0
5998 || ! rtx_equal_p (rld[i].in, rld[j].in))
6000 if (rld[j].when_needed != RELOAD_FOR_INPUT
6001 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6002 || rld[i].opnum > rld[j].opnum)
6003 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6004 break;
6005 conflicting_input = 1;
6006 if (min_conflicting_input_opnum > rld[j].opnum)
6007 min_conflicting_input_opnum = rld[j].opnum;
6011 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6012 we, in fact, found any matching reloads. */
6014 if (j == n_reloads
6015 && max_input_address_opnum <= min_conflicting_input_opnum)
6017 for (j = 0; j < n_reloads; j++)
6018 if (i != j && rld[j].reg_rtx != 0
6019 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6020 && (! conflicting_input
6021 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6022 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6024 rld[i].when_needed = RELOAD_OTHER;
6025 rld[j].in = 0;
6026 reload_spill_index[j] = -1;
6027 transfer_replacements (i, j);
6030 /* If this is now RELOAD_OTHER, look for any reloads that load
6031 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6032 if they were for inputs, RELOAD_OTHER for outputs. Note that
6033 this test is equivalent to looking for reloads for this operand
6034 number. */
6035 /* We must take special care when there are two or more reloads to
6036 be merged and a RELOAD_FOR_OUTPUT_ADDRESS reload that loads the
6037 same value or a part of it; we must not change its type if there
6038 is a conflicting input. */
6040 if (rld[i].when_needed == RELOAD_OTHER)
6041 for (j = 0; j < n_reloads; j++)
6042 if (rld[j].in != 0
6043 && rld[j].when_needed != RELOAD_OTHER
6044 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6045 && (! conflicting_input
6046 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6047 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6048 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6049 rld[i].in))
6051 int k;
6053 rld[j].when_needed
6054 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6055 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6056 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6058 /* Check to see if we accidentally converted two reloads
6059 that use the same reload register with different inputs
6060 to the same type. If so, the resulting code won't work,
6061 so abort. */
6062 if (rld[j].reg_rtx)
6063 for (k = 0; k < j; k++)
6064 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6065 || rld[k].when_needed != rld[j].when_needed
6066 || !rtx_equal_p (rld[k].reg_rtx,
6067 rld[j].reg_rtx)
6068 || rtx_equal_p (rld[k].in,
6069 rld[j].in));
6075 /* These arrays are filled by emit_reload_insns and its subroutines. */
6076 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6077 static rtx other_input_address_reload_insns = 0;
6078 static rtx other_input_reload_insns = 0;
6079 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6080 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6081 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6082 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6083 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6084 static rtx operand_reload_insns = 0;
6085 static rtx other_operand_reload_insns = 0;
6086 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6088 /* Values to be put in spill_reg_store are put here first. */
6089 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6090 static HARD_REG_SET reg_reloaded_died;
6092 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6093 has the number J. OLD contains the value to be used as input. */
6095 static void
6096 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6097 rtx old, int j)
6099 rtx insn = chain->insn;
6100 rtx reloadreg = rl->reg_rtx;
6101 rtx oldequiv_reg = 0;
6102 rtx oldequiv = 0;
6103 int special = 0;
6104 enum machine_mode mode;
6105 rtx *where;
6107 /* Determine the mode to reload in.
6108 This is very tricky because we have three to choose from.
6109 There is the mode the insn operand wants (rl->inmode).
6110 There is the mode of the reload register RELOADREG.
6111 There is the intrinsic mode of the operand, which we could find
6112 by stripping some SUBREGs.
6113 It turns out that RELOADREG's mode is irrelevant:
6114 we can change that arbitrarily.
6116 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6117 then the reload reg may not support QImode moves, so use SImode.
6118 If foo is in memory due to spilling a pseudo reg, this is safe,
6119 because the QImode value is in the least significant part of a
6120 slot big enough for a SImode. If foo is some other sort of
6121 memory reference, then it is impossible to reload this case,
6122 so previous passes had better make sure this never happens.
6124 Then consider a one-word union which has SImode and one of its
6125 members is a float, being fetched as (SUBREG:SF union:SI).
6126 We must fetch that as SFmode because we could be loading into
6127 a float-only register. In this case OLD's mode is correct.
6129 Consider an immediate integer: it has VOIDmode. Here we need
6130 to get a mode from something else.
6132 In some cases, there is a fourth mode, the operand's
6133 containing mode. If the insn specifies a containing mode for
6134 this operand, it overrides all others.
6136 I am not sure whether the algorithm here is always right,
6137 but it does the right things in those cases. */
6139 mode = GET_MODE (old);
6140 if (mode == VOIDmode)
6141 mode = rl->inmode;
6143 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6144 /* If we need a secondary register for this operation, see if
6145 the value is already in a register in that class. Don't
6146 do this if the secondary register will be used as a scratch
6147 register. */
6149 if (rl->secondary_in_reload >= 0
6150 && rl->secondary_in_icode == CODE_FOR_nothing
6151 && optimize)
6152 oldequiv
6153 = find_equiv_reg (old, insn,
6154 rld[rl->secondary_in_reload].class,
6155 -1, NULL, 0, mode);
6156 #endif
6158 /* If reloading from memory, see if there is a register
6159 that already holds the same value. If so, reload from there.
6160 We can pass 0 as the reload_reg_p argument because
6161 any other reload has either already been emitted,
6162 in which case find_equiv_reg will see the reload-insn,
6163 or has yet to be emitted, in which case it doesn't matter
6164 because we will use this equiv reg right away. */
6166 if (oldequiv == 0 && optimize
6167 && (MEM_P (old)
6168 || (REG_P (old)
6169 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6170 && reg_renumber[REGNO (old)] < 0)))
6171 oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6173 if (oldequiv)
6175 unsigned int regno = true_regnum (oldequiv);
6177 /* Don't use OLDEQUIV if any other reload changes it at an
6178 earlier stage of this insn or at this stage. */
6179 if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6180 rl->in, const0_rtx, j, 0))
6181 oldequiv = 0;
6183 /* If it is no cheaper to copy from OLDEQUIV into the
6184 reload register than it would be to move from memory,
6185 don't use it. Likewise, if we need a secondary register
6186 or memory. */
6188 if (oldequiv != 0
6189 && (((enum reg_class) REGNO_REG_CLASS (regno) != rl->class
6190 && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6191 rl->class)
6192 >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6193 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6194 || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6195 mode, oldequiv)
6196 != NO_REGS)
6197 #endif
6198 #ifdef SECONDARY_MEMORY_NEEDED
6199 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6200 rl->class,
6201 mode)
6202 #endif
6204 oldequiv = 0;
6207 /* delete_output_reload is only invoked properly if old contains
6208 the original pseudo register. Since this is replaced with a
6209 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6210 find the pseudo in RELOAD_IN_REG. */
6211 if (oldequiv == 0
6212 && reload_override_in[j]
6213 && REG_P (rl->in_reg))
6215 oldequiv = old;
6216 old = rl->in_reg;
6218 if (oldequiv == 0)
6219 oldequiv = old;
6220 else if (REG_P (oldequiv))
6221 oldequiv_reg = oldequiv;
6222 else if (GET_CODE (oldequiv) == SUBREG)
6223 oldequiv_reg = SUBREG_REG (oldequiv);
6225 /* If we are reloading from a register that was recently stored in
6226 with an output-reload, see if we can prove there was
6227 actually no need to store the old value in it. */
6229 if (optimize && REG_P (oldequiv)
6230 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6231 && spill_reg_store[REGNO (oldequiv)]
6232 && REG_P (old)
6233 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6234 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6235 rl->out_reg)))
6236 delete_output_reload (insn, j, REGNO (oldequiv));
6238 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6239 then load RELOADREG from OLDEQUIV. Note that we cannot use
6240 gen_lowpart_common since it can do the wrong thing when
6241 RELOADREG has a multi-word mode. Note that RELOADREG
6242 must always be a REG here. */
6244 if (GET_MODE (reloadreg) != mode)
6245 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6246 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6247 oldequiv = SUBREG_REG (oldequiv);
6248 if (GET_MODE (oldequiv) != VOIDmode
6249 && mode != GET_MODE (oldequiv))
6250 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6252 /* Switch to the right place to emit the reload insns. */
6253 switch (rl->when_needed)
6255 case RELOAD_OTHER:
6256 where = &other_input_reload_insns;
6257 break;
6258 case RELOAD_FOR_INPUT:
6259 where = &input_reload_insns[rl->opnum];
6260 break;
6261 case RELOAD_FOR_INPUT_ADDRESS:
6262 where = &input_address_reload_insns[rl->opnum];
6263 break;
6264 case RELOAD_FOR_INPADDR_ADDRESS:
6265 where = &inpaddr_address_reload_insns[rl->opnum];
6266 break;
6267 case RELOAD_FOR_OUTPUT_ADDRESS:
6268 where = &output_address_reload_insns[rl->opnum];
6269 break;
6270 case RELOAD_FOR_OUTADDR_ADDRESS:
6271 where = &outaddr_address_reload_insns[rl->opnum];
6272 break;
6273 case RELOAD_FOR_OPERAND_ADDRESS:
6274 where = &operand_reload_insns;
6275 break;
6276 case RELOAD_FOR_OPADDR_ADDR:
6277 where = &other_operand_reload_insns;
6278 break;
6279 case RELOAD_FOR_OTHER_ADDRESS:
6280 where = &other_input_address_reload_insns;
6281 break;
6282 default:
6283 gcc_unreachable ();
6286 push_to_sequence (*where);
6288 /* Auto-increment addresses must be reloaded in a special way. */
6289 if (rl->out && ! rl->out_reg)
6291 /* We are not going to bother supporting the case where a
6292 incremented register can't be copied directly from
6293 OLDEQUIV since this seems highly unlikely. */
6294 gcc_assert (rl->secondary_in_reload < 0);
6296 if (reload_inherited[j])
6297 oldequiv = reloadreg;
6299 old = XEXP (rl->in_reg, 0);
6301 if (optimize && REG_P (oldequiv)
6302 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6303 && spill_reg_store[REGNO (oldequiv)]
6304 && REG_P (old)
6305 && (dead_or_set_p (insn,
6306 spill_reg_stored_to[REGNO (oldequiv)])
6307 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6308 old)))
6309 delete_output_reload (insn, j, REGNO (oldequiv));
6311 /* Prevent normal processing of this reload. */
6312 special = 1;
6313 /* Output a special code sequence for this case. */
6314 new_spill_reg_store[REGNO (reloadreg)]
6315 = inc_for_reload (reloadreg, oldequiv, rl->out,
6316 rl->inc);
6319 /* If we are reloading a pseudo-register that was set by the previous
6320 insn, see if we can get rid of that pseudo-register entirely
6321 by redirecting the previous insn into our reload register. */
6323 else if (optimize && REG_P (old)
6324 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6325 && dead_or_set_p (insn, old)
6326 /* This is unsafe if some other reload
6327 uses the same reg first. */
6328 && ! conflicts_with_override (reloadreg)
6329 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6330 rl->when_needed, old, rl->out, j, 0))
6332 rtx temp = PREV_INSN (insn);
6333 while (temp && NOTE_P (temp))
6334 temp = PREV_INSN (temp);
6335 if (temp
6336 && NONJUMP_INSN_P (temp)
6337 && GET_CODE (PATTERN (temp)) == SET
6338 && SET_DEST (PATTERN (temp)) == old
6339 /* Make sure we can access insn_operand_constraint. */
6340 && asm_noperands (PATTERN (temp)) < 0
6341 /* This is unsafe if operand occurs more than once in current
6342 insn. Perhaps some occurrences aren't reloaded. */
6343 && count_occurrences (PATTERN (insn), old, 0) == 1)
6345 rtx old = SET_DEST (PATTERN (temp));
6346 /* Store into the reload register instead of the pseudo. */
6347 SET_DEST (PATTERN (temp)) = reloadreg;
6349 /* Verify that resulting insn is valid. */
6350 extract_insn (temp);
6351 if (constrain_operands (1))
6353 /* If the previous insn is an output reload, the source is
6354 a reload register, and its spill_reg_store entry will
6355 contain the previous destination. This is now
6356 invalid. */
6357 if (REG_P (SET_SRC (PATTERN (temp)))
6358 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6360 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6361 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6364 /* If these are the only uses of the pseudo reg,
6365 pretend for GDB it lives in the reload reg we used. */
6366 if (REG_N_DEATHS (REGNO (old)) == 1
6367 && REG_N_SETS (REGNO (old)) == 1)
6369 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6370 alter_reg (REGNO (old), -1);
6372 special = 1;
6374 else
6376 SET_DEST (PATTERN (temp)) = old;
6381 /* We can't do that, so output an insn to load RELOADREG. */
6383 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6384 /* If we have a secondary reload, pick up the secondary register
6385 and icode, if any. If OLDEQUIV and OLD are different or
6386 if this is an in-out reload, recompute whether or not we
6387 still need a secondary register and what the icode should
6388 be. If we still need a secondary register and the class or
6389 icode is different, go back to reloading from OLD if using
6390 OLDEQUIV means that we got the wrong type of register. We
6391 cannot have different class or icode due to an in-out reload
6392 because we don't make such reloads when both the input and
6393 output need secondary reload registers. */
6395 if (! special && rl->secondary_in_reload >= 0)
6397 rtx second_reload_reg = 0;
6398 int secondary_reload = rl->secondary_in_reload;
6399 rtx real_oldequiv = oldequiv;
6400 rtx real_old = old;
6401 rtx tmp;
6402 enum insn_code icode;
6404 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6405 and similarly for OLD.
6406 See comments in get_secondary_reload in reload.c. */
6407 /* If it is a pseudo that cannot be replaced with its
6408 equivalent MEM, we must fall back to reload_in, which
6409 will have all the necessary substitutions registered.
6410 Likewise for a pseudo that can't be replaced with its
6411 equivalent constant.
6413 Take extra care for subregs of such pseudos. Note that
6414 we cannot use reg_equiv_mem in this case because it is
6415 not in the right mode. */
6417 tmp = oldequiv;
6418 if (GET_CODE (tmp) == SUBREG)
6419 tmp = SUBREG_REG (tmp);
6420 if (REG_P (tmp)
6421 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6422 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6423 || reg_equiv_constant[REGNO (tmp)] != 0))
6425 if (! reg_equiv_mem[REGNO (tmp)]
6426 || num_not_at_initial_offset
6427 || GET_CODE (oldequiv) == SUBREG)
6428 real_oldequiv = rl->in;
6429 else
6430 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6433 tmp = old;
6434 if (GET_CODE (tmp) == SUBREG)
6435 tmp = SUBREG_REG (tmp);
6436 if (REG_P (tmp)
6437 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6438 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6439 || reg_equiv_constant[REGNO (tmp)] != 0))
6441 if (! reg_equiv_mem[REGNO (tmp)]
6442 || num_not_at_initial_offset
6443 || GET_CODE (old) == SUBREG)
6444 real_old = rl->in;
6445 else
6446 real_old = reg_equiv_mem[REGNO (tmp)];
6449 second_reload_reg = rld[secondary_reload].reg_rtx;
6450 icode = rl->secondary_in_icode;
6452 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6453 || (rl->in != 0 && rl->out != 0))
6455 enum reg_class new_class
6456 = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6457 mode, real_oldequiv);
6459 if (new_class == NO_REGS)
6460 second_reload_reg = 0;
6461 else
6463 enum insn_code new_icode;
6464 enum machine_mode new_mode;
6466 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6467 REGNO (second_reload_reg)))
6468 oldequiv = old, real_oldequiv = real_old;
6469 else
6471 new_icode = reload_in_optab[(int) mode];
6472 if (new_icode != CODE_FOR_nothing
6473 && ((insn_data[(int) new_icode].operand[0].predicate
6474 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6475 (reloadreg, mode)))
6476 || (insn_data[(int) new_icode].operand[1].predicate
6477 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6478 (real_oldequiv, mode)))))
6479 new_icode = CODE_FOR_nothing;
6481 if (new_icode == CODE_FOR_nothing)
6482 new_mode = mode;
6483 else
6484 new_mode = insn_data[(int) new_icode].operand[2].mode;
6486 if (GET_MODE (second_reload_reg) != new_mode)
6488 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6489 new_mode))
6490 oldequiv = old, real_oldequiv = real_old;
6491 else
6492 second_reload_reg
6493 = reload_adjust_reg_for_mode (second_reload_reg,
6494 new_mode);
6500 /* If we still need a secondary reload register, check
6501 to see if it is being used as a scratch or intermediate
6502 register and generate code appropriately. If we need
6503 a scratch register, use REAL_OLDEQUIV since the form of
6504 the insn may depend on the actual address if it is
6505 a MEM. */
6507 if (second_reload_reg)
6509 if (icode != CODE_FOR_nothing)
6511 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6512 second_reload_reg));
6513 special = 1;
6515 else
6517 /* See if we need a scratch register to load the
6518 intermediate register (a tertiary reload). */
6519 enum insn_code tertiary_icode
6520 = rld[secondary_reload].secondary_in_icode;
6522 if (tertiary_icode != CODE_FOR_nothing)
6524 rtx third_reload_reg
6525 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6527 emit_insn ((GEN_FCN (tertiary_icode)
6528 (second_reload_reg, real_oldequiv,
6529 third_reload_reg)));
6531 else
6532 gen_reload (second_reload_reg, real_oldequiv,
6533 rl->opnum,
6534 rl->when_needed);
6536 oldequiv = second_reload_reg;
6540 #endif
6542 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6544 rtx real_oldequiv = oldequiv;
6546 if ((REG_P (oldequiv)
6547 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6548 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6549 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6550 || (GET_CODE (oldequiv) == SUBREG
6551 && REG_P (SUBREG_REG (oldequiv))
6552 && (REGNO (SUBREG_REG (oldequiv))
6553 >= FIRST_PSEUDO_REGISTER)
6554 && ((reg_equiv_memory_loc
6555 [REGNO (SUBREG_REG (oldequiv))] != 0)
6556 || (reg_equiv_constant
6557 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6558 || (CONSTANT_P (oldequiv)
6559 && (PREFERRED_RELOAD_CLASS (oldequiv,
6560 REGNO_REG_CLASS (REGNO (reloadreg)))
6561 == NO_REGS)))
6562 real_oldequiv = rl->in;
6563 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6564 rl->when_needed);
6567 if (flag_non_call_exceptions)
6568 copy_eh_notes (insn, get_insns ());
6570 /* End this sequence. */
6571 *where = get_insns ();
6572 end_sequence ();
6574 /* Update reload_override_in so that delete_address_reloads_1
6575 can see the actual register usage. */
6576 if (oldequiv_reg)
6577 reload_override_in[j] = oldequiv;
6580 /* Generate insns to for the output reload RL, which is for the insn described
6581 by CHAIN and has the number J. */
6582 static void
6583 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6584 int j)
6586 rtx reloadreg = rl->reg_rtx;
6587 rtx insn = chain->insn;
6588 int special = 0;
6589 rtx old = rl->out;
6590 enum machine_mode mode = GET_MODE (old);
6591 rtx p;
6593 if (rl->when_needed == RELOAD_OTHER)
6594 start_sequence ();
6595 else
6596 push_to_sequence (output_reload_insns[rl->opnum]);
6598 /* Determine the mode to reload in.
6599 See comments above (for input reloading). */
6601 if (mode == VOIDmode)
6603 /* VOIDmode should never happen for an output. */
6604 if (asm_noperands (PATTERN (insn)) < 0)
6605 /* It's the compiler's fault. */
6606 fatal_insn ("VOIDmode on an output", insn);
6607 error_for_asm (insn, "output operand is constant in %<asm%>");
6608 /* Prevent crash--use something we know is valid. */
6609 mode = word_mode;
6610 old = gen_rtx_REG (mode, REGNO (reloadreg));
6613 if (GET_MODE (reloadreg) != mode)
6614 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6616 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6618 /* If we need two reload regs, set RELOADREG to the intermediate
6619 one, since it will be stored into OLD. We might need a secondary
6620 register only for an input reload, so check again here. */
6622 if (rl->secondary_out_reload >= 0)
6624 rtx real_old = old;
6626 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
6627 && reg_equiv_mem[REGNO (old)] != 0)
6628 real_old = reg_equiv_mem[REGNO (old)];
6630 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6631 mode, real_old)
6632 != NO_REGS))
6634 rtx second_reloadreg = reloadreg;
6635 reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6637 /* See if RELOADREG is to be used as a scratch register
6638 or as an intermediate register. */
6639 if (rl->secondary_out_icode != CODE_FOR_nothing)
6641 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6642 (real_old, second_reloadreg, reloadreg)));
6643 special = 1;
6645 else
6647 /* See if we need both a scratch and intermediate reload
6648 register. */
6650 int secondary_reload = rl->secondary_out_reload;
6651 enum insn_code tertiary_icode
6652 = rld[secondary_reload].secondary_out_icode;
6654 if (GET_MODE (reloadreg) != mode)
6655 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6657 if (tertiary_icode != CODE_FOR_nothing)
6659 rtx third_reloadreg
6660 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6661 rtx tem;
6663 /* Copy primary reload reg to secondary reload reg.
6664 (Note that these have been swapped above, then
6665 secondary reload reg to OLD using our insn.) */
6667 /* If REAL_OLD is a paradoxical SUBREG, remove it
6668 and try to put the opposite SUBREG on
6669 RELOADREG. */
6670 if (GET_CODE (real_old) == SUBREG
6671 && (GET_MODE_SIZE (GET_MODE (real_old))
6672 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6673 && 0 != (tem = gen_lowpart_common
6674 (GET_MODE (SUBREG_REG (real_old)),
6675 reloadreg)))
6676 real_old = SUBREG_REG (real_old), reloadreg = tem;
6678 gen_reload (reloadreg, second_reloadreg,
6679 rl->opnum, rl->when_needed);
6680 emit_insn ((GEN_FCN (tertiary_icode)
6681 (real_old, reloadreg, third_reloadreg)));
6682 special = 1;
6685 else
6686 /* Copy between the reload regs here and then to
6687 OUT later. */
6689 gen_reload (reloadreg, second_reloadreg,
6690 rl->opnum, rl->when_needed);
6694 #endif
6696 /* Output the last reload insn. */
6697 if (! special)
6699 rtx set;
6701 /* Don't output the last reload if OLD is not the dest of
6702 INSN and is in the src and is clobbered by INSN. */
6703 if (! flag_expensive_optimizations
6704 || !REG_P (old)
6705 || !(set = single_set (insn))
6706 || rtx_equal_p (old, SET_DEST (set))
6707 || !reg_mentioned_p (old, SET_SRC (set))
6708 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
6709 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
6710 gen_reload (old, reloadreg, rl->opnum,
6711 rl->when_needed);
6714 /* Look at all insns we emitted, just to be safe. */
6715 for (p = get_insns (); p; p = NEXT_INSN (p))
6716 if (INSN_P (p))
6718 rtx pat = PATTERN (p);
6720 /* If this output reload doesn't come from a spill reg,
6721 clear any memory of reloaded copies of the pseudo reg.
6722 If this output reload comes from a spill reg,
6723 reg_has_output_reload will make this do nothing. */
6724 note_stores (pat, forget_old_reloads_1, NULL);
6726 if (reg_mentioned_p (rl->reg_rtx, pat))
6728 rtx set = single_set (insn);
6729 if (reload_spill_index[j] < 0
6730 && set
6731 && SET_SRC (set) == rl->reg_rtx)
6733 int src = REGNO (SET_SRC (set));
6735 reload_spill_index[j] = src;
6736 SET_HARD_REG_BIT (reg_is_output_reload, src);
6737 if (find_regno_note (insn, REG_DEAD, src))
6738 SET_HARD_REG_BIT (reg_reloaded_died, src);
6740 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6742 int s = rl->secondary_out_reload;
6743 set = single_set (p);
6744 /* If this reload copies only to the secondary reload
6745 register, the secondary reload does the actual
6746 store. */
6747 if (s >= 0 && set == NULL_RTX)
6748 /* We can't tell what function the secondary reload
6749 has and where the actual store to the pseudo is
6750 made; leave new_spill_reg_store alone. */
6752 else if (s >= 0
6753 && SET_SRC (set) == rl->reg_rtx
6754 && SET_DEST (set) == rld[s].reg_rtx)
6756 /* Usually the next instruction will be the
6757 secondary reload insn; if we can confirm
6758 that it is, setting new_spill_reg_store to
6759 that insn will allow an extra optimization. */
6760 rtx s_reg = rld[s].reg_rtx;
6761 rtx next = NEXT_INSN (p);
6762 rld[s].out = rl->out;
6763 rld[s].out_reg = rl->out_reg;
6764 set = single_set (next);
6765 if (set && SET_SRC (set) == s_reg
6766 && ! new_spill_reg_store[REGNO (s_reg)])
6768 SET_HARD_REG_BIT (reg_is_output_reload,
6769 REGNO (s_reg));
6770 new_spill_reg_store[REGNO (s_reg)] = next;
6773 else
6774 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6779 if (rl->when_needed == RELOAD_OTHER)
6781 emit_insn (other_output_reload_insns[rl->opnum]);
6782 other_output_reload_insns[rl->opnum] = get_insns ();
6784 else
6785 output_reload_insns[rl->opnum] = get_insns ();
6787 if (flag_non_call_exceptions)
6788 copy_eh_notes (insn, get_insns ());
6790 end_sequence ();
6793 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6794 and has the number J. */
6795 static void
6796 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
6798 rtx insn = chain->insn;
6799 rtx old = (rl->in && MEM_P (rl->in)
6800 ? rl->in_reg : rl->in);
6802 if (old != 0
6803 /* AUTO_INC reloads need to be handled even if inherited. We got an
6804 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6805 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6806 && ! rtx_equal_p (rl->reg_rtx, old)
6807 && rl->reg_rtx != 0)
6808 emit_input_reload_insns (chain, rld + j, old, j);
6810 /* When inheriting a wider reload, we have a MEM in rl->in,
6811 e.g. inheriting a SImode output reload for
6812 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6813 if (optimize && reload_inherited[j] && rl->in
6814 && MEM_P (rl->in)
6815 && MEM_P (rl->in_reg)
6816 && reload_spill_index[j] >= 0
6817 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6818 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6820 /* If we are reloading a register that was recently stored in with an
6821 output-reload, see if we can prove there was
6822 actually no need to store the old value in it. */
6824 if (optimize
6825 /* Only attempt this for input reloads; for RELOAD_OTHER we miss
6826 that there may be multiple uses of the previous output reload.
6827 Restricting to RELOAD_FOR_INPUT is mostly paranoia. */
6828 && rl->when_needed == RELOAD_FOR_INPUT
6829 && (reload_inherited[j] || reload_override_in[j])
6830 && rl->reg_rtx
6831 && REG_P (rl->reg_rtx)
6832 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6833 #if 0
6834 /* There doesn't seem to be any reason to restrict this to pseudos
6835 and doing so loses in the case where we are copying from a
6836 register of the wrong class. */
6837 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6838 >= FIRST_PSEUDO_REGISTER)
6839 #endif
6840 /* The insn might have already some references to stackslots
6841 replaced by MEMs, while reload_out_reg still names the
6842 original pseudo. */
6843 && (dead_or_set_p (insn,
6844 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6845 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6846 rl->out_reg)))
6847 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6850 /* Do output reloading for reload RL, which is for the insn described by
6851 CHAIN and has the number J.
6852 ??? At some point we need to support handling output reloads of
6853 JUMP_INSNs or insns that set cc0. */
6854 static void
6855 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
6857 rtx note, old;
6858 rtx insn = chain->insn;
6859 /* If this is an output reload that stores something that is
6860 not loaded in this same reload, see if we can eliminate a previous
6861 store. */
6862 rtx pseudo = rl->out_reg;
6864 if (pseudo
6865 && optimize
6866 && REG_P (pseudo)
6867 && ! rtx_equal_p (rl->in_reg, pseudo)
6868 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6869 && reg_last_reload_reg[REGNO (pseudo)])
6871 int pseudo_no = REGNO (pseudo);
6872 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6874 /* We don't need to test full validity of last_regno for
6875 inherit here; we only want to know if the store actually
6876 matches the pseudo. */
6877 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6878 && reg_reloaded_contents[last_regno] == pseudo_no
6879 && spill_reg_store[last_regno]
6880 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6881 delete_output_reload (insn, j, last_regno);
6884 old = rl->out_reg;
6885 if (old == 0
6886 || rl->reg_rtx == old
6887 || rl->reg_rtx == 0)
6888 return;
6890 /* An output operand that dies right away does need a reload,
6891 but need not be copied from it. Show the new location in the
6892 REG_UNUSED note. */
6893 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
6894 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6896 XEXP (note, 0) = rl->reg_rtx;
6897 return;
6899 /* Likewise for a SUBREG of an operand that dies. */
6900 else if (GET_CODE (old) == SUBREG
6901 && REG_P (SUBREG_REG (old))
6902 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6903 SUBREG_REG (old))))
6905 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6906 rl->reg_rtx);
6907 return;
6909 else if (GET_CODE (old) == SCRATCH)
6910 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6911 but we don't want to make an output reload. */
6912 return;
6914 /* If is a JUMP_INSN, we can't support output reloads yet. */
6915 gcc_assert (!JUMP_P (insn));
6917 emit_output_reload_insns (chain, rld + j, j);
6920 /* Reload number R reloads from or to a group of hard registers starting at
6921 register REGNO. Return true if it can be treated for inheritance purposes
6922 like a group of reloads, each one reloading a single hard register.
6923 The caller has already checked that the spill register and REGNO use
6924 the same number of registers to store the reload value. */
6926 static bool
6927 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
6929 #ifdef CANNOT_CHANGE_MODE_CLASS
6930 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
6931 GET_MODE (rld[r].reg_rtx),
6932 reg_raw_mode[reload_spill_index[r]])
6933 && !REG_CANNOT_CHANGE_MODE_P (regno,
6934 GET_MODE (rld[r].reg_rtx),
6935 reg_raw_mode[regno]));
6936 #else
6937 return true;
6938 #endif
6941 /* Output insns to reload values in and out of the chosen reload regs. */
6943 static void
6944 emit_reload_insns (struct insn_chain *chain)
6946 rtx insn = chain->insn;
6948 int j;
6950 CLEAR_HARD_REG_SET (reg_reloaded_died);
6952 for (j = 0; j < reload_n_operands; j++)
6953 input_reload_insns[j] = input_address_reload_insns[j]
6954 = inpaddr_address_reload_insns[j]
6955 = output_reload_insns[j] = output_address_reload_insns[j]
6956 = outaddr_address_reload_insns[j]
6957 = other_output_reload_insns[j] = 0;
6958 other_input_address_reload_insns = 0;
6959 other_input_reload_insns = 0;
6960 operand_reload_insns = 0;
6961 other_operand_reload_insns = 0;
6963 /* Dump reloads into the dump file. */
6964 if (dump_file)
6966 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6967 debug_reload_to_stream (dump_file);
6970 /* Now output the instructions to copy the data into and out of the
6971 reload registers. Do these in the order that the reloads were reported,
6972 since reloads of base and index registers precede reloads of operands
6973 and the operands may need the base and index registers reloaded. */
6975 for (j = 0; j < n_reloads; j++)
6977 if (rld[j].reg_rtx
6978 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6979 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
6981 do_input_reload (chain, rld + j, j);
6982 do_output_reload (chain, rld + j, j);
6985 /* Now write all the insns we made for reloads in the order expected by
6986 the allocation functions. Prior to the insn being reloaded, we write
6987 the following reloads:
6989 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6991 RELOAD_OTHER reloads.
6993 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6994 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6995 RELOAD_FOR_INPUT reload for the operand.
6997 RELOAD_FOR_OPADDR_ADDRS reloads.
6999 RELOAD_FOR_OPERAND_ADDRESS reloads.
7001 After the insn being reloaded, we write the following:
7003 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7004 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7005 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7006 reloads for the operand. The RELOAD_OTHER output reloads are
7007 output in descending order by reload number. */
7009 emit_insn_before (other_input_address_reload_insns, insn);
7010 emit_insn_before (other_input_reload_insns, insn);
7012 for (j = 0; j < reload_n_operands; j++)
7014 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7015 emit_insn_before (input_address_reload_insns[j], insn);
7016 emit_insn_before (input_reload_insns[j], insn);
7019 emit_insn_before (other_operand_reload_insns, insn);
7020 emit_insn_before (operand_reload_insns, insn);
7022 for (j = 0; j < reload_n_operands; j++)
7024 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7025 x = emit_insn_after (output_address_reload_insns[j], x);
7026 x = emit_insn_after (output_reload_insns[j], x);
7027 emit_insn_after (other_output_reload_insns[j], x);
7030 /* For all the spill regs newly reloaded in this instruction,
7031 record what they were reloaded from, so subsequent instructions
7032 can inherit the reloads.
7034 Update spill_reg_store for the reloads of this insn.
7035 Copy the elements that were updated in the loop above. */
7037 for (j = 0; j < n_reloads; j++)
7039 int r = reload_order[j];
7040 int i = reload_spill_index[r];
7042 /* If this is a non-inherited input reload from a pseudo, we must
7043 clear any memory of a previous store to the same pseudo. Only do
7044 something if there will not be an output reload for the pseudo
7045 being reloaded. */
7046 if (rld[r].in_reg != 0
7047 && ! (reload_inherited[r] || reload_override_in[r]))
7049 rtx reg = rld[r].in_reg;
7051 if (GET_CODE (reg) == SUBREG)
7052 reg = SUBREG_REG (reg);
7054 if (REG_P (reg)
7055 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7056 && ! reg_has_output_reload[REGNO (reg)])
7058 int nregno = REGNO (reg);
7060 if (reg_last_reload_reg[nregno])
7062 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7064 if (reg_reloaded_contents[last_regno] == nregno)
7065 spill_reg_store[last_regno] = 0;
7070 /* I is nonneg if this reload used a register.
7071 If rld[r].reg_rtx is 0, this is an optional reload
7072 that we opted to ignore. */
7074 if (i >= 0 && rld[r].reg_rtx != 0)
7076 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7077 int k;
7078 int part_reaches_end = 0;
7079 int all_reaches_end = 1;
7081 /* For a multi register reload, we need to check if all or part
7082 of the value lives to the end. */
7083 for (k = 0; k < nr; k++)
7085 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7086 rld[r].when_needed))
7087 part_reaches_end = 1;
7088 else
7089 all_reaches_end = 0;
7092 /* Ignore reloads that don't reach the end of the insn in
7093 entirety. */
7094 if (all_reaches_end)
7096 /* First, clear out memory of what used to be in this spill reg.
7097 If consecutive registers are used, clear them all. */
7099 for (k = 0; k < nr; k++)
7101 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7102 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7105 /* Maybe the spill reg contains a copy of reload_out. */
7106 if (rld[r].out != 0
7107 && (REG_P (rld[r].out)
7108 #ifdef AUTO_INC_DEC
7109 || ! rld[r].out_reg
7110 #endif
7111 || REG_P (rld[r].out_reg)))
7113 rtx out = (REG_P (rld[r].out)
7114 ? rld[r].out
7115 : rld[r].out_reg
7116 ? rld[r].out_reg
7117 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7118 int nregno = REGNO (out);
7119 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7120 : hard_regno_nregs[nregno]
7121 [GET_MODE (rld[r].reg_rtx)]);
7122 bool piecemeal;
7124 spill_reg_store[i] = new_spill_reg_store[i];
7125 spill_reg_stored_to[i] = out;
7126 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7128 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7129 && nr == nnr
7130 && inherit_piecemeal_p (r, nregno));
7132 /* If NREGNO is a hard register, it may occupy more than
7133 one register. If it does, say what is in the
7134 rest of the registers assuming that both registers
7135 agree on how many words the object takes. If not,
7136 invalidate the subsequent registers. */
7138 if (nregno < FIRST_PSEUDO_REGISTER)
7139 for (k = 1; k < nnr; k++)
7140 reg_last_reload_reg[nregno + k]
7141 = (piecemeal
7142 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7143 : 0);
7145 /* Now do the inverse operation. */
7146 for (k = 0; k < nr; k++)
7148 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7149 reg_reloaded_contents[i + k]
7150 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7151 ? nregno
7152 : nregno + k);
7153 reg_reloaded_insn[i + k] = insn;
7154 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7155 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7156 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7160 /* Maybe the spill reg contains a copy of reload_in. Only do
7161 something if there will not be an output reload for
7162 the register being reloaded. */
7163 else if (rld[r].out_reg == 0
7164 && rld[r].in != 0
7165 && ((REG_P (rld[r].in)
7166 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7167 && ! reg_has_output_reload[REGNO (rld[r].in)])
7168 || (REG_P (rld[r].in_reg)
7169 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7170 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7172 int nregno;
7173 int nnr;
7174 rtx in;
7175 bool piecemeal;
7177 if (REG_P (rld[r].in)
7178 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7179 in = rld[r].in;
7180 else if (REG_P (rld[r].in_reg))
7181 in = rld[r].in_reg;
7182 else
7183 in = XEXP (rld[r].in_reg, 0);
7184 nregno = REGNO (in);
7186 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7187 : hard_regno_nregs[nregno]
7188 [GET_MODE (rld[r].reg_rtx)]);
7190 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7192 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7193 && nr == nnr
7194 && inherit_piecemeal_p (r, nregno));
7196 if (nregno < FIRST_PSEUDO_REGISTER)
7197 for (k = 1; k < nnr; k++)
7198 reg_last_reload_reg[nregno + k]
7199 = (piecemeal
7200 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7201 : 0);
7203 /* Unless we inherited this reload, show we haven't
7204 recently done a store.
7205 Previous stores of inherited auto_inc expressions
7206 also have to be discarded. */
7207 if (! reload_inherited[r]
7208 || (rld[r].out && ! rld[r].out_reg))
7209 spill_reg_store[i] = 0;
7211 for (k = 0; k < nr; k++)
7213 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7214 reg_reloaded_contents[i + k]
7215 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7216 ? nregno
7217 : nregno + k);
7218 reg_reloaded_insn[i + k] = insn;
7219 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7220 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7221 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7226 /* However, if part of the reload reaches the end, then we must
7227 invalidate the old info for the part that survives to the end. */
7228 else if (part_reaches_end)
7230 for (k = 0; k < nr; k++)
7231 if (reload_reg_reaches_end_p (i + k,
7232 rld[r].opnum,
7233 rld[r].when_needed))
7234 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7238 /* The following if-statement was #if 0'd in 1.34 (or before...).
7239 It's reenabled in 1.35 because supposedly nothing else
7240 deals with this problem. */
7242 /* If a register gets output-reloaded from a non-spill register,
7243 that invalidates any previous reloaded copy of it.
7244 But forget_old_reloads_1 won't get to see it, because
7245 it thinks only about the original insn. So invalidate it here. */
7246 if (i < 0 && rld[r].out != 0
7247 && (REG_P (rld[r].out)
7248 || (MEM_P (rld[r].out)
7249 && REG_P (rld[r].out_reg))))
7251 rtx out = (REG_P (rld[r].out)
7252 ? rld[r].out : rld[r].out_reg);
7253 int nregno = REGNO (out);
7254 if (nregno >= FIRST_PSEUDO_REGISTER)
7256 rtx src_reg, store_insn = NULL_RTX;
7258 reg_last_reload_reg[nregno] = 0;
7260 /* If we can find a hard register that is stored, record
7261 the storing insn so that we may delete this insn with
7262 delete_output_reload. */
7263 src_reg = rld[r].reg_rtx;
7265 /* If this is an optional reload, try to find the source reg
7266 from an input reload. */
7267 if (! src_reg)
7269 rtx set = single_set (insn);
7270 if (set && SET_DEST (set) == rld[r].out)
7272 int k;
7274 src_reg = SET_SRC (set);
7275 store_insn = insn;
7276 for (k = 0; k < n_reloads; k++)
7278 if (rld[k].in == src_reg)
7280 src_reg = rld[k].reg_rtx;
7281 break;
7286 else
7287 store_insn = new_spill_reg_store[REGNO (src_reg)];
7288 if (src_reg && REG_P (src_reg)
7289 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7291 int src_regno = REGNO (src_reg);
7292 int nr = hard_regno_nregs[src_regno][rld[r].mode];
7293 /* The place where to find a death note varies with
7294 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7295 necessarily checked exactly in the code that moves
7296 notes, so just check both locations. */
7297 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7298 if (! note && store_insn)
7299 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7300 while (nr-- > 0)
7302 spill_reg_store[src_regno + nr] = store_insn;
7303 spill_reg_stored_to[src_regno + nr] = out;
7304 reg_reloaded_contents[src_regno + nr] = nregno;
7305 reg_reloaded_insn[src_regno + nr] = store_insn;
7306 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7307 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7308 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7309 GET_MODE (src_reg)))
7310 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7311 src_regno + nr);
7312 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7313 if (note)
7314 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7315 else
7316 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7318 reg_last_reload_reg[nregno] = src_reg;
7319 /* We have to set reg_has_output_reload here, or else
7320 forget_old_reloads_1 will clear reg_last_reload_reg
7321 right away. */
7322 reg_has_output_reload[nregno] = 1;
7325 else
7327 int num_regs = hard_regno_nregs[nregno][GET_MODE (rld[r].out)];
7329 while (num_regs-- > 0)
7330 reg_last_reload_reg[nregno + num_regs] = 0;
7334 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7337 /* Emit code to perform a reload from IN (which may be a reload register) to
7338 OUT (which may also be a reload register). IN or OUT is from operand
7339 OPNUM with reload type TYPE.
7341 Returns first insn emitted. */
7343 static rtx
7344 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7346 rtx last = get_last_insn ();
7347 rtx tem;
7349 /* If IN is a paradoxical SUBREG, remove it and try to put the
7350 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7351 if (GET_CODE (in) == SUBREG
7352 && (GET_MODE_SIZE (GET_MODE (in))
7353 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7354 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7355 in = SUBREG_REG (in), out = tem;
7356 else if (GET_CODE (out) == SUBREG
7357 && (GET_MODE_SIZE (GET_MODE (out))
7358 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7359 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7360 out = SUBREG_REG (out), in = tem;
7362 /* How to do this reload can get quite tricky. Normally, we are being
7363 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7364 register that didn't get a hard register. In that case we can just
7365 call emit_move_insn.
7367 We can also be asked to reload a PLUS that adds a register or a MEM to
7368 another register, constant or MEM. This can occur during frame pointer
7369 elimination and while reloading addresses. This case is handled by
7370 trying to emit a single insn to perform the add. If it is not valid,
7371 we use a two insn sequence.
7373 Finally, we could be called to handle an 'o' constraint by putting
7374 an address into a register. In that case, we first try to do this
7375 with a named pattern of "reload_load_address". If no such pattern
7376 exists, we just emit a SET insn and hope for the best (it will normally
7377 be valid on machines that use 'o').
7379 This entire process is made complex because reload will never
7380 process the insns we generate here and so we must ensure that
7381 they will fit their constraints and also by the fact that parts of
7382 IN might be being reloaded separately and replaced with spill registers.
7383 Because of this, we are, in some sense, just guessing the right approach
7384 here. The one listed above seems to work.
7386 ??? At some point, this whole thing needs to be rethought. */
7388 if (GET_CODE (in) == PLUS
7389 && (REG_P (XEXP (in, 0))
7390 || GET_CODE (XEXP (in, 0)) == SUBREG
7391 || MEM_P (XEXP (in, 0)))
7392 && (REG_P (XEXP (in, 1))
7393 || GET_CODE (XEXP (in, 1)) == SUBREG
7394 || CONSTANT_P (XEXP (in, 1))
7395 || MEM_P (XEXP (in, 1))))
7397 /* We need to compute the sum of a register or a MEM and another
7398 register, constant, or MEM, and put it into the reload
7399 register. The best possible way of doing this is if the machine
7400 has a three-operand ADD insn that accepts the required operands.
7402 The simplest approach is to try to generate such an insn and see if it
7403 is recognized and matches its constraints. If so, it can be used.
7405 It might be better not to actually emit the insn unless it is valid,
7406 but we need to pass the insn as an operand to `recog' and
7407 `extract_insn' and it is simpler to emit and then delete the insn if
7408 not valid than to dummy things up. */
7410 rtx op0, op1, tem, insn;
7411 int code;
7413 op0 = find_replacement (&XEXP (in, 0));
7414 op1 = find_replacement (&XEXP (in, 1));
7416 /* Since constraint checking is strict, commutativity won't be
7417 checked, so we need to do that here to avoid spurious failure
7418 if the add instruction is two-address and the second operand
7419 of the add is the same as the reload reg, which is frequently
7420 the case. If the insn would be A = B + A, rearrange it so
7421 it will be A = A + B as constrain_operands expects. */
7423 if (REG_P (XEXP (in, 1))
7424 && REGNO (out) == REGNO (XEXP (in, 1)))
7425 tem = op0, op0 = op1, op1 = tem;
7427 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7428 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7430 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7431 code = recog_memoized (insn);
7433 if (code >= 0)
7435 extract_insn (insn);
7436 /* We want constrain operands to treat this insn strictly in
7437 its validity determination, i.e., the way it would after reload
7438 has completed. */
7439 if (constrain_operands (1))
7440 return insn;
7443 delete_insns_since (last);
7445 /* If that failed, we must use a conservative two-insn sequence.
7447 Use a move to copy one operand into the reload register. Prefer
7448 to reload a constant, MEM or pseudo since the move patterns can
7449 handle an arbitrary operand. If OP1 is not a constant, MEM or
7450 pseudo and OP1 is not a valid operand for an add instruction, then
7451 reload OP1.
7453 After reloading one of the operands into the reload register, add
7454 the reload register to the output register.
7456 If there is another way to do this for a specific machine, a
7457 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7458 we emit below. */
7460 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7462 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
7463 || (REG_P (op1)
7464 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7465 || (code != CODE_FOR_nothing
7466 && ! ((*insn_data[code].operand[2].predicate)
7467 (op1, insn_data[code].operand[2].mode))))
7468 tem = op0, op0 = op1, op1 = tem;
7470 gen_reload (out, op0, opnum, type);
7472 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7473 This fixes a problem on the 32K where the stack pointer cannot
7474 be used as an operand of an add insn. */
7476 if (rtx_equal_p (op0, op1))
7477 op1 = out;
7479 insn = emit_insn (gen_add2_insn (out, op1));
7481 /* If that failed, copy the address register to the reload register.
7482 Then add the constant to the reload register. */
7484 code = recog_memoized (insn);
7486 if (code >= 0)
7488 extract_insn (insn);
7489 /* We want constrain operands to treat this insn strictly in
7490 its validity determination, i.e., the way it would after reload
7491 has completed. */
7492 if (constrain_operands (1))
7494 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7495 REG_NOTES (insn)
7496 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7497 return insn;
7501 delete_insns_since (last);
7503 gen_reload (out, op1, opnum, type);
7504 insn = emit_insn (gen_add2_insn (out, op0));
7505 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7508 #ifdef SECONDARY_MEMORY_NEEDED
7509 /* If we need a memory location to do the move, do it that way. */
7510 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
7511 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7512 && (REG_P (out) || GET_CODE (out) == SUBREG)
7513 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7514 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7515 REGNO_REG_CLASS (reg_or_subregno (out)),
7516 GET_MODE (out)))
7518 /* Get the memory to use and rewrite both registers to its mode. */
7519 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7521 if (GET_MODE (loc) != GET_MODE (out))
7522 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7524 if (GET_MODE (loc) != GET_MODE (in))
7525 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7527 gen_reload (loc, in, opnum, type);
7528 gen_reload (out, loc, opnum, type);
7530 #endif
7532 /* If IN is a simple operand, use gen_move_insn. */
7533 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
7534 emit_insn (gen_move_insn (out, in));
7536 #ifdef HAVE_reload_load_address
7537 else if (HAVE_reload_load_address)
7538 emit_insn (gen_reload_load_address (out, in));
7539 #endif
7541 /* Otherwise, just write (set OUT IN) and hope for the best. */
7542 else
7543 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7545 /* Return the first insn emitted.
7546 We can not just return get_last_insn, because there may have
7547 been multiple instructions emitted. Also note that gen_move_insn may
7548 emit more than one insn itself, so we can not assume that there is one
7549 insn emitted per emit_insn_before call. */
7551 return last ? NEXT_INSN (last) : get_insns ();
7554 /* Delete a previously made output-reload whose result we now believe
7555 is not needed. First we double-check.
7557 INSN is the insn now being processed.
7558 LAST_RELOAD_REG is the hard register number for which we want to delete
7559 the last output reload.
7560 J is the reload-number that originally used REG. The caller has made
7561 certain that reload J doesn't use REG any longer for input. */
7563 static void
7564 delete_output_reload (rtx insn, int j, int last_reload_reg)
7566 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7567 rtx reg = spill_reg_stored_to[last_reload_reg];
7568 int k;
7569 int n_occurrences;
7570 int n_inherited = 0;
7571 rtx i1;
7572 rtx substed;
7574 /* It is possible that this reload has been only used to set another reload
7575 we eliminated earlier and thus deleted this instruction too. */
7576 if (INSN_DELETED_P (output_reload_insn))
7577 return;
7579 /* Get the raw pseudo-register referred to. */
7581 while (GET_CODE (reg) == SUBREG)
7582 reg = SUBREG_REG (reg);
7583 substed = reg_equiv_memory_loc[REGNO (reg)];
7585 /* This is unsafe if the operand occurs more often in the current
7586 insn than it is inherited. */
7587 for (k = n_reloads - 1; k >= 0; k--)
7589 rtx reg2 = rld[k].in;
7590 if (! reg2)
7591 continue;
7592 if (MEM_P (reg2) || reload_override_in[k])
7593 reg2 = rld[k].in_reg;
7594 #ifdef AUTO_INC_DEC
7595 if (rld[k].out && ! rld[k].out_reg)
7596 reg2 = XEXP (rld[k].in_reg, 0);
7597 #endif
7598 while (GET_CODE (reg2) == SUBREG)
7599 reg2 = SUBREG_REG (reg2);
7600 if (rtx_equal_p (reg2, reg))
7602 if (reload_inherited[k] || reload_override_in[k] || k == j)
7604 n_inherited++;
7605 reg2 = rld[k].out_reg;
7606 if (! reg2)
7607 continue;
7608 while (GET_CODE (reg2) == SUBREG)
7609 reg2 = XEXP (reg2, 0);
7610 if (rtx_equal_p (reg2, reg))
7611 n_inherited++;
7613 else
7614 return;
7617 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7618 if (substed)
7619 n_occurrences += count_occurrences (PATTERN (insn),
7620 eliminate_regs (substed, 0,
7621 NULL_RTX), 0);
7622 if (n_occurrences > n_inherited)
7623 return;
7625 /* If the pseudo-reg we are reloading is no longer referenced
7626 anywhere between the store into it and here,
7627 and we're within the same basic block, then the value can only
7628 pass through the reload reg and end up here.
7629 Otherwise, give up--return. */
7630 for (i1 = NEXT_INSN (output_reload_insn);
7631 i1 != insn; i1 = NEXT_INSN (i1))
7633 if (NOTE_INSN_BASIC_BLOCK_P (i1))
7634 return;
7635 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
7636 && reg_mentioned_p (reg, PATTERN (i1)))
7638 /* If this is USE in front of INSN, we only have to check that
7639 there are no more references than accounted for by inheritance. */
7640 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
7642 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7643 i1 = NEXT_INSN (i1);
7645 if (n_occurrences <= n_inherited && i1 == insn)
7646 break;
7647 return;
7651 /* We will be deleting the insn. Remove the spill reg information. */
7652 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
7654 spill_reg_store[last_reload_reg + k] = 0;
7655 spill_reg_stored_to[last_reload_reg + k] = 0;
7658 /* The caller has already checked that REG dies or is set in INSN.
7659 It has also checked that we are optimizing, and thus some
7660 inaccuracies in the debugging information are acceptable.
7661 So we could just delete output_reload_insn. But in some cases
7662 we can improve the debugging information without sacrificing
7663 optimization - maybe even improving the code: See if the pseudo
7664 reg has been completely replaced with reload regs. If so, delete
7665 the store insn and forget we had a stack slot for the pseudo. */
7666 if (rld[j].out != rld[j].in
7667 && REG_N_DEATHS (REGNO (reg)) == 1
7668 && REG_N_SETS (REGNO (reg)) == 1
7669 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7670 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7672 rtx i2;
7674 /* We know that it was used only between here and the beginning of
7675 the current basic block. (We also know that the last use before
7676 INSN was the output reload we are thinking of deleting, but never
7677 mind that.) Search that range; see if any ref remains. */
7678 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7680 rtx set = single_set (i2);
7682 /* Uses which just store in the pseudo don't count,
7683 since if they are the only uses, they are dead. */
7684 if (set != 0 && SET_DEST (set) == reg)
7685 continue;
7686 if (LABEL_P (i2)
7687 || JUMP_P (i2))
7688 break;
7689 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
7690 && reg_mentioned_p (reg, PATTERN (i2)))
7692 /* Some other ref remains; just delete the output reload we
7693 know to be dead. */
7694 delete_address_reloads (output_reload_insn, insn);
7695 delete_insn (output_reload_insn);
7696 return;
7700 /* Delete the now-dead stores into this pseudo. Note that this
7701 loop also takes care of deleting output_reload_insn. */
7702 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7704 rtx set = single_set (i2);
7706 if (set != 0 && SET_DEST (set) == reg)
7708 delete_address_reloads (i2, insn);
7709 delete_insn (i2);
7711 if (LABEL_P (i2)
7712 || JUMP_P (i2))
7713 break;
7716 /* For the debugging info, say the pseudo lives in this reload reg. */
7717 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7718 alter_reg (REGNO (reg), -1);
7720 else
7722 delete_address_reloads (output_reload_insn, insn);
7723 delete_insn (output_reload_insn);
7727 /* We are going to delete DEAD_INSN. Recursively delete loads of
7728 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7729 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7730 static void
7731 delete_address_reloads (rtx dead_insn, rtx current_insn)
7733 rtx set = single_set (dead_insn);
7734 rtx set2, dst, prev, next;
7735 if (set)
7737 rtx dst = SET_DEST (set);
7738 if (MEM_P (dst))
7739 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7741 /* If we deleted the store from a reloaded post_{in,de}c expression,
7742 we can delete the matching adds. */
7743 prev = PREV_INSN (dead_insn);
7744 next = NEXT_INSN (dead_insn);
7745 if (! prev || ! next)
7746 return;
7747 set = single_set (next);
7748 set2 = single_set (prev);
7749 if (! set || ! set2
7750 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7751 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7752 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7753 return;
7754 dst = SET_DEST (set);
7755 if (! rtx_equal_p (dst, SET_DEST (set2))
7756 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7757 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7758 || (INTVAL (XEXP (SET_SRC (set), 1))
7759 != -INTVAL (XEXP (SET_SRC (set2), 1))))
7760 return;
7761 delete_related_insns (prev);
7762 delete_related_insns (next);
7765 /* Subfunction of delete_address_reloads: process registers found in X. */
7766 static void
7767 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
7769 rtx prev, set, dst, i2;
7770 int i, j;
7771 enum rtx_code code = GET_CODE (x);
7773 if (code != REG)
7775 const char *fmt = GET_RTX_FORMAT (code);
7776 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7778 if (fmt[i] == 'e')
7779 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7780 else if (fmt[i] == 'E')
7782 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7783 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7784 current_insn);
7787 return;
7790 if (spill_reg_order[REGNO (x)] < 0)
7791 return;
7793 /* Scan backwards for the insn that sets x. This might be a way back due
7794 to inheritance. */
7795 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7797 code = GET_CODE (prev);
7798 if (code == CODE_LABEL || code == JUMP_INSN)
7799 return;
7800 if (!INSN_P (prev))
7801 continue;
7802 if (reg_set_p (x, PATTERN (prev)))
7803 break;
7804 if (reg_referenced_p (x, PATTERN (prev)))
7805 return;
7807 if (! prev || INSN_UID (prev) < reload_first_uid)
7808 return;
7809 /* Check that PREV only sets the reload register. */
7810 set = single_set (prev);
7811 if (! set)
7812 return;
7813 dst = SET_DEST (set);
7814 if (!REG_P (dst)
7815 || ! rtx_equal_p (dst, x))
7816 return;
7817 if (! reg_set_p (dst, PATTERN (dead_insn)))
7819 /* Check if DST was used in a later insn -
7820 it might have been inherited. */
7821 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7823 if (LABEL_P (i2))
7824 break;
7825 if (! INSN_P (i2))
7826 continue;
7827 if (reg_referenced_p (dst, PATTERN (i2)))
7829 /* If there is a reference to the register in the current insn,
7830 it might be loaded in a non-inherited reload. If no other
7831 reload uses it, that means the register is set before
7832 referenced. */
7833 if (i2 == current_insn)
7835 for (j = n_reloads - 1; j >= 0; j--)
7836 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7837 || reload_override_in[j] == dst)
7838 return;
7839 for (j = n_reloads - 1; j >= 0; j--)
7840 if (rld[j].in && rld[j].reg_rtx == dst)
7841 break;
7842 if (j >= 0)
7843 break;
7845 return;
7847 if (JUMP_P (i2))
7848 break;
7849 /* If DST is still live at CURRENT_INSN, check if it is used for
7850 any reload. Note that even if CURRENT_INSN sets DST, we still
7851 have to check the reloads. */
7852 if (i2 == current_insn)
7854 for (j = n_reloads - 1; j >= 0; j--)
7855 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7856 || reload_override_in[j] == dst)
7857 return;
7858 /* ??? We can't finish the loop here, because dst might be
7859 allocated to a pseudo in this block if no reload in this
7860 block needs any of the classes containing DST - see
7861 spill_hard_reg. There is no easy way to tell this, so we
7862 have to scan till the end of the basic block. */
7864 if (reg_set_p (dst, PATTERN (i2)))
7865 break;
7868 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7869 reg_reloaded_contents[REGNO (dst)] = -1;
7870 delete_insn (prev);
7873 /* Output reload-insns to reload VALUE into RELOADREG.
7874 VALUE is an autoincrement or autodecrement RTX whose operand
7875 is a register or memory location;
7876 so reloading involves incrementing that location.
7877 IN is either identical to VALUE, or some cheaper place to reload from.
7879 INC_AMOUNT is the number to increment or decrement by (always positive).
7880 This cannot be deduced from VALUE.
7882 Return the instruction that stores into RELOADREG. */
7884 static rtx
7885 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
7887 /* REG or MEM to be copied and incremented. */
7888 rtx incloc = XEXP (value, 0);
7889 /* Nonzero if increment after copying. */
7890 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7891 rtx last;
7892 rtx inc;
7893 rtx add_insn;
7894 int code;
7895 rtx store;
7896 rtx real_in = in == value ? XEXP (in, 0) : in;
7898 /* No hard register is equivalent to this register after
7899 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
7900 we could inc/dec that register as well (maybe even using it for
7901 the source), but I'm not sure it's worth worrying about. */
7902 if (REG_P (incloc))
7903 reg_last_reload_reg[REGNO (incloc)] = 0;
7905 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7906 inc_amount = -inc_amount;
7908 inc = GEN_INT (inc_amount);
7910 /* If this is post-increment, first copy the location to the reload reg. */
7911 if (post && real_in != reloadreg)
7912 emit_insn (gen_move_insn (reloadreg, real_in));
7914 if (in == value)
7916 /* See if we can directly increment INCLOC. Use a method similar to
7917 that in gen_reload. */
7919 last = get_last_insn ();
7920 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7921 gen_rtx_PLUS (GET_MODE (incloc),
7922 incloc, inc)));
7924 code = recog_memoized (add_insn);
7925 if (code >= 0)
7927 extract_insn (add_insn);
7928 if (constrain_operands (1))
7930 /* If this is a pre-increment and we have incremented the value
7931 where it lives, copy the incremented value to RELOADREG to
7932 be used as an address. */
7934 if (! post)
7935 emit_insn (gen_move_insn (reloadreg, incloc));
7937 return add_insn;
7940 delete_insns_since (last);
7943 /* If couldn't do the increment directly, must increment in RELOADREG.
7944 The way we do this depends on whether this is pre- or post-increment.
7945 For pre-increment, copy INCLOC to the reload register, increment it
7946 there, then save back. */
7948 if (! post)
7950 if (in != reloadreg)
7951 emit_insn (gen_move_insn (reloadreg, real_in));
7952 emit_insn (gen_add2_insn (reloadreg, inc));
7953 store = emit_insn (gen_move_insn (incloc, reloadreg));
7955 else
7957 /* Postincrement.
7958 Because this might be a jump insn or a compare, and because RELOADREG
7959 may not be available after the insn in an input reload, we must do
7960 the incrementation before the insn being reloaded for.
7962 We have already copied IN to RELOADREG. Increment the copy in
7963 RELOADREG, save that back, then decrement RELOADREG so it has
7964 the original value. */
7966 emit_insn (gen_add2_insn (reloadreg, inc));
7967 store = emit_insn (gen_move_insn (incloc, reloadreg));
7968 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7971 return store;
7974 #ifdef AUTO_INC_DEC
7975 static void
7976 add_auto_inc_notes (rtx insn, rtx x)
7978 enum rtx_code code = GET_CODE (x);
7979 const char *fmt;
7980 int i, j;
7982 if (code == MEM && auto_inc_p (XEXP (x, 0)))
7984 REG_NOTES (insn)
7985 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
7986 return;
7989 /* Scan all the operand sub-expressions. */
7990 fmt = GET_RTX_FORMAT (code);
7991 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7993 if (fmt[i] == 'e')
7994 add_auto_inc_notes (insn, XEXP (x, i));
7995 else if (fmt[i] == 'E')
7996 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7997 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8000 #endif
8002 /* Copy EH notes from an insn to its reloads. */
8003 static void
8004 copy_eh_notes (rtx insn, rtx x)
8006 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8007 if (eh_note)
8009 for (; x != 0; x = NEXT_INSN (x))
8011 if (may_trap_p (PATTERN (x)))
8012 REG_NOTES (x)
8013 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8014 REG_NOTES (x));
8019 /* This is used by reload pass, that does emit some instructions after
8020 abnormal calls moving basic block end, but in fact it wants to emit
8021 them on the edge. Looks for abnormal call edges, find backward the
8022 proper call and fix the damage.
8024 Similar handle instructions throwing exceptions internally. */
8025 void
8026 fixup_abnormal_edges (void)
8028 bool inserted = false;
8029 basic_block bb;
8031 FOR_EACH_BB (bb)
8033 edge e;
8034 edge_iterator ei;
8036 /* Look for cases we are interested in - calls or instructions causing
8037 exceptions. */
8038 FOR_EACH_EDGE (e, ei, bb->succs)
8040 if (e->flags & EDGE_ABNORMAL_CALL)
8041 break;
8042 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8043 == (EDGE_ABNORMAL | EDGE_EH))
8044 break;
8046 if (e && !CALL_P (BB_END (bb))
8047 && !can_throw_internal (BB_END (bb)))
8049 rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
8050 rtx next;
8051 FOR_EACH_EDGE (e, ei, bb->succs)
8052 if (e->flags & EDGE_FALLTHRU)
8053 break;
8054 /* Get past the new insns generated. Allow notes, as the insns may
8055 be already deleted. */
8056 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8057 && !can_throw_internal (insn)
8058 && insn != BB_HEAD (bb))
8059 insn = PREV_INSN (insn);
8060 gcc_assert (CALL_P (insn) || can_throw_internal (insn));
8061 BB_END (bb) = insn;
8062 inserted = true;
8063 insn = NEXT_INSN (insn);
8064 while (insn && insn != stop)
8066 next = NEXT_INSN (insn);
8067 if (INSN_P (insn))
8069 delete_insn (insn);
8071 /* Sometimes there's still the return value USE.
8072 If it's placed after a trapping call (i.e. that
8073 call is the last insn anyway), we have no fallthru
8074 edge. Simply delete this use and don't try to insert
8075 on the non-existent edge. */
8076 if (GET_CODE (PATTERN (insn)) != USE)
8078 /* We're not deleting it, we're moving it. */
8079 INSN_DELETED_P (insn) = 0;
8080 PREV_INSN (insn) = NULL_RTX;
8081 NEXT_INSN (insn) = NULL_RTX;
8083 insert_insn_on_edge (insn, e);
8086 insn = next;
8090 /* We've possibly turned single trapping insn into multiple ones. */
8091 if (flag_non_call_exceptions)
8093 sbitmap blocks;
8094 blocks = sbitmap_alloc (last_basic_block);
8095 sbitmap_ones (blocks);
8096 find_many_sub_basic_blocks (blocks);
8098 if (inserted)
8099 commit_edge_insertions ();