EnumSet*.class: Regenerate
[official-gcc.git] / gcc / reload1.c
blob41789696685575737fc01147b081f4d515126e30
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "df.h"
48 #include "target.h"
49 #include "dse.h"
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
56 that need them.
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
85 /* During reload_as_needed, element N contains a REG rtx for the hard reg
86 into which reg N has been reloaded (perhaps for a previous insn). */
87 static rtx *reg_last_reload_reg;
89 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
90 for an output reload that stores into reg N. */
91 static regset_head reg_has_output_reload;
93 /* Indicates which hard regs are reload-registers for an output reload
94 in the current insn. */
95 static HARD_REG_SET reg_is_output_reload;
97 /* Element N is the constant value to which pseudo reg N is equivalent,
98 or zero if pseudo reg N is not equivalent to a constant.
99 find_reloads looks at this in order to replace pseudo reg N
100 with the constant it stands for. */
101 rtx *reg_equiv_constant;
103 /* Element N is an invariant value to which pseudo reg N is equivalent.
104 eliminate_regs_in_insn uses this to replace pseudos in particular
105 contexts. */
106 rtx *reg_equiv_invariant;
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx *reg_equiv_memory_loc;
114 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
115 collector can keep track of what is inside. */
116 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
118 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
119 This is used when the address is not valid as a memory address
120 (because its displacement is too big for the machine.) */
121 rtx *reg_equiv_address;
123 /* Element N is the memory slot to which pseudo reg N is equivalent,
124 or zero if pseudo reg N is not equivalent to a memory slot. */
125 rtx *reg_equiv_mem;
127 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
128 alternate representations of the location of pseudo reg N. */
129 rtx *reg_equiv_alt_mem_list;
131 /* Widest width in which each pseudo reg is referred to (via subreg). */
132 static unsigned int *reg_max_ref_width;
134 /* Element N is the list of insns that initialized reg N from its equivalent
135 constant or memory slot. */
136 rtx *reg_equiv_init;
137 int reg_equiv_init_size;
139 /* Vector to remember old contents of reg_renumber before spilling. */
140 static short *reg_old_renumber;
142 /* During reload_as_needed, element N contains the last pseudo regno reloaded
143 into hard register N. If that pseudo reg occupied more than one register,
144 reg_reloaded_contents points to that pseudo for each spill register in
145 use; all of these must remain set for an inheritance to occur. */
146 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
148 /* During reload_as_needed, element N contains the insn for which
149 hard register N was last used. Its contents are significant only
150 when reg_reloaded_valid is set for this register. */
151 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
153 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
154 static HARD_REG_SET reg_reloaded_valid;
155 /* Indicate if the register was dead at the end of the reload.
156 This is only valid if reg_reloaded_contents is set and valid. */
157 static HARD_REG_SET reg_reloaded_dead;
159 /* Indicate whether the register's current value is one that is not
160 safe to retain across a call, even for registers that are normally
161 call-saved. */
162 static HARD_REG_SET reg_reloaded_call_part_clobbered;
164 /* Number of spill-regs so far; number of valid elements of spill_regs. */
165 static int n_spills;
167 /* In parallel with spill_regs, contains REG rtx's for those regs.
168 Holds the last rtx used for any given reg, or 0 if it has never
169 been used for spilling yet. This rtx is reused, provided it has
170 the proper mode. */
171 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
173 /* In parallel with spill_regs, contains nonzero for a spill reg
174 that was stored after the last time it was used.
175 The precise value is the insn generated to do the store. */
176 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
178 /* This is the register that was stored with spill_reg_store. This is a
179 copy of reload_out / reload_out_reg when the value was stored; if
180 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
181 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
183 /* This table is the inverse mapping of spill_regs:
184 indexed by hard reg number,
185 it contains the position of that reg in spill_regs,
186 or -1 for something that is not in spill_regs.
188 ?!? This is no longer accurate. */
189 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
191 /* This reg set indicates registers that can't be used as spill registers for
192 the currently processed insn. These are the hard registers which are live
193 during the insn, but not allocated to pseudos, as well as fixed
194 registers. */
195 static HARD_REG_SET bad_spill_regs;
197 /* These are the hard registers that can't be used as spill register for any
198 insn. This includes registers used for user variables and registers that
199 we can't eliminate. A register that appears in this set also can't be used
200 to retry register allocation. */
201 static HARD_REG_SET bad_spill_regs_global;
203 /* Describes order of use of registers for reloading
204 of spilled pseudo-registers. `n_spills' is the number of
205 elements that are actually valid; new ones are added at the end.
207 Both spill_regs and spill_reg_order are used on two occasions:
208 once during find_reload_regs, where they keep track of the spill registers
209 for a single insn, but also during reload_as_needed where they show all
210 the registers ever used by reload. For the latter case, the information
211 is calculated during finish_spills. */
212 static short spill_regs[FIRST_PSEUDO_REGISTER];
214 /* This vector of reg sets indicates, for each pseudo, which hard registers
215 may not be used for retrying global allocation because the register was
216 formerly spilled from one of them. If we allowed reallocating a pseudo to
217 a register that it was already allocated to, reload might not
218 terminate. */
219 static HARD_REG_SET *pseudo_previous_regs;
221 /* This vector of reg sets indicates, for each pseudo, which hard
222 registers may not be used for retrying global allocation because they
223 are used as spill registers during one of the insns in which the
224 pseudo is live. */
225 static HARD_REG_SET *pseudo_forbidden_regs;
227 /* All hard regs that have been used as spill registers for any insn are
228 marked in this set. */
229 static HARD_REG_SET used_spill_regs;
231 /* Index of last register assigned as a spill register. We allocate in
232 a round-robin fashion. */
233 static int last_spill_reg;
235 /* Nonzero if indirect addressing is supported on the machine; this means
236 that spilling (REG n) does not require reloading it into a register in
237 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
238 value indicates the level of indirect addressing supported, e.g., two
239 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
240 a hard register. */
241 static char spill_indirect_levels;
243 /* Nonzero if indirect addressing is supported when the innermost MEM is
244 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
245 which these are valid is the same as spill_indirect_levels, above. */
246 char indirect_symref_ok;
248 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
249 char double_reg_address_ok;
251 /* Record the stack slot for each spilled hard register. */
252 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
254 /* Width allocated so far for that stack slot. */
255 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
257 /* Record which pseudos needed to be spilled. */
258 static regset_head spilled_pseudos;
260 /* Used for communication between order_regs_for_reload and count_pseudo.
261 Used to avoid counting one pseudo twice. */
262 static regset_head pseudos_counted;
264 /* First uid used by insns created by reload in this function.
265 Used in find_equiv_reg. */
266 int reload_first_uid;
268 /* Flag set by local-alloc or global-alloc if anything is live in
269 a call-clobbered reg across calls. */
270 int caller_save_needed;
272 /* Set to 1 while reload_as_needed is operating.
273 Required by some machines to handle any generated moves differently. */
274 int reload_in_progress = 0;
276 /* These arrays record the insn_code of insns that may be needed to
277 perform input and output reloads of special objects. They provide a
278 place to pass a scratch register. */
279 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
280 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
282 /* This obstack is used for allocation of rtl during register elimination.
283 The allocated storage can be freed once find_reloads has processed the
284 insn. */
285 static struct obstack reload_obstack;
287 /* Points to the beginning of the reload_obstack. All insn_chain structures
288 are allocated first. */
289 static char *reload_startobj;
291 /* The point after all insn_chain structures. Used to quickly deallocate
292 memory allocated in copy_reloads during calculate_needs_all_insns. */
293 static char *reload_firstobj;
295 /* This points before all local rtl generated by register elimination.
296 Used to quickly free all memory after processing one insn. */
297 static char *reload_insn_firstobj;
299 /* List of insn_chain instructions, one for every insn that reload needs to
300 examine. */
301 struct insn_chain *reload_insn_chain;
303 /* List of all insns needing reloads. */
304 static struct insn_chain *insns_need_reload;
306 /* This structure is used to record information about register eliminations.
307 Each array entry describes one possible way of eliminating a register
308 in favor of another. If there is more than one way of eliminating a
309 particular register, the most preferred should be specified first. */
311 struct elim_table
313 int from; /* Register number to be eliminated. */
314 int to; /* Register number used as replacement. */
315 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
316 int can_eliminate; /* Nonzero if this elimination can be done. */
317 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
318 insns made by reload. */
319 HOST_WIDE_INT offset; /* Current offset between the two regs. */
320 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
321 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
322 rtx from_rtx; /* REG rtx for the register to be eliminated.
323 We cannot simply compare the number since
324 we might then spuriously replace a hard
325 register corresponding to a pseudo
326 assigned to the reg to be eliminated. */
327 rtx to_rtx; /* REG rtx for the replacement. */
330 static struct elim_table *reg_eliminate = 0;
332 /* This is an intermediate structure to initialize the table. It has
333 exactly the members provided by ELIMINABLE_REGS. */
334 static const struct elim_table_1
336 const int from;
337 const int to;
338 } reg_eliminate_1[] =
340 /* If a set of eliminable registers was specified, define the table from it.
341 Otherwise, default to the normal case of the frame pointer being
342 replaced by the stack pointer. */
344 #ifdef ELIMINABLE_REGS
345 ELIMINABLE_REGS;
346 #else
347 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
348 #endif
350 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
352 /* Record the number of pending eliminations that have an offset not equal
353 to their initial offset. If nonzero, we use a new copy of each
354 replacement result in any insns encountered. */
355 int num_not_at_initial_offset;
357 /* Count the number of registers that we may be able to eliminate. */
358 static int num_eliminable;
359 /* And the number of registers that are equivalent to a constant that
360 can be eliminated to frame_pointer / arg_pointer + constant. */
361 static int num_eliminable_invariants;
363 /* For each label, we record the offset of each elimination. If we reach
364 a label by more than one path and an offset differs, we cannot do the
365 elimination. This information is indexed by the difference of the
366 number of the label and the first label number. We can't offset the
367 pointer itself as this can cause problems on machines with segmented
368 memory. The first table is an array of flags that records whether we
369 have yet encountered a label and the second table is an array of arrays,
370 one entry in the latter array for each elimination. */
372 static int first_label_num;
373 static char *offsets_known_at;
374 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
376 /* Number of labels in the current function. */
378 static int num_labels;
380 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
381 static void maybe_fix_stack_asms (void);
382 static void copy_reloads (struct insn_chain *);
383 static void calculate_needs_all_insns (int);
384 static int find_reg (struct insn_chain *, int);
385 static void find_reload_regs (struct insn_chain *);
386 static void select_reload_regs (void);
387 static void delete_caller_save_insns (void);
389 static void spill_failure (rtx, enum reg_class);
390 static void count_spilled_pseudo (int, int, int);
391 static void delete_dead_insn (rtx);
392 static void alter_reg (int, int);
393 static void set_label_offsets (rtx, rtx, int);
394 static void check_eliminable_occurrences (rtx);
395 static void elimination_effects (rtx, enum machine_mode);
396 static int eliminate_regs_in_insn (rtx, int);
397 static void update_eliminable_offsets (void);
398 static void mark_not_eliminable (rtx, const_rtx, void *);
399 static void set_initial_elim_offsets (void);
400 static bool verify_initial_elim_offsets (void);
401 static void set_initial_label_offsets (void);
402 static void set_offsets_for_label (rtx);
403 static void init_elim_table (void);
404 static void update_eliminables (HARD_REG_SET *);
405 static void spill_hard_reg (unsigned int, int);
406 static int finish_spills (int);
407 static void scan_paradoxical_subregs (rtx);
408 static void count_pseudo (int);
409 static void order_regs_for_reload (struct insn_chain *);
410 static void reload_as_needed (int);
411 static void forget_old_reloads_1 (rtx, const_rtx, void *);
412 static void forget_marked_reloads (regset);
413 static int reload_reg_class_lower (const void *, const void *);
414 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
415 enum machine_mode);
416 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
417 enum machine_mode);
418 static int reload_reg_free_p (unsigned int, int, enum reload_type);
419 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
420 rtx, rtx, int, int);
421 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
422 rtx, rtx, int, int);
423 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
424 static int allocate_reload_reg (struct insn_chain *, int, int);
425 static int conflicts_with_override (rtx);
426 static void failed_reload (rtx, int);
427 static int set_reload_reg (int, int);
428 static void choose_reload_regs_init (struct insn_chain *, rtx *);
429 static void choose_reload_regs (struct insn_chain *);
430 static void merge_assigned_reloads (rtx);
431 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
432 rtx, int);
433 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
434 int);
435 static void do_input_reload (struct insn_chain *, struct reload *, int);
436 static void do_output_reload (struct insn_chain *, struct reload *, int);
437 static bool inherit_piecemeal_p (int, int);
438 static void emit_reload_insns (struct insn_chain *);
439 static void delete_output_reload (rtx, int, int);
440 static void delete_address_reloads (rtx, rtx);
441 static void delete_address_reloads_1 (rtx, rtx, rtx);
442 static rtx inc_for_reload (rtx, rtx, rtx, int);
443 #ifdef AUTO_INC_DEC
444 static void add_auto_inc_notes (rtx, rtx);
445 #endif
446 static void copy_eh_notes (rtx, rtx);
447 static int reloads_conflict (int, int);
448 static rtx gen_reload (rtx, rtx, int, enum reload_type);
449 static rtx emit_insn_if_valid_for_reload (rtx);
451 /* Initialize the reload pass. This is called at the beginning of compilation
452 and may be called again if the target is reinitialized. */
454 void
455 init_reload (void)
457 int i;
459 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
460 Set spill_indirect_levels to the number of levels such addressing is
461 permitted, zero if it is not permitted at all. */
463 rtx tem
464 = gen_rtx_MEM (Pmode,
465 gen_rtx_PLUS (Pmode,
466 gen_rtx_REG (Pmode,
467 LAST_VIRTUAL_REGISTER + 1),
468 GEN_INT (4)));
469 spill_indirect_levels = 0;
471 while (memory_address_p (QImode, tem))
473 spill_indirect_levels++;
474 tem = gen_rtx_MEM (Pmode, tem);
477 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
479 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
480 indirect_symref_ok = memory_address_p (QImode, tem);
482 /* See if reg+reg is a valid (and offsettable) address. */
484 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
486 tem = gen_rtx_PLUS (Pmode,
487 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
488 gen_rtx_REG (Pmode, i));
490 /* This way, we make sure that reg+reg is an offsettable address. */
491 tem = plus_constant (tem, 4);
493 if (memory_address_p (QImode, tem))
495 double_reg_address_ok = 1;
496 break;
500 /* Initialize obstack for our rtl allocation. */
501 gcc_obstack_init (&reload_obstack);
502 reload_startobj = obstack_alloc (&reload_obstack, 0);
504 INIT_REG_SET (&spilled_pseudos);
505 INIT_REG_SET (&pseudos_counted);
508 /* List of insn chains that are currently unused. */
509 static struct insn_chain *unused_insn_chains = 0;
511 /* Allocate an empty insn_chain structure. */
512 struct insn_chain *
513 new_insn_chain (void)
515 struct insn_chain *c;
517 if (unused_insn_chains == 0)
519 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
520 INIT_REG_SET (&c->live_throughout);
521 INIT_REG_SET (&c->dead_or_set);
523 else
525 c = unused_insn_chains;
526 unused_insn_chains = c->next;
528 c->is_caller_save_insn = 0;
529 c->need_operand_change = 0;
530 c->need_reload = 0;
531 c->need_elim = 0;
532 return c;
535 /* Small utility function to set all regs in hard reg set TO which are
536 allocated to pseudos in regset FROM. */
538 void
539 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
541 unsigned int regno;
542 reg_set_iterator rsi;
544 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
546 int r = reg_renumber[regno];
548 if (r < 0)
550 /* reload_combine uses the information from
551 DF_RA_LIVE_IN (BASIC_BLOCK), which might still
552 contain registers that have not actually been allocated
553 since they have an equivalence. */
554 gcc_assert (reload_completed);
556 else
557 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
561 /* Replace all pseudos found in LOC with their corresponding
562 equivalences. */
564 static void
565 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
567 rtx x = *loc;
568 enum rtx_code code;
569 const char *fmt;
570 int i, j;
572 if (! x)
573 return;
575 code = GET_CODE (x);
576 if (code == REG)
578 unsigned int regno = REGNO (x);
580 if (regno < FIRST_PSEUDO_REGISTER)
581 return;
583 x = eliminate_regs (x, mem_mode, usage);
584 if (x != *loc)
586 *loc = x;
587 replace_pseudos_in (loc, mem_mode, usage);
588 return;
591 if (reg_equiv_constant[regno])
592 *loc = reg_equiv_constant[regno];
593 else if (reg_equiv_mem[regno])
594 *loc = reg_equiv_mem[regno];
595 else if (reg_equiv_address[regno])
596 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
597 else
599 gcc_assert (!REG_P (regno_reg_rtx[regno])
600 || REGNO (regno_reg_rtx[regno]) != regno);
601 *loc = regno_reg_rtx[regno];
604 return;
606 else if (code == MEM)
608 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
609 return;
612 /* Process each of our operands recursively. */
613 fmt = GET_RTX_FORMAT (code);
614 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
615 if (*fmt == 'e')
616 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
617 else if (*fmt == 'E')
618 for (j = 0; j < XVECLEN (x, i); j++)
619 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
622 /* Determine if the current function has an exception receiver block
623 that reaches the exit block via non-exceptional edges */
625 static bool
626 has_nonexceptional_receiver (void)
628 edge e;
629 edge_iterator ei;
630 basic_block *tos, *worklist, bb;
632 /* If we're not optimizing, then just err on the safe side. */
633 if (!optimize)
634 return true;
636 /* First determine which blocks can reach exit via normal paths. */
637 tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
639 FOR_EACH_BB (bb)
640 bb->flags &= ~BB_REACHABLE;
642 /* Place the exit block on our worklist. */
643 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
644 *tos++ = EXIT_BLOCK_PTR;
646 /* Iterate: find everything reachable from what we've already seen. */
647 while (tos != worklist)
649 bb = *--tos;
651 FOR_EACH_EDGE (e, ei, bb->preds)
652 if (!(e->flags & EDGE_ABNORMAL))
654 basic_block src = e->src;
656 if (!(src->flags & BB_REACHABLE))
658 src->flags |= BB_REACHABLE;
659 *tos++ = src;
663 free (worklist);
665 /* Now see if there's a reachable block with an exceptional incoming
666 edge. */
667 FOR_EACH_BB (bb)
668 if (bb->flags & BB_REACHABLE)
669 FOR_EACH_EDGE (e, ei, bb->preds)
670 if (e->flags & EDGE_ABNORMAL)
671 return true;
673 /* No exceptional block reached exit unexceptionally. */
674 return false;
678 /* Global variables used by reload and its subroutines. */
680 /* Set during calculate_needs if an insn needs register elimination. */
681 static int something_needs_elimination;
682 /* Set during calculate_needs if an insn needs an operand changed. */
683 static int something_needs_operands_changed;
685 /* Nonzero means we couldn't get enough spill regs. */
686 static int failure;
688 /* Main entry point for the reload pass.
690 FIRST is the first insn of the function being compiled.
692 GLOBAL nonzero means we were called from global_alloc
693 and should attempt to reallocate any pseudoregs that we
694 displace from hard regs we will use for reloads.
695 If GLOBAL is zero, we do not have enough information to do that,
696 so any pseudo reg that is spilled must go to the stack.
698 Return value is nonzero if reload failed
699 and we must not do any more for this function. */
702 reload (rtx first, int global)
704 int i;
705 rtx insn;
706 struct elim_table *ep;
707 basic_block bb;
709 /* Make sure even insns with volatile mem refs are recognizable. */
710 init_recog ();
712 failure = 0;
714 reload_firstobj = obstack_alloc (&reload_obstack, 0);
716 /* Make sure that the last insn in the chain
717 is not something that needs reloading. */
718 emit_note (NOTE_INSN_DELETED);
720 /* Enable find_equiv_reg to distinguish insns made by reload. */
721 reload_first_uid = get_max_uid ();
723 #ifdef SECONDARY_MEMORY_NEEDED
724 /* Initialize the secondary memory table. */
725 clear_secondary_mem ();
726 #endif
728 /* We don't have a stack slot for any spill reg yet. */
729 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
730 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
732 /* Initialize the save area information for caller-save, in case some
733 are needed. */
734 init_save_areas ();
736 /* Compute which hard registers are now in use
737 as homes for pseudo registers.
738 This is done here rather than (eg) in global_alloc
739 because this point is reached even if not optimizing. */
740 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
741 mark_home_live (i);
743 /* A function that has a nonlocal label that can reach the exit
744 block via non-exceptional paths must save all call-saved
745 registers. */
746 if (current_function_calls_unwind_init
747 || (current_function_has_nonlocal_label
748 && has_nonexceptional_receiver ()))
749 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
750 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
751 df_set_regs_ever_live (i, true);
753 /* Find all the pseudo registers that didn't get hard regs
754 but do have known equivalent constants or memory slots.
755 These include parameters (known equivalent to parameter slots)
756 and cse'd or loop-moved constant memory addresses.
758 Record constant equivalents in reg_equiv_constant
759 so they will be substituted by find_reloads.
760 Record memory equivalents in reg_mem_equiv so they can
761 be substituted eventually by altering the REG-rtx's. */
763 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
764 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
765 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
766 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
767 reg_equiv_address = XCNEWVEC (rtx, max_regno);
768 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
769 reg_old_renumber = XCNEWVEC (short, max_regno);
770 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
771 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
772 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
774 CLEAR_HARD_REG_SET (bad_spill_regs_global);
776 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
777 to. Also find all paradoxical subregs and find largest such for
778 each pseudo. */
780 num_eliminable_invariants = 0;
781 for (insn = first; insn; insn = NEXT_INSN (insn))
783 rtx set = single_set (insn);
785 /* We may introduce USEs that we want to remove at the end, so
786 we'll mark them with QImode. Make sure there are no
787 previously-marked insns left by say regmove. */
788 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
789 && GET_MODE (insn) != VOIDmode)
790 PUT_MODE (insn, VOIDmode);
792 if (INSN_P (insn))
793 scan_paradoxical_subregs (PATTERN (insn));
795 if (set != 0 && REG_P (SET_DEST (set)))
797 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
798 rtx x;
800 if (! note)
801 continue;
803 i = REGNO (SET_DEST (set));
804 x = XEXP (note, 0);
806 if (i <= LAST_VIRTUAL_REGISTER)
807 continue;
809 if (! function_invariant_p (x)
810 || ! flag_pic
811 /* A function invariant is often CONSTANT_P but may
812 include a register. We promise to only pass
813 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
814 || (CONSTANT_P (x)
815 && LEGITIMATE_PIC_OPERAND_P (x)))
817 /* It can happen that a REG_EQUIV note contains a MEM
818 that is not a legitimate memory operand. As later
819 stages of reload assume that all addresses found
820 in the reg_equiv_* arrays were originally legitimate,
821 we ignore such REG_EQUIV notes. */
822 if (memory_operand (x, VOIDmode))
824 /* Always unshare the equivalence, so we can
825 substitute into this insn without touching the
826 equivalence. */
827 reg_equiv_memory_loc[i] = copy_rtx (x);
829 else if (function_invariant_p (x))
831 if (GET_CODE (x) == PLUS)
833 /* This is PLUS of frame pointer and a constant,
834 and might be shared. Unshare it. */
835 reg_equiv_invariant[i] = copy_rtx (x);
836 num_eliminable_invariants++;
838 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
840 reg_equiv_invariant[i] = x;
841 num_eliminable_invariants++;
843 else if (LEGITIMATE_CONSTANT_P (x))
844 reg_equiv_constant[i] = x;
845 else
847 reg_equiv_memory_loc[i]
848 = force_const_mem (GET_MODE (SET_DEST (set)), x);
849 if (! reg_equiv_memory_loc[i])
850 reg_equiv_init[i] = NULL_RTX;
853 else
855 reg_equiv_init[i] = NULL_RTX;
856 continue;
859 else
860 reg_equiv_init[i] = NULL_RTX;
864 if (dump_file)
865 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
866 if (reg_equiv_init[i])
868 fprintf (dump_file, "init_insns for %u: ", i);
869 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
870 fprintf (dump_file, "\n");
873 init_elim_table ();
875 first_label_num = get_first_label_num ();
876 num_labels = max_label_num () - first_label_num;
878 /* Allocate the tables used to store offset information at labels. */
879 /* We used to use alloca here, but the size of what it would try to
880 allocate would occasionally cause it to exceed the stack limit and
881 cause a core dump. */
882 offsets_known_at = XNEWVEC (char, num_labels);
883 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
885 /* Alter each pseudo-reg rtx to contain its hard reg number.
886 Assign stack slots to the pseudos that lack hard regs or equivalents.
887 Do not touch virtual registers. */
889 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
890 alter_reg (i, -1);
892 /* If we have some registers we think can be eliminated, scan all insns to
893 see if there is an insn that sets one of these registers to something
894 other than itself plus a constant. If so, the register cannot be
895 eliminated. Doing this scan here eliminates an extra pass through the
896 main reload loop in the most common case where register elimination
897 cannot be done. */
898 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
899 if (INSN_P (insn))
900 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
902 maybe_fix_stack_asms ();
904 insns_need_reload = 0;
905 something_needs_elimination = 0;
907 /* Initialize to -1, which means take the first spill register. */
908 last_spill_reg = -1;
910 /* Spill any hard regs that we know we can't eliminate. */
911 CLEAR_HARD_REG_SET (used_spill_regs);
912 /* There can be multiple ways to eliminate a register;
913 they should be listed adjacently.
914 Elimination for any register fails only if all possible ways fail. */
915 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
917 int from = ep->from;
918 int can_eliminate = 0;
921 can_eliminate |= ep->can_eliminate;
922 ep++;
924 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
925 if (! can_eliminate)
926 spill_hard_reg (from, 1);
929 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
930 if (frame_pointer_needed)
931 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
932 #endif
933 finish_spills (global);
935 /* From now on, we may need to generate moves differently. We may also
936 allow modifications of insns which cause them to not be recognized.
937 Any such modifications will be cleaned up during reload itself. */
938 reload_in_progress = 1;
940 /* This loop scans the entire function each go-round
941 and repeats until one repetition spills no additional hard regs. */
942 for (;;)
944 int something_changed;
945 int did_spill;
946 HOST_WIDE_INT starting_frame_size;
948 starting_frame_size = get_frame_size ();
950 set_initial_elim_offsets ();
951 set_initial_label_offsets ();
953 /* For each pseudo register that has an equivalent location defined,
954 try to eliminate any eliminable registers (such as the frame pointer)
955 assuming initial offsets for the replacement register, which
956 is the normal case.
958 If the resulting location is directly addressable, substitute
959 the MEM we just got directly for the old REG.
961 If it is not addressable but is a constant or the sum of a hard reg
962 and constant, it is probably not addressable because the constant is
963 out of range, in that case record the address; we will generate
964 hairy code to compute the address in a register each time it is
965 needed. Similarly if it is a hard register, but one that is not
966 valid as an address register.
968 If the location is not addressable, but does not have one of the
969 above forms, assign a stack slot. We have to do this to avoid the
970 potential of producing lots of reloads if, e.g., a location involves
971 a pseudo that didn't get a hard register and has an equivalent memory
972 location that also involves a pseudo that didn't get a hard register.
974 Perhaps at some point we will improve reload_when_needed handling
975 so this problem goes away. But that's very hairy. */
977 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
978 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
980 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
982 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
983 XEXP (x, 0)))
984 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
985 else if (CONSTANT_P (XEXP (x, 0))
986 || (REG_P (XEXP (x, 0))
987 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
988 || (GET_CODE (XEXP (x, 0)) == PLUS
989 && REG_P (XEXP (XEXP (x, 0), 0))
990 && (REGNO (XEXP (XEXP (x, 0), 0))
991 < FIRST_PSEUDO_REGISTER)
992 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
993 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
994 else
996 /* Make a new stack slot. Then indicate that something
997 changed so we go back and recompute offsets for
998 eliminable registers because the allocation of memory
999 below might change some offset. reg_equiv_{mem,address}
1000 will be set up for this pseudo on the next pass around
1001 the loop. */
1002 reg_equiv_memory_loc[i] = 0;
1003 reg_equiv_init[i] = 0;
1004 alter_reg (i, -1);
1008 if (caller_save_needed)
1009 setup_save_areas ();
1011 /* If we allocated another stack slot, redo elimination bookkeeping. */
1012 if (starting_frame_size != get_frame_size ())
1013 continue;
1014 if (starting_frame_size && cfun->stack_alignment_needed)
1016 /* If we have a stack frame, we must align it now. The
1017 stack size may be a part of the offset computation for
1018 register elimination. So if this changes the stack size,
1019 then repeat the elimination bookkeeping. We don't
1020 realign when there is no stack, as that will cause a
1021 stack frame when none is needed should
1022 STARTING_FRAME_OFFSET not be already aligned to
1023 STACK_BOUNDARY. */
1024 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
1025 if (starting_frame_size != get_frame_size ())
1026 continue;
1029 if (caller_save_needed)
1031 save_call_clobbered_regs ();
1032 /* That might have allocated new insn_chain structures. */
1033 reload_firstobj = obstack_alloc (&reload_obstack, 0);
1036 calculate_needs_all_insns (global);
1038 CLEAR_REG_SET (&spilled_pseudos);
1039 did_spill = 0;
1041 something_changed = 0;
1043 /* If we allocated any new memory locations, make another pass
1044 since it might have changed elimination offsets. */
1045 if (starting_frame_size != get_frame_size ())
1046 something_changed = 1;
1048 /* Even if the frame size remained the same, we might still have
1049 changed elimination offsets, e.g. if find_reloads called
1050 force_const_mem requiring the back end to allocate a constant
1051 pool base register that needs to be saved on the stack. */
1052 else if (!verify_initial_elim_offsets ())
1053 something_changed = 1;
1056 HARD_REG_SET to_spill;
1057 CLEAR_HARD_REG_SET (to_spill);
1058 update_eliminables (&to_spill);
1059 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1061 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1062 if (TEST_HARD_REG_BIT (to_spill, i))
1064 spill_hard_reg (i, 1);
1065 did_spill = 1;
1067 /* Regardless of the state of spills, if we previously had
1068 a register that we thought we could eliminate, but now can
1069 not eliminate, we must run another pass.
1071 Consider pseudos which have an entry in reg_equiv_* which
1072 reference an eliminable register. We must make another pass
1073 to update reg_equiv_* so that we do not substitute in the
1074 old value from when we thought the elimination could be
1075 performed. */
1076 something_changed = 1;
1080 select_reload_regs ();
1081 if (failure)
1082 goto failed;
1084 if (insns_need_reload != 0 || did_spill)
1085 something_changed |= finish_spills (global);
1087 if (! something_changed)
1088 break;
1090 if (caller_save_needed)
1091 delete_caller_save_insns ();
1093 obstack_free (&reload_obstack, reload_firstobj);
1096 /* If global-alloc was run, notify it of any register eliminations we have
1097 done. */
1098 if (global)
1099 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1100 if (ep->can_eliminate)
1101 mark_elimination (ep->from, ep->to);
1103 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1104 If that insn didn't set the register (i.e., it copied the register to
1105 memory), just delete that insn instead of the equivalencing insn plus
1106 anything now dead. If we call delete_dead_insn on that insn, we may
1107 delete the insn that actually sets the register if the register dies
1108 there and that is incorrect. */
1110 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1112 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1114 rtx list;
1115 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1117 rtx equiv_insn = XEXP (list, 0);
1119 /* If we already deleted the insn or if it may trap, we can't
1120 delete it. The latter case shouldn't happen, but can
1121 if an insn has a variable address, gets a REG_EH_REGION
1122 note added to it, and then gets converted into a load
1123 from a constant address. */
1124 if (NOTE_P (equiv_insn)
1125 || can_throw_internal (equiv_insn))
1127 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1128 delete_dead_insn (equiv_insn);
1129 else
1130 SET_INSN_DELETED (equiv_insn);
1135 /* Use the reload registers where necessary
1136 by generating move instructions to move the must-be-register
1137 values into or out of the reload registers. */
1139 if (insns_need_reload != 0 || something_needs_elimination
1140 || something_needs_operands_changed)
1142 HOST_WIDE_INT old_frame_size = get_frame_size ();
1144 reload_as_needed (global);
1146 gcc_assert (old_frame_size == get_frame_size ());
1148 gcc_assert (verify_initial_elim_offsets ());
1151 /* If we were able to eliminate the frame pointer, show that it is no
1152 longer live at the start of any basic block. If it ls live by
1153 virtue of being in a pseudo, that pseudo will be marked live
1154 and hence the frame pointer will be known to be live via that
1155 pseudo. */
1157 if (! frame_pointer_needed)
1158 FOR_EACH_BB (bb)
1160 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1161 bitmap_clear_bit (df_get_live_top (bb), HARD_FRAME_POINTER_REGNUM);
1164 /* Come here (with failure set nonzero) if we can't get enough spill
1165 regs. */
1166 failed:
1168 CLEAR_REG_SET (&spilled_pseudos);
1169 reload_in_progress = 0;
1171 /* Now eliminate all pseudo regs by modifying them into
1172 their equivalent memory references.
1173 The REG-rtx's for the pseudos are modified in place,
1174 so all insns that used to refer to them now refer to memory.
1176 For a reg that has a reg_equiv_address, all those insns
1177 were changed by reloading so that no insns refer to it any longer;
1178 but the DECL_RTL of a variable decl may refer to it,
1179 and if so this causes the debugging info to mention the variable. */
1181 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1183 rtx addr = 0;
1185 if (reg_equiv_mem[i])
1186 addr = XEXP (reg_equiv_mem[i], 0);
1188 if (reg_equiv_address[i])
1189 addr = reg_equiv_address[i];
1191 if (addr)
1193 if (reg_renumber[i] < 0)
1195 rtx reg = regno_reg_rtx[i];
1197 REG_USERVAR_P (reg) = 0;
1198 PUT_CODE (reg, MEM);
1199 XEXP (reg, 0) = addr;
1200 if (reg_equiv_memory_loc[i])
1201 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1202 else
1204 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1205 MEM_ATTRS (reg) = 0;
1207 MEM_NOTRAP_P (reg) = 1;
1209 else if (reg_equiv_mem[i])
1210 XEXP (reg_equiv_mem[i], 0) = addr;
1214 /* We must set reload_completed now since the cleanup_subreg_operands call
1215 below will re-recognize each insn and reload may have generated insns
1216 which are only valid during and after reload. */
1217 reload_completed = 1;
1219 /* Make a pass over all the insns and delete all USEs which we inserted
1220 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1221 notes. Delete all CLOBBER insns, except those that refer to the return
1222 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1223 from misarranging variable-array code, and simplify (subreg (reg))
1224 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1225 are no longer useful or accurate. Strip and regenerate REG_INC notes
1226 that may have been moved around. */
1228 for (insn = first; insn; insn = NEXT_INSN (insn))
1229 if (INSN_P (insn))
1231 rtx *pnote;
1233 if (CALL_P (insn))
1234 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1235 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1237 if ((GET_CODE (PATTERN (insn)) == USE
1238 /* We mark with QImode USEs introduced by reload itself. */
1239 && (GET_MODE (insn) == QImode
1240 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1241 || (GET_CODE (PATTERN (insn)) == CLOBBER
1242 && (!MEM_P (XEXP (PATTERN (insn), 0))
1243 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1244 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1245 && XEXP (XEXP (PATTERN (insn), 0), 0)
1246 != stack_pointer_rtx))
1247 && (!REG_P (XEXP (PATTERN (insn), 0))
1248 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1250 delete_insn (insn);
1251 continue;
1254 /* Some CLOBBERs may survive until here and still reference unassigned
1255 pseudos with const equivalent, which may in turn cause ICE in later
1256 passes if the reference remains in place. */
1257 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1258 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1259 VOIDmode, PATTERN (insn));
1261 /* Discard obvious no-ops, even without -O. This optimization
1262 is fast and doesn't interfere with debugging. */
1263 if (NONJUMP_INSN_P (insn)
1264 && GET_CODE (PATTERN (insn)) == SET
1265 && REG_P (SET_SRC (PATTERN (insn)))
1266 && REG_P (SET_DEST (PATTERN (insn)))
1267 && (REGNO (SET_SRC (PATTERN (insn)))
1268 == REGNO (SET_DEST (PATTERN (insn)))))
1270 delete_insn (insn);
1271 continue;
1274 pnote = &REG_NOTES (insn);
1275 while (*pnote != 0)
1277 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1278 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1279 || REG_NOTE_KIND (*pnote) == REG_INC
1280 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1281 || REG_NOTE_KIND (*pnote) == REG_LIBCALL_ID
1282 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1283 *pnote = XEXP (*pnote, 1);
1284 else
1285 pnote = &XEXP (*pnote, 1);
1288 #ifdef AUTO_INC_DEC
1289 add_auto_inc_notes (insn, PATTERN (insn));
1290 #endif
1292 /* Simplify (subreg (reg)) if it appears as an operand. */
1293 cleanup_subreg_operands (insn);
1295 /* Clean up invalid ASMs so that they don't confuse later passes.
1296 See PR 21299. */
1297 if (asm_noperands (PATTERN (insn)) >= 0)
1299 extract_insn (insn);
1300 if (!constrain_operands (1))
1302 error_for_asm (insn,
1303 "%<asm%> operand has impossible constraints");
1304 delete_insn (insn);
1305 continue;
1310 /* If we are doing stack checking, give a warning if this function's
1311 frame size is larger than we expect. */
1312 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1314 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1315 static int verbose_warned = 0;
1317 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1318 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1319 size += UNITS_PER_WORD;
1321 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1323 warning (0, "frame size too large for reliable stack checking");
1324 if (! verbose_warned)
1326 warning (0, "try reducing the number of local variables");
1327 verbose_warned = 1;
1332 /* Indicate that we no longer have known memory locations or constants. */
1333 if (reg_equiv_constant)
1334 free (reg_equiv_constant);
1335 if (reg_equiv_invariant)
1336 free (reg_equiv_invariant);
1337 reg_equiv_constant = 0;
1338 reg_equiv_invariant = 0;
1339 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1340 reg_equiv_memory_loc = 0;
1342 if (offsets_known_at)
1343 free (offsets_known_at);
1344 if (offsets_at)
1345 free (offsets_at);
1347 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1348 if (reg_equiv_alt_mem_list[i])
1349 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1350 free (reg_equiv_alt_mem_list);
1352 free (reg_equiv_mem);
1353 reg_equiv_init = 0;
1354 free (reg_equiv_address);
1355 free (reg_max_ref_width);
1356 free (reg_old_renumber);
1357 free (pseudo_previous_regs);
1358 free (pseudo_forbidden_regs);
1360 CLEAR_HARD_REG_SET (used_spill_regs);
1361 for (i = 0; i < n_spills; i++)
1362 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1364 /* Free all the insn_chain structures at once. */
1365 obstack_free (&reload_obstack, reload_startobj);
1366 unused_insn_chains = 0;
1367 fixup_abnormal_edges ();
1369 /* Replacing pseudos with their memory equivalents might have
1370 created shared rtx. Subsequent passes would get confused
1371 by this, so unshare everything here. */
1372 unshare_all_rtl_again (first);
1374 #ifdef STACK_BOUNDARY
1375 /* init_emit has set the alignment of the hard frame pointer
1376 to STACK_BOUNDARY. It is very likely no longer valid if
1377 the hard frame pointer was used for register allocation. */
1378 if (!frame_pointer_needed)
1379 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1380 #endif
1382 return failure;
1385 /* Yet another special case. Unfortunately, reg-stack forces people to
1386 write incorrect clobbers in asm statements. These clobbers must not
1387 cause the register to appear in bad_spill_regs, otherwise we'll call
1388 fatal_insn later. We clear the corresponding regnos in the live
1389 register sets to avoid this.
1390 The whole thing is rather sick, I'm afraid. */
1392 static void
1393 maybe_fix_stack_asms (void)
1395 #ifdef STACK_REGS
1396 const char *constraints[MAX_RECOG_OPERANDS];
1397 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1398 struct insn_chain *chain;
1400 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1402 int i, noperands;
1403 HARD_REG_SET clobbered, allowed;
1404 rtx pat;
1406 if (! INSN_P (chain->insn)
1407 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1408 continue;
1409 pat = PATTERN (chain->insn);
1410 if (GET_CODE (pat) != PARALLEL)
1411 continue;
1413 CLEAR_HARD_REG_SET (clobbered);
1414 CLEAR_HARD_REG_SET (allowed);
1416 /* First, make a mask of all stack regs that are clobbered. */
1417 for (i = 0; i < XVECLEN (pat, 0); i++)
1419 rtx t = XVECEXP (pat, 0, i);
1420 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1421 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1424 /* Get the operand values and constraints out of the insn. */
1425 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1426 constraints, operand_mode, NULL);
1428 /* For every operand, see what registers are allowed. */
1429 for (i = 0; i < noperands; i++)
1431 const char *p = constraints[i];
1432 /* For every alternative, we compute the class of registers allowed
1433 for reloading in CLS, and merge its contents into the reg set
1434 ALLOWED. */
1435 int cls = (int) NO_REGS;
1437 for (;;)
1439 char c = *p;
1441 if (c == '\0' || c == ',' || c == '#')
1443 /* End of one alternative - mark the regs in the current
1444 class, and reset the class. */
1445 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1446 cls = NO_REGS;
1447 p++;
1448 if (c == '#')
1449 do {
1450 c = *p++;
1451 } while (c != '\0' && c != ',');
1452 if (c == '\0')
1453 break;
1454 continue;
1457 switch (c)
1459 case '=': case '+': case '*': case '%': case '?': case '!':
1460 case '0': case '1': case '2': case '3': case '4': case 'm':
1461 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1462 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1463 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1464 case 'P':
1465 break;
1467 case 'p':
1468 cls = (int) reg_class_subunion[cls]
1469 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1470 break;
1472 case 'g':
1473 case 'r':
1474 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1475 break;
1477 default:
1478 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1479 cls = (int) reg_class_subunion[cls]
1480 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1481 else
1482 cls = (int) reg_class_subunion[cls]
1483 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1485 p += CONSTRAINT_LEN (c, p);
1488 /* Those of the registers which are clobbered, but allowed by the
1489 constraints, must be usable as reload registers. So clear them
1490 out of the life information. */
1491 AND_HARD_REG_SET (allowed, clobbered);
1492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1493 if (TEST_HARD_REG_BIT (allowed, i))
1495 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1496 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1500 #endif
1503 /* Copy the global variables n_reloads and rld into the corresponding elts
1504 of CHAIN. */
1505 static void
1506 copy_reloads (struct insn_chain *chain)
1508 chain->n_reloads = n_reloads;
1509 chain->rld = obstack_alloc (&reload_obstack,
1510 n_reloads * sizeof (struct reload));
1511 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1512 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1515 /* Walk the chain of insns, and determine for each whether it needs reloads
1516 and/or eliminations. Build the corresponding insns_need_reload list, and
1517 set something_needs_elimination as appropriate. */
1518 static void
1519 calculate_needs_all_insns (int global)
1521 struct insn_chain **pprev_reload = &insns_need_reload;
1522 struct insn_chain *chain, *next = 0;
1524 something_needs_elimination = 0;
1526 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1527 for (chain = reload_insn_chain; chain != 0; chain = next)
1529 rtx insn = chain->insn;
1531 next = chain->next;
1533 /* Clear out the shortcuts. */
1534 chain->n_reloads = 0;
1535 chain->need_elim = 0;
1536 chain->need_reload = 0;
1537 chain->need_operand_change = 0;
1539 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1540 include REG_LABEL), we need to see what effects this has on the
1541 known offsets at labels. */
1543 if (LABEL_P (insn) || JUMP_P (insn)
1544 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1545 set_label_offsets (insn, insn, 0);
1547 if (INSN_P (insn))
1549 rtx old_body = PATTERN (insn);
1550 int old_code = INSN_CODE (insn);
1551 rtx old_notes = REG_NOTES (insn);
1552 int did_elimination = 0;
1553 int operands_changed = 0;
1554 rtx set = single_set (insn);
1556 /* Skip insns that only set an equivalence. */
1557 if (set && REG_P (SET_DEST (set))
1558 && reg_renumber[REGNO (SET_DEST (set))] < 0
1559 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1560 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1561 && reg_equiv_init[REGNO (SET_DEST (set))])
1562 continue;
1564 /* If needed, eliminate any eliminable registers. */
1565 if (num_eliminable || num_eliminable_invariants)
1566 did_elimination = eliminate_regs_in_insn (insn, 0);
1568 /* Analyze the instruction. */
1569 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1570 global, spill_reg_order);
1572 /* If a no-op set needs more than one reload, this is likely
1573 to be something that needs input address reloads. We
1574 can't get rid of this cleanly later, and it is of no use
1575 anyway, so discard it now.
1576 We only do this when expensive_optimizations is enabled,
1577 since this complements reload inheritance / output
1578 reload deletion, and it can make debugging harder. */
1579 if (flag_expensive_optimizations && n_reloads > 1)
1581 rtx set = single_set (insn);
1582 if (set
1583 && SET_SRC (set) == SET_DEST (set)
1584 && REG_P (SET_SRC (set))
1585 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1587 delete_insn (insn);
1588 /* Delete it from the reload chain. */
1589 if (chain->prev)
1590 chain->prev->next = next;
1591 else
1592 reload_insn_chain = next;
1593 if (next)
1594 next->prev = chain->prev;
1595 chain->next = unused_insn_chains;
1596 unused_insn_chains = chain;
1597 continue;
1600 if (num_eliminable)
1601 update_eliminable_offsets ();
1603 /* Remember for later shortcuts which insns had any reloads or
1604 register eliminations. */
1605 chain->need_elim = did_elimination;
1606 chain->need_reload = n_reloads > 0;
1607 chain->need_operand_change = operands_changed;
1609 /* Discard any register replacements done. */
1610 if (did_elimination)
1612 obstack_free (&reload_obstack, reload_insn_firstobj);
1613 PATTERN (insn) = old_body;
1614 INSN_CODE (insn) = old_code;
1615 REG_NOTES (insn) = old_notes;
1616 something_needs_elimination = 1;
1619 something_needs_operands_changed |= operands_changed;
1621 if (n_reloads != 0)
1623 copy_reloads (chain);
1624 *pprev_reload = chain;
1625 pprev_reload = &chain->next_need_reload;
1629 *pprev_reload = 0;
1632 /* Comparison function for qsort to decide which of two reloads
1633 should be handled first. *P1 and *P2 are the reload numbers. */
1635 static int
1636 reload_reg_class_lower (const void *r1p, const void *r2p)
1638 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1639 int t;
1641 /* Consider required reloads before optional ones. */
1642 t = rld[r1].optional - rld[r2].optional;
1643 if (t != 0)
1644 return t;
1646 /* Count all solitary classes before non-solitary ones. */
1647 t = ((reg_class_size[(int) rld[r2].class] == 1)
1648 - (reg_class_size[(int) rld[r1].class] == 1));
1649 if (t != 0)
1650 return t;
1652 /* Aside from solitaires, consider all multi-reg groups first. */
1653 t = rld[r2].nregs - rld[r1].nregs;
1654 if (t != 0)
1655 return t;
1657 /* Consider reloads in order of increasing reg-class number. */
1658 t = (int) rld[r1].class - (int) rld[r2].class;
1659 if (t != 0)
1660 return t;
1662 /* If reloads are equally urgent, sort by reload number,
1663 so that the results of qsort leave nothing to chance. */
1664 return r1 - r2;
1667 /* The cost of spilling each hard reg. */
1668 static int spill_cost[FIRST_PSEUDO_REGISTER];
1670 /* When spilling multiple hard registers, we use SPILL_COST for the first
1671 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1672 only the first hard reg for a multi-reg pseudo. */
1673 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1675 /* Update the spill cost arrays, considering that pseudo REG is live. */
1677 static void
1678 count_pseudo (int reg)
1680 int freq = REG_FREQ (reg);
1681 int r = reg_renumber[reg];
1682 int nregs;
1684 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1685 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1686 return;
1688 SET_REGNO_REG_SET (&pseudos_counted, reg);
1690 gcc_assert (r >= 0);
1692 spill_add_cost[r] += freq;
1694 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1695 while (nregs-- > 0)
1696 spill_cost[r + nregs] += freq;
1699 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1700 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1702 static void
1703 order_regs_for_reload (struct insn_chain *chain)
1705 unsigned i;
1706 HARD_REG_SET used_by_pseudos;
1707 HARD_REG_SET used_by_pseudos2;
1708 reg_set_iterator rsi;
1710 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1712 memset (spill_cost, 0, sizeof spill_cost);
1713 memset (spill_add_cost, 0, sizeof spill_add_cost);
1715 /* Count number of uses of each hard reg by pseudo regs allocated to it
1716 and then order them by decreasing use. First exclude hard registers
1717 that are live in or across this insn. */
1719 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1720 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1721 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1722 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1724 /* Now find out which pseudos are allocated to it, and update
1725 hard_reg_n_uses. */
1726 CLEAR_REG_SET (&pseudos_counted);
1728 EXECUTE_IF_SET_IN_REG_SET
1729 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1731 count_pseudo (i);
1733 EXECUTE_IF_SET_IN_REG_SET
1734 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1736 count_pseudo (i);
1738 CLEAR_REG_SET (&pseudos_counted);
1741 /* Vector of reload-numbers showing the order in which the reloads should
1742 be processed. */
1743 static short reload_order[MAX_RELOADS];
1745 /* This is used to keep track of the spill regs used in one insn. */
1746 static HARD_REG_SET used_spill_regs_local;
1748 /* We decided to spill hard register SPILLED, which has a size of
1749 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1750 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1751 update SPILL_COST/SPILL_ADD_COST. */
1753 static void
1754 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1756 int r = reg_renumber[reg];
1757 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1759 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1760 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1761 return;
1763 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1765 spill_add_cost[r] -= REG_FREQ (reg);
1766 while (nregs-- > 0)
1767 spill_cost[r + nregs] -= REG_FREQ (reg);
1770 /* Find reload register to use for reload number ORDER. */
1772 static int
1773 find_reg (struct insn_chain *chain, int order)
1775 int rnum = reload_order[order];
1776 struct reload *rl = rld + rnum;
1777 int best_cost = INT_MAX;
1778 int best_reg = -1;
1779 unsigned int i, j;
1780 int k;
1781 HARD_REG_SET not_usable;
1782 HARD_REG_SET used_by_other_reload;
1783 reg_set_iterator rsi;
1785 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1786 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1787 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1789 CLEAR_HARD_REG_SET (used_by_other_reload);
1790 for (k = 0; k < order; k++)
1792 int other = reload_order[k];
1794 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1795 for (j = 0; j < rld[other].nregs; j++)
1796 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1799 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1801 unsigned int regno = i;
1803 if (! TEST_HARD_REG_BIT (not_usable, regno)
1804 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1805 && HARD_REGNO_MODE_OK (regno, rl->mode))
1807 int this_cost = spill_cost[regno];
1808 int ok = 1;
1809 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1811 for (j = 1; j < this_nregs; j++)
1813 this_cost += spill_add_cost[regno + j];
1814 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1815 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1816 ok = 0;
1818 if (! ok)
1819 continue;
1820 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1821 this_cost--;
1822 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1823 this_cost--;
1824 if (this_cost < best_cost
1825 /* Among registers with equal cost, prefer caller-saved ones, or
1826 use REG_ALLOC_ORDER if it is defined. */
1827 || (this_cost == best_cost
1828 #ifdef REG_ALLOC_ORDER
1829 && (inv_reg_alloc_order[regno]
1830 < inv_reg_alloc_order[best_reg])
1831 #else
1832 && call_used_regs[regno]
1833 && ! call_used_regs[best_reg]
1834 #endif
1837 best_reg = regno;
1838 best_cost = this_cost;
1842 if (best_reg == -1)
1843 return 0;
1845 if (dump_file)
1846 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1848 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1849 rl->regno = best_reg;
1851 EXECUTE_IF_SET_IN_REG_SET
1852 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1854 count_spilled_pseudo (best_reg, rl->nregs, j);
1857 EXECUTE_IF_SET_IN_REG_SET
1858 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1860 count_spilled_pseudo (best_reg, rl->nregs, j);
1863 for (i = 0; i < rl->nregs; i++)
1865 gcc_assert (spill_cost[best_reg + i] == 0);
1866 gcc_assert (spill_add_cost[best_reg + i] == 0);
1867 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1869 return 1;
1872 /* Find more reload regs to satisfy the remaining need of an insn, which
1873 is given by CHAIN.
1874 Do it by ascending class number, since otherwise a reg
1875 might be spilled for a big class and might fail to count
1876 for a smaller class even though it belongs to that class. */
1878 static void
1879 find_reload_regs (struct insn_chain *chain)
1881 int i;
1883 /* In order to be certain of getting the registers we need,
1884 we must sort the reloads into order of increasing register class.
1885 Then our grabbing of reload registers will parallel the process
1886 that provided the reload registers. */
1887 for (i = 0; i < chain->n_reloads; i++)
1889 /* Show whether this reload already has a hard reg. */
1890 if (chain->rld[i].reg_rtx)
1892 int regno = REGNO (chain->rld[i].reg_rtx);
1893 chain->rld[i].regno = regno;
1894 chain->rld[i].nregs
1895 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1897 else
1898 chain->rld[i].regno = -1;
1899 reload_order[i] = i;
1902 n_reloads = chain->n_reloads;
1903 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1905 CLEAR_HARD_REG_SET (used_spill_regs_local);
1907 if (dump_file)
1908 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1910 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1912 /* Compute the order of preference for hard registers to spill. */
1914 order_regs_for_reload (chain);
1916 for (i = 0; i < n_reloads; i++)
1918 int r = reload_order[i];
1920 /* Ignore reloads that got marked inoperative. */
1921 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1922 && ! rld[r].optional
1923 && rld[r].regno == -1)
1924 if (! find_reg (chain, i))
1926 if (dump_file)
1927 fprintf (dump_file, "reload failure for reload %d\n", r);
1928 spill_failure (chain->insn, rld[r].class);
1929 failure = 1;
1930 return;
1934 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1935 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1937 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1940 static void
1941 select_reload_regs (void)
1943 struct insn_chain *chain;
1945 /* Try to satisfy the needs for each insn. */
1946 for (chain = insns_need_reload; chain != 0;
1947 chain = chain->next_need_reload)
1948 find_reload_regs (chain);
1951 /* Delete all insns that were inserted by emit_caller_save_insns during
1952 this iteration. */
1953 static void
1954 delete_caller_save_insns (void)
1956 struct insn_chain *c = reload_insn_chain;
1958 while (c != 0)
1960 while (c != 0 && c->is_caller_save_insn)
1962 struct insn_chain *next = c->next;
1963 rtx insn = c->insn;
1965 if (c == reload_insn_chain)
1966 reload_insn_chain = next;
1967 delete_insn (insn);
1969 if (next)
1970 next->prev = c->prev;
1971 if (c->prev)
1972 c->prev->next = next;
1973 c->next = unused_insn_chains;
1974 unused_insn_chains = c;
1975 c = next;
1977 if (c != 0)
1978 c = c->next;
1982 /* Handle the failure to find a register to spill.
1983 INSN should be one of the insns which needed this particular spill reg. */
1985 static void
1986 spill_failure (rtx insn, enum reg_class class)
1988 if (asm_noperands (PATTERN (insn)) >= 0)
1989 error_for_asm (insn, "can't find a register in class %qs while "
1990 "reloading %<asm%>",
1991 reg_class_names[class]);
1992 else
1994 error ("unable to find a register to spill in class %qs",
1995 reg_class_names[class]);
1997 if (dump_file)
1999 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2000 debug_reload_to_stream (dump_file);
2002 fatal_insn ("this is the insn:", insn);
2006 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2007 data that is dead in INSN. */
2009 static void
2010 delete_dead_insn (rtx insn)
2012 rtx prev = prev_real_insn (insn);
2013 rtx prev_dest;
2015 /* If the previous insn sets a register that dies in our insn, delete it
2016 too. */
2017 if (prev && GET_CODE (PATTERN (prev)) == SET
2018 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2019 && reg_mentioned_p (prev_dest, PATTERN (insn))
2020 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2021 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2022 delete_dead_insn (prev);
2024 SET_INSN_DELETED (insn);
2027 /* Modify the home of pseudo-reg I.
2028 The new home is present in reg_renumber[I].
2030 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2031 or it may be -1, meaning there is none or it is not relevant.
2032 This is used so that all pseudos spilled from a given hard reg
2033 can share one stack slot. */
2035 static void
2036 alter_reg (int i, int from_reg)
2038 /* When outputting an inline function, this can happen
2039 for a reg that isn't actually used. */
2040 if (regno_reg_rtx[i] == 0)
2041 return;
2043 /* If the reg got changed to a MEM at rtl-generation time,
2044 ignore it. */
2045 if (!REG_P (regno_reg_rtx[i]))
2046 return;
2048 /* Modify the reg-rtx to contain the new hard reg
2049 number or else to contain its pseudo reg number. */
2050 SET_REGNO (regno_reg_rtx[i],
2051 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2053 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2054 allocate a stack slot for it. */
2056 if (reg_renumber[i] < 0
2057 && REG_N_REFS (i) > 0
2058 && reg_equiv_constant[i] == 0
2059 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2060 && reg_equiv_memory_loc[i] == 0)
2062 rtx x;
2063 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2064 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2065 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2066 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2067 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2068 int adjust = 0;
2070 /* Each pseudo reg has an inherent size which comes from its own mode,
2071 and a total size which provides room for paradoxical subregs
2072 which refer to the pseudo reg in wider modes.
2074 We can use a slot already allocated if it provides both
2075 enough inherent space and enough total space.
2076 Otherwise, we allocate a new slot, making sure that it has no less
2077 inherent space, and no less total space, then the previous slot. */
2078 if (from_reg == -1)
2080 alias_set_type alias_set = new_alias_set ();
2082 /* No known place to spill from => no slot to reuse. */
2083 x = assign_stack_local (mode, total_size,
2084 min_align > inherent_align
2085 || total_size > inherent_size ? -1 : 0);
2086 if (BYTES_BIG_ENDIAN)
2087 /* Cancel the big-endian correction done in assign_stack_local.
2088 Get the address of the beginning of the slot.
2089 This is so we can do a big-endian correction unconditionally
2090 below. */
2091 adjust = inherent_size - total_size;
2093 /* Nothing can alias this slot except this pseudo. */
2094 set_mem_alias_set (x, alias_set);
2095 dse_record_singleton_alias_set (alias_set, mode);
2098 /* Reuse a stack slot if possible. */
2099 else if (spill_stack_slot[from_reg] != 0
2100 && spill_stack_slot_width[from_reg] >= total_size
2101 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2102 >= inherent_size)
2103 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2104 x = spill_stack_slot[from_reg];
2105 /* Allocate a bigger slot. */
2106 else
2108 /* Compute maximum size needed, both for inherent size
2109 and for total size. */
2110 rtx stack_slot;
2112 if (spill_stack_slot[from_reg])
2114 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2115 > inherent_size)
2116 mode = GET_MODE (spill_stack_slot[from_reg]);
2117 if (spill_stack_slot_width[from_reg] > total_size)
2118 total_size = spill_stack_slot_width[from_reg];
2119 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2120 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2123 /* Make a slot with that size. */
2124 x = assign_stack_local (mode, total_size,
2125 min_align > inherent_align
2126 || total_size > inherent_size ? -1 : 0);
2127 stack_slot = x;
2129 /* All pseudos mapped to this slot can alias each other. */
2130 if (spill_stack_slot[from_reg])
2132 alias_set_type alias_set
2133 = MEM_ALIAS_SET (spill_stack_slot[from_reg]);
2134 set_mem_alias_set (x, alias_set);
2135 dse_invalidate_singleton_alias_set (alias_set);
2137 else
2139 alias_set_type alias_set = new_alias_set ();
2140 set_mem_alias_set (x, alias_set);
2141 dse_record_singleton_alias_set (alias_set, mode);
2144 if (BYTES_BIG_ENDIAN)
2146 /* Cancel the big-endian correction done in assign_stack_local.
2147 Get the address of the beginning of the slot.
2148 This is so we can do a big-endian correction unconditionally
2149 below. */
2150 adjust = GET_MODE_SIZE (mode) - total_size;
2151 if (adjust)
2152 stack_slot
2153 = adjust_address_nv (x, mode_for_size (total_size
2154 * BITS_PER_UNIT,
2155 MODE_INT, 1),
2156 adjust);
2159 spill_stack_slot[from_reg] = stack_slot;
2160 spill_stack_slot_width[from_reg] = total_size;
2163 /* On a big endian machine, the "address" of the slot
2164 is the address of the low part that fits its inherent mode. */
2165 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2166 adjust += (total_size - inherent_size);
2168 /* If we have any adjustment to make, or if the stack slot is the
2169 wrong mode, make a new stack slot. */
2170 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2172 /* If we have a decl for the original register, set it for the
2173 memory. If this is a shared MEM, make a copy. */
2174 if (REG_EXPR (regno_reg_rtx[i])
2175 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2177 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2179 /* We can do this only for the DECLs home pseudo, not for
2180 any copies of it, since otherwise when the stack slot
2181 is reused, nonoverlapping_memrefs_p might think they
2182 cannot overlap. */
2183 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2185 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2186 x = copy_rtx (x);
2188 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2192 /* Save the stack slot for later. */
2193 reg_equiv_memory_loc[i] = x;
2197 /* Mark the slots in regs_ever_live for the hard regs used by
2198 pseudo-reg number REGNO, accessed in MODE. */
2200 static void
2201 mark_home_live_1 (int regno, enum machine_mode mode)
2203 int i, lim;
2205 i = reg_renumber[regno];
2206 if (i < 0)
2207 return;
2208 lim = end_hard_regno (mode, i);
2209 while (i < lim)
2210 df_set_regs_ever_live(i++, true);
2213 /* Mark the slots in regs_ever_live for the hard regs
2214 used by pseudo-reg number REGNO. */
2216 void
2217 mark_home_live (int regno)
2219 if (reg_renumber[regno] >= 0)
2220 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2223 /* This function handles the tracking of elimination offsets around branches.
2225 X is a piece of RTL being scanned.
2227 INSN is the insn that it came from, if any.
2229 INITIAL_P is nonzero if we are to set the offset to be the initial
2230 offset and zero if we are setting the offset of the label to be the
2231 current offset. */
2233 static void
2234 set_label_offsets (rtx x, rtx insn, int initial_p)
2236 enum rtx_code code = GET_CODE (x);
2237 rtx tem;
2238 unsigned int i;
2239 struct elim_table *p;
2241 switch (code)
2243 case LABEL_REF:
2244 if (LABEL_REF_NONLOCAL_P (x))
2245 return;
2247 x = XEXP (x, 0);
2249 /* ... fall through ... */
2251 case CODE_LABEL:
2252 /* If we know nothing about this label, set the desired offsets. Note
2253 that this sets the offset at a label to be the offset before a label
2254 if we don't know anything about the label. This is not correct for
2255 the label after a BARRIER, but is the best guess we can make. If
2256 we guessed wrong, we will suppress an elimination that might have
2257 been possible had we been able to guess correctly. */
2259 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2261 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2262 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2263 = (initial_p ? reg_eliminate[i].initial_offset
2264 : reg_eliminate[i].offset);
2265 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2268 /* Otherwise, if this is the definition of a label and it is
2269 preceded by a BARRIER, set our offsets to the known offset of
2270 that label. */
2272 else if (x == insn
2273 && (tem = prev_nonnote_insn (insn)) != 0
2274 && BARRIER_P (tem))
2275 set_offsets_for_label (insn);
2276 else
2277 /* If neither of the above cases is true, compare each offset
2278 with those previously recorded and suppress any eliminations
2279 where the offsets disagree. */
2281 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2282 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2283 != (initial_p ? reg_eliminate[i].initial_offset
2284 : reg_eliminate[i].offset))
2285 reg_eliminate[i].can_eliminate = 0;
2287 return;
2289 case JUMP_INSN:
2290 set_label_offsets (PATTERN (insn), insn, initial_p);
2292 /* ... fall through ... */
2294 case INSN:
2295 case CALL_INSN:
2296 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2297 and hence must have all eliminations at their initial offsets. */
2298 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2299 if (REG_NOTE_KIND (tem) == REG_LABEL)
2300 set_label_offsets (XEXP (tem, 0), insn, 1);
2301 return;
2303 case PARALLEL:
2304 case ADDR_VEC:
2305 case ADDR_DIFF_VEC:
2306 /* Each of the labels in the parallel or address vector must be
2307 at their initial offsets. We want the first field for PARALLEL
2308 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2310 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2311 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2312 insn, initial_p);
2313 return;
2315 case SET:
2316 /* We only care about setting PC. If the source is not RETURN,
2317 IF_THEN_ELSE, or a label, disable any eliminations not at
2318 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2319 isn't one of those possibilities. For branches to a label,
2320 call ourselves recursively.
2322 Note that this can disable elimination unnecessarily when we have
2323 a non-local goto since it will look like a non-constant jump to
2324 someplace in the current function. This isn't a significant
2325 problem since such jumps will normally be when all elimination
2326 pairs are back to their initial offsets. */
2328 if (SET_DEST (x) != pc_rtx)
2329 return;
2331 switch (GET_CODE (SET_SRC (x)))
2333 case PC:
2334 case RETURN:
2335 return;
2337 case LABEL_REF:
2338 set_label_offsets (SET_SRC (x), insn, initial_p);
2339 return;
2341 case IF_THEN_ELSE:
2342 tem = XEXP (SET_SRC (x), 1);
2343 if (GET_CODE (tem) == LABEL_REF)
2344 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2345 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2346 break;
2348 tem = XEXP (SET_SRC (x), 2);
2349 if (GET_CODE (tem) == LABEL_REF)
2350 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2351 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2352 break;
2353 return;
2355 default:
2356 break;
2359 /* If we reach here, all eliminations must be at their initial
2360 offset because we are doing a jump to a variable address. */
2361 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2362 if (p->offset != p->initial_offset)
2363 p->can_eliminate = 0;
2364 break;
2366 default:
2367 break;
2371 /* Scan X and replace any eliminable registers (such as fp) with a
2372 replacement (such as sp), plus an offset.
2374 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2375 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2376 MEM, we are allowed to replace a sum of a register and the constant zero
2377 with the register, which we cannot do outside a MEM. In addition, we need
2378 to record the fact that a register is referenced outside a MEM.
2380 If INSN is an insn, it is the insn containing X. If we replace a REG
2381 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2382 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2383 the REG is being modified.
2385 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2386 That's used when we eliminate in expressions stored in notes.
2387 This means, do not set ref_outside_mem even if the reference
2388 is outside of MEMs.
2390 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2391 replacements done assuming all offsets are at their initial values. If
2392 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2393 encounter, return the actual location so that find_reloads will do
2394 the proper thing. */
2396 static rtx
2397 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2398 bool may_use_invariant)
2400 enum rtx_code code = GET_CODE (x);
2401 struct elim_table *ep;
2402 int regno;
2403 rtx new;
2404 int i, j;
2405 const char *fmt;
2406 int copied = 0;
2408 if (! current_function_decl)
2409 return x;
2411 switch (code)
2413 case CONST_INT:
2414 case CONST_DOUBLE:
2415 case CONST_FIXED:
2416 case CONST_VECTOR:
2417 case CONST:
2418 case SYMBOL_REF:
2419 case CODE_LABEL:
2420 case PC:
2421 case CC0:
2422 case ASM_INPUT:
2423 case ADDR_VEC:
2424 case ADDR_DIFF_VEC:
2425 case RETURN:
2426 return x;
2428 case REG:
2429 regno = REGNO (x);
2431 /* First handle the case where we encounter a bare register that
2432 is eliminable. Replace it with a PLUS. */
2433 if (regno < FIRST_PSEUDO_REGISTER)
2435 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2436 ep++)
2437 if (ep->from_rtx == x && ep->can_eliminate)
2438 return plus_constant (ep->to_rtx, ep->previous_offset);
2441 else if (reg_renumber && reg_renumber[regno] < 0
2442 && reg_equiv_invariant && reg_equiv_invariant[regno])
2444 if (may_use_invariant)
2445 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2446 mem_mode, insn, true);
2447 /* There exists at least one use of REGNO that cannot be
2448 eliminated. Prevent the defining insn from being deleted. */
2449 reg_equiv_init[regno] = NULL_RTX;
2450 alter_reg (regno, -1);
2452 return x;
2454 /* You might think handling MINUS in a manner similar to PLUS is a
2455 good idea. It is not. It has been tried multiple times and every
2456 time the change has had to have been reverted.
2458 Other parts of reload know a PLUS is special (gen_reload for example)
2459 and require special code to handle code a reloaded PLUS operand.
2461 Also consider backends where the flags register is clobbered by a
2462 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2463 lea instruction comes to mind). If we try to reload a MINUS, we
2464 may kill the flags register that was holding a useful value.
2466 So, please before trying to handle MINUS, consider reload as a
2467 whole instead of this little section as well as the backend issues. */
2468 case PLUS:
2469 /* If this is the sum of an eliminable register and a constant, rework
2470 the sum. */
2471 if (REG_P (XEXP (x, 0))
2472 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2473 && CONSTANT_P (XEXP (x, 1)))
2475 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2476 ep++)
2477 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2479 /* The only time we want to replace a PLUS with a REG (this
2480 occurs when the constant operand of the PLUS is the negative
2481 of the offset) is when we are inside a MEM. We won't want
2482 to do so at other times because that would change the
2483 structure of the insn in a way that reload can't handle.
2484 We special-case the commonest situation in
2485 eliminate_regs_in_insn, so just replace a PLUS with a
2486 PLUS here, unless inside a MEM. */
2487 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2488 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2489 return ep->to_rtx;
2490 else
2491 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2492 plus_constant (XEXP (x, 1),
2493 ep->previous_offset));
2496 /* If the register is not eliminable, we are done since the other
2497 operand is a constant. */
2498 return x;
2501 /* If this is part of an address, we want to bring any constant to the
2502 outermost PLUS. We will do this by doing register replacement in
2503 our operands and seeing if a constant shows up in one of them.
2505 Note that there is no risk of modifying the structure of the insn,
2506 since we only get called for its operands, thus we are either
2507 modifying the address inside a MEM, or something like an address
2508 operand of a load-address insn. */
2511 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2512 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2514 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2516 /* If one side is a PLUS and the other side is a pseudo that
2517 didn't get a hard register but has a reg_equiv_constant,
2518 we must replace the constant here since it may no longer
2519 be in the position of any operand. */
2520 if (GET_CODE (new0) == PLUS && REG_P (new1)
2521 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2522 && reg_renumber[REGNO (new1)] < 0
2523 && reg_equiv_constant != 0
2524 && reg_equiv_constant[REGNO (new1)] != 0)
2525 new1 = reg_equiv_constant[REGNO (new1)];
2526 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2527 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2528 && reg_renumber[REGNO (new0)] < 0
2529 && reg_equiv_constant[REGNO (new0)] != 0)
2530 new0 = reg_equiv_constant[REGNO (new0)];
2532 new = form_sum (new0, new1);
2534 /* As above, if we are not inside a MEM we do not want to
2535 turn a PLUS into something else. We might try to do so here
2536 for an addition of 0 if we aren't optimizing. */
2537 if (! mem_mode && GET_CODE (new) != PLUS)
2538 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2539 else
2540 return new;
2543 return x;
2545 case MULT:
2546 /* If this is the product of an eliminable register and a
2547 constant, apply the distribute law and move the constant out
2548 so that we have (plus (mult ..) ..). This is needed in order
2549 to keep load-address insns valid. This case is pathological.
2550 We ignore the possibility of overflow here. */
2551 if (REG_P (XEXP (x, 0))
2552 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2553 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2554 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2555 ep++)
2556 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2558 if (! mem_mode
2559 /* Refs inside notes don't count for this purpose. */
2560 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2561 || GET_CODE (insn) == INSN_LIST)))
2562 ep->ref_outside_mem = 1;
2564 return
2565 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2566 ep->previous_offset * INTVAL (XEXP (x, 1)));
2569 /* ... fall through ... */
2571 case CALL:
2572 case COMPARE:
2573 /* See comments before PLUS about handling MINUS. */
2574 case MINUS:
2575 case DIV: case UDIV:
2576 case MOD: case UMOD:
2577 case AND: case IOR: case XOR:
2578 case ROTATERT: case ROTATE:
2579 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2580 case NE: case EQ:
2581 case GE: case GT: case GEU: case GTU:
2582 case LE: case LT: case LEU: case LTU:
2584 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2585 rtx new1 = XEXP (x, 1)
2586 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2588 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2589 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2591 return x;
2593 case EXPR_LIST:
2594 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2595 if (XEXP (x, 0))
2597 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2598 if (new != XEXP (x, 0))
2600 /* If this is a REG_DEAD note, it is not valid anymore.
2601 Using the eliminated version could result in creating a
2602 REG_DEAD note for the stack or frame pointer. */
2603 if (GET_MODE (x) == REG_DEAD)
2604 return (XEXP (x, 1)
2605 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2606 : NULL_RTX);
2608 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2612 /* ... fall through ... */
2614 case INSN_LIST:
2615 /* Now do eliminations in the rest of the chain. If this was
2616 an EXPR_LIST, this might result in allocating more memory than is
2617 strictly needed, but it simplifies the code. */
2618 if (XEXP (x, 1))
2620 new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2621 if (new != XEXP (x, 1))
2622 return
2623 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2625 return x;
2627 case PRE_INC:
2628 case POST_INC:
2629 case PRE_DEC:
2630 case POST_DEC:
2631 /* We do not support elimination of a register that is modified.
2632 elimination_effects has already make sure that this does not
2633 happen. */
2634 return x;
2636 case PRE_MODIFY:
2637 case POST_MODIFY:
2638 /* We do not support elimination of a register that is modified.
2639 elimination_effects has already make sure that this does not
2640 happen. The only remaining case we need to consider here is
2641 that the increment value may be an eliminable register. */
2642 if (GET_CODE (XEXP (x, 1)) == PLUS
2643 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2645 rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2646 insn, true);
2648 if (new != XEXP (XEXP (x, 1), 1))
2649 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2650 gen_rtx_PLUS (GET_MODE (x),
2651 XEXP (x, 0), new));
2653 return x;
2655 case STRICT_LOW_PART:
2656 case NEG: case NOT:
2657 case SIGN_EXTEND: case ZERO_EXTEND:
2658 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2659 case FLOAT: case FIX:
2660 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2661 case ABS:
2662 case SQRT:
2663 case FFS:
2664 case CLZ:
2665 case CTZ:
2666 case POPCOUNT:
2667 case PARITY:
2668 case BSWAP:
2669 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2670 if (new != XEXP (x, 0))
2671 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2672 return x;
2674 case SUBREG:
2675 /* Similar to above processing, but preserve SUBREG_BYTE.
2676 Convert (subreg (mem)) to (mem) if not paradoxical.
2677 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2678 pseudo didn't get a hard reg, we must replace this with the
2679 eliminated version of the memory location because push_reload
2680 may do the replacement in certain circumstances. */
2681 if (REG_P (SUBREG_REG (x))
2682 && (GET_MODE_SIZE (GET_MODE (x))
2683 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2684 && reg_equiv_memory_loc != 0
2685 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2687 new = SUBREG_REG (x);
2689 else
2690 new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2692 if (new != SUBREG_REG (x))
2694 int x_size = GET_MODE_SIZE (GET_MODE (x));
2695 int new_size = GET_MODE_SIZE (GET_MODE (new));
2697 if (MEM_P (new)
2698 && ((x_size < new_size
2699 #ifdef WORD_REGISTER_OPERATIONS
2700 /* On these machines, combine can create rtl of the form
2701 (set (subreg:m1 (reg:m2 R) 0) ...)
2702 where m1 < m2, and expects something interesting to
2703 happen to the entire word. Moreover, it will use the
2704 (reg:m2 R) later, expecting all bits to be preserved.
2705 So if the number of words is the same, preserve the
2706 subreg so that push_reload can see it. */
2707 && ! ((x_size - 1) / UNITS_PER_WORD
2708 == (new_size -1 ) / UNITS_PER_WORD)
2709 #endif
2711 || x_size == new_size)
2713 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2714 else
2715 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2718 return x;
2720 case MEM:
2721 /* Our only special processing is to pass the mode of the MEM to our
2722 recursive call and copy the flags. While we are here, handle this
2723 case more efficiently. */
2724 return
2725 replace_equiv_address_nv (x,
2726 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2727 insn, true));
2729 case USE:
2730 /* Handle insn_list USE that a call to a pure function may generate. */
2731 new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2732 if (new != XEXP (x, 0))
2733 return gen_rtx_USE (GET_MODE (x), new);
2734 return x;
2736 case CLOBBER:
2737 case ASM_OPERANDS:
2738 case SET:
2739 gcc_unreachable ();
2741 default:
2742 break;
2745 /* Process each of our operands recursively. If any have changed, make a
2746 copy of the rtx. */
2747 fmt = GET_RTX_FORMAT (code);
2748 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2750 if (*fmt == 'e')
2752 new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2753 if (new != XEXP (x, i) && ! copied)
2755 x = shallow_copy_rtx (x);
2756 copied = 1;
2758 XEXP (x, i) = new;
2760 else if (*fmt == 'E')
2762 int copied_vec = 0;
2763 for (j = 0; j < XVECLEN (x, i); j++)
2765 new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2766 if (new != XVECEXP (x, i, j) && ! copied_vec)
2768 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2769 XVEC (x, i)->elem);
2770 if (! copied)
2772 x = shallow_copy_rtx (x);
2773 copied = 1;
2775 XVEC (x, i) = new_v;
2776 copied_vec = 1;
2778 XVECEXP (x, i, j) = new;
2783 return x;
2787 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2789 return eliminate_regs_1 (x, mem_mode, insn, false);
2792 /* Scan rtx X for modifications of elimination target registers. Update
2793 the table of eliminables to reflect the changed state. MEM_MODE is
2794 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2796 static void
2797 elimination_effects (rtx x, enum machine_mode mem_mode)
2799 enum rtx_code code = GET_CODE (x);
2800 struct elim_table *ep;
2801 int regno;
2802 int i, j;
2803 const char *fmt;
2805 switch (code)
2807 case CONST_INT:
2808 case CONST_DOUBLE:
2809 case CONST_FIXED:
2810 case CONST_VECTOR:
2811 case CONST:
2812 case SYMBOL_REF:
2813 case CODE_LABEL:
2814 case PC:
2815 case CC0:
2816 case ASM_INPUT:
2817 case ADDR_VEC:
2818 case ADDR_DIFF_VEC:
2819 case RETURN:
2820 return;
2822 case REG:
2823 regno = REGNO (x);
2825 /* First handle the case where we encounter a bare register that
2826 is eliminable. Replace it with a PLUS. */
2827 if (regno < FIRST_PSEUDO_REGISTER)
2829 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2830 ep++)
2831 if (ep->from_rtx == x && ep->can_eliminate)
2833 if (! mem_mode)
2834 ep->ref_outside_mem = 1;
2835 return;
2839 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2840 && reg_equiv_constant[regno]
2841 && ! function_invariant_p (reg_equiv_constant[regno]))
2842 elimination_effects (reg_equiv_constant[regno], mem_mode);
2843 return;
2845 case PRE_INC:
2846 case POST_INC:
2847 case PRE_DEC:
2848 case POST_DEC:
2849 case POST_MODIFY:
2850 case PRE_MODIFY:
2851 /* If we modify the source of an elimination rule, disable it. */
2852 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2853 if (ep->from_rtx == XEXP (x, 0))
2854 ep->can_eliminate = 0;
2856 /* If we modify the target of an elimination rule by adding a constant,
2857 update its offset. If we modify the target in any other way, we'll
2858 have to disable the rule as well. */
2859 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2860 if (ep->to_rtx == XEXP (x, 0))
2862 int size = GET_MODE_SIZE (mem_mode);
2864 /* If more bytes than MEM_MODE are pushed, account for them. */
2865 #ifdef PUSH_ROUNDING
2866 if (ep->to_rtx == stack_pointer_rtx)
2867 size = PUSH_ROUNDING (size);
2868 #endif
2869 if (code == PRE_DEC || code == POST_DEC)
2870 ep->offset += size;
2871 else if (code == PRE_INC || code == POST_INC)
2872 ep->offset -= size;
2873 else if (code == PRE_MODIFY || code == POST_MODIFY)
2875 if (GET_CODE (XEXP (x, 1)) == PLUS
2876 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2877 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
2878 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2879 else
2880 ep->can_eliminate = 0;
2884 /* These two aren't unary operators. */
2885 if (code == POST_MODIFY || code == PRE_MODIFY)
2886 break;
2888 /* Fall through to generic unary operation case. */
2889 case STRICT_LOW_PART:
2890 case NEG: case NOT:
2891 case SIGN_EXTEND: case ZERO_EXTEND:
2892 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2893 case FLOAT: case FIX:
2894 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2895 case ABS:
2896 case SQRT:
2897 case FFS:
2898 case CLZ:
2899 case CTZ:
2900 case POPCOUNT:
2901 case PARITY:
2902 case BSWAP:
2903 elimination_effects (XEXP (x, 0), mem_mode);
2904 return;
2906 case SUBREG:
2907 if (REG_P (SUBREG_REG (x))
2908 && (GET_MODE_SIZE (GET_MODE (x))
2909 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2910 && reg_equiv_memory_loc != 0
2911 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2912 return;
2914 elimination_effects (SUBREG_REG (x), mem_mode);
2915 return;
2917 case USE:
2918 /* If using a register that is the source of an eliminate we still
2919 think can be performed, note it cannot be performed since we don't
2920 know how this register is used. */
2921 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2922 if (ep->from_rtx == XEXP (x, 0))
2923 ep->can_eliminate = 0;
2925 elimination_effects (XEXP (x, 0), mem_mode);
2926 return;
2928 case CLOBBER:
2929 /* If clobbering a register that is the replacement register for an
2930 elimination we still think can be performed, note that it cannot
2931 be performed. Otherwise, we need not be concerned about it. */
2932 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2933 if (ep->to_rtx == XEXP (x, 0))
2934 ep->can_eliminate = 0;
2936 elimination_effects (XEXP (x, 0), mem_mode);
2937 return;
2939 case SET:
2940 /* Check for setting a register that we know about. */
2941 if (REG_P (SET_DEST (x)))
2943 /* See if this is setting the replacement register for an
2944 elimination.
2946 If DEST is the hard frame pointer, we do nothing because we
2947 assume that all assignments to the frame pointer are for
2948 non-local gotos and are being done at a time when they are valid
2949 and do not disturb anything else. Some machines want to
2950 eliminate a fake argument pointer (or even a fake frame pointer)
2951 with either the real frame or the stack pointer. Assignments to
2952 the hard frame pointer must not prevent this elimination. */
2954 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2955 ep++)
2956 if (ep->to_rtx == SET_DEST (x)
2957 && SET_DEST (x) != hard_frame_pointer_rtx)
2959 /* If it is being incremented, adjust the offset. Otherwise,
2960 this elimination can't be done. */
2961 rtx src = SET_SRC (x);
2963 if (GET_CODE (src) == PLUS
2964 && XEXP (src, 0) == SET_DEST (x)
2965 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2966 ep->offset -= INTVAL (XEXP (src, 1));
2967 else
2968 ep->can_eliminate = 0;
2972 elimination_effects (SET_DEST (x), 0);
2973 elimination_effects (SET_SRC (x), 0);
2974 return;
2976 case MEM:
2977 /* Our only special processing is to pass the mode of the MEM to our
2978 recursive call. */
2979 elimination_effects (XEXP (x, 0), GET_MODE (x));
2980 return;
2982 default:
2983 break;
2986 fmt = GET_RTX_FORMAT (code);
2987 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2989 if (*fmt == 'e')
2990 elimination_effects (XEXP (x, i), mem_mode);
2991 else if (*fmt == 'E')
2992 for (j = 0; j < XVECLEN (x, i); j++)
2993 elimination_effects (XVECEXP (x, i, j), mem_mode);
2997 /* Descend through rtx X and verify that no references to eliminable registers
2998 remain. If any do remain, mark the involved register as not
2999 eliminable. */
3001 static void
3002 check_eliminable_occurrences (rtx x)
3004 const char *fmt;
3005 int i;
3006 enum rtx_code code;
3008 if (x == 0)
3009 return;
3011 code = GET_CODE (x);
3013 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3015 struct elim_table *ep;
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->from_rtx == x)
3019 ep->can_eliminate = 0;
3020 return;
3023 fmt = GET_RTX_FORMAT (code);
3024 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3026 if (*fmt == 'e')
3027 check_eliminable_occurrences (XEXP (x, i));
3028 else if (*fmt == 'E')
3030 int j;
3031 for (j = 0; j < XVECLEN (x, i); j++)
3032 check_eliminable_occurrences (XVECEXP (x, i, j));
3037 /* Scan INSN and eliminate all eliminable registers in it.
3039 If REPLACE is nonzero, do the replacement destructively. Also
3040 delete the insn as dead it if it is setting an eliminable register.
3042 If REPLACE is zero, do all our allocations in reload_obstack.
3044 If no eliminations were done and this insn doesn't require any elimination
3045 processing (these are not identical conditions: it might be updating sp,
3046 but not referencing fp; this needs to be seen during reload_as_needed so
3047 that the offset between fp and sp can be taken into consideration), zero
3048 is returned. Otherwise, 1 is returned. */
3050 static int
3051 eliminate_regs_in_insn (rtx insn, int replace)
3053 int icode = recog_memoized (insn);
3054 rtx old_body = PATTERN (insn);
3055 int insn_is_asm = asm_noperands (old_body) >= 0;
3056 rtx old_set = single_set (insn);
3057 rtx new_body;
3058 int val = 0;
3059 int i;
3060 rtx substed_operand[MAX_RECOG_OPERANDS];
3061 rtx orig_operand[MAX_RECOG_OPERANDS];
3062 struct elim_table *ep;
3063 rtx plus_src, plus_cst_src;
3065 if (! insn_is_asm && icode < 0)
3067 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3068 || GET_CODE (PATTERN (insn)) == CLOBBER
3069 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3070 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3071 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3072 return 0;
3075 if (old_set != 0 && REG_P (SET_DEST (old_set))
3076 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3078 /* Check for setting an eliminable register. */
3079 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3080 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3082 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3083 /* If this is setting the frame pointer register to the
3084 hardware frame pointer register and this is an elimination
3085 that will be done (tested above), this insn is really
3086 adjusting the frame pointer downward to compensate for
3087 the adjustment done before a nonlocal goto. */
3088 if (ep->from == FRAME_POINTER_REGNUM
3089 && ep->to == HARD_FRAME_POINTER_REGNUM)
3091 rtx base = SET_SRC (old_set);
3092 rtx base_insn = insn;
3093 HOST_WIDE_INT offset = 0;
3095 while (base != ep->to_rtx)
3097 rtx prev_insn, prev_set;
3099 if (GET_CODE (base) == PLUS
3100 && GET_CODE (XEXP (base, 1)) == CONST_INT)
3102 offset += INTVAL (XEXP (base, 1));
3103 base = XEXP (base, 0);
3105 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3106 && (prev_set = single_set (prev_insn)) != 0
3107 && rtx_equal_p (SET_DEST (prev_set), base))
3109 base = SET_SRC (prev_set);
3110 base_insn = prev_insn;
3112 else
3113 break;
3116 if (base == ep->to_rtx)
3118 rtx src
3119 = plus_constant (ep->to_rtx, offset - ep->offset);
3121 new_body = old_body;
3122 if (! replace)
3124 new_body = copy_insn (old_body);
3125 if (REG_NOTES (insn))
3126 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3128 PATTERN (insn) = new_body;
3129 old_set = single_set (insn);
3131 /* First see if this insn remains valid when we
3132 make the change. If not, keep the INSN_CODE
3133 the same and let reload fit it up. */
3134 validate_change (insn, &SET_SRC (old_set), src, 1);
3135 validate_change (insn, &SET_DEST (old_set),
3136 ep->to_rtx, 1);
3137 if (! apply_change_group ())
3139 SET_SRC (old_set) = src;
3140 SET_DEST (old_set) = ep->to_rtx;
3143 val = 1;
3144 goto done;
3147 #endif
3149 /* In this case this insn isn't serving a useful purpose. We
3150 will delete it in reload_as_needed once we know that this
3151 elimination is, in fact, being done.
3153 If REPLACE isn't set, we can't delete this insn, but needn't
3154 process it since it won't be used unless something changes. */
3155 if (replace)
3157 delete_dead_insn (insn);
3158 return 1;
3160 val = 1;
3161 goto done;
3165 /* We allow one special case which happens to work on all machines we
3166 currently support: a single set with the source or a REG_EQUAL
3167 note being a PLUS of an eliminable register and a constant. */
3168 plus_src = plus_cst_src = 0;
3169 if (old_set && REG_P (SET_DEST (old_set)))
3171 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3172 plus_src = SET_SRC (old_set);
3173 /* First see if the source is of the form (plus (...) CST). */
3174 if (plus_src
3175 && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3176 plus_cst_src = plus_src;
3177 else if (REG_P (SET_SRC (old_set))
3178 || plus_src)
3180 /* Otherwise, see if we have a REG_EQUAL note of the form
3181 (plus (...) CST). */
3182 rtx links;
3183 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3185 if ((REG_NOTE_KIND (links) == REG_EQUAL
3186 || REG_NOTE_KIND (links) == REG_EQUIV)
3187 && GET_CODE (XEXP (links, 0)) == PLUS
3188 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3190 plus_cst_src = XEXP (links, 0);
3191 break;
3196 /* Check that the first operand of the PLUS is a hard reg or
3197 the lowpart subreg of one. */
3198 if (plus_cst_src)
3200 rtx reg = XEXP (plus_cst_src, 0);
3201 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3202 reg = SUBREG_REG (reg);
3204 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3205 plus_cst_src = 0;
3208 if (plus_cst_src)
3210 rtx reg = XEXP (plus_cst_src, 0);
3211 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3213 if (GET_CODE (reg) == SUBREG)
3214 reg = SUBREG_REG (reg);
3216 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3217 if (ep->from_rtx == reg && ep->can_eliminate)
3219 rtx to_rtx = ep->to_rtx;
3220 offset += ep->offset;
3221 offset = trunc_int_for_mode (offset, GET_MODE (reg));
3223 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3224 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3225 to_rtx);
3226 /* If we have a nonzero offset, and the source is already
3227 a simple REG, the following transformation would
3228 increase the cost of the insn by replacing a simple REG
3229 with (plus (reg sp) CST). So try only when we already
3230 had a PLUS before. */
3231 if (offset == 0 || plus_src)
3233 rtx new_src = plus_constant (to_rtx, offset);
3235 new_body = old_body;
3236 if (! replace)
3238 new_body = copy_insn (old_body);
3239 if (REG_NOTES (insn))
3240 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3242 PATTERN (insn) = new_body;
3243 old_set = single_set (insn);
3245 /* First see if this insn remains valid when we make the
3246 change. If not, try to replace the whole pattern with
3247 a simple set (this may help if the original insn was a
3248 PARALLEL that was only recognized as single_set due to
3249 REG_UNUSED notes). If this isn't valid either, keep
3250 the INSN_CODE the same and let reload fix it up. */
3251 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3253 rtx new_pat = gen_rtx_SET (VOIDmode,
3254 SET_DEST (old_set), new_src);
3256 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3257 SET_SRC (old_set) = new_src;
3260 else
3261 break;
3263 val = 1;
3264 /* This can't have an effect on elimination offsets, so skip right
3265 to the end. */
3266 goto done;
3270 /* Determine the effects of this insn on elimination offsets. */
3271 elimination_effects (old_body, 0);
3273 /* Eliminate all eliminable registers occurring in operands that
3274 can be handled by reload. */
3275 extract_insn (insn);
3276 for (i = 0; i < recog_data.n_operands; i++)
3278 orig_operand[i] = recog_data.operand[i];
3279 substed_operand[i] = recog_data.operand[i];
3281 /* For an asm statement, every operand is eliminable. */
3282 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3284 bool is_set_src, in_plus;
3286 /* Check for setting a register that we know about. */
3287 if (recog_data.operand_type[i] != OP_IN
3288 && REG_P (orig_operand[i]))
3290 /* If we are assigning to a register that can be eliminated, it
3291 must be as part of a PARALLEL, since the code above handles
3292 single SETs. We must indicate that we can no longer
3293 eliminate this reg. */
3294 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3295 ep++)
3296 if (ep->from_rtx == orig_operand[i])
3297 ep->can_eliminate = 0;
3300 /* Companion to the above plus substitution, we can allow
3301 invariants as the source of a plain move. */
3302 is_set_src = false;
3303 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3304 is_set_src = true;
3305 in_plus = false;
3306 if (plus_src
3307 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3308 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3309 in_plus = true;
3311 substed_operand[i]
3312 = eliminate_regs_1 (recog_data.operand[i], 0,
3313 replace ? insn : NULL_RTX,
3314 is_set_src || in_plus);
3315 if (substed_operand[i] != orig_operand[i])
3316 val = 1;
3317 /* Terminate the search in check_eliminable_occurrences at
3318 this point. */
3319 *recog_data.operand_loc[i] = 0;
3321 /* If an output operand changed from a REG to a MEM and INSN is an
3322 insn, write a CLOBBER insn. */
3323 if (recog_data.operand_type[i] != OP_IN
3324 && REG_P (orig_operand[i])
3325 && MEM_P (substed_operand[i])
3326 && replace)
3327 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3328 insn);
3332 for (i = 0; i < recog_data.n_dups; i++)
3333 *recog_data.dup_loc[i]
3334 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3336 /* If any eliminable remain, they aren't eliminable anymore. */
3337 check_eliminable_occurrences (old_body);
3339 /* Substitute the operands; the new values are in the substed_operand
3340 array. */
3341 for (i = 0; i < recog_data.n_operands; i++)
3342 *recog_data.operand_loc[i] = substed_operand[i];
3343 for (i = 0; i < recog_data.n_dups; i++)
3344 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3346 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3347 re-recognize the insn. We do this in case we had a simple addition
3348 but now can do this as a load-address. This saves an insn in this
3349 common case.
3350 If re-recognition fails, the old insn code number will still be used,
3351 and some register operands may have changed into PLUS expressions.
3352 These will be handled by find_reloads by loading them into a register
3353 again. */
3355 if (val)
3357 /* If we aren't replacing things permanently and we changed something,
3358 make another copy to ensure that all the RTL is new. Otherwise
3359 things can go wrong if find_reload swaps commutative operands
3360 and one is inside RTL that has been copied while the other is not. */
3361 new_body = old_body;
3362 if (! replace)
3364 new_body = copy_insn (old_body);
3365 if (REG_NOTES (insn))
3366 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3368 PATTERN (insn) = new_body;
3370 /* If we had a move insn but now we don't, rerecognize it. This will
3371 cause spurious re-recognition if the old move had a PARALLEL since
3372 the new one still will, but we can't call single_set without
3373 having put NEW_BODY into the insn and the re-recognition won't
3374 hurt in this rare case. */
3375 /* ??? Why this huge if statement - why don't we just rerecognize the
3376 thing always? */
3377 if (! insn_is_asm
3378 && old_set != 0
3379 && ((REG_P (SET_SRC (old_set))
3380 && (GET_CODE (new_body) != SET
3381 || !REG_P (SET_SRC (new_body))))
3382 /* If this was a load from or store to memory, compare
3383 the MEM in recog_data.operand to the one in the insn.
3384 If they are not equal, then rerecognize the insn. */
3385 || (old_set != 0
3386 && ((MEM_P (SET_SRC (old_set))
3387 && SET_SRC (old_set) != recog_data.operand[1])
3388 || (MEM_P (SET_DEST (old_set))
3389 && SET_DEST (old_set) != recog_data.operand[0])))
3390 /* If this was an add insn before, rerecognize. */
3391 || GET_CODE (SET_SRC (old_set)) == PLUS))
3393 int new_icode = recog (PATTERN (insn), insn, 0);
3394 if (new_icode >= 0)
3395 INSN_CODE (insn) = new_icode;
3399 /* Restore the old body. If there were any changes to it, we made a copy
3400 of it while the changes were still in place, so we'll correctly return
3401 a modified insn below. */
3402 if (! replace)
3404 /* Restore the old body. */
3405 for (i = 0; i < recog_data.n_operands; i++)
3406 *recog_data.operand_loc[i] = orig_operand[i];
3407 for (i = 0; i < recog_data.n_dups; i++)
3408 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3411 /* Update all elimination pairs to reflect the status after the current
3412 insn. The changes we make were determined by the earlier call to
3413 elimination_effects.
3415 We also detect cases where register elimination cannot be done,
3416 namely, if a register would be both changed and referenced outside a MEM
3417 in the resulting insn since such an insn is often undefined and, even if
3418 not, we cannot know what meaning will be given to it. Note that it is
3419 valid to have a register used in an address in an insn that changes it
3420 (presumably with a pre- or post-increment or decrement).
3422 If anything changes, return nonzero. */
3424 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3426 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3427 ep->can_eliminate = 0;
3429 ep->ref_outside_mem = 0;
3431 if (ep->previous_offset != ep->offset)
3432 val = 1;
3435 done:
3436 /* If we changed something, perform elimination in REG_NOTES. This is
3437 needed even when REPLACE is zero because a REG_DEAD note might refer
3438 to a register that we eliminate and could cause a different number
3439 of spill registers to be needed in the final reload pass than in
3440 the pre-passes. */
3441 if (val && REG_NOTES (insn) != 0)
3442 REG_NOTES (insn)
3443 = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3445 return val;
3448 /* Loop through all elimination pairs.
3449 Recalculate the number not at initial offset.
3451 Compute the maximum offset (minimum offset if the stack does not
3452 grow downward) for each elimination pair. */
3454 static void
3455 update_eliminable_offsets (void)
3457 struct elim_table *ep;
3459 num_not_at_initial_offset = 0;
3460 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3462 ep->previous_offset = ep->offset;
3463 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3464 num_not_at_initial_offset++;
3468 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3469 replacement we currently believe is valid, mark it as not eliminable if X
3470 modifies DEST in any way other than by adding a constant integer to it.
3472 If DEST is the frame pointer, we do nothing because we assume that
3473 all assignments to the hard frame pointer are nonlocal gotos and are being
3474 done at a time when they are valid and do not disturb anything else.
3475 Some machines want to eliminate a fake argument pointer with either the
3476 frame or stack pointer. Assignments to the hard frame pointer must not
3477 prevent this elimination.
3479 Called via note_stores from reload before starting its passes to scan
3480 the insns of the function. */
3482 static void
3483 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3485 unsigned int i;
3487 /* A SUBREG of a hard register here is just changing its mode. We should
3488 not see a SUBREG of an eliminable hard register, but check just in
3489 case. */
3490 if (GET_CODE (dest) == SUBREG)
3491 dest = SUBREG_REG (dest);
3493 if (dest == hard_frame_pointer_rtx)
3494 return;
3496 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3497 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3498 && (GET_CODE (x) != SET
3499 || GET_CODE (SET_SRC (x)) != PLUS
3500 || XEXP (SET_SRC (x), 0) != dest
3501 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3503 reg_eliminate[i].can_eliminate_previous
3504 = reg_eliminate[i].can_eliminate = 0;
3505 num_eliminable--;
3509 /* Verify that the initial elimination offsets did not change since the
3510 last call to set_initial_elim_offsets. This is used to catch cases
3511 where something illegal happened during reload_as_needed that could
3512 cause incorrect code to be generated if we did not check for it. */
3514 static bool
3515 verify_initial_elim_offsets (void)
3517 HOST_WIDE_INT t;
3519 if (!num_eliminable)
3520 return true;
3522 #ifdef ELIMINABLE_REGS
3524 struct elim_table *ep;
3526 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3528 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3529 if (t != ep->initial_offset)
3530 return false;
3533 #else
3534 INITIAL_FRAME_POINTER_OFFSET (t);
3535 if (t != reg_eliminate[0].initial_offset)
3536 return false;
3537 #endif
3539 return true;
3542 /* Reset all offsets on eliminable registers to their initial values. */
3544 static void
3545 set_initial_elim_offsets (void)
3547 struct elim_table *ep = reg_eliminate;
3549 #ifdef ELIMINABLE_REGS
3550 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3552 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3553 ep->previous_offset = ep->offset = ep->initial_offset;
3555 #else
3556 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3557 ep->previous_offset = ep->offset = ep->initial_offset;
3558 #endif
3560 num_not_at_initial_offset = 0;
3563 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3565 static void
3566 set_initial_eh_label_offset (rtx label)
3568 set_label_offsets (label, NULL_RTX, 1);
3571 /* Initialize the known label offsets.
3572 Set a known offset for each forced label to be at the initial offset
3573 of each elimination. We do this because we assume that all
3574 computed jumps occur from a location where each elimination is
3575 at its initial offset.
3576 For all other labels, show that we don't know the offsets. */
3578 static void
3579 set_initial_label_offsets (void)
3581 rtx x;
3582 memset (offsets_known_at, 0, num_labels);
3584 for (x = forced_labels; x; x = XEXP (x, 1))
3585 if (XEXP (x, 0))
3586 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3588 for_each_eh_label (set_initial_eh_label_offset);
3591 /* Set all elimination offsets to the known values for the code label given
3592 by INSN. */
3594 static void
3595 set_offsets_for_label (rtx insn)
3597 unsigned int i;
3598 int label_nr = CODE_LABEL_NUMBER (insn);
3599 struct elim_table *ep;
3601 num_not_at_initial_offset = 0;
3602 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3604 ep->offset = ep->previous_offset
3605 = offsets_at[label_nr - first_label_num][i];
3606 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3607 num_not_at_initial_offset++;
3611 /* See if anything that happened changes which eliminations are valid.
3612 For example, on the SPARC, whether or not the frame pointer can
3613 be eliminated can depend on what registers have been used. We need
3614 not check some conditions again (such as flag_omit_frame_pointer)
3615 since they can't have changed. */
3617 static void
3618 update_eliminables (HARD_REG_SET *pset)
3620 int previous_frame_pointer_needed = frame_pointer_needed;
3621 struct elim_table *ep;
3623 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3624 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3625 #ifdef ELIMINABLE_REGS
3626 || ! CAN_ELIMINATE (ep->from, ep->to)
3627 #endif
3629 ep->can_eliminate = 0;
3631 /* Look for the case where we have discovered that we can't replace
3632 register A with register B and that means that we will now be
3633 trying to replace register A with register C. This means we can
3634 no longer replace register C with register B and we need to disable
3635 such an elimination, if it exists. This occurs often with A == ap,
3636 B == sp, and C == fp. */
3638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3640 struct elim_table *op;
3641 int new_to = -1;
3643 if (! ep->can_eliminate && ep->can_eliminate_previous)
3645 /* Find the current elimination for ep->from, if there is a
3646 new one. */
3647 for (op = reg_eliminate;
3648 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3649 if (op->from == ep->from && op->can_eliminate)
3651 new_to = op->to;
3652 break;
3655 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3656 disable it. */
3657 for (op = reg_eliminate;
3658 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3659 if (op->from == new_to && op->to == ep->to)
3660 op->can_eliminate = 0;
3664 /* See if any registers that we thought we could eliminate the previous
3665 time are no longer eliminable. If so, something has changed and we
3666 must spill the register. Also, recompute the number of eliminable
3667 registers and see if the frame pointer is needed; it is if there is
3668 no elimination of the frame pointer that we can perform. */
3670 frame_pointer_needed = 1;
3671 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3673 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3674 && ep->to != HARD_FRAME_POINTER_REGNUM)
3675 frame_pointer_needed = 0;
3677 if (! ep->can_eliminate && ep->can_eliminate_previous)
3679 ep->can_eliminate_previous = 0;
3680 SET_HARD_REG_BIT (*pset, ep->from);
3681 num_eliminable--;
3685 /* If we didn't need a frame pointer last time, but we do now, spill
3686 the hard frame pointer. */
3687 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3688 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3691 /* Return true if X is used as the target register of an elimination. */
3693 bool
3694 elimination_target_reg_p (rtx x)
3696 struct elim_table *ep;
3698 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3699 if (ep->to_rtx == x && ep->can_eliminate)
3700 return true;
3702 return false;
3705 /* Initialize the table of registers to eliminate. */
3707 static void
3708 init_elim_table (void)
3710 struct elim_table *ep;
3711 #ifdef ELIMINABLE_REGS
3712 const struct elim_table_1 *ep1;
3713 #endif
3715 if (!reg_eliminate)
3716 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3718 /* Does this function require a frame pointer? */
3720 frame_pointer_needed = (! flag_omit_frame_pointer
3721 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3722 and restore sp for alloca. So we can't eliminate
3723 the frame pointer in that case. At some point,
3724 we should improve this by emitting the
3725 sp-adjusting insns for this case. */
3726 || (current_function_calls_alloca
3727 && EXIT_IGNORE_STACK)
3728 || current_function_accesses_prior_frames
3729 || FRAME_POINTER_REQUIRED);
3731 num_eliminable = 0;
3733 #ifdef ELIMINABLE_REGS
3734 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3735 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3737 ep->from = ep1->from;
3738 ep->to = ep1->to;
3739 ep->can_eliminate = ep->can_eliminate_previous
3740 = (CAN_ELIMINATE (ep->from, ep->to)
3741 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3743 #else
3744 reg_eliminate[0].from = reg_eliminate_1[0].from;
3745 reg_eliminate[0].to = reg_eliminate_1[0].to;
3746 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3747 = ! frame_pointer_needed;
3748 #endif
3750 /* Count the number of eliminable registers and build the FROM and TO
3751 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3752 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3753 We depend on this. */
3754 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3756 num_eliminable += ep->can_eliminate;
3757 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3758 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3762 /* Kick all pseudos out of hard register REGNO.
3764 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3765 because we found we can't eliminate some register. In the case, no pseudos
3766 are allowed to be in the register, even if they are only in a block that
3767 doesn't require spill registers, unlike the case when we are spilling this
3768 hard reg to produce another spill register.
3770 Return nonzero if any pseudos needed to be kicked out. */
3772 static void
3773 spill_hard_reg (unsigned int regno, int cant_eliminate)
3775 int i;
3777 if (cant_eliminate)
3779 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3780 df_set_regs_ever_live (regno, true);
3783 /* Spill every pseudo reg that was allocated to this reg
3784 or to something that overlaps this reg. */
3786 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3787 if (reg_renumber[i] >= 0
3788 && (unsigned int) reg_renumber[i] <= regno
3789 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3790 SET_REGNO_REG_SET (&spilled_pseudos, i);
3793 /* After find_reload_regs has been run for all insn that need reloads,
3794 and/or spill_hard_regs was called, this function is used to actually
3795 spill pseudo registers and try to reallocate them. It also sets up the
3796 spill_regs array for use by choose_reload_regs. */
3798 static int
3799 finish_spills (int global)
3801 struct insn_chain *chain;
3802 int something_changed = 0;
3803 unsigned i;
3804 reg_set_iterator rsi;
3806 /* Build the spill_regs array for the function. */
3807 /* If there are some registers still to eliminate and one of the spill regs
3808 wasn't ever used before, additional stack space may have to be
3809 allocated to store this register. Thus, we may have changed the offset
3810 between the stack and frame pointers, so mark that something has changed.
3812 One might think that we need only set VAL to 1 if this is a call-used
3813 register. However, the set of registers that must be saved by the
3814 prologue is not identical to the call-used set. For example, the
3815 register used by the call insn for the return PC is a call-used register,
3816 but must be saved by the prologue. */
3818 n_spills = 0;
3819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3820 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3822 spill_reg_order[i] = n_spills;
3823 spill_regs[n_spills++] = i;
3824 if (num_eliminable && ! df_regs_ever_live_p (i))
3825 something_changed = 1;
3826 df_set_regs_ever_live (i, true);
3828 else
3829 spill_reg_order[i] = -1;
3831 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3833 /* Record the current hard register the pseudo is allocated to in
3834 pseudo_previous_regs so we avoid reallocating it to the same
3835 hard reg in a later pass. */
3836 gcc_assert (reg_renumber[i] >= 0);
3838 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3839 /* Mark it as no longer having a hard register home. */
3840 reg_renumber[i] = -1;
3841 /* We will need to scan everything again. */
3842 something_changed = 1;
3845 /* Retry global register allocation if possible. */
3846 if (global)
3848 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3849 /* For every insn that needs reloads, set the registers used as spill
3850 regs in pseudo_forbidden_regs for every pseudo live across the
3851 insn. */
3852 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3854 EXECUTE_IF_SET_IN_REG_SET
3855 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3857 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3858 chain->used_spill_regs);
3860 EXECUTE_IF_SET_IN_REG_SET
3861 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3863 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3864 chain->used_spill_regs);
3868 /* Retry allocating the spilled pseudos. For each reg, merge the
3869 various reg sets that indicate which hard regs can't be used,
3870 and call retry_global_alloc.
3871 We change spill_pseudos here to only contain pseudos that did not
3872 get a new hard register. */
3873 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3874 if (reg_old_renumber[i] != reg_renumber[i])
3876 HARD_REG_SET forbidden;
3877 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3878 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3879 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3880 retry_global_alloc (i, forbidden);
3881 if (reg_renumber[i] >= 0)
3882 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3886 /* Fix up the register information in the insn chain.
3887 This involves deleting those of the spilled pseudos which did not get
3888 a new hard register home from the live_{before,after} sets. */
3889 for (chain = reload_insn_chain; chain; chain = chain->next)
3891 HARD_REG_SET used_by_pseudos;
3892 HARD_REG_SET used_by_pseudos2;
3894 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3895 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3897 /* Mark any unallocated hard regs as available for spills. That
3898 makes inheritance work somewhat better. */
3899 if (chain->need_reload)
3901 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3902 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3903 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3905 /* Save the old value for the sanity test below. */
3906 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3908 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3909 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3910 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3911 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3913 /* Make sure we only enlarge the set. */
3914 gcc_assert (hard_reg_set_subset_p (used_by_pseudos2,
3915 chain->used_spill_regs));
3919 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3920 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3922 int regno = reg_renumber[i];
3923 if (reg_old_renumber[i] == regno)
3924 continue;
3926 alter_reg (i, reg_old_renumber[i]);
3927 reg_old_renumber[i] = regno;
3928 if (dump_file)
3930 if (regno == -1)
3931 fprintf (dump_file, " Register %d now on stack.\n\n", i);
3932 else
3933 fprintf (dump_file, " Register %d now in %d.\n\n",
3934 i, reg_renumber[i]);
3938 return something_changed;
3941 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3943 static void
3944 scan_paradoxical_subregs (rtx x)
3946 int i;
3947 const char *fmt;
3948 enum rtx_code code = GET_CODE (x);
3950 switch (code)
3952 case REG:
3953 case CONST_INT:
3954 case CONST:
3955 case SYMBOL_REF:
3956 case LABEL_REF:
3957 case CONST_DOUBLE:
3958 case CONST_FIXED:
3959 case CONST_VECTOR: /* shouldn't happen, but just in case. */
3960 case CC0:
3961 case PC:
3962 case USE:
3963 case CLOBBER:
3964 return;
3966 case SUBREG:
3967 if (REG_P (SUBREG_REG (x))
3968 && (GET_MODE_SIZE (GET_MODE (x))
3969 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
3971 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3972 = GET_MODE_SIZE (GET_MODE (x));
3973 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
3975 return;
3977 default:
3978 break;
3981 fmt = GET_RTX_FORMAT (code);
3982 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3984 if (fmt[i] == 'e')
3985 scan_paradoxical_subregs (XEXP (x, i));
3986 else if (fmt[i] == 'E')
3988 int j;
3989 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3990 scan_paradoxical_subregs (XVECEXP (x, i, j));
3995 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
3996 examine all of the reload insns between PREV and NEXT exclusive, and
3997 annotate all that may trap. */
3999 static void
4000 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4002 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4003 unsigned int trap_count;
4004 rtx i;
4006 if (note == NULL)
4007 return;
4009 if (may_trap_p (PATTERN (insn)))
4010 trap_count = 1;
4011 else
4013 remove_note (insn, note);
4014 trap_count = 0;
4017 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4018 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4020 trap_count++;
4021 REG_NOTES (i)
4022 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
4026 /* Reload pseudo-registers into hard regs around each insn as needed.
4027 Additional register load insns are output before the insn that needs it
4028 and perhaps store insns after insns that modify the reloaded pseudo reg.
4030 reg_last_reload_reg and reg_reloaded_contents keep track of
4031 which registers are already available in reload registers.
4032 We update these for the reloads that we perform,
4033 as the insns are scanned. */
4035 static void
4036 reload_as_needed (int live_known)
4038 struct insn_chain *chain;
4039 #if defined (AUTO_INC_DEC)
4040 int i;
4041 #endif
4042 rtx x;
4044 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4045 memset (spill_reg_store, 0, sizeof spill_reg_store);
4046 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4047 INIT_REG_SET (&reg_has_output_reload);
4048 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4049 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4051 set_initial_elim_offsets ();
4053 for (chain = reload_insn_chain; chain; chain = chain->next)
4055 rtx prev = 0;
4056 rtx insn = chain->insn;
4057 rtx old_next = NEXT_INSN (insn);
4059 /* If we pass a label, copy the offsets from the label information
4060 into the current offsets of each elimination. */
4061 if (LABEL_P (insn))
4062 set_offsets_for_label (insn);
4064 else if (INSN_P (insn))
4066 regset_head regs_to_forget;
4067 INIT_REG_SET (&regs_to_forget);
4068 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4070 /* If this is a USE and CLOBBER of a MEM, ensure that any
4071 references to eliminable registers have been removed. */
4073 if ((GET_CODE (PATTERN (insn)) == USE
4074 || GET_CODE (PATTERN (insn)) == CLOBBER)
4075 && MEM_P (XEXP (PATTERN (insn), 0)))
4076 XEXP (XEXP (PATTERN (insn), 0), 0)
4077 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4078 GET_MODE (XEXP (PATTERN (insn), 0)),
4079 NULL_RTX);
4081 /* If we need to do register elimination processing, do so.
4082 This might delete the insn, in which case we are done. */
4083 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4085 eliminate_regs_in_insn (insn, 1);
4086 if (NOTE_P (insn))
4088 update_eliminable_offsets ();
4089 CLEAR_REG_SET (&regs_to_forget);
4090 continue;
4094 /* If need_elim is nonzero but need_reload is zero, one might think
4095 that we could simply set n_reloads to 0. However, find_reloads
4096 could have done some manipulation of the insn (such as swapping
4097 commutative operands), and these manipulations are lost during
4098 the first pass for every insn that needs register elimination.
4099 So the actions of find_reloads must be redone here. */
4101 if (! chain->need_elim && ! chain->need_reload
4102 && ! chain->need_operand_change)
4103 n_reloads = 0;
4104 /* First find the pseudo regs that must be reloaded for this insn.
4105 This info is returned in the tables reload_... (see reload.h).
4106 Also modify the body of INSN by substituting RELOAD
4107 rtx's for those pseudo regs. */
4108 else
4110 CLEAR_REG_SET (&reg_has_output_reload);
4111 CLEAR_HARD_REG_SET (reg_is_output_reload);
4113 find_reloads (insn, 1, spill_indirect_levels, live_known,
4114 spill_reg_order);
4117 if (n_reloads > 0)
4119 rtx next = NEXT_INSN (insn);
4120 rtx p;
4122 prev = PREV_INSN (insn);
4124 /* Now compute which reload regs to reload them into. Perhaps
4125 reusing reload regs from previous insns, or else output
4126 load insns to reload them. Maybe output store insns too.
4127 Record the choices of reload reg in reload_reg_rtx. */
4128 choose_reload_regs (chain);
4130 /* Merge any reloads that we didn't combine for fear of
4131 increasing the number of spill registers needed but now
4132 discover can be safely merged. */
4133 if (SMALL_REGISTER_CLASSES)
4134 merge_assigned_reloads (insn);
4136 /* Generate the insns to reload operands into or out of
4137 their reload regs. */
4138 emit_reload_insns (chain);
4140 /* Substitute the chosen reload regs from reload_reg_rtx
4141 into the insn's body (or perhaps into the bodies of other
4142 load and store insn that we just made for reloading
4143 and that we moved the structure into). */
4144 subst_reloads (insn);
4146 /* Adjust the exception region notes for loads and stores. */
4147 if (flag_non_call_exceptions && !CALL_P (insn))
4148 fixup_eh_region_note (insn, prev, next);
4150 /* If this was an ASM, make sure that all the reload insns
4151 we have generated are valid. If not, give an error
4152 and delete them. */
4153 if (asm_noperands (PATTERN (insn)) >= 0)
4154 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4155 if (p != insn && INSN_P (p)
4156 && GET_CODE (PATTERN (p)) != USE
4157 && (recog_memoized (p) < 0
4158 || (extract_insn (p), ! constrain_operands (1))))
4160 error_for_asm (insn,
4161 "%<asm%> operand requires "
4162 "impossible reload");
4163 delete_insn (p);
4167 if (num_eliminable && chain->need_elim)
4168 update_eliminable_offsets ();
4170 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4171 is no longer validly lying around to save a future reload.
4172 Note that this does not detect pseudos that were reloaded
4173 for this insn in order to be stored in
4174 (obeying register constraints). That is correct; such reload
4175 registers ARE still valid. */
4176 forget_marked_reloads (&regs_to_forget);
4177 CLEAR_REG_SET (&regs_to_forget);
4179 /* There may have been CLOBBER insns placed after INSN. So scan
4180 between INSN and NEXT and use them to forget old reloads. */
4181 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4182 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4183 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4185 #ifdef AUTO_INC_DEC
4186 /* Likewise for regs altered by auto-increment in this insn.
4187 REG_INC notes have been changed by reloading:
4188 find_reloads_address_1 records substitutions for them,
4189 which have been performed by subst_reloads above. */
4190 for (i = n_reloads - 1; i >= 0; i--)
4192 rtx in_reg = rld[i].in_reg;
4193 if (in_reg)
4195 enum rtx_code code = GET_CODE (in_reg);
4196 /* PRE_INC / PRE_DEC will have the reload register ending up
4197 with the same value as the stack slot, but that doesn't
4198 hold true for POST_INC / POST_DEC. Either we have to
4199 convert the memory access to a true POST_INC / POST_DEC,
4200 or we can't use the reload register for inheritance. */
4201 if ((code == POST_INC || code == POST_DEC)
4202 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4203 REGNO (rld[i].reg_rtx))
4204 /* Make sure it is the inc/dec pseudo, and not
4205 some other (e.g. output operand) pseudo. */
4206 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4207 == REGNO (XEXP (in_reg, 0))))
4210 rtx reload_reg = rld[i].reg_rtx;
4211 enum machine_mode mode = GET_MODE (reload_reg);
4212 int n = 0;
4213 rtx p;
4215 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4217 /* We really want to ignore REG_INC notes here, so
4218 use PATTERN (p) as argument to reg_set_p . */
4219 if (reg_set_p (reload_reg, PATTERN (p)))
4220 break;
4221 n = count_occurrences (PATTERN (p), reload_reg, 0);
4222 if (! n)
4223 continue;
4224 if (n == 1)
4226 n = validate_replace_rtx (reload_reg,
4227 gen_rtx_fmt_e (code,
4228 mode,
4229 reload_reg),
4232 /* We must also verify that the constraints
4233 are met after the replacement. */
4234 extract_insn (p);
4235 if (n)
4236 n = constrain_operands (1);
4237 else
4238 break;
4240 /* If the constraints were not met, then
4241 undo the replacement. */
4242 if (!n)
4244 validate_replace_rtx (gen_rtx_fmt_e (code,
4245 mode,
4246 reload_reg),
4247 reload_reg, p);
4248 break;
4252 break;
4254 if (n == 1)
4256 REG_NOTES (p)
4257 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4258 REG_NOTES (p));
4259 /* Mark this as having an output reload so that the
4260 REG_INC processing code below won't invalidate
4261 the reload for inheritance. */
4262 SET_HARD_REG_BIT (reg_is_output_reload,
4263 REGNO (reload_reg));
4264 SET_REGNO_REG_SET (&reg_has_output_reload,
4265 REGNO (XEXP (in_reg, 0)));
4267 else
4268 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4269 NULL);
4271 else if ((code == PRE_INC || code == PRE_DEC)
4272 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4273 REGNO (rld[i].reg_rtx))
4274 /* Make sure it is the inc/dec pseudo, and not
4275 some other (e.g. output operand) pseudo. */
4276 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4277 == REGNO (XEXP (in_reg, 0))))
4279 SET_HARD_REG_BIT (reg_is_output_reload,
4280 REGNO (rld[i].reg_rtx));
4281 SET_REGNO_REG_SET (&reg_has_output_reload,
4282 REGNO (XEXP (in_reg, 0)));
4286 /* If a pseudo that got a hard register is auto-incremented,
4287 we must purge records of copying it into pseudos without
4288 hard registers. */
4289 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4290 if (REG_NOTE_KIND (x) == REG_INC)
4292 /* See if this pseudo reg was reloaded in this insn.
4293 If so, its last-reload info is still valid
4294 because it is based on this insn's reload. */
4295 for (i = 0; i < n_reloads; i++)
4296 if (rld[i].out == XEXP (x, 0))
4297 break;
4299 if (i == n_reloads)
4300 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4302 #endif
4304 /* A reload reg's contents are unknown after a label. */
4305 if (LABEL_P (insn))
4306 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4308 /* Don't assume a reload reg is still good after a call insn
4309 if it is a call-used reg, or if it contains a value that will
4310 be partially clobbered by the call. */
4311 else if (CALL_P (insn))
4313 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4314 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4318 /* Clean up. */
4319 free (reg_last_reload_reg);
4320 CLEAR_REG_SET (&reg_has_output_reload);
4323 /* Discard all record of any value reloaded from X,
4324 or reloaded in X from someplace else;
4325 unless X is an output reload reg of the current insn.
4327 X may be a hard reg (the reload reg)
4328 or it may be a pseudo reg that was reloaded from.
4330 When DATA is non-NULL just mark the registers in regset
4331 to be forgotten later. */
4333 static void
4334 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4335 void *data)
4337 unsigned int regno;
4338 unsigned int nr;
4339 regset regs = (regset) data;
4341 /* note_stores does give us subregs of hard regs,
4342 subreg_regno_offset requires a hard reg. */
4343 while (GET_CODE (x) == SUBREG)
4345 /* We ignore the subreg offset when calculating the regno,
4346 because we are using the entire underlying hard register
4347 below. */
4348 x = SUBREG_REG (x);
4351 if (!REG_P (x))
4352 return;
4354 regno = REGNO (x);
4356 if (regno >= FIRST_PSEUDO_REGISTER)
4357 nr = 1;
4358 else
4360 unsigned int i;
4362 nr = hard_regno_nregs[regno][GET_MODE (x)];
4363 /* Storing into a spilled-reg invalidates its contents.
4364 This can happen if a block-local pseudo is allocated to that reg
4365 and it wasn't spilled because this block's total need is 0.
4366 Then some insn might have an optional reload and use this reg. */
4367 if (!regs)
4368 for (i = 0; i < nr; i++)
4369 /* But don't do this if the reg actually serves as an output
4370 reload reg in the current instruction. */
4371 if (n_reloads == 0
4372 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4374 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4375 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4376 spill_reg_store[regno + i] = 0;
4380 if (regs)
4381 while (nr-- > 0)
4382 SET_REGNO_REG_SET (regs, regno + nr);
4383 else
4385 /* Since value of X has changed,
4386 forget any value previously copied from it. */
4388 while (nr-- > 0)
4389 /* But don't forget a copy if this is the output reload
4390 that establishes the copy's validity. */
4391 if (n_reloads == 0
4392 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4393 reg_last_reload_reg[regno + nr] = 0;
4397 /* Forget the reloads marked in regset by previous function. */
4398 static void
4399 forget_marked_reloads (regset regs)
4401 unsigned int reg;
4402 reg_set_iterator rsi;
4403 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4405 if (reg < FIRST_PSEUDO_REGISTER
4406 /* But don't do this if the reg actually serves as an output
4407 reload reg in the current instruction. */
4408 && (n_reloads == 0
4409 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4411 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4412 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
4413 spill_reg_store[reg] = 0;
4415 if (n_reloads == 0
4416 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4417 reg_last_reload_reg[reg] = 0;
4421 /* The following HARD_REG_SETs indicate when each hard register is
4422 used for a reload of various parts of the current insn. */
4424 /* If reg is unavailable for all reloads. */
4425 static HARD_REG_SET reload_reg_unavailable;
4426 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4427 static HARD_REG_SET reload_reg_used;
4428 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4429 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4430 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4431 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4432 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4433 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4434 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4435 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4436 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4437 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4438 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4439 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4440 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4441 static HARD_REG_SET reload_reg_used_in_op_addr;
4442 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4443 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4444 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4445 static HARD_REG_SET reload_reg_used_in_insn;
4446 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4447 static HARD_REG_SET reload_reg_used_in_other_addr;
4449 /* If reg is in use as a reload reg for any sort of reload. */
4450 static HARD_REG_SET reload_reg_used_at_all;
4452 /* If reg is use as an inherited reload. We just mark the first register
4453 in the group. */
4454 static HARD_REG_SET reload_reg_used_for_inherit;
4456 /* Records which hard regs are used in any way, either as explicit use or
4457 by being allocated to a pseudo during any point of the current insn. */
4458 static HARD_REG_SET reg_used_in_insn;
4460 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4461 TYPE. MODE is used to indicate how many consecutive regs are
4462 actually used. */
4464 static void
4465 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4466 enum machine_mode mode)
4468 unsigned int nregs = hard_regno_nregs[regno][mode];
4469 unsigned int i;
4471 for (i = regno; i < nregs + regno; i++)
4473 switch (type)
4475 case RELOAD_OTHER:
4476 SET_HARD_REG_BIT (reload_reg_used, i);
4477 break;
4479 case RELOAD_FOR_INPUT_ADDRESS:
4480 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4481 break;
4483 case RELOAD_FOR_INPADDR_ADDRESS:
4484 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4485 break;
4487 case RELOAD_FOR_OUTPUT_ADDRESS:
4488 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4489 break;
4491 case RELOAD_FOR_OUTADDR_ADDRESS:
4492 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4493 break;
4495 case RELOAD_FOR_OPERAND_ADDRESS:
4496 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4497 break;
4499 case RELOAD_FOR_OPADDR_ADDR:
4500 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4501 break;
4503 case RELOAD_FOR_OTHER_ADDRESS:
4504 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4505 break;
4507 case RELOAD_FOR_INPUT:
4508 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4509 break;
4511 case RELOAD_FOR_OUTPUT:
4512 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4513 break;
4515 case RELOAD_FOR_INSN:
4516 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4517 break;
4520 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4524 /* Similarly, but show REGNO is no longer in use for a reload. */
4526 static void
4527 clear_reload_reg_in_use (unsigned int regno, int opnum,
4528 enum reload_type type, enum machine_mode mode)
4530 unsigned int nregs = hard_regno_nregs[regno][mode];
4531 unsigned int start_regno, end_regno, r;
4532 int i;
4533 /* A complication is that for some reload types, inheritance might
4534 allow multiple reloads of the same types to share a reload register.
4535 We set check_opnum if we have to check only reloads with the same
4536 operand number, and check_any if we have to check all reloads. */
4537 int check_opnum = 0;
4538 int check_any = 0;
4539 HARD_REG_SET *used_in_set;
4541 switch (type)
4543 case RELOAD_OTHER:
4544 used_in_set = &reload_reg_used;
4545 break;
4547 case RELOAD_FOR_INPUT_ADDRESS:
4548 used_in_set = &reload_reg_used_in_input_addr[opnum];
4549 break;
4551 case RELOAD_FOR_INPADDR_ADDRESS:
4552 check_opnum = 1;
4553 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4554 break;
4556 case RELOAD_FOR_OUTPUT_ADDRESS:
4557 used_in_set = &reload_reg_used_in_output_addr[opnum];
4558 break;
4560 case RELOAD_FOR_OUTADDR_ADDRESS:
4561 check_opnum = 1;
4562 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4563 break;
4565 case RELOAD_FOR_OPERAND_ADDRESS:
4566 used_in_set = &reload_reg_used_in_op_addr;
4567 break;
4569 case RELOAD_FOR_OPADDR_ADDR:
4570 check_any = 1;
4571 used_in_set = &reload_reg_used_in_op_addr_reload;
4572 break;
4574 case RELOAD_FOR_OTHER_ADDRESS:
4575 used_in_set = &reload_reg_used_in_other_addr;
4576 check_any = 1;
4577 break;
4579 case RELOAD_FOR_INPUT:
4580 used_in_set = &reload_reg_used_in_input[opnum];
4581 break;
4583 case RELOAD_FOR_OUTPUT:
4584 used_in_set = &reload_reg_used_in_output[opnum];
4585 break;
4587 case RELOAD_FOR_INSN:
4588 used_in_set = &reload_reg_used_in_insn;
4589 break;
4590 default:
4591 gcc_unreachable ();
4593 /* We resolve conflicts with remaining reloads of the same type by
4594 excluding the intervals of reload registers by them from the
4595 interval of freed reload registers. Since we only keep track of
4596 one set of interval bounds, we might have to exclude somewhat
4597 more than what would be necessary if we used a HARD_REG_SET here.
4598 But this should only happen very infrequently, so there should
4599 be no reason to worry about it. */
4601 start_regno = regno;
4602 end_regno = regno + nregs;
4603 if (check_opnum || check_any)
4605 for (i = n_reloads - 1; i >= 0; i--)
4607 if (rld[i].when_needed == type
4608 && (check_any || rld[i].opnum == opnum)
4609 && rld[i].reg_rtx)
4611 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4612 unsigned int conflict_end
4613 = end_hard_regno (rld[i].mode, conflict_start);
4615 /* If there is an overlap with the first to-be-freed register,
4616 adjust the interval start. */
4617 if (conflict_start <= start_regno && conflict_end > start_regno)
4618 start_regno = conflict_end;
4619 /* Otherwise, if there is a conflict with one of the other
4620 to-be-freed registers, adjust the interval end. */
4621 if (conflict_start > start_regno && conflict_start < end_regno)
4622 end_regno = conflict_start;
4627 for (r = start_regno; r < end_regno; r++)
4628 CLEAR_HARD_REG_BIT (*used_in_set, r);
4631 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4632 specified by OPNUM and TYPE. */
4634 static int
4635 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4637 int i;
4639 /* In use for a RELOAD_OTHER means it's not available for anything. */
4640 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4641 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4642 return 0;
4644 switch (type)
4646 case RELOAD_OTHER:
4647 /* In use for anything means we can't use it for RELOAD_OTHER. */
4648 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4649 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4650 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4651 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4652 return 0;
4654 for (i = 0; i < reload_n_operands; i++)
4655 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4656 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4657 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4658 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4659 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4660 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4661 return 0;
4663 return 1;
4665 case RELOAD_FOR_INPUT:
4666 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4667 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4668 return 0;
4670 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4671 return 0;
4673 /* If it is used for some other input, can't use it. */
4674 for (i = 0; i < reload_n_operands; i++)
4675 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4676 return 0;
4678 /* If it is used in a later operand's address, can't use it. */
4679 for (i = opnum + 1; i < reload_n_operands; i++)
4680 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4681 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4682 return 0;
4684 return 1;
4686 case RELOAD_FOR_INPUT_ADDRESS:
4687 /* Can't use a register if it is used for an input address for this
4688 operand or used as an input in an earlier one. */
4689 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4690 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4691 return 0;
4693 for (i = 0; i < opnum; i++)
4694 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4695 return 0;
4697 return 1;
4699 case RELOAD_FOR_INPADDR_ADDRESS:
4700 /* Can't use a register if it is used for an input address
4701 for this operand or used as an input in an earlier
4702 one. */
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4704 return 0;
4706 for (i = 0; i < opnum; i++)
4707 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4708 return 0;
4710 return 1;
4712 case RELOAD_FOR_OUTPUT_ADDRESS:
4713 /* Can't use a register if it is used for an output address for this
4714 operand or used as an output in this or a later operand. Note
4715 that multiple output operands are emitted in reverse order, so
4716 the conflicting ones are those with lower indices. */
4717 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4718 return 0;
4720 for (i = 0; i <= opnum; i++)
4721 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4722 return 0;
4724 return 1;
4726 case RELOAD_FOR_OUTADDR_ADDRESS:
4727 /* Can't use a register if it is used for an output address
4728 for this operand or used as an output in this or a
4729 later operand. Note that multiple output operands are
4730 emitted in reverse order, so the conflicting ones are
4731 those with lower indices. */
4732 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4733 return 0;
4735 for (i = 0; i <= opnum; i++)
4736 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4737 return 0;
4739 return 1;
4741 case RELOAD_FOR_OPERAND_ADDRESS:
4742 for (i = 0; i < reload_n_operands; i++)
4743 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4744 return 0;
4746 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4747 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4749 case RELOAD_FOR_OPADDR_ADDR:
4750 for (i = 0; i < reload_n_operands; i++)
4751 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4752 return 0;
4754 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4756 case RELOAD_FOR_OUTPUT:
4757 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4758 outputs, or an operand address for this or an earlier output.
4759 Note that multiple output operands are emitted in reverse order,
4760 so the conflicting ones are those with higher indices. */
4761 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4762 return 0;
4764 for (i = 0; i < reload_n_operands; i++)
4765 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4766 return 0;
4768 for (i = opnum; i < reload_n_operands; i++)
4769 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4770 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4771 return 0;
4773 return 1;
4775 case RELOAD_FOR_INSN:
4776 for (i = 0; i < reload_n_operands; i++)
4777 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4778 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4779 return 0;
4781 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4782 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4784 case RELOAD_FOR_OTHER_ADDRESS:
4785 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4787 default:
4788 gcc_unreachable ();
4792 /* Return 1 if the value in reload reg REGNO, as used by a reload
4793 needed for the part of the insn specified by OPNUM and TYPE,
4794 is still available in REGNO at the end of the insn.
4796 We can assume that the reload reg was already tested for availability
4797 at the time it is needed, and we should not check this again,
4798 in case the reg has already been marked in use. */
4800 static int
4801 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4803 int i;
4805 switch (type)
4807 case RELOAD_OTHER:
4808 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4809 its value must reach the end. */
4810 return 1;
4812 /* If this use is for part of the insn,
4813 its value reaches if no subsequent part uses the same register.
4814 Just like the above function, don't try to do this with lots
4815 of fallthroughs. */
4817 case RELOAD_FOR_OTHER_ADDRESS:
4818 /* Here we check for everything else, since these don't conflict
4819 with anything else and everything comes later. */
4821 for (i = 0; i < reload_n_operands; i++)
4822 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4823 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4824 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4825 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4828 return 0;
4830 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4831 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4832 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4833 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4835 case RELOAD_FOR_INPUT_ADDRESS:
4836 case RELOAD_FOR_INPADDR_ADDRESS:
4837 /* Similar, except that we check only for this and subsequent inputs
4838 and the address of only subsequent inputs and we do not need
4839 to check for RELOAD_OTHER objects since they are known not to
4840 conflict. */
4842 for (i = opnum; i < reload_n_operands; i++)
4843 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4844 return 0;
4846 for (i = opnum + 1; i < reload_n_operands; i++)
4847 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4848 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4849 return 0;
4851 for (i = 0; i < reload_n_operands; i++)
4852 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4853 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4854 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4855 return 0;
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4858 return 0;
4860 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4861 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4862 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4864 case RELOAD_FOR_INPUT:
4865 /* Similar to input address, except we start at the next operand for
4866 both input and input address and we do not check for
4867 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4868 would conflict. */
4870 for (i = opnum + 1; i < reload_n_operands; i++)
4871 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4872 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4873 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4874 return 0;
4876 /* ... fall through ... */
4878 case RELOAD_FOR_OPERAND_ADDRESS:
4879 /* Check outputs and their addresses. */
4881 for (i = 0; i < reload_n_operands; i++)
4882 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4883 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4884 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4885 return 0;
4887 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4889 case RELOAD_FOR_OPADDR_ADDR:
4890 for (i = 0; i < reload_n_operands; i++)
4891 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4892 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4893 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4894 return 0;
4896 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4897 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4898 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4900 case RELOAD_FOR_INSN:
4901 /* These conflict with other outputs with RELOAD_OTHER. So
4902 we need only check for output addresses. */
4904 opnum = reload_n_operands;
4906 /* ... fall through ... */
4908 case RELOAD_FOR_OUTPUT:
4909 case RELOAD_FOR_OUTPUT_ADDRESS:
4910 case RELOAD_FOR_OUTADDR_ADDRESS:
4911 /* We already know these can't conflict with a later output. So the
4912 only thing to check are later output addresses.
4913 Note that multiple output operands are emitted in reverse order,
4914 so the conflicting ones are those with lower indices. */
4915 for (i = 0; i < opnum; i++)
4916 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4917 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4918 return 0;
4920 return 1;
4922 default:
4923 gcc_unreachable ();
4928 /* Returns whether R1 and R2 are uniquely chained: the value of one
4929 is used by the other, and that value is not used by any other
4930 reload for this insn. This is used to partially undo the decision
4931 made in find_reloads when in the case of multiple
4932 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
4933 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
4934 reloads. This code tries to avoid the conflict created by that
4935 change. It might be cleaner to explicitly keep track of which
4936 RELOAD_FOR_OPADDR_ADDR reload is associated with which
4937 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
4938 this after the fact. */
4939 static bool
4940 reloads_unique_chain_p (int r1, int r2)
4942 int i;
4944 /* We only check input reloads. */
4945 if (! rld[r1].in || ! rld[r2].in)
4946 return false;
4948 /* Avoid anything with output reloads. */
4949 if (rld[r1].out || rld[r2].out)
4950 return false;
4952 /* "chained" means one reload is a component of the other reload,
4953 not the same as the other reload. */
4954 if (rld[r1].opnum != rld[r2].opnum
4955 || rtx_equal_p (rld[r1].in, rld[r2].in)
4956 || rld[r1].optional || rld[r2].optional
4957 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
4958 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
4959 return false;
4961 for (i = 0; i < n_reloads; i ++)
4962 /* Look for input reloads that aren't our two */
4963 if (i != r1 && i != r2 && rld[i].in)
4965 /* If our reload is mentioned at all, it isn't a simple chain. */
4966 if (reg_mentioned_p (rld[r1].in, rld[i].in))
4967 return false;
4969 return true;
4972 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4973 Return 0 otherwise.
4975 This function uses the same algorithm as reload_reg_free_p above. */
4977 static int
4978 reloads_conflict (int r1, int r2)
4980 enum reload_type r1_type = rld[r1].when_needed;
4981 enum reload_type r2_type = rld[r2].when_needed;
4982 int r1_opnum = rld[r1].opnum;
4983 int r2_opnum = rld[r2].opnum;
4985 /* RELOAD_OTHER conflicts with everything. */
4986 if (r2_type == RELOAD_OTHER)
4987 return 1;
4989 /* Otherwise, check conflicts differently for each type. */
4991 switch (r1_type)
4993 case RELOAD_FOR_INPUT:
4994 return (r2_type == RELOAD_FOR_INSN
4995 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4996 || r2_type == RELOAD_FOR_OPADDR_ADDR
4997 || r2_type == RELOAD_FOR_INPUT
4998 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4999 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5000 && r2_opnum > r1_opnum));
5002 case RELOAD_FOR_INPUT_ADDRESS:
5003 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5004 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5006 case RELOAD_FOR_INPADDR_ADDRESS:
5007 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5008 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5010 case RELOAD_FOR_OUTPUT_ADDRESS:
5011 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5012 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5014 case RELOAD_FOR_OUTADDR_ADDRESS:
5015 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5016 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5018 case RELOAD_FOR_OPERAND_ADDRESS:
5019 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5020 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5021 && !reloads_unique_chain_p (r1, r2)));
5023 case RELOAD_FOR_OPADDR_ADDR:
5024 return (r2_type == RELOAD_FOR_INPUT
5025 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5027 case RELOAD_FOR_OUTPUT:
5028 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5029 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5030 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5031 && r2_opnum >= r1_opnum));
5033 case RELOAD_FOR_INSN:
5034 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5035 || r2_type == RELOAD_FOR_INSN
5036 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5038 case RELOAD_FOR_OTHER_ADDRESS:
5039 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5041 case RELOAD_OTHER:
5042 return 1;
5044 default:
5045 gcc_unreachable ();
5049 /* Indexed by reload number, 1 if incoming value
5050 inherited from previous insns. */
5051 static char reload_inherited[MAX_RELOADS];
5053 /* For an inherited reload, this is the insn the reload was inherited from,
5054 if we know it. Otherwise, this is 0. */
5055 static rtx reload_inheritance_insn[MAX_RELOADS];
5057 /* If nonzero, this is a place to get the value of the reload,
5058 rather than using reload_in. */
5059 static rtx reload_override_in[MAX_RELOADS];
5061 /* For each reload, the hard register number of the register used,
5062 or -1 if we did not need a register for this reload. */
5063 static int reload_spill_index[MAX_RELOADS];
5065 /* Subroutine of free_for_value_p, used to check a single register.
5066 START_REGNO is the starting regno of the full reload register
5067 (possibly comprising multiple hard registers) that we are considering. */
5069 static int
5070 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5071 enum reload_type type, rtx value, rtx out,
5072 int reloadnum, int ignore_address_reloads)
5074 int time1;
5075 /* Set if we see an input reload that must not share its reload register
5076 with any new earlyclobber, but might otherwise share the reload
5077 register with an output or input-output reload. */
5078 int check_earlyclobber = 0;
5079 int i;
5080 int copy = 0;
5082 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5083 return 0;
5085 if (out == const0_rtx)
5087 copy = 1;
5088 out = NULL_RTX;
5091 /* We use some pseudo 'time' value to check if the lifetimes of the
5092 new register use would overlap with the one of a previous reload
5093 that is not read-only or uses a different value.
5094 The 'time' used doesn't have to be linear in any shape or form, just
5095 monotonic.
5096 Some reload types use different 'buckets' for each operand.
5097 So there are MAX_RECOG_OPERANDS different time values for each
5098 such reload type.
5099 We compute TIME1 as the time when the register for the prospective
5100 new reload ceases to be live, and TIME2 for each existing
5101 reload as the time when that the reload register of that reload
5102 becomes live.
5103 Where there is little to be gained by exact lifetime calculations,
5104 we just make conservative assumptions, i.e. a longer lifetime;
5105 this is done in the 'default:' cases. */
5106 switch (type)
5108 case RELOAD_FOR_OTHER_ADDRESS:
5109 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5110 time1 = copy ? 0 : 1;
5111 break;
5112 case RELOAD_OTHER:
5113 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5114 break;
5115 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5116 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5117 respectively, to the time values for these, we get distinct time
5118 values. To get distinct time values for each operand, we have to
5119 multiply opnum by at least three. We round that up to four because
5120 multiply by four is often cheaper. */
5121 case RELOAD_FOR_INPADDR_ADDRESS:
5122 time1 = opnum * 4 + 2;
5123 break;
5124 case RELOAD_FOR_INPUT_ADDRESS:
5125 time1 = opnum * 4 + 3;
5126 break;
5127 case RELOAD_FOR_INPUT:
5128 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5129 executes (inclusive). */
5130 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5131 break;
5132 case RELOAD_FOR_OPADDR_ADDR:
5133 /* opnum * 4 + 4
5134 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5135 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5136 break;
5137 case RELOAD_FOR_OPERAND_ADDRESS:
5138 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5139 is executed. */
5140 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5141 break;
5142 case RELOAD_FOR_OUTADDR_ADDRESS:
5143 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5144 break;
5145 case RELOAD_FOR_OUTPUT_ADDRESS:
5146 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5147 break;
5148 default:
5149 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5152 for (i = 0; i < n_reloads; i++)
5154 rtx reg = rld[i].reg_rtx;
5155 if (reg && REG_P (reg)
5156 && ((unsigned) regno - true_regnum (reg)
5157 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5158 && i != reloadnum)
5160 rtx other_input = rld[i].in;
5162 /* If the other reload loads the same input value, that
5163 will not cause a conflict only if it's loading it into
5164 the same register. */
5165 if (true_regnum (reg) != start_regno)
5166 other_input = NULL_RTX;
5167 if (! other_input || ! rtx_equal_p (other_input, value)
5168 || rld[i].out || out)
5170 int time2;
5171 switch (rld[i].when_needed)
5173 case RELOAD_FOR_OTHER_ADDRESS:
5174 time2 = 0;
5175 break;
5176 case RELOAD_FOR_INPADDR_ADDRESS:
5177 /* find_reloads makes sure that a
5178 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5179 by at most one - the first -
5180 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5181 address reload is inherited, the address address reload
5182 goes away, so we can ignore this conflict. */
5183 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5184 && ignore_address_reloads
5185 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5186 Then the address address is still needed to store
5187 back the new address. */
5188 && ! rld[reloadnum].out)
5189 continue;
5190 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5191 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5192 reloads go away. */
5193 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5194 && ignore_address_reloads
5195 /* Unless we are reloading an auto_inc expression. */
5196 && ! rld[reloadnum].out)
5197 continue;
5198 time2 = rld[i].opnum * 4 + 2;
5199 break;
5200 case RELOAD_FOR_INPUT_ADDRESS:
5201 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5202 && ignore_address_reloads
5203 && ! rld[reloadnum].out)
5204 continue;
5205 time2 = rld[i].opnum * 4 + 3;
5206 break;
5207 case RELOAD_FOR_INPUT:
5208 time2 = rld[i].opnum * 4 + 4;
5209 check_earlyclobber = 1;
5210 break;
5211 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5212 == MAX_RECOG_OPERAND * 4 */
5213 case RELOAD_FOR_OPADDR_ADDR:
5214 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5215 && ignore_address_reloads
5216 && ! rld[reloadnum].out)
5217 continue;
5218 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5219 break;
5220 case RELOAD_FOR_OPERAND_ADDRESS:
5221 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5222 check_earlyclobber = 1;
5223 break;
5224 case RELOAD_FOR_INSN:
5225 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5226 break;
5227 case RELOAD_FOR_OUTPUT:
5228 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5229 instruction is executed. */
5230 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5231 break;
5232 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5233 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5234 value. */
5235 case RELOAD_FOR_OUTADDR_ADDRESS:
5236 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5237 && ignore_address_reloads
5238 && ! rld[reloadnum].out)
5239 continue;
5240 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5241 break;
5242 case RELOAD_FOR_OUTPUT_ADDRESS:
5243 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5244 break;
5245 case RELOAD_OTHER:
5246 /* If there is no conflict in the input part, handle this
5247 like an output reload. */
5248 if (! rld[i].in || rtx_equal_p (other_input, value))
5250 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5251 /* Earlyclobbered outputs must conflict with inputs. */
5252 if (earlyclobber_operand_p (rld[i].out))
5253 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5255 break;
5257 time2 = 1;
5258 /* RELOAD_OTHER might be live beyond instruction execution,
5259 but this is not obvious when we set time2 = 1. So check
5260 here if there might be a problem with the new reload
5261 clobbering the register used by the RELOAD_OTHER. */
5262 if (out)
5263 return 0;
5264 break;
5265 default:
5266 return 0;
5268 if ((time1 >= time2
5269 && (! rld[i].in || rld[i].out
5270 || ! rtx_equal_p (other_input, value)))
5271 || (out && rld[reloadnum].out_reg
5272 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5273 return 0;
5278 /* Earlyclobbered outputs must conflict with inputs. */
5279 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5280 return 0;
5282 return 1;
5285 /* Return 1 if the value in reload reg REGNO, as used by a reload
5286 needed for the part of the insn specified by OPNUM and TYPE,
5287 may be used to load VALUE into it.
5289 MODE is the mode in which the register is used, this is needed to
5290 determine how many hard regs to test.
5292 Other read-only reloads with the same value do not conflict
5293 unless OUT is nonzero and these other reloads have to live while
5294 output reloads live.
5295 If OUT is CONST0_RTX, this is a special case: it means that the
5296 test should not be for using register REGNO as reload register, but
5297 for copying from register REGNO into the reload register.
5299 RELOADNUM is the number of the reload we want to load this value for;
5300 a reload does not conflict with itself.
5302 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5303 reloads that load an address for the very reload we are considering.
5305 The caller has to make sure that there is no conflict with the return
5306 register. */
5308 static int
5309 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5310 enum reload_type type, rtx value, rtx out, int reloadnum,
5311 int ignore_address_reloads)
5313 int nregs = hard_regno_nregs[regno][mode];
5314 while (nregs-- > 0)
5315 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5316 value, out, reloadnum,
5317 ignore_address_reloads))
5318 return 0;
5319 return 1;
5322 /* Return nonzero if the rtx X is invariant over the current function. */
5323 /* ??? Actually, the places where we use this expect exactly what is
5324 tested here, and not everything that is function invariant. In
5325 particular, the frame pointer and arg pointer are special cased;
5326 pic_offset_table_rtx is not, and we must not spill these things to
5327 memory. */
5330 function_invariant_p (const_rtx x)
5332 if (CONSTANT_P (x))
5333 return 1;
5334 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5335 return 1;
5336 if (GET_CODE (x) == PLUS
5337 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5338 && CONSTANT_P (XEXP (x, 1)))
5339 return 1;
5340 return 0;
5343 /* Determine whether the reload reg X overlaps any rtx'es used for
5344 overriding inheritance. Return nonzero if so. */
5346 static int
5347 conflicts_with_override (rtx x)
5349 int i;
5350 for (i = 0; i < n_reloads; i++)
5351 if (reload_override_in[i]
5352 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5353 return 1;
5354 return 0;
5357 /* Give an error message saying we failed to find a reload for INSN,
5358 and clear out reload R. */
5359 static void
5360 failed_reload (rtx insn, int r)
5362 if (asm_noperands (PATTERN (insn)) < 0)
5363 /* It's the compiler's fault. */
5364 fatal_insn ("could not find a spill register", insn);
5366 /* It's the user's fault; the operand's mode and constraint
5367 don't match. Disable this reload so we don't crash in final. */
5368 error_for_asm (insn,
5369 "%<asm%> operand constraint incompatible with operand size");
5370 rld[r].in = 0;
5371 rld[r].out = 0;
5372 rld[r].reg_rtx = 0;
5373 rld[r].optional = 1;
5374 rld[r].secondary_p = 1;
5377 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5378 for reload R. If it's valid, get an rtx for it. Return nonzero if
5379 successful. */
5380 static int
5381 set_reload_reg (int i, int r)
5383 int regno;
5384 rtx reg = spill_reg_rtx[i];
5386 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5387 spill_reg_rtx[i] = reg
5388 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5390 regno = true_regnum (reg);
5392 /* Detect when the reload reg can't hold the reload mode.
5393 This used to be one `if', but Sequent compiler can't handle that. */
5394 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5396 enum machine_mode test_mode = VOIDmode;
5397 if (rld[r].in)
5398 test_mode = GET_MODE (rld[r].in);
5399 /* If rld[r].in has VOIDmode, it means we will load it
5400 in whatever mode the reload reg has: to wit, rld[r].mode.
5401 We have already tested that for validity. */
5402 /* Aside from that, we need to test that the expressions
5403 to reload from or into have modes which are valid for this
5404 reload register. Otherwise the reload insns would be invalid. */
5405 if (! (rld[r].in != 0 && test_mode != VOIDmode
5406 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5407 if (! (rld[r].out != 0
5408 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5410 /* The reg is OK. */
5411 last_spill_reg = i;
5413 /* Mark as in use for this insn the reload regs we use
5414 for this. */
5415 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5416 rld[r].when_needed, rld[r].mode);
5418 rld[r].reg_rtx = reg;
5419 reload_spill_index[r] = spill_regs[i];
5420 return 1;
5423 return 0;
5426 /* Find a spill register to use as a reload register for reload R.
5427 LAST_RELOAD is nonzero if this is the last reload for the insn being
5428 processed.
5430 Set rld[R].reg_rtx to the register allocated.
5432 We return 1 if successful, or 0 if we couldn't find a spill reg and
5433 we didn't change anything. */
5435 static int
5436 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5437 int last_reload)
5439 int i, pass, count;
5441 /* If we put this reload ahead, thinking it is a group,
5442 then insist on finding a group. Otherwise we can grab a
5443 reg that some other reload needs.
5444 (That can happen when we have a 68000 DATA_OR_FP_REG
5445 which is a group of data regs or one fp reg.)
5446 We need not be so restrictive if there are no more reloads
5447 for this insn.
5449 ??? Really it would be nicer to have smarter handling
5450 for that kind of reg class, where a problem like this is normal.
5451 Perhaps those classes should be avoided for reloading
5452 by use of more alternatives. */
5454 int force_group = rld[r].nregs > 1 && ! last_reload;
5456 /* If we want a single register and haven't yet found one,
5457 take any reg in the right class and not in use.
5458 If we want a consecutive group, here is where we look for it.
5460 We use two passes so we can first look for reload regs to
5461 reuse, which are already in use for other reloads in this insn,
5462 and only then use additional registers.
5463 I think that maximizing reuse is needed to make sure we don't
5464 run out of reload regs. Suppose we have three reloads, and
5465 reloads A and B can share regs. These need two regs.
5466 Suppose A and B are given different regs.
5467 That leaves none for C. */
5468 for (pass = 0; pass < 2; pass++)
5470 /* I is the index in spill_regs.
5471 We advance it round-robin between insns to use all spill regs
5472 equally, so that inherited reloads have a chance
5473 of leapfrogging each other. */
5475 i = last_spill_reg;
5477 for (count = 0; count < n_spills; count++)
5479 int class = (int) rld[r].class;
5480 int regnum;
5482 i++;
5483 if (i >= n_spills)
5484 i -= n_spills;
5485 regnum = spill_regs[i];
5487 if ((reload_reg_free_p (regnum, rld[r].opnum,
5488 rld[r].when_needed)
5489 || (rld[r].in
5490 /* We check reload_reg_used to make sure we
5491 don't clobber the return register. */
5492 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5493 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5494 rld[r].when_needed, rld[r].in,
5495 rld[r].out, r, 1)))
5496 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5497 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5498 /* Look first for regs to share, then for unshared. But
5499 don't share regs used for inherited reloads; they are
5500 the ones we want to preserve. */
5501 && (pass
5502 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5503 regnum)
5504 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5505 regnum))))
5507 int nr = hard_regno_nregs[regnum][rld[r].mode];
5508 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5509 (on 68000) got us two FP regs. If NR is 1,
5510 we would reject both of them. */
5511 if (force_group)
5512 nr = rld[r].nregs;
5513 /* If we need only one reg, we have already won. */
5514 if (nr == 1)
5516 /* But reject a single reg if we demand a group. */
5517 if (force_group)
5518 continue;
5519 break;
5521 /* Otherwise check that as many consecutive regs as we need
5522 are available here. */
5523 while (nr > 1)
5525 int regno = regnum + nr - 1;
5526 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5527 && spill_reg_order[regno] >= 0
5528 && reload_reg_free_p (regno, rld[r].opnum,
5529 rld[r].when_needed)))
5530 break;
5531 nr--;
5533 if (nr == 1)
5534 break;
5538 /* If we found something on pass 1, omit pass 2. */
5539 if (count < n_spills)
5540 break;
5543 /* We should have found a spill register by now. */
5544 if (count >= n_spills)
5545 return 0;
5547 /* I is the index in SPILL_REG_RTX of the reload register we are to
5548 allocate. Get an rtx for it and find its register number. */
5550 return set_reload_reg (i, r);
5553 /* Initialize all the tables needed to allocate reload registers.
5554 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5555 is the array we use to restore the reg_rtx field for every reload. */
5557 static void
5558 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5560 int i;
5562 for (i = 0; i < n_reloads; i++)
5563 rld[i].reg_rtx = save_reload_reg_rtx[i];
5565 memset (reload_inherited, 0, MAX_RELOADS);
5566 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5567 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5569 CLEAR_HARD_REG_SET (reload_reg_used);
5570 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5571 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5572 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5573 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5574 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5576 CLEAR_HARD_REG_SET (reg_used_in_insn);
5578 HARD_REG_SET tmp;
5579 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5580 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5581 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5582 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5583 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5584 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5587 for (i = 0; i < reload_n_operands; i++)
5589 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5590 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5591 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5592 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5593 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5594 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5597 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5599 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5601 for (i = 0; i < n_reloads; i++)
5602 /* If we have already decided to use a certain register,
5603 don't use it in another way. */
5604 if (rld[i].reg_rtx)
5605 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5606 rld[i].when_needed, rld[i].mode);
5609 /* Assign hard reg targets for the pseudo-registers we must reload
5610 into hard regs for this insn.
5611 Also output the instructions to copy them in and out of the hard regs.
5613 For machines with register classes, we are responsible for
5614 finding a reload reg in the proper class. */
5616 static void
5617 choose_reload_regs (struct insn_chain *chain)
5619 rtx insn = chain->insn;
5620 int i, j;
5621 unsigned int max_group_size = 1;
5622 enum reg_class group_class = NO_REGS;
5623 int pass, win, inheritance;
5625 rtx save_reload_reg_rtx[MAX_RELOADS];
5627 /* In order to be certain of getting the registers we need,
5628 we must sort the reloads into order of increasing register class.
5629 Then our grabbing of reload registers will parallel the process
5630 that provided the reload registers.
5632 Also note whether any of the reloads wants a consecutive group of regs.
5633 If so, record the maximum size of the group desired and what
5634 register class contains all the groups needed by this insn. */
5636 for (j = 0; j < n_reloads; j++)
5638 reload_order[j] = j;
5639 if (rld[j].reg_rtx != NULL_RTX)
5641 gcc_assert (REG_P (rld[j].reg_rtx)
5642 && HARD_REGISTER_P (rld[j].reg_rtx));
5643 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5645 else
5646 reload_spill_index[j] = -1;
5648 if (rld[j].nregs > 1)
5650 max_group_size = MAX (rld[j].nregs, max_group_size);
5651 group_class
5652 = reg_class_superunion[(int) rld[j].class][(int) group_class];
5655 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5658 if (n_reloads > 1)
5659 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5661 /* If -O, try first with inheritance, then turning it off.
5662 If not -O, don't do inheritance.
5663 Using inheritance when not optimizing leads to paradoxes
5664 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5665 because one side of the comparison might be inherited. */
5666 win = 0;
5667 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5669 choose_reload_regs_init (chain, save_reload_reg_rtx);
5671 /* Process the reloads in order of preference just found.
5672 Beyond this point, subregs can be found in reload_reg_rtx.
5674 This used to look for an existing reloaded home for all of the
5675 reloads, and only then perform any new reloads. But that could lose
5676 if the reloads were done out of reg-class order because a later
5677 reload with a looser constraint might have an old home in a register
5678 needed by an earlier reload with a tighter constraint.
5680 To solve this, we make two passes over the reloads, in the order
5681 described above. In the first pass we try to inherit a reload
5682 from a previous insn. If there is a later reload that needs a
5683 class that is a proper subset of the class being processed, we must
5684 also allocate a spill register during the first pass.
5686 Then make a second pass over the reloads to allocate any reloads
5687 that haven't been given registers yet. */
5689 for (j = 0; j < n_reloads; j++)
5691 int r = reload_order[j];
5692 rtx search_equiv = NULL_RTX;
5694 /* Ignore reloads that got marked inoperative. */
5695 if (rld[r].out == 0 && rld[r].in == 0
5696 && ! rld[r].secondary_p)
5697 continue;
5699 /* If find_reloads chose to use reload_in or reload_out as a reload
5700 register, we don't need to chose one. Otherwise, try even if it
5701 found one since we might save an insn if we find the value lying
5702 around.
5703 Try also when reload_in is a pseudo without a hard reg. */
5704 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5705 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5706 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5707 && !MEM_P (rld[r].in)
5708 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5709 continue;
5711 #if 0 /* No longer needed for correct operation.
5712 It might give better code, or might not; worth an experiment? */
5713 /* If this is an optional reload, we can't inherit from earlier insns
5714 until we are sure that any non-optional reloads have been allocated.
5715 The following code takes advantage of the fact that optional reloads
5716 are at the end of reload_order. */
5717 if (rld[r].optional != 0)
5718 for (i = 0; i < j; i++)
5719 if ((rld[reload_order[i]].out != 0
5720 || rld[reload_order[i]].in != 0
5721 || rld[reload_order[i]].secondary_p)
5722 && ! rld[reload_order[i]].optional
5723 && rld[reload_order[i]].reg_rtx == 0)
5724 allocate_reload_reg (chain, reload_order[i], 0);
5725 #endif
5727 /* First see if this pseudo is already available as reloaded
5728 for a previous insn. We cannot try to inherit for reloads
5729 that are smaller than the maximum number of registers needed
5730 for groups unless the register we would allocate cannot be used
5731 for the groups.
5733 We could check here to see if this is a secondary reload for
5734 an object that is already in a register of the desired class.
5735 This would avoid the need for the secondary reload register.
5736 But this is complex because we can't easily determine what
5737 objects might want to be loaded via this reload. So let a
5738 register be allocated here. In `emit_reload_insns' we suppress
5739 one of the loads in the case described above. */
5741 if (inheritance)
5743 int byte = 0;
5744 int regno = -1;
5745 enum machine_mode mode = VOIDmode;
5747 if (rld[r].in == 0)
5749 else if (REG_P (rld[r].in))
5751 regno = REGNO (rld[r].in);
5752 mode = GET_MODE (rld[r].in);
5754 else if (REG_P (rld[r].in_reg))
5756 regno = REGNO (rld[r].in_reg);
5757 mode = GET_MODE (rld[r].in_reg);
5759 else if (GET_CODE (rld[r].in_reg) == SUBREG
5760 && REG_P (SUBREG_REG (rld[r].in_reg)))
5762 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5763 if (regno < FIRST_PSEUDO_REGISTER)
5764 regno = subreg_regno (rld[r].in_reg);
5765 else
5766 byte = SUBREG_BYTE (rld[r].in_reg);
5767 mode = GET_MODE (rld[r].in_reg);
5769 #ifdef AUTO_INC_DEC
5770 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
5771 && REG_P (XEXP (rld[r].in_reg, 0)))
5773 regno = REGNO (XEXP (rld[r].in_reg, 0));
5774 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5775 rld[r].out = rld[r].in;
5777 #endif
5778 #if 0
5779 /* This won't work, since REGNO can be a pseudo reg number.
5780 Also, it takes much more hair to keep track of all the things
5781 that can invalidate an inherited reload of part of a pseudoreg. */
5782 else if (GET_CODE (rld[r].in) == SUBREG
5783 && REG_P (SUBREG_REG (rld[r].in)))
5784 regno = subreg_regno (rld[r].in);
5785 #endif
5787 if (regno >= 0
5788 && reg_last_reload_reg[regno] != 0
5789 #ifdef CANNOT_CHANGE_MODE_CLASS
5790 /* Verify that the register it's in can be used in
5791 mode MODE. */
5792 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
5793 GET_MODE (reg_last_reload_reg[regno]),
5794 mode)
5795 #endif
5798 enum reg_class class = rld[r].class, last_class;
5799 rtx last_reg = reg_last_reload_reg[regno];
5800 enum machine_mode need_mode;
5802 i = REGNO (last_reg);
5803 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5804 last_class = REGNO_REG_CLASS (i);
5806 if (byte == 0)
5807 need_mode = mode;
5808 else
5809 need_mode
5810 = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5811 + byte * BITS_PER_UNIT,
5812 GET_MODE_CLASS (mode));
5814 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5815 >= GET_MODE_SIZE (need_mode))
5816 && reg_reloaded_contents[i] == regno
5817 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5818 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5819 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5820 /* Even if we can't use this register as a reload
5821 register, we might use it for reload_override_in,
5822 if copying it to the desired class is cheap
5823 enough. */
5824 || ((REGISTER_MOVE_COST (mode, last_class, class)
5825 < MEMORY_MOVE_COST (mode, class, 1))
5826 && (secondary_reload_class (1, class, mode,
5827 last_reg)
5828 == NO_REGS)
5829 #ifdef SECONDARY_MEMORY_NEEDED
5830 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5831 mode)
5832 #endif
5835 && (rld[r].nregs == max_group_size
5836 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5838 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5839 rld[r].when_needed, rld[r].in,
5840 const0_rtx, r, 1))
5842 /* If a group is needed, verify that all the subsequent
5843 registers still have their values intact. */
5844 int nr = hard_regno_nregs[i][rld[r].mode];
5845 int k;
5847 for (k = 1; k < nr; k++)
5848 if (reg_reloaded_contents[i + k] != regno
5849 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5850 break;
5852 if (k == nr)
5854 int i1;
5855 int bad_for_class;
5857 last_reg = (GET_MODE (last_reg) == mode
5858 ? last_reg : gen_rtx_REG (mode, i));
5860 bad_for_class = 0;
5861 for (k = 0; k < nr; k++)
5862 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5863 i+k);
5865 /* We found a register that contains the
5866 value we need. If this register is the
5867 same as an `earlyclobber' operand of the
5868 current insn, just mark it as a place to
5869 reload from since we can't use it as the
5870 reload register itself. */
5872 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5873 if (reg_overlap_mentioned_for_reload_p
5874 (reg_last_reload_reg[regno],
5875 reload_earlyclobbers[i1]))
5876 break;
5878 if (i1 != n_earlyclobbers
5879 || ! (free_for_value_p (i, rld[r].mode,
5880 rld[r].opnum,
5881 rld[r].when_needed, rld[r].in,
5882 rld[r].out, r, 1))
5883 /* Don't use it if we'd clobber a pseudo reg. */
5884 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5885 && rld[r].out
5886 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5887 /* Don't clobber the frame pointer. */
5888 || (i == HARD_FRAME_POINTER_REGNUM
5889 && frame_pointer_needed
5890 && rld[r].out)
5891 /* Don't really use the inherited spill reg
5892 if we need it wider than we've got it. */
5893 || (GET_MODE_SIZE (rld[r].mode)
5894 > GET_MODE_SIZE (mode))
5895 || bad_for_class
5897 /* If find_reloads chose reload_out as reload
5898 register, stay with it - that leaves the
5899 inherited register for subsequent reloads. */
5900 || (rld[r].out && rld[r].reg_rtx
5901 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5903 if (! rld[r].optional)
5905 reload_override_in[r] = last_reg;
5906 reload_inheritance_insn[r]
5907 = reg_reloaded_insn[i];
5910 else
5912 int k;
5913 /* We can use this as a reload reg. */
5914 /* Mark the register as in use for this part of
5915 the insn. */
5916 mark_reload_reg_in_use (i,
5917 rld[r].opnum,
5918 rld[r].when_needed,
5919 rld[r].mode);
5920 rld[r].reg_rtx = last_reg;
5921 reload_inherited[r] = 1;
5922 reload_inheritance_insn[r]
5923 = reg_reloaded_insn[i];
5924 reload_spill_index[r] = i;
5925 for (k = 0; k < nr; k++)
5926 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5927 i + k);
5934 /* Here's another way to see if the value is already lying around. */
5935 if (inheritance
5936 && rld[r].in != 0
5937 && ! reload_inherited[r]
5938 && rld[r].out == 0
5939 && (CONSTANT_P (rld[r].in)
5940 || GET_CODE (rld[r].in) == PLUS
5941 || REG_P (rld[r].in)
5942 || MEM_P (rld[r].in))
5943 && (rld[r].nregs == max_group_size
5944 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5945 search_equiv = rld[r].in;
5946 /* If this is an output reload from a simple move insn, look
5947 if an equivalence for the input is available. */
5948 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5950 rtx set = single_set (insn);
5952 if (set
5953 && rtx_equal_p (rld[r].out, SET_DEST (set))
5954 && CONSTANT_P (SET_SRC (set)))
5955 search_equiv = SET_SRC (set);
5958 if (search_equiv)
5960 rtx equiv
5961 = find_equiv_reg (search_equiv, insn, rld[r].class,
5962 -1, NULL, 0, rld[r].mode);
5963 int regno = 0;
5965 if (equiv != 0)
5967 if (REG_P (equiv))
5968 regno = REGNO (equiv);
5969 else
5971 /* This must be a SUBREG of a hard register.
5972 Make a new REG since this might be used in an
5973 address and not all machines support SUBREGs
5974 there. */
5975 gcc_assert (GET_CODE (equiv) == SUBREG);
5976 regno = subreg_regno (equiv);
5977 equiv = gen_rtx_REG (rld[r].mode, regno);
5978 /* If we choose EQUIV as the reload register, but the
5979 loop below decides to cancel the inheritance, we'll
5980 end up reloading EQUIV in rld[r].mode, not the mode
5981 it had originally. That isn't safe when EQUIV isn't
5982 available as a spill register since its value might
5983 still be live at this point. */
5984 for (i = regno; i < regno + (int) rld[r].nregs; i++)
5985 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
5986 equiv = 0;
5990 /* If we found a spill reg, reject it unless it is free
5991 and of the desired class. */
5992 if (equiv != 0)
5994 int regs_used = 0;
5995 int bad_for_class = 0;
5996 int max_regno = regno + rld[r].nregs;
5998 for (i = regno; i < max_regno; i++)
6000 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6002 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6006 if ((regs_used
6007 && ! free_for_value_p (regno, rld[r].mode,
6008 rld[r].opnum, rld[r].when_needed,
6009 rld[r].in, rld[r].out, r, 1))
6010 || bad_for_class)
6011 equiv = 0;
6014 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6015 equiv = 0;
6017 /* We found a register that contains the value we need.
6018 If this register is the same as an `earlyclobber' operand
6019 of the current insn, just mark it as a place to reload from
6020 since we can't use it as the reload register itself. */
6022 if (equiv != 0)
6023 for (i = 0; i < n_earlyclobbers; i++)
6024 if (reg_overlap_mentioned_for_reload_p (equiv,
6025 reload_earlyclobbers[i]))
6027 if (! rld[r].optional)
6028 reload_override_in[r] = equiv;
6029 equiv = 0;
6030 break;
6033 /* If the equiv register we have found is explicitly clobbered
6034 in the current insn, it depends on the reload type if we
6035 can use it, use it for reload_override_in, or not at all.
6036 In particular, we then can't use EQUIV for a
6037 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6039 if (equiv != 0)
6041 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6042 switch (rld[r].when_needed)
6044 case RELOAD_FOR_OTHER_ADDRESS:
6045 case RELOAD_FOR_INPADDR_ADDRESS:
6046 case RELOAD_FOR_INPUT_ADDRESS:
6047 case RELOAD_FOR_OPADDR_ADDR:
6048 break;
6049 case RELOAD_OTHER:
6050 case RELOAD_FOR_INPUT:
6051 case RELOAD_FOR_OPERAND_ADDRESS:
6052 if (! rld[r].optional)
6053 reload_override_in[r] = equiv;
6054 /* Fall through. */
6055 default:
6056 equiv = 0;
6057 break;
6059 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6060 switch (rld[r].when_needed)
6062 case RELOAD_FOR_OTHER_ADDRESS:
6063 case RELOAD_FOR_INPADDR_ADDRESS:
6064 case RELOAD_FOR_INPUT_ADDRESS:
6065 case RELOAD_FOR_OPADDR_ADDR:
6066 case RELOAD_FOR_OPERAND_ADDRESS:
6067 case RELOAD_FOR_INPUT:
6068 break;
6069 case RELOAD_OTHER:
6070 if (! rld[r].optional)
6071 reload_override_in[r] = equiv;
6072 /* Fall through. */
6073 default:
6074 equiv = 0;
6075 break;
6079 /* If we found an equivalent reg, say no code need be generated
6080 to load it, and use it as our reload reg. */
6081 if (equiv != 0
6082 && (regno != HARD_FRAME_POINTER_REGNUM
6083 || !frame_pointer_needed))
6085 int nr = hard_regno_nregs[regno][rld[r].mode];
6086 int k;
6087 rld[r].reg_rtx = equiv;
6088 reload_inherited[r] = 1;
6090 /* If reg_reloaded_valid is not set for this register,
6091 there might be a stale spill_reg_store lying around.
6092 We must clear it, since otherwise emit_reload_insns
6093 might delete the store. */
6094 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6095 spill_reg_store[regno] = NULL_RTX;
6096 /* If any of the hard registers in EQUIV are spill
6097 registers, mark them as in use for this insn. */
6098 for (k = 0; k < nr; k++)
6100 i = spill_reg_order[regno + k];
6101 if (i >= 0)
6103 mark_reload_reg_in_use (regno, rld[r].opnum,
6104 rld[r].when_needed,
6105 rld[r].mode);
6106 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6107 regno + k);
6113 /* If we found a register to use already, or if this is an optional
6114 reload, we are done. */
6115 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6116 continue;
6118 #if 0
6119 /* No longer needed for correct operation. Might or might
6120 not give better code on the average. Want to experiment? */
6122 /* See if there is a later reload that has a class different from our
6123 class that intersects our class or that requires less register
6124 than our reload. If so, we must allocate a register to this
6125 reload now, since that reload might inherit a previous reload
6126 and take the only available register in our class. Don't do this
6127 for optional reloads since they will force all previous reloads
6128 to be allocated. Also don't do this for reloads that have been
6129 turned off. */
6131 for (i = j + 1; i < n_reloads; i++)
6133 int s = reload_order[i];
6135 if ((rld[s].in == 0 && rld[s].out == 0
6136 && ! rld[s].secondary_p)
6137 || rld[s].optional)
6138 continue;
6140 if ((rld[s].class != rld[r].class
6141 && reg_classes_intersect_p (rld[r].class,
6142 rld[s].class))
6143 || rld[s].nregs < rld[r].nregs)
6144 break;
6147 if (i == n_reloads)
6148 continue;
6150 allocate_reload_reg (chain, r, j == n_reloads - 1);
6151 #endif
6154 /* Now allocate reload registers for anything non-optional that
6155 didn't get one yet. */
6156 for (j = 0; j < n_reloads; j++)
6158 int r = reload_order[j];
6160 /* Ignore reloads that got marked inoperative. */
6161 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6162 continue;
6164 /* Skip reloads that already have a register allocated or are
6165 optional. */
6166 if (rld[r].reg_rtx != 0 || rld[r].optional)
6167 continue;
6169 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6170 break;
6173 /* If that loop got all the way, we have won. */
6174 if (j == n_reloads)
6176 win = 1;
6177 break;
6180 /* Loop around and try without any inheritance. */
6183 if (! win)
6185 /* First undo everything done by the failed attempt
6186 to allocate with inheritance. */
6187 choose_reload_regs_init (chain, save_reload_reg_rtx);
6189 /* Some sanity tests to verify that the reloads found in the first
6190 pass are identical to the ones we have now. */
6191 gcc_assert (chain->n_reloads == n_reloads);
6193 for (i = 0; i < n_reloads; i++)
6195 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6196 continue;
6197 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6198 for (j = 0; j < n_spills; j++)
6199 if (spill_regs[j] == chain->rld[i].regno)
6200 if (! set_reload_reg (j, i))
6201 failed_reload (chain->insn, i);
6205 /* If we thought we could inherit a reload, because it seemed that
6206 nothing else wanted the same reload register earlier in the insn,
6207 verify that assumption, now that all reloads have been assigned.
6208 Likewise for reloads where reload_override_in has been set. */
6210 /* If doing expensive optimizations, do one preliminary pass that doesn't
6211 cancel any inheritance, but removes reloads that have been needed only
6212 for reloads that we know can be inherited. */
6213 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6215 for (j = 0; j < n_reloads; j++)
6217 int r = reload_order[j];
6218 rtx check_reg;
6219 if (reload_inherited[r] && rld[r].reg_rtx)
6220 check_reg = rld[r].reg_rtx;
6221 else if (reload_override_in[r]
6222 && (REG_P (reload_override_in[r])
6223 || GET_CODE (reload_override_in[r]) == SUBREG))
6224 check_reg = reload_override_in[r];
6225 else
6226 continue;
6227 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6228 rld[r].opnum, rld[r].when_needed, rld[r].in,
6229 (reload_inherited[r]
6230 ? rld[r].out : const0_rtx),
6231 r, 1))
6233 if (pass)
6234 continue;
6235 reload_inherited[r] = 0;
6236 reload_override_in[r] = 0;
6238 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6239 reload_override_in, then we do not need its related
6240 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6241 likewise for other reload types.
6242 We handle this by removing a reload when its only replacement
6243 is mentioned in reload_in of the reload we are going to inherit.
6244 A special case are auto_inc expressions; even if the input is
6245 inherited, we still need the address for the output. We can
6246 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6247 If we succeeded removing some reload and we are doing a preliminary
6248 pass just to remove such reloads, make another pass, since the
6249 removal of one reload might allow us to inherit another one. */
6250 else if (rld[r].in
6251 && rld[r].out != rld[r].in
6252 && remove_address_replacements (rld[r].in) && pass)
6253 pass = 2;
6257 /* Now that reload_override_in is known valid,
6258 actually override reload_in. */
6259 for (j = 0; j < n_reloads; j++)
6260 if (reload_override_in[j])
6261 rld[j].in = reload_override_in[j];
6263 /* If this reload won't be done because it has been canceled or is
6264 optional and not inherited, clear reload_reg_rtx so other
6265 routines (such as subst_reloads) don't get confused. */
6266 for (j = 0; j < n_reloads; j++)
6267 if (rld[j].reg_rtx != 0
6268 && ((rld[j].optional && ! reload_inherited[j])
6269 || (rld[j].in == 0 && rld[j].out == 0
6270 && ! rld[j].secondary_p)))
6272 int regno = true_regnum (rld[j].reg_rtx);
6274 if (spill_reg_order[regno] >= 0)
6275 clear_reload_reg_in_use (regno, rld[j].opnum,
6276 rld[j].when_needed, rld[j].mode);
6277 rld[j].reg_rtx = 0;
6278 reload_spill_index[j] = -1;
6281 /* Record which pseudos and which spill regs have output reloads. */
6282 for (j = 0; j < n_reloads; j++)
6284 int r = reload_order[j];
6286 i = reload_spill_index[r];
6288 /* I is nonneg if this reload uses a register.
6289 If rld[r].reg_rtx is 0, this is an optional reload
6290 that we opted to ignore. */
6291 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6292 && rld[r].reg_rtx != 0)
6294 int nregno = REGNO (rld[r].out_reg);
6295 int nr = 1;
6297 if (nregno < FIRST_PSEUDO_REGISTER)
6298 nr = hard_regno_nregs[nregno][rld[r].mode];
6300 while (--nr >= 0)
6301 SET_REGNO_REG_SET (&reg_has_output_reload,
6302 nregno + nr);
6304 if (i >= 0)
6306 nr = hard_regno_nregs[i][rld[r].mode];
6307 while (--nr >= 0)
6308 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6311 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6312 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6313 || rld[r].when_needed == RELOAD_FOR_INSN);
6318 /* Deallocate the reload register for reload R. This is called from
6319 remove_address_replacements. */
6321 void
6322 deallocate_reload_reg (int r)
6324 int regno;
6326 if (! rld[r].reg_rtx)
6327 return;
6328 regno = true_regnum (rld[r].reg_rtx);
6329 rld[r].reg_rtx = 0;
6330 if (spill_reg_order[regno] >= 0)
6331 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6332 rld[r].mode);
6333 reload_spill_index[r] = -1;
6336 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6337 reloads of the same item for fear that we might not have enough reload
6338 registers. However, normally they will get the same reload register
6339 and hence actually need not be loaded twice.
6341 Here we check for the most common case of this phenomenon: when we have
6342 a number of reloads for the same object, each of which were allocated
6343 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6344 reload, and is not modified in the insn itself. If we find such,
6345 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6346 This will not increase the number of spill registers needed and will
6347 prevent redundant code. */
6349 static void
6350 merge_assigned_reloads (rtx insn)
6352 int i, j;
6354 /* Scan all the reloads looking for ones that only load values and
6355 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6356 assigned and not modified by INSN. */
6358 for (i = 0; i < n_reloads; i++)
6360 int conflicting_input = 0;
6361 int max_input_address_opnum = -1;
6362 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6364 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6365 || rld[i].out != 0 || rld[i].reg_rtx == 0
6366 || reg_set_p (rld[i].reg_rtx, insn))
6367 continue;
6369 /* Look at all other reloads. Ensure that the only use of this
6370 reload_reg_rtx is in a reload that just loads the same value
6371 as we do. Note that any secondary reloads must be of the identical
6372 class since the values, modes, and result registers are the
6373 same, so we need not do anything with any secondary reloads. */
6375 for (j = 0; j < n_reloads; j++)
6377 if (i == j || rld[j].reg_rtx == 0
6378 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6379 rld[i].reg_rtx))
6380 continue;
6382 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6383 && rld[j].opnum > max_input_address_opnum)
6384 max_input_address_opnum = rld[j].opnum;
6386 /* If the reload regs aren't exactly the same (e.g, different modes)
6387 or if the values are different, we can't merge this reload.
6388 But if it is an input reload, we might still merge
6389 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6391 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6392 || rld[j].out != 0 || rld[j].in == 0
6393 || ! rtx_equal_p (rld[i].in, rld[j].in))
6395 if (rld[j].when_needed != RELOAD_FOR_INPUT
6396 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6397 || rld[i].opnum > rld[j].opnum)
6398 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6399 break;
6400 conflicting_input = 1;
6401 if (min_conflicting_input_opnum > rld[j].opnum)
6402 min_conflicting_input_opnum = rld[j].opnum;
6406 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6407 we, in fact, found any matching reloads. */
6409 if (j == n_reloads
6410 && max_input_address_opnum <= min_conflicting_input_opnum)
6412 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6414 for (j = 0; j < n_reloads; j++)
6415 if (i != j && rld[j].reg_rtx != 0
6416 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6417 && (! conflicting_input
6418 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6419 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6421 rld[i].when_needed = RELOAD_OTHER;
6422 rld[j].in = 0;
6423 reload_spill_index[j] = -1;
6424 transfer_replacements (i, j);
6427 /* If this is now RELOAD_OTHER, look for any reloads that
6428 load parts of this operand and set them to
6429 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6430 RELOAD_OTHER for outputs. Note that this test is
6431 equivalent to looking for reloads for this operand
6432 number.
6434 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6435 it may share registers with a RELOAD_FOR_INPUT, so we can
6436 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6437 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6439 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6440 instruction is assigned the same register as the earlier
6441 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6442 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6443 instruction to be deleted later on. */
6445 if (rld[i].when_needed == RELOAD_OTHER)
6446 for (j = 0; j < n_reloads; j++)
6447 if (rld[j].in != 0
6448 && rld[j].when_needed != RELOAD_OTHER
6449 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6450 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6451 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6452 && (! conflicting_input
6453 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6454 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6455 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6456 rld[i].in))
6458 int k;
6460 rld[j].when_needed
6461 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6462 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6463 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6465 /* Check to see if we accidentally converted two
6466 reloads that use the same reload register with
6467 different inputs to the same type. If so, the
6468 resulting code won't work. */
6469 if (rld[j].reg_rtx)
6470 for (k = 0; k < j; k++)
6471 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6472 || rld[k].when_needed != rld[j].when_needed
6473 || !rtx_equal_p (rld[k].reg_rtx,
6474 rld[j].reg_rtx)
6475 || rtx_equal_p (rld[k].in,
6476 rld[j].in));
6482 /* These arrays are filled by emit_reload_insns and its subroutines. */
6483 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6484 static rtx other_input_address_reload_insns = 0;
6485 static rtx other_input_reload_insns = 0;
6486 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6487 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6488 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6489 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6490 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6491 static rtx operand_reload_insns = 0;
6492 static rtx other_operand_reload_insns = 0;
6493 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6495 /* Values to be put in spill_reg_store are put here first. */
6496 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6497 static HARD_REG_SET reg_reloaded_died;
6499 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6500 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6501 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6502 adjusted register, and return true. Otherwise, return false. */
6503 static bool
6504 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6505 enum reg_class new_class,
6506 enum machine_mode new_mode)
6509 rtx reg;
6511 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6513 unsigned regno = REGNO (reg);
6515 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6516 continue;
6517 if (GET_MODE (reg) != new_mode)
6519 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6520 continue;
6521 if (hard_regno_nregs[regno][new_mode]
6522 > hard_regno_nregs[regno][GET_MODE (reg)])
6523 continue;
6524 reg = reload_adjust_reg_for_mode (reg, new_mode);
6526 *reload_reg = reg;
6527 return true;
6529 return false;
6532 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6533 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6534 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6535 adjusted register, and return true. Otherwise, return false. */
6536 static bool
6537 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6538 enum insn_code icode)
6541 enum reg_class new_class = scratch_reload_class (icode);
6542 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6544 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6545 new_class, new_mode);
6548 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6549 has the number J. OLD contains the value to be used as input. */
6551 static void
6552 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6553 rtx old, int j)
6555 rtx insn = chain->insn;
6556 rtx reloadreg = rl->reg_rtx;
6557 rtx oldequiv_reg = 0;
6558 rtx oldequiv = 0;
6559 int special = 0;
6560 enum machine_mode mode;
6561 rtx *where;
6563 /* Determine the mode to reload in.
6564 This is very tricky because we have three to choose from.
6565 There is the mode the insn operand wants (rl->inmode).
6566 There is the mode of the reload register RELOADREG.
6567 There is the intrinsic mode of the operand, which we could find
6568 by stripping some SUBREGs.
6569 It turns out that RELOADREG's mode is irrelevant:
6570 we can change that arbitrarily.
6572 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6573 then the reload reg may not support QImode moves, so use SImode.
6574 If foo is in memory due to spilling a pseudo reg, this is safe,
6575 because the QImode value is in the least significant part of a
6576 slot big enough for a SImode. If foo is some other sort of
6577 memory reference, then it is impossible to reload this case,
6578 so previous passes had better make sure this never happens.
6580 Then consider a one-word union which has SImode and one of its
6581 members is a float, being fetched as (SUBREG:SF union:SI).
6582 We must fetch that as SFmode because we could be loading into
6583 a float-only register. In this case OLD's mode is correct.
6585 Consider an immediate integer: it has VOIDmode. Here we need
6586 to get a mode from something else.
6588 In some cases, there is a fourth mode, the operand's
6589 containing mode. If the insn specifies a containing mode for
6590 this operand, it overrides all others.
6592 I am not sure whether the algorithm here is always right,
6593 but it does the right things in those cases. */
6595 mode = GET_MODE (old);
6596 if (mode == VOIDmode)
6597 mode = rl->inmode;
6599 /* delete_output_reload is only invoked properly if old contains
6600 the original pseudo register. Since this is replaced with a
6601 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6602 find the pseudo in RELOAD_IN_REG. */
6603 if (reload_override_in[j]
6604 && REG_P (rl->in_reg))
6606 oldequiv = old;
6607 old = rl->in_reg;
6609 if (oldequiv == 0)
6610 oldequiv = old;
6611 else if (REG_P (oldequiv))
6612 oldequiv_reg = oldequiv;
6613 else if (GET_CODE (oldequiv) == SUBREG)
6614 oldequiv_reg = SUBREG_REG (oldequiv);
6616 /* If we are reloading from a register that was recently stored in
6617 with an output-reload, see if we can prove there was
6618 actually no need to store the old value in it. */
6620 if (optimize && REG_P (oldequiv)
6621 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6622 && spill_reg_store[REGNO (oldequiv)]
6623 && REG_P (old)
6624 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6625 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6626 rl->out_reg)))
6627 delete_output_reload (insn, j, REGNO (oldequiv));
6629 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6630 then load RELOADREG from OLDEQUIV. Note that we cannot use
6631 gen_lowpart_common since it can do the wrong thing when
6632 RELOADREG has a multi-word mode. Note that RELOADREG
6633 must always be a REG here. */
6635 if (GET_MODE (reloadreg) != mode)
6636 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6637 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6638 oldequiv = SUBREG_REG (oldequiv);
6639 if (GET_MODE (oldequiv) != VOIDmode
6640 && mode != GET_MODE (oldequiv))
6641 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6643 /* Switch to the right place to emit the reload insns. */
6644 switch (rl->when_needed)
6646 case RELOAD_OTHER:
6647 where = &other_input_reload_insns;
6648 break;
6649 case RELOAD_FOR_INPUT:
6650 where = &input_reload_insns[rl->opnum];
6651 break;
6652 case RELOAD_FOR_INPUT_ADDRESS:
6653 where = &input_address_reload_insns[rl->opnum];
6654 break;
6655 case RELOAD_FOR_INPADDR_ADDRESS:
6656 where = &inpaddr_address_reload_insns[rl->opnum];
6657 break;
6658 case RELOAD_FOR_OUTPUT_ADDRESS:
6659 where = &output_address_reload_insns[rl->opnum];
6660 break;
6661 case RELOAD_FOR_OUTADDR_ADDRESS:
6662 where = &outaddr_address_reload_insns[rl->opnum];
6663 break;
6664 case RELOAD_FOR_OPERAND_ADDRESS:
6665 where = &operand_reload_insns;
6666 break;
6667 case RELOAD_FOR_OPADDR_ADDR:
6668 where = &other_operand_reload_insns;
6669 break;
6670 case RELOAD_FOR_OTHER_ADDRESS:
6671 where = &other_input_address_reload_insns;
6672 break;
6673 default:
6674 gcc_unreachable ();
6677 push_to_sequence (*where);
6679 /* Auto-increment addresses must be reloaded in a special way. */
6680 if (rl->out && ! rl->out_reg)
6682 /* We are not going to bother supporting the case where a
6683 incremented register can't be copied directly from
6684 OLDEQUIV since this seems highly unlikely. */
6685 gcc_assert (rl->secondary_in_reload < 0);
6687 if (reload_inherited[j])
6688 oldequiv = reloadreg;
6690 old = XEXP (rl->in_reg, 0);
6692 if (optimize && REG_P (oldequiv)
6693 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6694 && spill_reg_store[REGNO (oldequiv)]
6695 && REG_P (old)
6696 && (dead_or_set_p (insn,
6697 spill_reg_stored_to[REGNO (oldequiv)])
6698 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6699 old)))
6700 delete_output_reload (insn, j, REGNO (oldequiv));
6702 /* Prevent normal processing of this reload. */
6703 special = 1;
6704 /* Output a special code sequence for this case. */
6705 new_spill_reg_store[REGNO (reloadreg)]
6706 = inc_for_reload (reloadreg, oldequiv, rl->out,
6707 rl->inc);
6710 /* If we are reloading a pseudo-register that was set by the previous
6711 insn, see if we can get rid of that pseudo-register entirely
6712 by redirecting the previous insn into our reload register. */
6714 else if (optimize && REG_P (old)
6715 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6716 && dead_or_set_p (insn, old)
6717 /* This is unsafe if some other reload
6718 uses the same reg first. */
6719 && ! conflicts_with_override (reloadreg)
6720 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6721 rl->when_needed, old, rl->out, j, 0))
6723 rtx temp = PREV_INSN (insn);
6724 while (temp && NOTE_P (temp))
6725 temp = PREV_INSN (temp);
6726 if (temp
6727 && NONJUMP_INSN_P (temp)
6728 && GET_CODE (PATTERN (temp)) == SET
6729 && SET_DEST (PATTERN (temp)) == old
6730 /* Make sure we can access insn_operand_constraint. */
6731 && asm_noperands (PATTERN (temp)) < 0
6732 /* This is unsafe if operand occurs more than once in current
6733 insn. Perhaps some occurrences aren't reloaded. */
6734 && count_occurrences (PATTERN (insn), old, 0) == 1)
6736 rtx old = SET_DEST (PATTERN (temp));
6737 /* Store into the reload register instead of the pseudo. */
6738 SET_DEST (PATTERN (temp)) = reloadreg;
6740 /* Verify that resulting insn is valid. */
6741 extract_insn (temp);
6742 if (constrain_operands (1))
6744 /* If the previous insn is an output reload, the source is
6745 a reload register, and its spill_reg_store entry will
6746 contain the previous destination. This is now
6747 invalid. */
6748 if (REG_P (SET_SRC (PATTERN (temp)))
6749 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6751 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6752 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6755 /* If these are the only uses of the pseudo reg,
6756 pretend for GDB it lives in the reload reg we used. */
6757 if (REG_N_DEATHS (REGNO (old)) == 1
6758 && REG_N_SETS (REGNO (old)) == 1)
6760 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6761 alter_reg (REGNO (old), -1);
6763 special = 1;
6765 else
6767 SET_DEST (PATTERN (temp)) = old;
6772 /* We can't do that, so output an insn to load RELOADREG. */
6774 /* If we have a secondary reload, pick up the secondary register
6775 and icode, if any. If OLDEQUIV and OLD are different or
6776 if this is an in-out reload, recompute whether or not we
6777 still need a secondary register and what the icode should
6778 be. If we still need a secondary register and the class or
6779 icode is different, go back to reloading from OLD if using
6780 OLDEQUIV means that we got the wrong type of register. We
6781 cannot have different class or icode due to an in-out reload
6782 because we don't make such reloads when both the input and
6783 output need secondary reload registers. */
6785 if (! special && rl->secondary_in_reload >= 0)
6787 rtx second_reload_reg = 0;
6788 rtx third_reload_reg = 0;
6789 int secondary_reload = rl->secondary_in_reload;
6790 rtx real_oldequiv = oldequiv;
6791 rtx real_old = old;
6792 rtx tmp;
6793 enum insn_code icode;
6794 enum insn_code tertiary_icode = CODE_FOR_nothing;
6796 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6797 and similarly for OLD.
6798 See comments in get_secondary_reload in reload.c. */
6799 /* If it is a pseudo that cannot be replaced with its
6800 equivalent MEM, we must fall back to reload_in, which
6801 will have all the necessary substitutions registered.
6802 Likewise for a pseudo that can't be replaced with its
6803 equivalent constant.
6805 Take extra care for subregs of such pseudos. Note that
6806 we cannot use reg_equiv_mem in this case because it is
6807 not in the right mode. */
6809 tmp = oldequiv;
6810 if (GET_CODE (tmp) == SUBREG)
6811 tmp = SUBREG_REG (tmp);
6812 if (REG_P (tmp)
6813 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6814 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6815 || reg_equiv_constant[REGNO (tmp)] != 0))
6817 if (! reg_equiv_mem[REGNO (tmp)]
6818 || num_not_at_initial_offset
6819 || GET_CODE (oldequiv) == SUBREG)
6820 real_oldequiv = rl->in;
6821 else
6822 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6825 tmp = old;
6826 if (GET_CODE (tmp) == SUBREG)
6827 tmp = SUBREG_REG (tmp);
6828 if (REG_P (tmp)
6829 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6830 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6831 || reg_equiv_constant[REGNO (tmp)] != 0))
6833 if (! reg_equiv_mem[REGNO (tmp)]
6834 || num_not_at_initial_offset
6835 || GET_CODE (old) == SUBREG)
6836 real_old = rl->in;
6837 else
6838 real_old = reg_equiv_mem[REGNO (tmp)];
6841 second_reload_reg = rld[secondary_reload].reg_rtx;
6842 if (rld[secondary_reload].secondary_in_reload >= 0)
6844 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6846 third_reload_reg = rld[tertiary_reload].reg_rtx;
6847 tertiary_icode = rld[secondary_reload].secondary_in_icode;
6848 /* We'd have to add more code for quartary reloads. */
6849 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6851 icode = rl->secondary_in_icode;
6853 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6854 || (rl->in != 0 && rl->out != 0))
6856 secondary_reload_info sri, sri2;
6857 enum reg_class new_class, new_t_class;
6859 sri.icode = CODE_FOR_nothing;
6860 sri.prev_sri = NULL;
6861 new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
6862 mode, &sri);
6864 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
6865 second_reload_reg = 0;
6866 else if (new_class == NO_REGS)
6868 if (reload_adjust_reg_for_icode (&second_reload_reg,
6869 third_reload_reg, sri.icode))
6870 icode = sri.icode, third_reload_reg = 0;
6871 else
6872 oldequiv = old, real_oldequiv = real_old;
6874 else if (sri.icode != CODE_FOR_nothing)
6875 /* We currently lack a way to express this in reloads. */
6876 gcc_unreachable ();
6877 else
6879 sri2.icode = CODE_FOR_nothing;
6880 sri2.prev_sri = &sri;
6881 new_t_class = targetm.secondary_reload (1, real_oldequiv,
6882 new_class, mode, &sri);
6883 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
6885 if (reload_adjust_reg_for_temp (&second_reload_reg,
6886 third_reload_reg,
6887 new_class, mode))
6888 third_reload_reg = 0, tertiary_icode = sri2.icode;
6889 else
6890 oldequiv = old, real_oldequiv = real_old;
6892 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
6894 rtx intermediate = second_reload_reg;
6896 if (reload_adjust_reg_for_temp (&intermediate, NULL,
6897 new_class, mode)
6898 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
6899 sri2.icode))
6901 second_reload_reg = intermediate;
6902 tertiary_icode = sri2.icode;
6904 else
6905 oldequiv = old, real_oldequiv = real_old;
6907 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
6909 rtx intermediate = second_reload_reg;
6911 if (reload_adjust_reg_for_temp (&intermediate, NULL,
6912 new_class, mode)
6913 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
6914 new_t_class, mode))
6916 second_reload_reg = intermediate;
6917 tertiary_icode = sri2.icode;
6919 else
6920 oldequiv = old, real_oldequiv = real_old;
6922 else
6923 /* This could be handled more intelligently too. */
6924 oldequiv = old, real_oldequiv = real_old;
6928 /* If we still need a secondary reload register, check
6929 to see if it is being used as a scratch or intermediate
6930 register and generate code appropriately. If we need
6931 a scratch register, use REAL_OLDEQUIV since the form of
6932 the insn may depend on the actual address if it is
6933 a MEM. */
6935 if (second_reload_reg)
6937 if (icode != CODE_FOR_nothing)
6939 /* We'd have to add extra code to handle this case. */
6940 gcc_assert (!third_reload_reg);
6942 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6943 second_reload_reg));
6944 special = 1;
6946 else
6948 /* See if we need a scratch register to load the
6949 intermediate register (a tertiary reload). */
6950 if (tertiary_icode != CODE_FOR_nothing)
6952 emit_insn ((GEN_FCN (tertiary_icode)
6953 (second_reload_reg, real_oldequiv,
6954 third_reload_reg)));
6956 else if (third_reload_reg)
6958 gen_reload (third_reload_reg, real_oldequiv,
6959 rl->opnum,
6960 rl->when_needed);
6961 gen_reload (second_reload_reg, third_reload_reg,
6962 rl->opnum,
6963 rl->when_needed);
6965 else
6966 gen_reload (second_reload_reg, real_oldequiv,
6967 rl->opnum,
6968 rl->when_needed);
6970 oldequiv = second_reload_reg;
6975 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6977 rtx real_oldequiv = oldequiv;
6979 if ((REG_P (oldequiv)
6980 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6981 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6982 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6983 || (GET_CODE (oldequiv) == SUBREG
6984 && REG_P (SUBREG_REG (oldequiv))
6985 && (REGNO (SUBREG_REG (oldequiv))
6986 >= FIRST_PSEUDO_REGISTER)
6987 && ((reg_equiv_memory_loc
6988 [REGNO (SUBREG_REG (oldequiv))] != 0)
6989 || (reg_equiv_constant
6990 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6991 || (CONSTANT_P (oldequiv)
6992 && (PREFERRED_RELOAD_CLASS (oldequiv,
6993 REGNO_REG_CLASS (REGNO (reloadreg)))
6994 == NO_REGS)))
6995 real_oldequiv = rl->in;
6996 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6997 rl->when_needed);
7000 if (flag_non_call_exceptions)
7001 copy_eh_notes (insn, get_insns ());
7003 /* End this sequence. */
7004 *where = get_insns ();
7005 end_sequence ();
7007 /* Update reload_override_in so that delete_address_reloads_1
7008 can see the actual register usage. */
7009 if (oldequiv_reg)
7010 reload_override_in[j] = oldequiv;
7013 /* Generate insns to for the output reload RL, which is for the insn described
7014 by CHAIN and has the number J. */
7015 static void
7016 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7017 int j)
7019 rtx reloadreg = rl->reg_rtx;
7020 rtx insn = chain->insn;
7021 int special = 0;
7022 rtx old = rl->out;
7023 enum machine_mode mode = GET_MODE (old);
7024 rtx p;
7026 if (rl->when_needed == RELOAD_OTHER)
7027 start_sequence ();
7028 else
7029 push_to_sequence (output_reload_insns[rl->opnum]);
7031 /* Determine the mode to reload in.
7032 See comments above (for input reloading). */
7034 if (mode == VOIDmode)
7036 /* VOIDmode should never happen for an output. */
7037 if (asm_noperands (PATTERN (insn)) < 0)
7038 /* It's the compiler's fault. */
7039 fatal_insn ("VOIDmode on an output", insn);
7040 error_for_asm (insn, "output operand is constant in %<asm%>");
7041 /* Prevent crash--use something we know is valid. */
7042 mode = word_mode;
7043 old = gen_rtx_REG (mode, REGNO (reloadreg));
7046 if (GET_MODE (reloadreg) != mode)
7047 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7049 /* If we need two reload regs, set RELOADREG to the intermediate
7050 one, since it will be stored into OLD. We might need a secondary
7051 register only for an input reload, so check again here. */
7053 if (rl->secondary_out_reload >= 0)
7055 rtx real_old = old;
7056 int secondary_reload = rl->secondary_out_reload;
7057 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7059 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7060 && reg_equiv_mem[REGNO (old)] != 0)
7061 real_old = reg_equiv_mem[REGNO (old)];
7063 if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
7065 rtx second_reloadreg = reloadreg;
7066 reloadreg = rld[secondary_reload].reg_rtx;
7068 /* See if RELOADREG is to be used as a scratch register
7069 or as an intermediate register. */
7070 if (rl->secondary_out_icode != CODE_FOR_nothing)
7072 /* We'd have to add extra code to handle this case. */
7073 gcc_assert (tertiary_reload < 0);
7075 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7076 (real_old, second_reloadreg, reloadreg)));
7077 special = 1;
7079 else
7081 /* See if we need both a scratch and intermediate reload
7082 register. */
7084 enum insn_code tertiary_icode
7085 = rld[secondary_reload].secondary_out_icode;
7087 /* We'd have to add more code for quartary reloads. */
7088 gcc_assert (tertiary_reload < 0
7089 || rld[tertiary_reload].secondary_out_reload < 0);
7091 if (GET_MODE (reloadreg) != mode)
7092 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7094 if (tertiary_icode != CODE_FOR_nothing)
7096 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7097 rtx tem;
7099 /* Copy primary reload reg to secondary reload reg.
7100 (Note that these have been swapped above, then
7101 secondary reload reg to OLD using our insn.) */
7103 /* If REAL_OLD is a paradoxical SUBREG, remove it
7104 and try to put the opposite SUBREG on
7105 RELOADREG. */
7106 if (GET_CODE (real_old) == SUBREG
7107 && (GET_MODE_SIZE (GET_MODE (real_old))
7108 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7109 && 0 != (tem = gen_lowpart_common
7110 (GET_MODE (SUBREG_REG (real_old)),
7111 reloadreg)))
7112 real_old = SUBREG_REG (real_old), reloadreg = tem;
7114 gen_reload (reloadreg, second_reloadreg,
7115 rl->opnum, rl->when_needed);
7116 emit_insn ((GEN_FCN (tertiary_icode)
7117 (real_old, reloadreg, third_reloadreg)));
7118 special = 1;
7121 else
7123 /* Copy between the reload regs here and then to
7124 OUT later. */
7126 gen_reload (reloadreg, second_reloadreg,
7127 rl->opnum, rl->when_needed);
7128 if (tertiary_reload >= 0)
7130 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7132 gen_reload (third_reloadreg, reloadreg,
7133 rl->opnum, rl->when_needed);
7134 reloadreg = third_reloadreg;
7141 /* Output the last reload insn. */
7142 if (! special)
7144 rtx set;
7146 /* Don't output the last reload if OLD is not the dest of
7147 INSN and is in the src and is clobbered by INSN. */
7148 if (! flag_expensive_optimizations
7149 || !REG_P (old)
7150 || !(set = single_set (insn))
7151 || rtx_equal_p (old, SET_DEST (set))
7152 || !reg_mentioned_p (old, SET_SRC (set))
7153 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7154 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7155 gen_reload (old, reloadreg, rl->opnum,
7156 rl->when_needed);
7159 /* Look at all insns we emitted, just to be safe. */
7160 for (p = get_insns (); p; p = NEXT_INSN (p))
7161 if (INSN_P (p))
7163 rtx pat = PATTERN (p);
7165 /* If this output reload doesn't come from a spill reg,
7166 clear any memory of reloaded copies of the pseudo reg.
7167 If this output reload comes from a spill reg,
7168 reg_has_output_reload will make this do nothing. */
7169 note_stores (pat, forget_old_reloads_1, NULL);
7171 if (reg_mentioned_p (rl->reg_rtx, pat))
7173 rtx set = single_set (insn);
7174 if (reload_spill_index[j] < 0
7175 && set
7176 && SET_SRC (set) == rl->reg_rtx)
7178 int src = REGNO (SET_SRC (set));
7180 reload_spill_index[j] = src;
7181 SET_HARD_REG_BIT (reg_is_output_reload, src);
7182 if (find_regno_note (insn, REG_DEAD, src))
7183 SET_HARD_REG_BIT (reg_reloaded_died, src);
7185 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
7187 int s = rl->secondary_out_reload;
7188 set = single_set (p);
7189 /* If this reload copies only to the secondary reload
7190 register, the secondary reload does the actual
7191 store. */
7192 if (s >= 0 && set == NULL_RTX)
7193 /* We can't tell what function the secondary reload
7194 has and where the actual store to the pseudo is
7195 made; leave new_spill_reg_store alone. */
7197 else if (s >= 0
7198 && SET_SRC (set) == rl->reg_rtx
7199 && SET_DEST (set) == rld[s].reg_rtx)
7201 /* Usually the next instruction will be the
7202 secondary reload insn; if we can confirm
7203 that it is, setting new_spill_reg_store to
7204 that insn will allow an extra optimization. */
7205 rtx s_reg = rld[s].reg_rtx;
7206 rtx next = NEXT_INSN (p);
7207 rld[s].out = rl->out;
7208 rld[s].out_reg = rl->out_reg;
7209 set = single_set (next);
7210 if (set && SET_SRC (set) == s_reg
7211 && ! new_spill_reg_store[REGNO (s_reg)])
7213 SET_HARD_REG_BIT (reg_is_output_reload,
7214 REGNO (s_reg));
7215 new_spill_reg_store[REGNO (s_reg)] = next;
7218 else
7219 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
7224 if (rl->when_needed == RELOAD_OTHER)
7226 emit_insn (other_output_reload_insns[rl->opnum]);
7227 other_output_reload_insns[rl->opnum] = get_insns ();
7229 else
7230 output_reload_insns[rl->opnum] = get_insns ();
7232 if (flag_non_call_exceptions)
7233 copy_eh_notes (insn, get_insns ());
7235 end_sequence ();
7238 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7239 and has the number J. */
7240 static void
7241 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7243 rtx insn = chain->insn;
7244 rtx old = (rl->in && MEM_P (rl->in)
7245 ? rl->in_reg : rl->in);
7247 if (old != 0
7248 /* AUTO_INC reloads need to be handled even if inherited. We got an
7249 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7250 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7251 && ! rtx_equal_p (rl->reg_rtx, old)
7252 && rl->reg_rtx != 0)
7253 emit_input_reload_insns (chain, rld + j, old, j);
7255 /* When inheriting a wider reload, we have a MEM in rl->in,
7256 e.g. inheriting a SImode output reload for
7257 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7258 if (optimize && reload_inherited[j] && rl->in
7259 && MEM_P (rl->in)
7260 && MEM_P (rl->in_reg)
7261 && reload_spill_index[j] >= 0
7262 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7263 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7265 /* If we are reloading a register that was recently stored in with an
7266 output-reload, see if we can prove there was
7267 actually no need to store the old value in it. */
7269 if (optimize
7270 && (reload_inherited[j] || reload_override_in[j])
7271 && rl->reg_rtx
7272 && REG_P (rl->reg_rtx)
7273 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
7274 #if 0
7275 /* There doesn't seem to be any reason to restrict this to pseudos
7276 and doing so loses in the case where we are copying from a
7277 register of the wrong class. */
7278 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
7279 >= FIRST_PSEUDO_REGISTER)
7280 #endif
7281 /* The insn might have already some references to stackslots
7282 replaced by MEMs, while reload_out_reg still names the
7283 original pseudo. */
7284 && (dead_or_set_p (insn,
7285 spill_reg_stored_to[REGNO (rl->reg_rtx)])
7286 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
7287 rl->out_reg)))
7288 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
7291 /* Do output reloading for reload RL, which is for the insn described by
7292 CHAIN and has the number J.
7293 ??? At some point we need to support handling output reloads of
7294 JUMP_INSNs or insns that set cc0. */
7295 static void
7296 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7298 rtx note, old;
7299 rtx insn = chain->insn;
7300 /* If this is an output reload that stores something that is
7301 not loaded in this same reload, see if we can eliminate a previous
7302 store. */
7303 rtx pseudo = rl->out_reg;
7305 if (pseudo
7306 && optimize
7307 && REG_P (pseudo)
7308 && ! rtx_equal_p (rl->in_reg, pseudo)
7309 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7310 && reg_last_reload_reg[REGNO (pseudo)])
7312 int pseudo_no = REGNO (pseudo);
7313 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7315 /* We don't need to test full validity of last_regno for
7316 inherit here; we only want to know if the store actually
7317 matches the pseudo. */
7318 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7319 && reg_reloaded_contents[last_regno] == pseudo_no
7320 && spill_reg_store[last_regno]
7321 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7322 delete_output_reload (insn, j, last_regno);
7325 old = rl->out_reg;
7326 if (old == 0
7327 || rl->reg_rtx == old
7328 || rl->reg_rtx == 0)
7329 return;
7331 /* An output operand that dies right away does need a reload,
7332 but need not be copied from it. Show the new location in the
7333 REG_UNUSED note. */
7334 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7335 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7337 XEXP (note, 0) = rl->reg_rtx;
7338 return;
7340 /* Likewise for a SUBREG of an operand that dies. */
7341 else if (GET_CODE (old) == SUBREG
7342 && REG_P (SUBREG_REG (old))
7343 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7344 SUBREG_REG (old))))
7346 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7347 rl->reg_rtx);
7348 return;
7350 else if (GET_CODE (old) == SCRATCH)
7351 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7352 but we don't want to make an output reload. */
7353 return;
7355 /* If is a JUMP_INSN, we can't support output reloads yet. */
7356 gcc_assert (NONJUMP_INSN_P (insn));
7358 emit_output_reload_insns (chain, rld + j, j);
7361 /* Reload number R reloads from or to a group of hard registers starting at
7362 register REGNO. Return true if it can be treated for inheritance purposes
7363 like a group of reloads, each one reloading a single hard register.
7364 The caller has already checked that the spill register and REGNO use
7365 the same number of registers to store the reload value. */
7367 static bool
7368 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7370 #ifdef CANNOT_CHANGE_MODE_CLASS
7371 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7372 GET_MODE (rld[r].reg_rtx),
7373 reg_raw_mode[reload_spill_index[r]])
7374 && !REG_CANNOT_CHANGE_MODE_P (regno,
7375 GET_MODE (rld[r].reg_rtx),
7376 reg_raw_mode[regno]));
7377 #else
7378 return true;
7379 #endif
7382 /* Output insns to reload values in and out of the chosen reload regs. */
7384 static void
7385 emit_reload_insns (struct insn_chain *chain)
7387 rtx insn = chain->insn;
7389 int j;
7391 CLEAR_HARD_REG_SET (reg_reloaded_died);
7393 for (j = 0; j < reload_n_operands; j++)
7394 input_reload_insns[j] = input_address_reload_insns[j]
7395 = inpaddr_address_reload_insns[j]
7396 = output_reload_insns[j] = output_address_reload_insns[j]
7397 = outaddr_address_reload_insns[j]
7398 = other_output_reload_insns[j] = 0;
7399 other_input_address_reload_insns = 0;
7400 other_input_reload_insns = 0;
7401 operand_reload_insns = 0;
7402 other_operand_reload_insns = 0;
7404 /* Dump reloads into the dump file. */
7405 if (dump_file)
7407 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7408 debug_reload_to_stream (dump_file);
7411 /* Now output the instructions to copy the data into and out of the
7412 reload registers. Do these in the order that the reloads were reported,
7413 since reloads of base and index registers precede reloads of operands
7414 and the operands may need the base and index registers reloaded. */
7416 for (j = 0; j < n_reloads; j++)
7418 if (rld[j].reg_rtx
7419 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7420 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7422 do_input_reload (chain, rld + j, j);
7423 do_output_reload (chain, rld + j, j);
7426 /* Now write all the insns we made for reloads in the order expected by
7427 the allocation functions. Prior to the insn being reloaded, we write
7428 the following reloads:
7430 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7432 RELOAD_OTHER reloads.
7434 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7435 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7436 RELOAD_FOR_INPUT reload for the operand.
7438 RELOAD_FOR_OPADDR_ADDRS reloads.
7440 RELOAD_FOR_OPERAND_ADDRESS reloads.
7442 After the insn being reloaded, we write the following:
7444 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7445 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7446 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7447 reloads for the operand. The RELOAD_OTHER output reloads are
7448 output in descending order by reload number. */
7450 emit_insn_before (other_input_address_reload_insns, insn);
7451 emit_insn_before (other_input_reload_insns, insn);
7453 for (j = 0; j < reload_n_operands; j++)
7455 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7456 emit_insn_before (input_address_reload_insns[j], insn);
7457 emit_insn_before (input_reload_insns[j], insn);
7460 emit_insn_before (other_operand_reload_insns, insn);
7461 emit_insn_before (operand_reload_insns, insn);
7463 for (j = 0; j < reload_n_operands; j++)
7465 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7466 x = emit_insn_after (output_address_reload_insns[j], x);
7467 x = emit_insn_after (output_reload_insns[j], x);
7468 emit_insn_after (other_output_reload_insns[j], x);
7471 /* For all the spill regs newly reloaded in this instruction,
7472 record what they were reloaded from, so subsequent instructions
7473 can inherit the reloads.
7475 Update spill_reg_store for the reloads of this insn.
7476 Copy the elements that were updated in the loop above. */
7478 for (j = 0; j < n_reloads; j++)
7480 int r = reload_order[j];
7481 int i = reload_spill_index[r];
7483 /* If this is a non-inherited input reload from a pseudo, we must
7484 clear any memory of a previous store to the same pseudo. Only do
7485 something if there will not be an output reload for the pseudo
7486 being reloaded. */
7487 if (rld[r].in_reg != 0
7488 && ! (reload_inherited[r] || reload_override_in[r]))
7490 rtx reg = rld[r].in_reg;
7492 if (GET_CODE (reg) == SUBREG)
7493 reg = SUBREG_REG (reg);
7495 if (REG_P (reg)
7496 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7497 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7499 int nregno = REGNO (reg);
7501 if (reg_last_reload_reg[nregno])
7503 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7505 if (reg_reloaded_contents[last_regno] == nregno)
7506 spill_reg_store[last_regno] = 0;
7511 /* I is nonneg if this reload used a register.
7512 If rld[r].reg_rtx is 0, this is an optional reload
7513 that we opted to ignore. */
7515 if (i >= 0 && rld[r].reg_rtx != 0)
7517 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7518 int k;
7519 int part_reaches_end = 0;
7520 int all_reaches_end = 1;
7522 /* For a multi register reload, we need to check if all or part
7523 of the value lives to the end. */
7524 for (k = 0; k < nr; k++)
7526 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7527 rld[r].when_needed))
7528 part_reaches_end = 1;
7529 else
7530 all_reaches_end = 0;
7533 /* Ignore reloads that don't reach the end of the insn in
7534 entirety. */
7535 if (all_reaches_end)
7537 /* First, clear out memory of what used to be in this spill reg.
7538 If consecutive registers are used, clear them all. */
7540 for (k = 0; k < nr; k++)
7542 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7543 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7546 /* Maybe the spill reg contains a copy of reload_out. */
7547 if (rld[r].out != 0
7548 && (REG_P (rld[r].out)
7549 #ifdef AUTO_INC_DEC
7550 || ! rld[r].out_reg
7551 #endif
7552 || REG_P (rld[r].out_reg)))
7554 rtx out = (REG_P (rld[r].out)
7555 ? rld[r].out
7556 : rld[r].out_reg
7557 ? rld[r].out_reg
7558 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7559 int nregno = REGNO (out);
7560 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7561 : hard_regno_nregs[nregno]
7562 [GET_MODE (rld[r].reg_rtx)]);
7563 bool piecemeal;
7565 spill_reg_store[i] = new_spill_reg_store[i];
7566 spill_reg_stored_to[i] = out;
7567 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7569 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7570 && nr == nnr
7571 && inherit_piecemeal_p (r, nregno));
7573 /* If NREGNO is a hard register, it may occupy more than
7574 one register. If it does, say what is in the
7575 rest of the registers assuming that both registers
7576 agree on how many words the object takes. If not,
7577 invalidate the subsequent registers. */
7579 if (nregno < FIRST_PSEUDO_REGISTER)
7580 for (k = 1; k < nnr; k++)
7581 reg_last_reload_reg[nregno + k]
7582 = (piecemeal
7583 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7584 : 0);
7586 /* Now do the inverse operation. */
7587 for (k = 0; k < nr; k++)
7589 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7590 reg_reloaded_contents[i + k]
7591 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7592 ? nregno
7593 : nregno + k);
7594 reg_reloaded_insn[i + k] = insn;
7595 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7596 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7597 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7601 /* Maybe the spill reg contains a copy of reload_in. Only do
7602 something if there will not be an output reload for
7603 the register being reloaded. */
7604 else if (rld[r].out_reg == 0
7605 && rld[r].in != 0
7606 && ((REG_P (rld[r].in)
7607 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7608 && !REGNO_REG_SET_P (&reg_has_output_reload,
7609 REGNO (rld[r].in)))
7610 || (REG_P (rld[r].in_reg)
7611 && !REGNO_REG_SET_P (&reg_has_output_reload,
7612 REGNO (rld[r].in_reg))))
7613 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7615 int nregno;
7616 int nnr;
7617 rtx in;
7618 bool piecemeal;
7620 if (REG_P (rld[r].in)
7621 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7622 in = rld[r].in;
7623 else if (REG_P (rld[r].in_reg))
7624 in = rld[r].in_reg;
7625 else
7626 in = XEXP (rld[r].in_reg, 0);
7627 nregno = REGNO (in);
7629 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7630 : hard_regno_nregs[nregno]
7631 [GET_MODE (rld[r].reg_rtx)]);
7633 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7635 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7636 && nr == nnr
7637 && inherit_piecemeal_p (r, nregno));
7639 if (nregno < FIRST_PSEUDO_REGISTER)
7640 for (k = 1; k < nnr; k++)
7641 reg_last_reload_reg[nregno + k]
7642 = (piecemeal
7643 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7644 : 0);
7646 /* Unless we inherited this reload, show we haven't
7647 recently done a store.
7648 Previous stores of inherited auto_inc expressions
7649 also have to be discarded. */
7650 if (! reload_inherited[r]
7651 || (rld[r].out && ! rld[r].out_reg))
7652 spill_reg_store[i] = 0;
7654 for (k = 0; k < nr; k++)
7656 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7657 reg_reloaded_contents[i + k]
7658 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7659 ? nregno
7660 : nregno + k);
7661 reg_reloaded_insn[i + k] = insn;
7662 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7663 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7664 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7669 /* However, if part of the reload reaches the end, then we must
7670 invalidate the old info for the part that survives to the end. */
7671 else if (part_reaches_end)
7673 for (k = 0; k < nr; k++)
7674 if (reload_reg_reaches_end_p (i + k,
7675 rld[r].opnum,
7676 rld[r].when_needed))
7677 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7681 /* The following if-statement was #if 0'd in 1.34 (or before...).
7682 It's reenabled in 1.35 because supposedly nothing else
7683 deals with this problem. */
7685 /* If a register gets output-reloaded from a non-spill register,
7686 that invalidates any previous reloaded copy of it.
7687 But forget_old_reloads_1 won't get to see it, because
7688 it thinks only about the original insn. So invalidate it here.
7689 Also do the same thing for RELOAD_OTHER constraints where the
7690 output is discarded. */
7691 if (i < 0
7692 && ((rld[r].out != 0
7693 && (REG_P (rld[r].out)
7694 || (MEM_P (rld[r].out)
7695 && REG_P (rld[r].out_reg))))
7696 || (rld[r].out == 0 && rld[r].out_reg
7697 && REG_P (rld[r].out_reg))))
7699 rtx out = ((rld[r].out && REG_P (rld[r].out))
7700 ? rld[r].out : rld[r].out_reg);
7701 int nregno = REGNO (out);
7703 /* REG_RTX is now set or clobbered by the main instruction.
7704 As the comment above explains, forget_old_reloads_1 only
7705 sees the original instruction, and there is no guarantee
7706 that the original instruction also clobbered REG_RTX.
7707 For example, if find_reloads sees that the input side of
7708 a matched operand pair dies in this instruction, it may
7709 use the input register as the reload register.
7711 Calling forget_old_reloads_1 is a waste of effort if
7712 REG_RTX is also the output register.
7714 If we know that REG_RTX holds the value of a pseudo
7715 register, the code after the call will record that fact. */
7716 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
7717 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
7719 if (nregno >= FIRST_PSEUDO_REGISTER)
7721 rtx src_reg, store_insn = NULL_RTX;
7723 reg_last_reload_reg[nregno] = 0;
7725 /* If we can find a hard register that is stored, record
7726 the storing insn so that we may delete this insn with
7727 delete_output_reload. */
7728 src_reg = rld[r].reg_rtx;
7730 /* If this is an optional reload, try to find the source reg
7731 from an input reload. */
7732 if (! src_reg)
7734 rtx set = single_set (insn);
7735 if (set && SET_DEST (set) == rld[r].out)
7737 int k;
7739 src_reg = SET_SRC (set);
7740 store_insn = insn;
7741 for (k = 0; k < n_reloads; k++)
7743 if (rld[k].in == src_reg)
7745 src_reg = rld[k].reg_rtx;
7746 break;
7751 else
7752 store_insn = new_spill_reg_store[REGNO (src_reg)];
7753 if (src_reg && REG_P (src_reg)
7754 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7756 int src_regno = REGNO (src_reg);
7757 int nr = hard_regno_nregs[src_regno][rld[r].mode];
7758 /* The place where to find a death note varies with
7759 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7760 necessarily checked exactly in the code that moves
7761 notes, so just check both locations. */
7762 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7763 if (! note && store_insn)
7764 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7765 while (nr-- > 0)
7767 spill_reg_store[src_regno + nr] = store_insn;
7768 spill_reg_stored_to[src_regno + nr] = out;
7769 reg_reloaded_contents[src_regno + nr] = nregno;
7770 reg_reloaded_insn[src_regno + nr] = store_insn;
7771 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7772 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7773 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7774 GET_MODE (src_reg)))
7775 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7776 src_regno + nr);
7777 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7778 if (note)
7779 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7780 else
7781 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7783 reg_last_reload_reg[nregno] = src_reg;
7784 /* We have to set reg_has_output_reload here, or else
7785 forget_old_reloads_1 will clear reg_last_reload_reg
7786 right away. */
7787 SET_REGNO_REG_SET (&reg_has_output_reload,
7788 nregno);
7791 else
7793 int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
7795 while (num_regs-- > 0)
7796 reg_last_reload_reg[nregno + num_regs] = 0;
7800 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7803 /* Go through the motions to emit INSN and test if it is strictly valid.
7804 Return the emitted insn if valid, else return NULL. */
7806 static rtx
7807 emit_insn_if_valid_for_reload (rtx insn)
7809 rtx last = get_last_insn ();
7810 int code;
7812 insn = emit_insn (insn);
7813 code = recog_memoized (insn);
7815 if (code >= 0)
7817 extract_insn (insn);
7818 /* We want constrain operands to treat this insn strictly in its
7819 validity determination, i.e., the way it would after reload has
7820 completed. */
7821 if (constrain_operands (1))
7822 return insn;
7825 delete_insns_since (last);
7826 return NULL;
7829 /* Emit code to perform a reload from IN (which may be a reload register) to
7830 OUT (which may also be a reload register). IN or OUT is from operand
7831 OPNUM with reload type TYPE.
7833 Returns first insn emitted. */
7835 static rtx
7836 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7838 rtx last = get_last_insn ();
7839 rtx tem;
7841 /* If IN is a paradoxical SUBREG, remove it and try to put the
7842 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7843 if (GET_CODE (in) == SUBREG
7844 && (GET_MODE_SIZE (GET_MODE (in))
7845 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7846 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7847 in = SUBREG_REG (in), out = tem;
7848 else if (GET_CODE (out) == SUBREG
7849 && (GET_MODE_SIZE (GET_MODE (out))
7850 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7851 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7852 out = SUBREG_REG (out), in = tem;
7854 /* How to do this reload can get quite tricky. Normally, we are being
7855 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7856 register that didn't get a hard register. In that case we can just
7857 call emit_move_insn.
7859 We can also be asked to reload a PLUS that adds a register or a MEM to
7860 another register, constant or MEM. This can occur during frame pointer
7861 elimination and while reloading addresses. This case is handled by
7862 trying to emit a single insn to perform the add. If it is not valid,
7863 we use a two insn sequence.
7865 Or we can be asked to reload an unary operand that was a fragment of
7866 an addressing mode, into a register. If it isn't recognized as-is,
7867 we try making the unop operand and the reload-register the same:
7868 (set reg:X (unop:X expr:Y))
7869 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7871 Finally, we could be called to handle an 'o' constraint by putting
7872 an address into a register. In that case, we first try to do this
7873 with a named pattern of "reload_load_address". If no such pattern
7874 exists, we just emit a SET insn and hope for the best (it will normally
7875 be valid on machines that use 'o').
7877 This entire process is made complex because reload will never
7878 process the insns we generate here and so we must ensure that
7879 they will fit their constraints and also by the fact that parts of
7880 IN might be being reloaded separately and replaced with spill registers.
7881 Because of this, we are, in some sense, just guessing the right approach
7882 here. The one listed above seems to work.
7884 ??? At some point, this whole thing needs to be rethought. */
7886 if (GET_CODE (in) == PLUS
7887 && (REG_P (XEXP (in, 0))
7888 || GET_CODE (XEXP (in, 0)) == SUBREG
7889 || MEM_P (XEXP (in, 0)))
7890 && (REG_P (XEXP (in, 1))
7891 || GET_CODE (XEXP (in, 1)) == SUBREG
7892 || CONSTANT_P (XEXP (in, 1))
7893 || MEM_P (XEXP (in, 1))))
7895 /* We need to compute the sum of a register or a MEM and another
7896 register, constant, or MEM, and put it into the reload
7897 register. The best possible way of doing this is if the machine
7898 has a three-operand ADD insn that accepts the required operands.
7900 The simplest approach is to try to generate such an insn and see if it
7901 is recognized and matches its constraints. If so, it can be used.
7903 It might be better not to actually emit the insn unless it is valid,
7904 but we need to pass the insn as an operand to `recog' and
7905 `extract_insn' and it is simpler to emit and then delete the insn if
7906 not valid than to dummy things up. */
7908 rtx op0, op1, tem, insn;
7909 int code;
7911 op0 = find_replacement (&XEXP (in, 0));
7912 op1 = find_replacement (&XEXP (in, 1));
7914 /* Since constraint checking is strict, commutativity won't be
7915 checked, so we need to do that here to avoid spurious failure
7916 if the add instruction is two-address and the second operand
7917 of the add is the same as the reload reg, which is frequently
7918 the case. If the insn would be A = B + A, rearrange it so
7919 it will be A = A + B as constrain_operands expects. */
7921 if (REG_P (XEXP (in, 1))
7922 && REGNO (out) == REGNO (XEXP (in, 1)))
7923 tem = op0, op0 = op1, op1 = tem;
7925 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7926 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7928 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7929 if (insn)
7930 return insn;
7932 /* If that failed, we must use a conservative two-insn sequence.
7934 Use a move to copy one operand into the reload register. Prefer
7935 to reload a constant, MEM or pseudo since the move patterns can
7936 handle an arbitrary operand. If OP1 is not a constant, MEM or
7937 pseudo and OP1 is not a valid operand for an add instruction, then
7938 reload OP1.
7940 After reloading one of the operands into the reload register, add
7941 the reload register to the output register.
7943 If there is another way to do this for a specific machine, a
7944 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7945 we emit below. */
7947 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
7949 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
7950 || (REG_P (op1)
7951 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7952 || (code != CODE_FOR_nothing
7953 && ! ((*insn_data[code].operand[2].predicate)
7954 (op1, insn_data[code].operand[2].mode))))
7955 tem = op0, op0 = op1, op1 = tem;
7957 gen_reload (out, op0, opnum, type);
7959 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7960 This fixes a problem on the 32K where the stack pointer cannot
7961 be used as an operand of an add insn. */
7963 if (rtx_equal_p (op0, op1))
7964 op1 = out;
7966 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
7967 if (insn)
7969 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7970 set_unique_reg_note (insn, REG_EQUIV, in);
7971 return insn;
7974 /* If that failed, copy the address register to the reload register.
7975 Then add the constant to the reload register. */
7977 gcc_assert (!reg_overlap_mentioned_p (out, op0));
7978 gen_reload (out, op1, opnum, type);
7979 insn = emit_insn (gen_add2_insn (out, op0));
7980 set_unique_reg_note (insn, REG_EQUIV, in);
7983 #ifdef SECONDARY_MEMORY_NEEDED
7984 /* If we need a memory location to do the move, do it that way. */
7985 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
7986 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7987 && (REG_P (out) || GET_CODE (out) == SUBREG)
7988 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7989 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7990 REGNO_REG_CLASS (reg_or_subregno (out)),
7991 GET_MODE (out)))
7993 /* Get the memory to use and rewrite both registers to its mode. */
7994 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7996 if (GET_MODE (loc) != GET_MODE (out))
7997 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7999 if (GET_MODE (loc) != GET_MODE (in))
8000 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8002 gen_reload (loc, in, opnum, type);
8003 gen_reload (out, loc, opnum, type);
8005 #endif
8006 else if (REG_P (out) && UNARY_P (in))
8008 rtx insn;
8009 rtx op1;
8010 rtx out_moded;
8011 rtx set;
8013 op1 = find_replacement (&XEXP (in, 0));
8014 if (op1 != XEXP (in, 0))
8015 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8017 /* First, try a plain SET. */
8018 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8019 if (set)
8020 return set;
8022 /* If that failed, move the inner operand to the reload
8023 register, and try the same unop with the inner expression
8024 replaced with the reload register. */
8026 if (GET_MODE (op1) != GET_MODE (out))
8027 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8028 else
8029 out_moded = out;
8031 gen_reload (out_moded, op1, opnum, type);
8033 insn
8034 = gen_rtx_SET (VOIDmode, out,
8035 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8036 out_moded));
8037 insn = emit_insn_if_valid_for_reload (insn);
8038 if (insn)
8040 set_unique_reg_note (insn, REG_EQUIV, in);
8041 return insn;
8044 fatal_insn ("Failure trying to reload:", set);
8046 /* If IN is a simple operand, use gen_move_insn. */
8047 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8049 tem = emit_insn (gen_move_insn (out, in));
8050 /* IN may contain a LABEL_REF, if so add a REG_LABEL note. */
8051 mark_jump_label (in, tem, 0);
8054 #ifdef HAVE_reload_load_address
8055 else if (HAVE_reload_load_address)
8056 emit_insn (gen_reload_load_address (out, in));
8057 #endif
8059 /* Otherwise, just write (set OUT IN) and hope for the best. */
8060 else
8061 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8063 /* Return the first insn emitted.
8064 We can not just return get_last_insn, because there may have
8065 been multiple instructions emitted. Also note that gen_move_insn may
8066 emit more than one insn itself, so we can not assume that there is one
8067 insn emitted per emit_insn_before call. */
8069 return last ? NEXT_INSN (last) : get_insns ();
8072 /* Delete a previously made output-reload whose result we now believe
8073 is not needed. First we double-check.
8075 INSN is the insn now being processed.
8076 LAST_RELOAD_REG is the hard register number for which we want to delete
8077 the last output reload.
8078 J is the reload-number that originally used REG. The caller has made
8079 certain that reload J doesn't use REG any longer for input. */
8081 static void
8082 delete_output_reload (rtx insn, int j, int last_reload_reg)
8084 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8085 rtx reg = spill_reg_stored_to[last_reload_reg];
8086 int k;
8087 int n_occurrences;
8088 int n_inherited = 0;
8089 rtx i1;
8090 rtx substed;
8092 /* It is possible that this reload has been only used to set another reload
8093 we eliminated earlier and thus deleted this instruction too. */
8094 if (INSN_DELETED_P (output_reload_insn))
8095 return;
8097 /* Get the raw pseudo-register referred to. */
8099 while (GET_CODE (reg) == SUBREG)
8100 reg = SUBREG_REG (reg);
8101 substed = reg_equiv_memory_loc[REGNO (reg)];
8103 /* This is unsafe if the operand occurs more often in the current
8104 insn than it is inherited. */
8105 for (k = n_reloads - 1; k >= 0; k--)
8107 rtx reg2 = rld[k].in;
8108 if (! reg2)
8109 continue;
8110 if (MEM_P (reg2) || reload_override_in[k])
8111 reg2 = rld[k].in_reg;
8112 #ifdef AUTO_INC_DEC
8113 if (rld[k].out && ! rld[k].out_reg)
8114 reg2 = XEXP (rld[k].in_reg, 0);
8115 #endif
8116 while (GET_CODE (reg2) == SUBREG)
8117 reg2 = SUBREG_REG (reg2);
8118 if (rtx_equal_p (reg2, reg))
8120 if (reload_inherited[k] || reload_override_in[k] || k == j)
8121 n_inherited++;
8122 else
8123 return;
8126 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8127 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8128 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8129 reg, 0);
8130 if (substed)
8131 n_occurrences += count_occurrences (PATTERN (insn),
8132 eliminate_regs (substed, 0,
8133 NULL_RTX), 0);
8134 for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8136 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8137 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8139 if (n_occurrences > n_inherited)
8140 return;
8142 /* If the pseudo-reg we are reloading is no longer referenced
8143 anywhere between the store into it and here,
8144 and we're within the same basic block, then the value can only
8145 pass through the reload reg and end up here.
8146 Otherwise, give up--return. */
8147 for (i1 = NEXT_INSN (output_reload_insn);
8148 i1 != insn; i1 = NEXT_INSN (i1))
8150 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8151 return;
8152 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8153 && reg_mentioned_p (reg, PATTERN (i1)))
8155 /* If this is USE in front of INSN, we only have to check that
8156 there are no more references than accounted for by inheritance. */
8157 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8159 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8160 i1 = NEXT_INSN (i1);
8162 if (n_occurrences <= n_inherited && i1 == insn)
8163 break;
8164 return;
8168 /* We will be deleting the insn. Remove the spill reg information. */
8169 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8171 spill_reg_store[last_reload_reg + k] = 0;
8172 spill_reg_stored_to[last_reload_reg + k] = 0;
8175 /* The caller has already checked that REG dies or is set in INSN.
8176 It has also checked that we are optimizing, and thus some
8177 inaccuracies in the debugging information are acceptable.
8178 So we could just delete output_reload_insn. But in some cases
8179 we can improve the debugging information without sacrificing
8180 optimization - maybe even improving the code: See if the pseudo
8181 reg has been completely replaced with reload regs. If so, delete
8182 the store insn and forget we had a stack slot for the pseudo. */
8183 if (rld[j].out != rld[j].in
8184 && REG_N_DEATHS (REGNO (reg)) == 1
8185 && REG_N_SETS (REGNO (reg)) == 1
8186 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8187 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8189 rtx i2;
8191 /* We know that it was used only between here and the beginning of
8192 the current basic block. (We also know that the last use before
8193 INSN was the output reload we are thinking of deleting, but never
8194 mind that.) Search that range; see if any ref remains. */
8195 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8197 rtx set = single_set (i2);
8199 /* Uses which just store in the pseudo don't count,
8200 since if they are the only uses, they are dead. */
8201 if (set != 0 && SET_DEST (set) == reg)
8202 continue;
8203 if (LABEL_P (i2)
8204 || JUMP_P (i2))
8205 break;
8206 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8207 && reg_mentioned_p (reg, PATTERN (i2)))
8209 /* Some other ref remains; just delete the output reload we
8210 know to be dead. */
8211 delete_address_reloads (output_reload_insn, insn);
8212 delete_insn (output_reload_insn);
8213 return;
8217 /* Delete the now-dead stores into this pseudo. Note that this
8218 loop also takes care of deleting output_reload_insn. */
8219 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8221 rtx set = single_set (i2);
8223 if (set != 0 && SET_DEST (set) == reg)
8225 delete_address_reloads (i2, insn);
8226 delete_insn (i2);
8228 if (LABEL_P (i2)
8229 || JUMP_P (i2))
8230 break;
8233 /* For the debugging info, say the pseudo lives in this reload reg. */
8234 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8235 alter_reg (REGNO (reg), -1);
8237 else
8239 delete_address_reloads (output_reload_insn, insn);
8240 delete_insn (output_reload_insn);
8244 /* We are going to delete DEAD_INSN. Recursively delete loads of
8245 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8246 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8247 static void
8248 delete_address_reloads (rtx dead_insn, rtx current_insn)
8250 rtx set = single_set (dead_insn);
8251 rtx set2, dst, prev, next;
8252 if (set)
8254 rtx dst = SET_DEST (set);
8255 if (MEM_P (dst))
8256 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8258 /* If we deleted the store from a reloaded post_{in,de}c expression,
8259 we can delete the matching adds. */
8260 prev = PREV_INSN (dead_insn);
8261 next = NEXT_INSN (dead_insn);
8262 if (! prev || ! next)
8263 return;
8264 set = single_set (next);
8265 set2 = single_set (prev);
8266 if (! set || ! set2
8267 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8268 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8269 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8270 return;
8271 dst = SET_DEST (set);
8272 if (! rtx_equal_p (dst, SET_DEST (set2))
8273 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8274 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8275 || (INTVAL (XEXP (SET_SRC (set), 1))
8276 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8277 return;
8278 delete_related_insns (prev);
8279 delete_related_insns (next);
8282 /* Subfunction of delete_address_reloads: process registers found in X. */
8283 static void
8284 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8286 rtx prev, set, dst, i2;
8287 int i, j;
8288 enum rtx_code code = GET_CODE (x);
8290 if (code != REG)
8292 const char *fmt = GET_RTX_FORMAT (code);
8293 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8295 if (fmt[i] == 'e')
8296 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8297 else if (fmt[i] == 'E')
8299 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8300 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8301 current_insn);
8304 return;
8307 if (spill_reg_order[REGNO (x)] < 0)
8308 return;
8310 /* Scan backwards for the insn that sets x. This might be a way back due
8311 to inheritance. */
8312 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8314 code = GET_CODE (prev);
8315 if (code == CODE_LABEL || code == JUMP_INSN)
8316 return;
8317 if (!INSN_P (prev))
8318 continue;
8319 if (reg_set_p (x, PATTERN (prev)))
8320 break;
8321 if (reg_referenced_p (x, PATTERN (prev)))
8322 return;
8324 if (! prev || INSN_UID (prev) < reload_first_uid)
8325 return;
8326 /* Check that PREV only sets the reload register. */
8327 set = single_set (prev);
8328 if (! set)
8329 return;
8330 dst = SET_DEST (set);
8331 if (!REG_P (dst)
8332 || ! rtx_equal_p (dst, x))
8333 return;
8334 if (! reg_set_p (dst, PATTERN (dead_insn)))
8336 /* Check if DST was used in a later insn -
8337 it might have been inherited. */
8338 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8340 if (LABEL_P (i2))
8341 break;
8342 if (! INSN_P (i2))
8343 continue;
8344 if (reg_referenced_p (dst, PATTERN (i2)))
8346 /* If there is a reference to the register in the current insn,
8347 it might be loaded in a non-inherited reload. If no other
8348 reload uses it, that means the register is set before
8349 referenced. */
8350 if (i2 == current_insn)
8352 for (j = n_reloads - 1; j >= 0; j--)
8353 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8354 || reload_override_in[j] == dst)
8355 return;
8356 for (j = n_reloads - 1; j >= 0; j--)
8357 if (rld[j].in && rld[j].reg_rtx == dst)
8358 break;
8359 if (j >= 0)
8360 break;
8362 return;
8364 if (JUMP_P (i2))
8365 break;
8366 /* If DST is still live at CURRENT_INSN, check if it is used for
8367 any reload. Note that even if CURRENT_INSN sets DST, we still
8368 have to check the reloads. */
8369 if (i2 == current_insn)
8371 for (j = n_reloads - 1; j >= 0; j--)
8372 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8373 || reload_override_in[j] == dst)
8374 return;
8375 /* ??? We can't finish the loop here, because dst might be
8376 allocated to a pseudo in this block if no reload in this
8377 block needs any of the classes containing DST - see
8378 spill_hard_reg. There is no easy way to tell this, so we
8379 have to scan till the end of the basic block. */
8381 if (reg_set_p (dst, PATTERN (i2)))
8382 break;
8385 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8386 reg_reloaded_contents[REGNO (dst)] = -1;
8387 delete_insn (prev);
8390 /* Output reload-insns to reload VALUE into RELOADREG.
8391 VALUE is an autoincrement or autodecrement RTX whose operand
8392 is a register or memory location;
8393 so reloading involves incrementing that location.
8394 IN is either identical to VALUE, or some cheaper place to reload from.
8396 INC_AMOUNT is the number to increment or decrement by (always positive).
8397 This cannot be deduced from VALUE.
8399 Return the instruction that stores into RELOADREG. */
8401 static rtx
8402 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8404 /* REG or MEM to be copied and incremented. */
8405 rtx incloc = find_replacement (&XEXP (value, 0));
8406 /* Nonzero if increment after copying. */
8407 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8408 || GET_CODE (value) == POST_MODIFY);
8409 rtx last;
8410 rtx inc;
8411 rtx add_insn;
8412 int code;
8413 rtx store;
8414 rtx real_in = in == value ? incloc : in;
8416 /* No hard register is equivalent to this register after
8417 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8418 we could inc/dec that register as well (maybe even using it for
8419 the source), but I'm not sure it's worth worrying about. */
8420 if (REG_P (incloc))
8421 reg_last_reload_reg[REGNO (incloc)] = 0;
8423 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8425 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8426 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8428 else
8430 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8431 inc_amount = -inc_amount;
8433 inc = GEN_INT (inc_amount);
8436 /* If this is post-increment, first copy the location to the reload reg. */
8437 if (post && real_in != reloadreg)
8438 emit_insn (gen_move_insn (reloadreg, real_in));
8440 if (in == value)
8442 /* See if we can directly increment INCLOC. Use a method similar to
8443 that in gen_reload. */
8445 last = get_last_insn ();
8446 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8447 gen_rtx_PLUS (GET_MODE (incloc),
8448 incloc, inc)));
8450 code = recog_memoized (add_insn);
8451 if (code >= 0)
8453 extract_insn (add_insn);
8454 if (constrain_operands (1))
8456 /* If this is a pre-increment and we have incremented the value
8457 where it lives, copy the incremented value to RELOADREG to
8458 be used as an address. */
8460 if (! post)
8461 emit_insn (gen_move_insn (reloadreg, incloc));
8463 return add_insn;
8466 delete_insns_since (last);
8469 /* If couldn't do the increment directly, must increment in RELOADREG.
8470 The way we do this depends on whether this is pre- or post-increment.
8471 For pre-increment, copy INCLOC to the reload register, increment it
8472 there, then save back. */
8474 if (! post)
8476 if (in != reloadreg)
8477 emit_insn (gen_move_insn (reloadreg, real_in));
8478 emit_insn (gen_add2_insn (reloadreg, inc));
8479 store = emit_insn (gen_move_insn (incloc, reloadreg));
8481 else
8483 /* Postincrement.
8484 Because this might be a jump insn or a compare, and because RELOADREG
8485 may not be available after the insn in an input reload, we must do
8486 the incrementation before the insn being reloaded for.
8488 We have already copied IN to RELOADREG. Increment the copy in
8489 RELOADREG, save that back, then decrement RELOADREG so it has
8490 the original value. */
8492 emit_insn (gen_add2_insn (reloadreg, inc));
8493 store = emit_insn (gen_move_insn (incloc, reloadreg));
8494 if (GET_CODE (inc) == CONST_INT)
8495 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8496 else
8497 emit_insn (gen_sub2_insn (reloadreg, inc));
8500 return store;
8503 #ifdef AUTO_INC_DEC
8504 static void
8505 add_auto_inc_notes (rtx insn, rtx x)
8507 enum rtx_code code = GET_CODE (x);
8508 const char *fmt;
8509 int i, j;
8511 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8513 REG_NOTES (insn)
8514 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8515 return;
8518 /* Scan all the operand sub-expressions. */
8519 fmt = GET_RTX_FORMAT (code);
8520 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8522 if (fmt[i] == 'e')
8523 add_auto_inc_notes (insn, XEXP (x, i));
8524 else if (fmt[i] == 'E')
8525 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8526 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8529 #endif
8531 /* Copy EH notes from an insn to its reloads. */
8532 static void
8533 copy_eh_notes (rtx insn, rtx x)
8535 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8536 if (eh_note)
8538 for (; x != 0; x = NEXT_INSN (x))
8540 if (may_trap_p (PATTERN (x)))
8541 REG_NOTES (x)
8542 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8543 REG_NOTES (x));
8548 /* This is used by reload pass, that does emit some instructions after
8549 abnormal calls moving basic block end, but in fact it wants to emit
8550 them on the edge. Looks for abnormal call edges, find backward the
8551 proper call and fix the damage.
8553 Similar handle instructions throwing exceptions internally. */
8554 void
8555 fixup_abnormal_edges (void)
8557 bool inserted = false;
8558 basic_block bb;
8560 FOR_EACH_BB (bb)
8562 edge e;
8563 edge_iterator ei;
8565 /* Look for cases we are interested in - calls or instructions causing
8566 exceptions. */
8567 FOR_EACH_EDGE (e, ei, bb->succs)
8569 if (e->flags & EDGE_ABNORMAL_CALL)
8570 break;
8571 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8572 == (EDGE_ABNORMAL | EDGE_EH))
8573 break;
8575 if (e && !CALL_P (BB_END (bb))
8576 && !can_throw_internal (BB_END (bb)))
8578 rtx insn;
8580 /* Get past the new insns generated. Allow notes, as the insns
8581 may be already deleted. */
8582 insn = BB_END (bb);
8583 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8584 && !can_throw_internal (insn)
8585 && insn != BB_HEAD (bb))
8586 insn = PREV_INSN (insn);
8588 if (CALL_P (insn) || can_throw_internal (insn))
8590 rtx stop, next;
8592 stop = NEXT_INSN (BB_END (bb));
8593 BB_END (bb) = insn;
8594 insn = NEXT_INSN (insn);
8596 FOR_EACH_EDGE (e, ei, bb->succs)
8597 if (e->flags & EDGE_FALLTHRU)
8598 break;
8600 while (insn && insn != stop)
8602 next = NEXT_INSN (insn);
8603 if (INSN_P (insn))
8605 delete_insn (insn);
8607 /* Sometimes there's still the return value USE.
8608 If it's placed after a trapping call (i.e. that
8609 call is the last insn anyway), we have no fallthru
8610 edge. Simply delete this use and don't try to insert
8611 on the non-existent edge. */
8612 if (GET_CODE (PATTERN (insn)) != USE)
8614 /* We're not deleting it, we're moving it. */
8615 INSN_DELETED_P (insn) = 0;
8616 PREV_INSN (insn) = NULL_RTX;
8617 NEXT_INSN (insn) = NULL_RTX;
8619 insert_insn_on_edge (insn, e);
8620 inserted = true;
8623 else if (!BARRIER_P (insn))
8624 set_block_for_insn (insn, NULL);
8625 insn = next;
8629 /* It may be that we don't find any such trapping insn. In this
8630 case we discovered quite late that the insn that had been
8631 marked as can_throw_internal in fact couldn't trap at all.
8632 So we should in fact delete the EH edges out of the block. */
8633 else
8634 purge_dead_edges (bb);
8638 /* We've possibly turned single trapping insn into multiple ones. */
8639 if (flag_non_call_exceptions)
8641 sbitmap blocks;
8642 blocks = sbitmap_alloc (last_basic_block);
8643 sbitmap_ones (blocks);
8644 find_many_sub_basic_blocks (blocks);
8645 sbitmap_free (blocks);
8648 if (inserted)
8649 commit_edge_insertions ();
8651 #ifdef ENABLE_CHECKING
8652 /* Verify that we didn't turn one trapping insn into many, and that
8653 we found and corrected all of the problems wrt fixups on the
8654 fallthru edge. */
8655 verify_flow_info ();
8656 #endif