Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / reload1.c
blobec963f320d521e68bfe991bcff90ea5bf3c54e80
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "df.h"
49 #include "target.h"
50 #include "dse.h"
52 /* This file contains the reload pass of the compiler, which is
53 run after register allocation has been done. It checks that
54 each insn is valid (operands required to be in registers really
55 are in registers of the proper class) and fixes up invalid ones
56 by copying values temporarily into registers for the insns
57 that need them.
59 The results of register allocation are described by the vector
60 reg_renumber; the insns still contain pseudo regs, but reg_renumber
61 can be used to find which hard reg, if any, a pseudo reg is in.
63 The technique we always use is to free up a few hard regs that are
64 called ``reload regs'', and for each place where a pseudo reg
65 must be in a hard reg, copy it temporarily into one of the reload regs.
67 Reload regs are allocated locally for every instruction that needs
68 reloads. When there are pseudos which are allocated to a register that
69 has been chosen as a reload reg, such pseudos must be ``spilled''.
70 This means that they go to other hard regs, or to stack slots if no other
71 available hard regs can be found. Spilling can invalidate more
72 insns, requiring additional need for reloads, so we must keep checking
73 until the process stabilizes.
75 For machines with different classes of registers, we must keep track
76 of the register class needed for each reload, and make sure that
77 we allocate enough reload registers of each class.
79 The file reload.c contains the code that checks one insn for
80 validity and reports the reloads that it needs. This file
81 is in charge of scanning the entire rtl code, accumulating the
82 reload needs, spilling, assigning reload registers to use for
83 fixing up each insn, and generating the new insns to copy values
84 into the reload registers. */
86 /* During reload_as_needed, element N contains a REG rtx for the hard reg
87 into which reg N has been reloaded (perhaps for a previous insn). */
88 static rtx *reg_last_reload_reg;
90 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92 static regset_head reg_has_output_reload;
94 /* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96 static HARD_REG_SET reg_is_output_reload;
98 /* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102 rtx *reg_equiv_constant;
104 /* Element N is an invariant value to which pseudo reg N is equivalent.
105 eliminate_regs_in_insn uses this to replace pseudos in particular
106 contexts. */
107 rtx *reg_equiv_invariant;
109 /* Element N is a memory location to which pseudo reg N is equivalent,
110 prior to any register elimination (such as frame pointer to stack
111 pointer). Depending on whether or not it is a valid address, this value
112 is transferred to either reg_equiv_address or reg_equiv_mem. */
113 rtx *reg_equiv_memory_loc;
115 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116 collector can keep track of what is inside. */
117 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
119 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
120 This is used when the address is not valid as a memory address
121 (because its displacement is too big for the machine.) */
122 rtx *reg_equiv_address;
124 /* Element N is the memory slot to which pseudo reg N is equivalent,
125 or zero if pseudo reg N is not equivalent to a memory slot. */
126 rtx *reg_equiv_mem;
128 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129 alternate representations of the location of pseudo reg N. */
130 rtx *reg_equiv_alt_mem_list;
132 /* Widest width in which each pseudo reg is referred to (via subreg). */
133 static unsigned int *reg_max_ref_width;
135 /* Element N is the list of insns that initialized reg N from its equivalent
136 constant or memory slot. */
137 rtx *reg_equiv_init;
138 int reg_equiv_init_size;
140 /* Vector to remember old contents of reg_renumber before spilling. */
141 static short *reg_old_renumber;
143 /* During reload_as_needed, element N contains the last pseudo regno reloaded
144 into hard register N. If that pseudo reg occupied more than one register,
145 reg_reloaded_contents points to that pseudo for each spill register in
146 use; all of these must remain set for an inheritance to occur. */
147 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
149 /* During reload_as_needed, element N contains the insn for which
150 hard register N was last used. Its contents are significant only
151 when reg_reloaded_valid is set for this register. */
152 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
154 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
155 static HARD_REG_SET reg_reloaded_valid;
156 /* Indicate if the register was dead at the end of the reload.
157 This is only valid if reg_reloaded_contents is set and valid. */
158 static HARD_REG_SET reg_reloaded_dead;
160 /* Indicate whether the register's current value is one that is not
161 safe to retain across a call, even for registers that are normally
162 call-saved. */
163 static HARD_REG_SET reg_reloaded_call_part_clobbered;
165 /* Number of spill-regs so far; number of valid elements of spill_regs. */
166 static int n_spills;
168 /* In parallel with spill_regs, contains REG rtx's for those regs.
169 Holds the last rtx used for any given reg, or 0 if it has never
170 been used for spilling yet. This rtx is reused, provided it has
171 the proper mode. */
172 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
174 /* In parallel with spill_regs, contains nonzero for a spill reg
175 that was stored after the last time it was used.
176 The precise value is the insn generated to do the store. */
177 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
179 /* This is the register that was stored with spill_reg_store. This is a
180 copy of reload_out / reload_out_reg when the value was stored; if
181 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
182 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
184 /* This table is the inverse mapping of spill_regs:
185 indexed by hard reg number,
186 it contains the position of that reg in spill_regs,
187 or -1 for something that is not in spill_regs.
189 ?!? This is no longer accurate. */
190 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
192 /* This reg set indicates registers that can't be used as spill registers for
193 the currently processed insn. These are the hard registers which are live
194 during the insn, but not allocated to pseudos, as well as fixed
195 registers. */
196 static HARD_REG_SET bad_spill_regs;
198 /* These are the hard registers that can't be used as spill register for any
199 insn. This includes registers used for user variables and registers that
200 we can't eliminate. A register that appears in this set also can't be used
201 to retry register allocation. */
202 static HARD_REG_SET bad_spill_regs_global;
204 /* Describes order of use of registers for reloading
205 of spilled pseudo-registers. `n_spills' is the number of
206 elements that are actually valid; new ones are added at the end.
208 Both spill_regs and spill_reg_order are used on two occasions:
209 once during find_reload_regs, where they keep track of the spill registers
210 for a single insn, but also during reload_as_needed where they show all
211 the registers ever used by reload. For the latter case, the information
212 is calculated during finish_spills. */
213 static short spill_regs[FIRST_PSEUDO_REGISTER];
215 /* This vector of reg sets indicates, for each pseudo, which hard registers
216 may not be used for retrying global allocation because the register was
217 formerly spilled from one of them. If we allowed reallocating a pseudo to
218 a register that it was already allocated to, reload might not
219 terminate. */
220 static HARD_REG_SET *pseudo_previous_regs;
222 /* This vector of reg sets indicates, for each pseudo, which hard
223 registers may not be used for retrying global allocation because they
224 are used as spill registers during one of the insns in which the
225 pseudo is live. */
226 static HARD_REG_SET *pseudo_forbidden_regs;
228 /* All hard regs that have been used as spill registers for any insn are
229 marked in this set. */
230 static HARD_REG_SET used_spill_regs;
232 /* Index of last register assigned as a spill register. We allocate in
233 a round-robin fashion. */
234 static int last_spill_reg;
236 /* Nonzero if indirect addressing is supported on the machine; this means
237 that spilling (REG n) does not require reloading it into a register in
238 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
239 value indicates the level of indirect addressing supported, e.g., two
240 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 a hard register. */
242 static char spill_indirect_levels;
244 /* Nonzero if indirect addressing is supported when the innermost MEM is
245 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
246 which these are valid is the same as spill_indirect_levels, above. */
247 char indirect_symref_ok;
249 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
250 char double_reg_address_ok;
252 /* Record the stack slot for each spilled hard register. */
253 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
255 /* Width allocated so far for that stack slot. */
256 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
258 /* Record which pseudos needed to be spilled. */
259 static regset_head spilled_pseudos;
261 /* Used for communication between order_regs_for_reload and count_pseudo.
262 Used to avoid counting one pseudo twice. */
263 static regset_head pseudos_counted;
265 /* First uid used by insns created by reload in this function.
266 Used in find_equiv_reg. */
267 int reload_first_uid;
269 /* Flag set by local-alloc or global-alloc if anything is live in
270 a call-clobbered reg across calls. */
271 int caller_save_needed;
273 /* Set to 1 while reload_as_needed is operating.
274 Required by some machines to handle any generated moves differently. */
275 int reload_in_progress = 0;
277 /* These arrays record the insn_code of insns that may be needed to
278 perform input and output reloads of special objects. They provide a
279 place to pass a scratch register. */
280 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
281 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
283 /* This obstack is used for allocation of rtl during register elimination.
284 The allocated storage can be freed once find_reloads has processed the
285 insn. */
286 static struct obstack reload_obstack;
288 /* Points to the beginning of the reload_obstack. All insn_chain structures
289 are allocated first. */
290 static char *reload_startobj;
292 /* The point after all insn_chain structures. Used to quickly deallocate
293 memory allocated in copy_reloads during calculate_needs_all_insns. */
294 static char *reload_firstobj;
296 /* This points before all local rtl generated by register elimination.
297 Used to quickly free all memory after processing one insn. */
298 static char *reload_insn_firstobj;
300 /* List of insn_chain instructions, one for every insn that reload needs to
301 examine. */
302 struct insn_chain *reload_insn_chain;
304 /* List of all insns needing reloads. */
305 static struct insn_chain *insns_need_reload;
307 /* This structure is used to record information about register eliminations.
308 Each array entry describes one possible way of eliminating a register
309 in favor of another. If there is more than one way of eliminating a
310 particular register, the most preferred should be specified first. */
312 struct elim_table
314 int from; /* Register number to be eliminated. */
315 int to; /* Register number used as replacement. */
316 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
317 int can_eliminate; /* Nonzero if this elimination can be done. */
318 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
319 insns made by reload. */
320 HOST_WIDE_INT offset; /* Current offset between the two regs. */
321 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
322 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
323 rtx from_rtx; /* REG rtx for the register to be eliminated.
324 We cannot simply compare the number since
325 we might then spuriously replace a hard
326 register corresponding to a pseudo
327 assigned to the reg to be eliminated. */
328 rtx to_rtx; /* REG rtx for the replacement. */
331 static struct elim_table *reg_eliminate = 0;
333 /* This is an intermediate structure to initialize the table. It has
334 exactly the members provided by ELIMINABLE_REGS. */
335 static const struct elim_table_1
337 const int from;
338 const int to;
339 } reg_eliminate_1[] =
341 /* If a set of eliminable registers was specified, define the table from it.
342 Otherwise, default to the normal case of the frame pointer being
343 replaced by the stack pointer. */
345 #ifdef ELIMINABLE_REGS
346 ELIMINABLE_REGS;
347 #else
348 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
349 #endif
351 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
353 /* Record the number of pending eliminations that have an offset not equal
354 to their initial offset. If nonzero, we use a new copy of each
355 replacement result in any insns encountered. */
356 int num_not_at_initial_offset;
358 /* Count the number of registers that we may be able to eliminate. */
359 static int num_eliminable;
360 /* And the number of registers that are equivalent to a constant that
361 can be eliminated to frame_pointer / arg_pointer + constant. */
362 static int num_eliminable_invariants;
364 /* For each label, we record the offset of each elimination. If we reach
365 a label by more than one path and an offset differs, we cannot do the
366 elimination. This information is indexed by the difference of the
367 number of the label and the first label number. We can't offset the
368 pointer itself as this can cause problems on machines with segmented
369 memory. The first table is an array of flags that records whether we
370 have yet encountered a label and the second table is an array of arrays,
371 one entry in the latter array for each elimination. */
373 static int first_label_num;
374 static char *offsets_known_at;
375 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
377 /* Number of labels in the current function. */
379 static int num_labels;
381 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
382 static void maybe_fix_stack_asms (void);
383 static void copy_reloads (struct insn_chain *);
384 static void calculate_needs_all_insns (int);
385 static int find_reg (struct insn_chain *, int);
386 static void find_reload_regs (struct insn_chain *);
387 static void select_reload_regs (void);
388 static void delete_caller_save_insns (void);
390 static void spill_failure (rtx, enum reg_class);
391 static void count_spilled_pseudo (int, int, int);
392 static void delete_dead_insn (rtx);
393 static void alter_reg (int, int, bool);
394 static void set_label_offsets (rtx, rtx, int);
395 static void check_eliminable_occurrences (rtx);
396 static void elimination_effects (rtx, enum machine_mode);
397 static int eliminate_regs_in_insn (rtx, int);
398 static void update_eliminable_offsets (void);
399 static void mark_not_eliminable (rtx, const_rtx, void *);
400 static void set_initial_elim_offsets (void);
401 static bool verify_initial_elim_offsets (void);
402 static void set_initial_label_offsets (void);
403 static void set_offsets_for_label (rtx);
404 static void init_elim_table (void);
405 static void update_eliminables (HARD_REG_SET *);
406 static void spill_hard_reg (unsigned int, int);
407 static int finish_spills (int);
408 static void scan_paradoxical_subregs (rtx);
409 static void count_pseudo (int);
410 static void order_regs_for_reload (struct insn_chain *);
411 static void reload_as_needed (int);
412 static void forget_old_reloads_1 (rtx, const_rtx, void *);
413 static void forget_marked_reloads (regset);
414 static int reload_reg_class_lower (const void *, const void *);
415 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
416 enum machine_mode);
417 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
418 enum machine_mode);
419 static int reload_reg_free_p (unsigned int, int, enum reload_type);
420 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
421 rtx, rtx, int, int);
422 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
423 rtx, rtx, int, int);
424 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
425 static int allocate_reload_reg (struct insn_chain *, int, int);
426 static int conflicts_with_override (rtx);
427 static void failed_reload (rtx, int);
428 static int set_reload_reg (int, int);
429 static void choose_reload_regs_init (struct insn_chain *, rtx *);
430 static void choose_reload_regs (struct insn_chain *);
431 static void merge_assigned_reloads (rtx);
432 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
433 rtx, int);
434 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
435 int);
436 static void do_input_reload (struct insn_chain *, struct reload *, int);
437 static void do_output_reload (struct insn_chain *, struct reload *, int);
438 static bool inherit_piecemeal_p (int, int);
439 static void emit_reload_insns (struct insn_chain *);
440 static void delete_output_reload (rtx, int, int);
441 static void delete_address_reloads (rtx, rtx);
442 static void delete_address_reloads_1 (rtx, rtx, rtx);
443 static rtx inc_for_reload (rtx, rtx, rtx, int);
444 #ifdef AUTO_INC_DEC
445 static void add_auto_inc_notes (rtx, rtx);
446 #endif
447 static void copy_eh_notes (rtx, rtx);
448 static int reloads_conflict (int, int);
449 static rtx gen_reload (rtx, rtx, int, enum reload_type);
450 static rtx emit_insn_if_valid_for_reload (rtx);
452 /* Initialize the reload pass. This is called at the beginning of compilation
453 and may be called again if the target is reinitialized. */
455 void
456 init_reload (void)
458 int i;
460 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
461 Set spill_indirect_levels to the number of levels such addressing is
462 permitted, zero if it is not permitted at all. */
464 rtx tem
465 = gen_rtx_MEM (Pmode,
466 gen_rtx_PLUS (Pmode,
467 gen_rtx_REG (Pmode,
468 LAST_VIRTUAL_REGISTER + 1),
469 GEN_INT (4)));
470 spill_indirect_levels = 0;
472 while (memory_address_p (QImode, tem))
474 spill_indirect_levels++;
475 tem = gen_rtx_MEM (Pmode, tem);
478 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
480 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
481 indirect_symref_ok = memory_address_p (QImode, tem);
483 /* See if reg+reg is a valid (and offsettable) address. */
485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
487 tem = gen_rtx_PLUS (Pmode,
488 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
489 gen_rtx_REG (Pmode, i));
491 /* This way, we make sure that reg+reg is an offsettable address. */
492 tem = plus_constant (tem, 4);
494 if (memory_address_p (QImode, tem))
496 double_reg_address_ok = 1;
497 break;
501 /* Initialize obstack for our rtl allocation. */
502 gcc_obstack_init (&reload_obstack);
503 reload_startobj = obstack_alloc (&reload_obstack, 0);
505 INIT_REG_SET (&spilled_pseudos);
506 INIT_REG_SET (&pseudos_counted);
509 /* List of insn chains that are currently unused. */
510 static struct insn_chain *unused_insn_chains = 0;
512 /* Allocate an empty insn_chain structure. */
513 struct insn_chain *
514 new_insn_chain (void)
516 struct insn_chain *c;
518 if (unused_insn_chains == 0)
520 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
521 INIT_REG_SET (&c->live_throughout);
522 INIT_REG_SET (&c->dead_or_set);
524 else
526 c = unused_insn_chains;
527 unused_insn_chains = c->next;
529 c->is_caller_save_insn = 0;
530 c->need_operand_change = 0;
531 c->need_reload = 0;
532 c->need_elim = 0;
533 return c;
536 /* Small utility function to set all regs in hard reg set TO which are
537 allocated to pseudos in regset FROM. */
539 void
540 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
542 unsigned int regno;
543 reg_set_iterator rsi;
545 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
547 int r = reg_renumber[regno];
549 if (r < 0)
551 /* reload_combine uses the information from
552 DF_LIVE_IN (BASIC_BLOCK), which might still
553 contain registers that have not actually been allocated
554 since they have an equivalence. */
555 gcc_assert (flag_ira || reload_completed);
557 else
558 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
562 /* Replace all pseudos found in LOC with their corresponding
563 equivalences. */
565 static void
566 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
568 rtx x = *loc;
569 enum rtx_code code;
570 const char *fmt;
571 int i, j;
573 if (! x)
574 return;
576 code = GET_CODE (x);
577 if (code == REG)
579 unsigned int regno = REGNO (x);
581 if (regno < FIRST_PSEUDO_REGISTER)
582 return;
584 x = eliminate_regs (x, mem_mode, usage);
585 if (x != *loc)
587 *loc = x;
588 replace_pseudos_in (loc, mem_mode, usage);
589 return;
592 if (reg_equiv_constant[regno])
593 *loc = reg_equiv_constant[regno];
594 else if (reg_equiv_mem[regno])
595 *loc = reg_equiv_mem[regno];
596 else if (reg_equiv_address[regno])
597 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
598 else
600 gcc_assert (!REG_P (regno_reg_rtx[regno])
601 || REGNO (regno_reg_rtx[regno]) != regno);
602 *loc = regno_reg_rtx[regno];
605 return;
607 else if (code == MEM)
609 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
610 return;
613 /* Process each of our operands recursively. */
614 fmt = GET_RTX_FORMAT (code);
615 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
616 if (*fmt == 'e')
617 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
618 else if (*fmt == 'E')
619 for (j = 0; j < XVECLEN (x, i); j++)
620 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
623 /* Determine if the current function has an exception receiver block
624 that reaches the exit block via non-exceptional edges */
626 static bool
627 has_nonexceptional_receiver (void)
629 edge e;
630 edge_iterator ei;
631 basic_block *tos, *worklist, bb;
633 /* If we're not optimizing, then just err on the safe side. */
634 if (!optimize)
635 return true;
637 /* First determine which blocks can reach exit via normal paths. */
638 tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
640 FOR_EACH_BB (bb)
641 bb->flags &= ~BB_REACHABLE;
643 /* Place the exit block on our worklist. */
644 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
645 *tos++ = EXIT_BLOCK_PTR;
647 /* Iterate: find everything reachable from what we've already seen. */
648 while (tos != worklist)
650 bb = *--tos;
652 FOR_EACH_EDGE (e, ei, bb->preds)
653 if (!(e->flags & EDGE_ABNORMAL))
655 basic_block src = e->src;
657 if (!(src->flags & BB_REACHABLE))
659 src->flags |= BB_REACHABLE;
660 *tos++ = src;
664 free (worklist);
666 /* Now see if there's a reachable block with an exceptional incoming
667 edge. */
668 FOR_EACH_BB (bb)
669 if (bb->flags & BB_REACHABLE)
670 FOR_EACH_EDGE (e, ei, bb->preds)
671 if (e->flags & EDGE_ABNORMAL)
672 return true;
674 /* No exceptional block reached exit unexceptionally. */
675 return false;
679 /* Global variables used by reload and its subroutines. */
681 /* Set during calculate_needs if an insn needs register elimination. */
682 static int something_needs_elimination;
683 /* Set during calculate_needs if an insn needs an operand changed. */
684 static int something_needs_operands_changed;
686 /* Nonzero means we couldn't get enough spill regs. */
687 static int failure;
689 /* Temporary array of pseudo-register number. */
690 static int *temp_pseudo_reg_arr;
692 /* The function is used to sort pseudos according their usage
693 frequencies (putting most frequently ones first). */
694 static int
695 pseudo_reg_compare (const void *v1p, const void *v2p)
697 int regno1 = *(int *) v1p;
698 int regno2 = *(int *) v2p;
699 int diff;
701 if ((diff = REG_FREQ (regno2) - REG_FREQ (regno1)) != 0)
702 return diff;
703 return regno1 - regno2;
706 /* Main entry point for the reload pass.
708 FIRST is the first insn of the function being compiled.
710 GLOBAL nonzero means we were called from global_alloc
711 and should attempt to reallocate any pseudoregs that we
712 displace from hard regs we will use for reloads.
713 If GLOBAL is zero, we do not have enough information to do that,
714 so any pseudo reg that is spilled must go to the stack.
716 Return value is nonzero if reload failed
717 and we must not do any more for this function. */
720 reload (rtx first, int global)
722 int i, n;
723 rtx insn;
724 struct elim_table *ep;
725 basic_block bb;
727 /* Make sure even insns with volatile mem refs are recognizable. */
728 init_recog ();
730 failure = 0;
732 reload_firstobj = obstack_alloc (&reload_obstack, 0);
734 /* Make sure that the last insn in the chain
735 is not something that needs reloading. */
736 emit_note (NOTE_INSN_DELETED);
738 /* Enable find_equiv_reg to distinguish insns made by reload. */
739 reload_first_uid = get_max_uid ();
741 #ifdef SECONDARY_MEMORY_NEEDED
742 /* Initialize the secondary memory table. */
743 clear_secondary_mem ();
744 #endif
746 /* We don't have a stack slot for any spill reg yet. */
747 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
748 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
750 /* Initialize the save area information for caller-save, in case some
751 are needed. */
752 init_save_areas ();
754 /* Compute which hard registers are now in use
755 as homes for pseudo registers.
756 This is done here rather than (eg) in global_alloc
757 because this point is reached even if not optimizing. */
758 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
759 mark_home_live (i);
761 /* A function that has a nonlocal label that can reach the exit
762 block via non-exceptional paths must save all call-saved
763 registers. */
764 if (current_function_has_nonlocal_label
765 && has_nonexceptional_receiver ())
766 current_function_saves_all_registers = 1;
768 if (current_function_saves_all_registers)
769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
770 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
771 df_set_regs_ever_live (i, true);
773 /* Find all the pseudo registers that didn't get hard regs
774 but do have known equivalent constants or memory slots.
775 These include parameters (known equivalent to parameter slots)
776 and cse'd or loop-moved constant memory addresses.
778 Record constant equivalents in reg_equiv_constant
779 so they will be substituted by find_reloads.
780 Record memory equivalents in reg_mem_equiv so they can
781 be substituted eventually by altering the REG-rtx's. */
783 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
784 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
785 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
786 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
787 reg_equiv_address = XCNEWVEC (rtx, max_regno);
788 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
789 reg_old_renumber = XCNEWVEC (short, max_regno);
790 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
791 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
792 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
794 CLEAR_HARD_REG_SET (bad_spill_regs_global);
796 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
797 to. Also find all paradoxical subregs and find largest such for
798 each pseudo. */
800 num_eliminable_invariants = 0;
801 for (insn = first; insn; insn = NEXT_INSN (insn))
803 rtx set = single_set (insn);
805 /* We may introduce USEs that we want to remove at the end, so
806 we'll mark them with QImode. Make sure there are no
807 previously-marked insns left by say regmove. */
808 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
809 && GET_MODE (insn) != VOIDmode)
810 PUT_MODE (insn, VOIDmode);
812 if (INSN_P (insn))
813 scan_paradoxical_subregs (PATTERN (insn));
815 if (set != 0 && REG_P (SET_DEST (set)))
817 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
818 rtx x;
820 if (! note)
821 continue;
823 i = REGNO (SET_DEST (set));
824 x = XEXP (note, 0);
826 if (i <= LAST_VIRTUAL_REGISTER)
827 continue;
829 if (! function_invariant_p (x)
830 || ! flag_pic
831 /* A function invariant is often CONSTANT_P but may
832 include a register. We promise to only pass
833 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
834 || (CONSTANT_P (x)
835 && LEGITIMATE_PIC_OPERAND_P (x)))
837 /* It can happen that a REG_EQUIV note contains a MEM
838 that is not a legitimate memory operand. As later
839 stages of reload assume that all addresses found
840 in the reg_equiv_* arrays were originally legitimate,
841 we ignore such REG_EQUIV notes. */
842 if (memory_operand (x, VOIDmode))
844 /* Always unshare the equivalence, so we can
845 substitute into this insn without touching the
846 equivalence. */
847 reg_equiv_memory_loc[i] = copy_rtx (x);
849 else if (function_invariant_p (x))
851 if (GET_CODE (x) == PLUS)
853 /* This is PLUS of frame pointer and a constant,
854 and might be shared. Unshare it. */
855 reg_equiv_invariant[i] = copy_rtx (x);
856 num_eliminable_invariants++;
858 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
860 reg_equiv_invariant[i] = x;
861 num_eliminable_invariants++;
863 else if (LEGITIMATE_CONSTANT_P (x))
864 reg_equiv_constant[i] = x;
865 else
867 reg_equiv_memory_loc[i]
868 = force_const_mem (GET_MODE (SET_DEST (set)), x);
869 if (! reg_equiv_memory_loc[i])
870 reg_equiv_init[i] = NULL_RTX;
873 else
875 reg_equiv_init[i] = NULL_RTX;
876 continue;
879 else
880 reg_equiv_init[i] = NULL_RTX;
884 if (dump_file)
885 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
886 if (reg_equiv_init[i])
888 fprintf (dump_file, "init_insns for %u: ", i);
889 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
890 fprintf (dump_file, "\n");
893 init_elim_table ();
895 first_label_num = get_first_label_num ();
896 num_labels = max_label_num () - first_label_num;
898 /* Allocate the tables used to store offset information at labels. */
899 /* We used to use alloca here, but the size of what it would try to
900 allocate would occasionally cause it to exceed the stack limit and
901 cause a core dump. */
902 offsets_known_at = XNEWVEC (char, num_labels);
903 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
905 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
906 stack slots to the pseudos that lack hard regs or equivalents.
907 Do not touch virtual registers. */
909 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
910 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
911 temp_pseudo_reg_arr [n++] = i;
913 if (flag_ira)
914 qsort (temp_pseudo_reg_arr, n, sizeof (int), pseudo_reg_compare);
915 if (frame_pointer_needed || ! flag_ira)
916 for (i = 0; i < n; i++)
917 alter_reg (temp_pseudo_reg_arr [i], -1, false);
918 else
919 for (i = n - 1; i >= 0; i--)
920 alter_reg (temp_pseudo_reg_arr [i], -1, false);
922 /* If we have some registers we think can be eliminated, scan all insns to
923 see if there is an insn that sets one of these registers to something
924 other than itself plus a constant. If so, the register cannot be
925 eliminated. Doing this scan here eliminates an extra pass through the
926 main reload loop in the most common case where register elimination
927 cannot be done. */
928 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
929 if (INSN_P (insn))
930 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
932 maybe_fix_stack_asms ();
934 insns_need_reload = 0;
935 something_needs_elimination = 0;
937 /* Initialize to -1, which means take the first spill register. */
938 last_spill_reg = -1;
940 /* Spill any hard regs that we know we can't eliminate. */
941 CLEAR_HARD_REG_SET (used_spill_regs);
942 /* There can be multiple ways to eliminate a register;
943 they should be listed adjacently.
944 Elimination for any register fails only if all possible ways fail. */
945 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
947 int from = ep->from;
948 int can_eliminate = 0;
951 can_eliminate |= ep->can_eliminate;
952 ep++;
954 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
955 if (! can_eliminate)
956 spill_hard_reg (from, 1);
959 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
960 if (frame_pointer_needed)
961 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
962 #endif
963 finish_spills (global);
965 /* From now on, we may need to generate moves differently. We may also
966 allow modifications of insns which cause them to not be recognized.
967 Any such modifications will be cleaned up during reload itself. */
968 reload_in_progress = 1;
970 /* This loop scans the entire function each go-round
971 and repeats until one repetition spills no additional hard regs. */
972 for (;;)
974 int something_changed;
975 int did_spill;
976 HOST_WIDE_INT starting_frame_size;
978 starting_frame_size = get_frame_size ();
980 set_initial_elim_offsets ();
981 set_initial_label_offsets ();
983 /* For each pseudo register that has an equivalent location defined,
984 try to eliminate any eliminable registers (such as the frame pointer)
985 assuming initial offsets for the replacement register, which
986 is the normal case.
988 If the resulting location is directly addressable, substitute
989 the MEM we just got directly for the old REG.
991 If it is not addressable but is a constant or the sum of a hard reg
992 and constant, it is probably not addressable because the constant is
993 out of range, in that case record the address; we will generate
994 hairy code to compute the address in a register each time it is
995 needed. Similarly if it is a hard register, but one that is not
996 valid as an address register.
998 If the location is not addressable, but does not have one of the
999 above forms, assign a stack slot. We have to do this to avoid the
1000 potential of producing lots of reloads if, e.g., a location involves
1001 a pseudo that didn't get a hard register and has an equivalent memory
1002 location that also involves a pseudo that didn't get a hard register.
1004 Perhaps at some point we will improve reload_when_needed handling
1005 so this problem goes away. But that's very hairy. */
1007 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1008 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1010 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
1012 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
1013 XEXP (x, 0)))
1014 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1015 else if (CONSTANT_P (XEXP (x, 0))
1016 || (REG_P (XEXP (x, 0))
1017 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1018 || (GET_CODE (XEXP (x, 0)) == PLUS
1019 && REG_P (XEXP (XEXP (x, 0), 0))
1020 && (REGNO (XEXP (XEXP (x, 0), 0))
1021 < FIRST_PSEUDO_REGISTER)
1022 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1023 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1024 else
1026 /* Make a new stack slot. Then indicate that something
1027 changed so we go back and recompute offsets for
1028 eliminable registers because the allocation of memory
1029 below might change some offset. reg_equiv_{mem,address}
1030 will be set up for this pseudo on the next pass around
1031 the loop. */
1032 reg_equiv_memory_loc[i] = 0;
1033 reg_equiv_init[i] = 0;
1034 alter_reg (i, -1, true);
1038 if (caller_save_needed)
1039 setup_save_areas ();
1041 /* If we allocated another stack slot, redo elimination bookkeeping. */
1042 if (starting_frame_size != get_frame_size ())
1043 continue;
1044 if (starting_frame_size && cfun->stack_alignment_needed)
1046 /* If we have a stack frame, we must align it now. The
1047 stack size may be a part of the offset computation for
1048 register elimination. So if this changes the stack size,
1049 then repeat the elimination bookkeeping. We don't
1050 realign when there is no stack, as that will cause a
1051 stack frame when none is needed should
1052 STARTING_FRAME_OFFSET not be already aligned to
1053 STACK_BOUNDARY. */
1054 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
1055 if (starting_frame_size != get_frame_size ())
1056 continue;
1059 if (caller_save_needed)
1061 save_call_clobbered_regs ();
1062 /* That might have allocated new insn_chain structures. */
1063 reload_firstobj = obstack_alloc (&reload_obstack, 0);
1066 calculate_needs_all_insns (global);
1068 if (! flag_ira)
1069 CLEAR_REG_SET (&spilled_pseudos);
1070 did_spill = 0;
1072 something_changed = 0;
1074 /* If we allocated any new memory locations, make another pass
1075 since it might have changed elimination offsets. */
1076 if (starting_frame_size != get_frame_size ())
1077 something_changed = 1;
1079 /* Even if the frame size remained the same, we might still have
1080 changed elimination offsets, e.g. if find_reloads called
1081 force_const_mem requiring the back end to allocate a constant
1082 pool base register that needs to be saved on the stack. */
1083 else if (!verify_initial_elim_offsets ())
1084 something_changed = 1;
1087 HARD_REG_SET to_spill;
1088 CLEAR_HARD_REG_SET (to_spill);
1089 update_eliminables (&to_spill);
1090 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1093 if (TEST_HARD_REG_BIT (to_spill, i))
1095 spill_hard_reg (i, 1);
1096 did_spill = 1;
1098 /* Regardless of the state of spills, if we previously had
1099 a register that we thought we could eliminate, but now can
1100 not eliminate, we must run another pass.
1102 Consider pseudos which have an entry in reg_equiv_* which
1103 reference an eliminable register. We must make another pass
1104 to update reg_equiv_* so that we do not substitute in the
1105 old value from when we thought the elimination could be
1106 performed. */
1107 something_changed = 1;
1111 select_reload_regs ();
1112 if (failure)
1113 goto failed;
1115 if (insns_need_reload != 0 || did_spill)
1116 something_changed |= finish_spills (global);
1118 if (! something_changed)
1119 break;
1121 if (caller_save_needed)
1122 delete_caller_save_insns ();
1124 obstack_free (&reload_obstack, reload_firstobj);
1127 if (flag_ira)
1128 sort_insn_chain (FALSE);
1130 /* If global-alloc was run, notify it of any register eliminations we have
1131 done. */
1132 if (global)
1133 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1134 if (ep->can_eliminate)
1135 mark_elimination (ep->from, ep->to);
1137 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1138 If that insn didn't set the register (i.e., it copied the register to
1139 memory), just delete that insn instead of the equivalencing insn plus
1140 anything now dead. If we call delete_dead_insn on that insn, we may
1141 delete the insn that actually sets the register if the register dies
1142 there and that is incorrect. */
1144 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1146 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1148 rtx list;
1149 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1151 rtx equiv_insn = XEXP (list, 0);
1153 /* If we already deleted the insn or if it may trap, we can't
1154 delete it. The latter case shouldn't happen, but can
1155 if an insn has a variable address, gets a REG_EH_REGION
1156 note added to it, and then gets converted into a load
1157 from a constant address. */
1158 if (NOTE_P (equiv_insn)
1159 || can_throw_internal (equiv_insn))
1161 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1162 delete_dead_insn (equiv_insn);
1163 else
1164 SET_INSN_DELETED (equiv_insn);
1169 /* Use the reload registers where necessary
1170 by generating move instructions to move the must-be-register
1171 values into or out of the reload registers. */
1173 if (insns_need_reload != 0 || something_needs_elimination
1174 || something_needs_operands_changed)
1176 HOST_WIDE_INT old_frame_size = get_frame_size ();
1178 reload_as_needed (global);
1180 gcc_assert (old_frame_size == get_frame_size ());
1182 gcc_assert (verify_initial_elim_offsets ());
1185 /* If we were able to eliminate the frame pointer, show that it is no
1186 longer live at the start of any basic block. If it ls live by
1187 virtue of being in a pseudo, that pseudo will be marked live
1188 and hence the frame pointer will be known to be live via that
1189 pseudo. */
1191 if (! frame_pointer_needed)
1192 FOR_EACH_BB (bb)
1193 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1195 /* Come here (with failure set nonzero) if we can't get enough spill
1196 regs. */
1197 failed:
1199 CLEAR_REG_SET (&spilled_pseudos);
1200 reload_in_progress = 0;
1202 /* Now eliminate all pseudo regs by modifying them into
1203 their equivalent memory references.
1204 The REG-rtx's for the pseudos are modified in place,
1205 so all insns that used to refer to them now refer to memory.
1207 For a reg that has a reg_equiv_address, all those insns
1208 were changed by reloading so that no insns refer to it any longer;
1209 but the DECL_RTL of a variable decl may refer to it,
1210 and if so this causes the debugging info to mention the variable. */
1212 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1214 rtx addr = 0;
1216 if (reg_equiv_mem[i])
1217 addr = XEXP (reg_equiv_mem[i], 0);
1219 if (reg_equiv_address[i])
1220 addr = reg_equiv_address[i];
1222 if (addr)
1224 if (reg_renumber[i] < 0)
1226 rtx reg = regno_reg_rtx[i];
1228 REG_USERVAR_P (reg) = 0;
1229 PUT_CODE (reg, MEM);
1230 XEXP (reg, 0) = addr;
1231 if (reg_equiv_memory_loc[i])
1232 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1233 else
1235 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1236 MEM_ATTRS (reg) = 0;
1238 MEM_NOTRAP_P (reg) = 1;
1240 else if (reg_equiv_mem[i])
1241 XEXP (reg_equiv_mem[i], 0) = addr;
1245 /* We must set reload_completed now since the cleanup_subreg_operands call
1246 below will re-recognize each insn and reload may have generated insns
1247 which are only valid during and after reload. */
1248 reload_completed = 1;
1250 /* Make a pass over all the insns and delete all USEs which we inserted
1251 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1252 notes. Delete all CLOBBER insns, except those that refer to the return
1253 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1254 from misarranging variable-array code, and simplify (subreg (reg))
1255 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1256 are no longer useful or accurate. Strip and regenerate REG_INC notes
1257 that may have been moved around. */
1259 for (insn = first; insn; insn = NEXT_INSN (insn))
1260 if (INSN_P (insn))
1262 rtx *pnote;
1264 if (CALL_P (insn))
1266 HARD_REG_SET used_function_regs;
1268 get_call_invalidated_used_regs (insn, &used_function_regs, false);
1269 IOR_HARD_REG_SET (cfun->emit->call_used_regs, used_function_regs);
1270 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1271 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1274 if ((GET_CODE (PATTERN (insn)) == USE
1275 /* We mark with QImode USEs introduced by reload itself. */
1276 && (GET_MODE (insn) == QImode
1277 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1278 || (GET_CODE (PATTERN (insn)) == CLOBBER
1279 && (!MEM_P (XEXP (PATTERN (insn), 0))
1280 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1281 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1282 && XEXP (XEXP (PATTERN (insn), 0), 0)
1283 != stack_pointer_rtx))
1284 && (!REG_P (XEXP (PATTERN (insn), 0))
1285 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1287 delete_insn (insn);
1288 continue;
1291 /* Some CLOBBERs may survive until here and still reference unassigned
1292 pseudos with const equivalent, which may in turn cause ICE in later
1293 passes if the reference remains in place. */
1294 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1295 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1296 VOIDmode, PATTERN (insn));
1298 /* Discard obvious no-ops, even without -O. This optimization
1299 is fast and doesn't interfere with debugging. */
1300 if (NONJUMP_INSN_P (insn)
1301 && GET_CODE (PATTERN (insn)) == SET
1302 && REG_P (SET_SRC (PATTERN (insn)))
1303 && REG_P (SET_DEST (PATTERN (insn)))
1304 && (REGNO (SET_SRC (PATTERN (insn)))
1305 == REGNO (SET_DEST (PATTERN (insn)))))
1307 delete_insn (insn);
1308 continue;
1311 pnote = &REG_NOTES (insn);
1312 while (*pnote != 0)
1314 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1315 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1316 || REG_NOTE_KIND (*pnote) == REG_INC
1317 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1318 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1319 *pnote = XEXP (*pnote, 1);
1320 else
1321 pnote = &XEXP (*pnote, 1);
1324 #ifdef AUTO_INC_DEC
1325 add_auto_inc_notes (insn, PATTERN (insn));
1326 #endif
1328 /* Simplify (subreg (reg)) if it appears as an operand. */
1329 cleanup_subreg_operands (insn);
1331 /* Clean up invalid ASMs so that they don't confuse later passes.
1332 See PR 21299. */
1333 if (asm_noperands (PATTERN (insn)) >= 0)
1335 extract_insn (insn);
1336 if (!constrain_operands (1))
1338 error_for_asm (insn,
1339 "%<asm%> operand has impossible constraints");
1340 delete_insn (insn);
1341 continue;
1346 /* If we are doing stack checking, give a warning if this function's
1347 frame size is larger than we expect. */
1348 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1350 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1351 static int verbose_warned = 0;
1353 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1354 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1355 size += UNITS_PER_WORD;
1357 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1359 warning (0, "frame size too large for reliable stack checking");
1360 if (! verbose_warned)
1362 warning (0, "try reducing the number of local variables");
1363 verbose_warned = 1;
1368 /* Indicate that we no longer have known memory locations or constants. */
1369 if (reg_equiv_constant)
1370 free (reg_equiv_constant);
1371 if (reg_equiv_invariant)
1372 free (reg_equiv_invariant);
1373 reg_equiv_constant = 0;
1374 reg_equiv_invariant = 0;
1375 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1376 reg_equiv_memory_loc = 0;
1378 free (temp_pseudo_reg_arr);
1380 if (offsets_known_at)
1381 free (offsets_known_at);
1382 if (offsets_at)
1383 free (offsets_at);
1385 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1386 if (reg_equiv_alt_mem_list[i])
1387 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1388 free (reg_equiv_alt_mem_list);
1390 free (reg_equiv_mem);
1391 reg_equiv_init = 0;
1392 free (reg_equiv_address);
1393 free (reg_max_ref_width);
1394 free (reg_old_renumber);
1395 free (pseudo_previous_regs);
1396 free (pseudo_forbidden_regs);
1398 CLEAR_HARD_REG_SET (used_spill_regs);
1399 for (i = 0; i < n_spills; i++)
1400 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1402 /* Free all the insn_chain structures at once. */
1403 obstack_free (&reload_obstack, reload_startobj);
1404 unused_insn_chains = 0;
1405 fixup_abnormal_edges ();
1407 /* Replacing pseudos with their memory equivalents might have
1408 created shared rtx. Subsequent passes would get confused
1409 by this, so unshare everything here. */
1410 unshare_all_rtl_again (first);
1412 #ifdef STACK_BOUNDARY
1413 /* init_emit has set the alignment of the hard frame pointer
1414 to STACK_BOUNDARY. It is very likely no longer valid if
1415 the hard frame pointer was used for register allocation. */
1416 if (!frame_pointer_needed)
1417 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1418 #endif
1420 return failure;
1423 /* Yet another special case. Unfortunately, reg-stack forces people to
1424 write incorrect clobbers in asm statements. These clobbers must not
1425 cause the register to appear in bad_spill_regs, otherwise we'll call
1426 fatal_insn later. We clear the corresponding regnos in the live
1427 register sets to avoid this.
1428 The whole thing is rather sick, I'm afraid. */
1430 static void
1431 maybe_fix_stack_asms (void)
1433 #ifdef STACK_REGS
1434 const char *constraints[MAX_RECOG_OPERANDS];
1435 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1436 struct insn_chain *chain;
1438 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1440 int i, noperands;
1441 HARD_REG_SET clobbered, allowed;
1442 rtx pat;
1444 if (! INSN_P (chain->insn)
1445 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1446 continue;
1447 pat = PATTERN (chain->insn);
1448 if (GET_CODE (pat) != PARALLEL)
1449 continue;
1451 CLEAR_HARD_REG_SET (clobbered);
1452 CLEAR_HARD_REG_SET (allowed);
1454 /* First, make a mask of all stack regs that are clobbered. */
1455 for (i = 0; i < XVECLEN (pat, 0); i++)
1457 rtx t = XVECEXP (pat, 0, i);
1458 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1459 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1462 /* Get the operand values and constraints out of the insn. */
1463 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1464 constraints, operand_mode, NULL);
1466 /* For every operand, see what registers are allowed. */
1467 for (i = 0; i < noperands; i++)
1469 const char *p = constraints[i];
1470 /* For every alternative, we compute the class of registers allowed
1471 for reloading in CLS, and merge its contents into the reg set
1472 ALLOWED. */
1473 int cls = (int) NO_REGS;
1475 for (;;)
1477 char c = *p;
1479 if (c == '\0' || c == ',' || c == '#')
1481 /* End of one alternative - mark the regs in the current
1482 class, and reset the class. */
1483 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1484 cls = NO_REGS;
1485 p++;
1486 if (c == '#')
1487 do {
1488 c = *p++;
1489 } while (c != '\0' && c != ',');
1490 if (c == '\0')
1491 break;
1492 continue;
1495 switch (c)
1497 case '=': case '+': case '*': case '%': case '?': case '!':
1498 case '0': case '1': case '2': case '3': case '4': case 'm':
1499 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1500 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1501 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1502 case 'P':
1503 break;
1505 case 'p':
1506 cls = (int) reg_class_subunion[cls]
1507 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1508 break;
1510 case 'g':
1511 case 'r':
1512 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1513 break;
1515 default:
1516 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1517 cls = (int) reg_class_subunion[cls]
1518 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1519 else
1520 cls = (int) reg_class_subunion[cls]
1521 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1523 p += CONSTRAINT_LEN (c, p);
1526 /* Those of the registers which are clobbered, but allowed by the
1527 constraints, must be usable as reload registers. So clear them
1528 out of the life information. */
1529 AND_HARD_REG_SET (allowed, clobbered);
1530 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1531 if (TEST_HARD_REG_BIT (allowed, i))
1533 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1534 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1538 #endif
1541 /* Copy the global variables n_reloads and rld into the corresponding elts
1542 of CHAIN. */
1543 static void
1544 copy_reloads (struct insn_chain *chain)
1546 chain->n_reloads = n_reloads;
1547 chain->rld = obstack_alloc (&reload_obstack,
1548 n_reloads * sizeof (struct reload));
1549 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1550 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1553 /* Walk the chain of insns, and determine for each whether it needs reloads
1554 and/or eliminations. Build the corresponding insns_need_reload list, and
1555 set something_needs_elimination as appropriate. */
1556 static void
1557 calculate_needs_all_insns (int global)
1559 struct insn_chain **pprev_reload = &insns_need_reload;
1560 struct insn_chain *chain, *next = 0;
1562 something_needs_elimination = 0;
1564 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1565 for (chain = reload_insn_chain; chain != 0; chain = next)
1567 rtx insn = chain->insn;
1569 next = chain->next;
1571 /* Clear out the shortcuts. */
1572 chain->n_reloads = 0;
1573 chain->need_elim = 0;
1574 chain->need_reload = 0;
1575 chain->need_operand_change = 0;
1577 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1578 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1579 what effects this has on the known offsets at labels. */
1581 if (LABEL_P (insn) || JUMP_P (insn)
1582 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1583 set_label_offsets (insn, insn, 0);
1585 if (INSN_P (insn))
1587 rtx old_body = PATTERN (insn);
1588 int old_code = INSN_CODE (insn);
1589 rtx old_notes = REG_NOTES (insn);
1590 int did_elimination = 0;
1591 int operands_changed = 0;
1592 rtx set = single_set (insn);
1594 /* Skip insns that only set an equivalence. */
1595 if (set && REG_P (SET_DEST (set))
1596 && reg_renumber[REGNO (SET_DEST (set))] < 0
1597 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1598 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1599 && reg_equiv_init[REGNO (SET_DEST (set))])
1600 continue;
1602 /* If needed, eliminate any eliminable registers. */
1603 if (num_eliminable || num_eliminable_invariants)
1604 did_elimination = eliminate_regs_in_insn (insn, 0);
1606 /* Analyze the instruction. */
1607 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1608 global, spill_reg_order);
1610 /* If a no-op set needs more than one reload, this is likely
1611 to be something that needs input address reloads. We
1612 can't get rid of this cleanly later, and it is of no use
1613 anyway, so discard it now.
1614 We only do this when expensive_optimizations is enabled,
1615 since this complements reload inheritance / output
1616 reload deletion, and it can make debugging harder. */
1617 if (flag_expensive_optimizations && n_reloads > 1)
1619 rtx set = single_set (insn);
1620 if (set
1622 ((SET_SRC (set) == SET_DEST (set)
1623 && REG_P (SET_SRC (set))
1624 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1625 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1626 && reg_renumber [REGNO (SET_SRC (set))] < 0
1627 && reg_renumber [REGNO (SET_DEST (set))] < 0
1628 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1629 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1630 && rtx_equal_p (reg_equiv_memory_loc
1631 [REGNO (SET_SRC (set))],
1632 reg_equiv_memory_loc
1633 [REGNO (SET_DEST (set))]))))
1635 if (flag_ira)
1636 mark_memory_move_deletion (REGNO (SET_DEST (set)),
1637 REGNO (SET_SRC (set)));
1638 delete_insn (insn);
1639 /* Delete it from the reload chain. */
1640 if (chain->prev)
1641 chain->prev->next = next;
1642 else
1643 reload_insn_chain = next;
1644 if (next)
1645 next->prev = chain->prev;
1646 chain->next = unused_insn_chains;
1647 unused_insn_chains = chain;
1648 continue;
1651 if (num_eliminable)
1652 update_eliminable_offsets ();
1654 /* Remember for later shortcuts which insns had any reloads or
1655 register eliminations. */
1656 chain->need_elim = did_elimination;
1657 chain->need_reload = n_reloads > 0;
1658 chain->need_operand_change = operands_changed;
1660 /* Discard any register replacements done. */
1661 if (did_elimination)
1663 obstack_free (&reload_obstack, reload_insn_firstobj);
1664 PATTERN (insn) = old_body;
1665 INSN_CODE (insn) = old_code;
1666 REG_NOTES (insn) = old_notes;
1667 something_needs_elimination = 1;
1670 something_needs_operands_changed |= operands_changed;
1672 if (n_reloads != 0)
1674 copy_reloads (chain);
1675 *pprev_reload = chain;
1676 pprev_reload = &chain->next_need_reload;
1680 *pprev_reload = 0;
1683 /* Comparison function for qsort to decide which of two reloads
1684 should be handled first. *P1 and *P2 are the reload numbers. */
1686 static int
1687 reload_reg_class_lower (const void *r1p, const void *r2p)
1689 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1690 int t;
1692 /* Consider required reloads before optional ones. */
1693 t = rld[r1].optional - rld[r2].optional;
1694 if (t != 0)
1695 return t;
1697 /* Count all solitary classes before non-solitary ones. */
1698 t = ((reg_class_size[(int) rld[r2].class] == 1)
1699 - (reg_class_size[(int) rld[r1].class] == 1));
1700 if (t != 0)
1701 return t;
1703 /* Aside from solitaires, consider all multi-reg groups first. */
1704 t = rld[r2].nregs - rld[r1].nregs;
1705 if (t != 0)
1706 return t;
1708 /* Consider reloads in order of increasing reg-class number. */
1709 t = (int) rld[r1].class - (int) rld[r2].class;
1710 if (t != 0)
1711 return t;
1713 /* If reloads are equally urgent, sort by reload number,
1714 so that the results of qsort leave nothing to chance. */
1715 return r1 - r2;
1718 /* The cost of spilling each hard reg. */
1719 static int spill_cost[FIRST_PSEUDO_REGISTER];
1721 /* When spilling multiple hard registers, we use SPILL_COST for the first
1722 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1723 only the first hard reg for a multi-reg pseudo. */
1724 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1726 /* Update the spill cost arrays, considering that pseudo REG is live. */
1728 static void
1729 count_pseudo (int reg)
1731 int freq = REG_FREQ (reg);
1732 int r = reg_renumber[reg];
1733 int nregs;
1735 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1736 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1737 || (flag_ira && r < 0))
1738 return;
1740 SET_REGNO_REG_SET (&pseudos_counted, reg);
1742 gcc_assert (r >= 0);
1744 spill_add_cost[r] += freq;
1746 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1747 while (nregs-- > 0)
1748 spill_cost[r + nregs] += freq;
1751 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1752 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1754 static void
1755 order_regs_for_reload (struct insn_chain *chain)
1757 unsigned i;
1758 HARD_REG_SET used_by_pseudos;
1759 HARD_REG_SET used_by_pseudos2;
1760 reg_set_iterator rsi;
1762 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1764 memset (spill_cost, 0, sizeof spill_cost);
1765 memset (spill_add_cost, 0, sizeof spill_add_cost);
1767 /* Count number of uses of each hard reg by pseudo regs allocated to it
1768 and then order them by decreasing use. First exclude hard registers
1769 that are live in or across this insn. */
1771 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1772 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1773 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1774 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1776 /* Now find out which pseudos are allocated to it, and update
1777 hard_reg_n_uses. */
1778 CLEAR_REG_SET (&pseudos_counted);
1780 EXECUTE_IF_SET_IN_REG_SET
1781 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1783 count_pseudo (i);
1785 EXECUTE_IF_SET_IN_REG_SET
1786 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1788 count_pseudo (i);
1790 CLEAR_REG_SET (&pseudos_counted);
1793 /* Vector of reload-numbers showing the order in which the reloads should
1794 be processed. */
1795 static short reload_order[MAX_RELOADS];
1797 /* This is used to keep track of the spill regs used in one insn. */
1798 static HARD_REG_SET used_spill_regs_local;
1800 /* We decided to spill hard register SPILLED, which has a size of
1801 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1802 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1803 update SPILL_COST/SPILL_ADD_COST. */
1805 static void
1806 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1808 int freq = REG_FREQ (reg);
1809 int r = reg_renumber[reg];
1810 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1812 if ((flag_ira && r < 0)
1813 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1814 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1815 return;
1817 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1819 spill_add_cost[r] -= freq;
1820 while (nregs-- > 0)
1821 spill_cost[r + nregs] -= freq;
1824 /* Find reload register to use for reload number ORDER. */
1826 static int
1827 find_reg (struct insn_chain *chain, int order)
1829 int rnum = reload_order[order];
1830 struct reload *rl = rld + rnum;
1831 int best_cost = INT_MAX;
1832 int best_reg = -1;
1833 unsigned int i, j;
1834 int k;
1835 HARD_REG_SET not_usable;
1836 HARD_REG_SET used_by_other_reload;
1837 reg_set_iterator rsi;
1839 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1840 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1841 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1843 CLEAR_HARD_REG_SET (used_by_other_reload);
1844 for (k = 0; k < order; k++)
1846 int other = reload_order[k];
1848 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1849 for (j = 0; j < rld[other].nregs; j++)
1850 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1855 unsigned int regno = i;
1857 if (! TEST_HARD_REG_BIT (not_usable, regno)
1858 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1859 && HARD_REGNO_MODE_OK (regno, rl->mode))
1861 int this_cost = spill_cost[regno];
1862 int ok = 1;
1863 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1865 for (j = 1; j < this_nregs; j++)
1867 this_cost += spill_add_cost[regno + j];
1868 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1869 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1870 ok = 0;
1872 if (! ok)
1873 continue;
1874 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1875 this_cost--;
1876 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1877 this_cost--;
1878 if (this_cost < best_cost
1879 /* Among registers with equal cost, prefer caller-saved ones, or
1880 use REG_ALLOC_ORDER if it is defined. */
1881 || (this_cost == best_cost
1882 #ifdef REG_ALLOC_ORDER
1883 && (inv_reg_alloc_order[regno]
1884 < inv_reg_alloc_order[best_reg])
1885 #else
1886 && call_used_regs[regno]
1887 && ! call_used_regs[best_reg]
1888 #endif
1891 best_reg = regno;
1892 best_cost = this_cost;
1896 if (best_reg == -1)
1897 return 0;
1899 if (dump_file)
1900 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1902 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1903 rl->regno = best_reg;
1905 EXECUTE_IF_SET_IN_REG_SET
1906 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1908 count_spilled_pseudo (best_reg, rl->nregs, j);
1911 EXECUTE_IF_SET_IN_REG_SET
1912 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1914 count_spilled_pseudo (best_reg, rl->nregs, j);
1917 for (i = 0; i < rl->nregs; i++)
1919 gcc_assert (spill_cost[best_reg + i] == 0);
1920 gcc_assert (spill_add_cost[best_reg + i] == 0);
1921 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1923 return 1;
1926 /* Find more reload regs to satisfy the remaining need of an insn, which
1927 is given by CHAIN.
1928 Do it by ascending class number, since otherwise a reg
1929 might be spilled for a big class and might fail to count
1930 for a smaller class even though it belongs to that class. */
1932 static void
1933 find_reload_regs (struct insn_chain *chain)
1935 int i;
1937 /* In order to be certain of getting the registers we need,
1938 we must sort the reloads into order of increasing register class.
1939 Then our grabbing of reload registers will parallel the process
1940 that provided the reload registers. */
1941 for (i = 0; i < chain->n_reloads; i++)
1943 /* Show whether this reload already has a hard reg. */
1944 if (chain->rld[i].reg_rtx)
1946 int regno = REGNO (chain->rld[i].reg_rtx);
1947 chain->rld[i].regno = regno;
1948 chain->rld[i].nregs
1949 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1951 else
1952 chain->rld[i].regno = -1;
1953 reload_order[i] = i;
1956 n_reloads = chain->n_reloads;
1957 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1959 CLEAR_HARD_REG_SET (used_spill_regs_local);
1961 if (dump_file)
1962 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1964 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1966 /* Compute the order of preference for hard registers to spill. */
1968 order_regs_for_reload (chain);
1970 for (i = 0; i < n_reloads; i++)
1972 int r = reload_order[i];
1974 /* Ignore reloads that got marked inoperative. */
1975 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1976 && ! rld[r].optional
1977 && rld[r].regno == -1)
1978 if (! find_reg (chain, i))
1980 if (dump_file)
1981 fprintf (dump_file, "reload failure for reload %d\n", r);
1982 spill_failure (chain->insn, rld[r].class);
1983 failure = 1;
1984 return;
1988 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1989 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1991 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1994 static void
1995 select_reload_regs (void)
1997 struct insn_chain *chain;
1999 /* Try to satisfy the needs for each insn. */
2000 for (chain = insns_need_reload; chain != 0;
2001 chain = chain->next_need_reload)
2002 find_reload_regs (chain);
2005 /* Delete all insns that were inserted by emit_caller_save_insns during
2006 this iteration. */
2007 static void
2008 delete_caller_save_insns (void)
2010 struct insn_chain *c = reload_insn_chain;
2012 while (c != 0)
2014 while (c != 0 && c->is_caller_save_insn)
2016 struct insn_chain *next = c->next;
2017 rtx insn = c->insn;
2019 if (c == reload_insn_chain)
2020 reload_insn_chain = next;
2021 delete_insn (insn);
2023 if (next)
2024 next->prev = c->prev;
2025 if (c->prev)
2026 c->prev->next = next;
2027 c->next = unused_insn_chains;
2028 unused_insn_chains = c;
2029 c = next;
2031 if (c != 0)
2032 c = c->next;
2036 /* Handle the failure to find a register to spill.
2037 INSN should be one of the insns which needed this particular spill reg. */
2039 static void
2040 spill_failure (rtx insn, enum reg_class class)
2042 if (asm_noperands (PATTERN (insn)) >= 0)
2043 error_for_asm (insn, "can't find a register in class %qs while "
2044 "reloading %<asm%>",
2045 reg_class_names[class]);
2046 else
2048 error ("unable to find a register to spill in class %qs",
2049 reg_class_names[class]);
2051 if (dump_file)
2053 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2054 debug_reload_to_stream (dump_file);
2056 fatal_insn ("this is the insn:", insn);
2060 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2061 data that is dead in INSN. */
2063 static void
2064 delete_dead_insn (rtx insn)
2066 rtx prev = prev_real_insn (insn);
2067 rtx prev_dest;
2069 /* If the previous insn sets a register that dies in our insn, delete it
2070 too. */
2071 if (prev && GET_CODE (PATTERN (prev)) == SET
2072 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2073 && reg_mentioned_p (prev_dest, PATTERN (insn))
2074 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2075 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2076 delete_dead_insn (prev);
2078 SET_INSN_DELETED (insn);
2081 /* Modify the home of pseudo-reg I.
2082 The new home is present in reg_renumber[I].
2084 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2085 or it may be -1, meaning there is none or it is not relevant.
2086 This is used so that all pseudos spilled from a given hard reg
2087 can share one stack slot. */
2089 static void
2090 alter_reg (int i, int from_reg, bool dont_share_p)
2092 /* When outputting an inline function, this can happen
2093 for a reg that isn't actually used. */
2094 if (regno_reg_rtx[i] == 0)
2095 return;
2097 /* If the reg got changed to a MEM at rtl-generation time,
2098 ignore it. */
2099 if (!REG_P (regno_reg_rtx[i]))
2100 return;
2102 /* Modify the reg-rtx to contain the new hard reg
2103 number or else to contain its pseudo reg number. */
2104 SET_REGNO (regno_reg_rtx[i],
2105 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2107 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2108 allocate a stack slot for it. */
2110 if (reg_renumber[i] < 0
2111 && REG_N_REFS (i) > 0
2112 && reg_equiv_constant[i] == 0
2113 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2114 && reg_equiv_memory_loc[i] == 0)
2116 rtx x;
2117 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2118 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2119 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2120 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2121 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2122 int adjust = 0;
2123 bool shared_p = false;
2125 if (flag_ira)
2126 SET_REGNO_REG_SET (&spilled_pseudos, i);
2127 x = (dont_share_p || ! flag_ira
2128 ? NULL_RTX : reuse_stack_slot (i, inherent_size, total_size));
2129 if (x)
2130 shared_p = true;
2131 /* Each pseudo reg has an inherent size which comes from its own mode,
2132 and a total size which provides room for paradoxical subregs
2133 which refer to the pseudo reg in wider modes.
2135 We can use a slot already allocated if it provides both
2136 enough inherent space and enough total space.
2137 Otherwise, we allocate a new slot, making sure that it has no less
2138 inherent space, and no less total space, then the previous slot. */
2139 else if (from_reg == -1 || (! dont_share_p && flag_ira))
2141 alias_set_type alias_set = new_alias_set ();
2143 /* No known place to spill from => no slot to reuse. */
2144 x = assign_stack_local (mode, total_size,
2145 min_align > inherent_align
2146 || total_size > inherent_size ? -1 : 0);
2147 if (BYTES_BIG_ENDIAN)
2148 /* Cancel the big-endian correction done in assign_stack_local.
2149 Get the address of the beginning of the slot.
2150 This is so we can do a big-endian correction unconditionally
2151 below. */
2152 adjust = inherent_size - total_size;
2154 /* Nothing can alias this slot except this pseudo. */
2155 set_mem_alias_set (x, alias_set);
2156 dse_record_singleton_alias_set (alias_set, mode);
2158 if (! dont_share_p && flag_ira)
2159 mark_new_stack_slot (x, i, total_size);
2162 /* Reuse a stack slot if possible. */
2163 else if (spill_stack_slot[from_reg] != 0
2164 && spill_stack_slot_width[from_reg] >= total_size
2165 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2166 >= inherent_size)
2167 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2168 x = spill_stack_slot[from_reg];
2169 /* Allocate a bigger slot. */
2170 else
2172 /* Compute maximum size needed, both for inherent size
2173 and for total size. */
2174 rtx stack_slot;
2176 if (spill_stack_slot[from_reg])
2178 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2179 > inherent_size)
2180 mode = GET_MODE (spill_stack_slot[from_reg]);
2181 if (spill_stack_slot_width[from_reg] > total_size)
2182 total_size = spill_stack_slot_width[from_reg];
2183 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2184 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2187 /* Make a slot with that size. */
2188 x = assign_stack_local (mode, total_size,
2189 min_align > inherent_align
2190 || total_size > inherent_size ? -1 : 0);
2191 stack_slot = x;
2193 /* All pseudos mapped to this slot can alias each other. */
2194 if (spill_stack_slot[from_reg])
2196 alias_set_type alias_set
2197 = MEM_ALIAS_SET (spill_stack_slot[from_reg]);
2198 set_mem_alias_set (x, alias_set);
2199 dse_invalidate_singleton_alias_set (alias_set);
2201 else
2203 alias_set_type alias_set = new_alias_set ();
2204 set_mem_alias_set (x, alias_set);
2205 dse_record_singleton_alias_set (alias_set, mode);
2208 if (BYTES_BIG_ENDIAN)
2210 /* Cancel the big-endian correction done in assign_stack_local.
2211 Get the address of the beginning of the slot.
2212 This is so we can do a big-endian correction unconditionally
2213 below. */
2214 adjust = GET_MODE_SIZE (mode) - total_size;
2215 if (adjust)
2216 stack_slot
2217 = adjust_address_nv (x, mode_for_size (total_size
2218 * BITS_PER_UNIT,
2219 MODE_INT, 1),
2220 adjust);
2223 spill_stack_slot[from_reg] = stack_slot;
2224 spill_stack_slot_width[from_reg] = total_size;
2227 /* On a big endian machine, the "address" of the slot
2228 is the address of the low part that fits its inherent mode. */
2229 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2230 adjust += (total_size - inherent_size);
2232 /* If we have any adjustment to make, or if the stack slot is the
2233 wrong mode, make a new stack slot. */
2234 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2236 /* If we have a decl for the original register, set it for the
2237 memory. If this is a shared MEM, make a copy. */
2238 if (shared_p)
2240 x = copy_rtx (x);
2241 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2243 else if (REG_EXPR (regno_reg_rtx[i])
2244 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2246 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2248 /* We can do this only for the DECLs home pseudo, not for
2249 any copies of it, since otherwise when the stack slot
2250 is reused, nonoverlapping_memrefs_p might think they
2251 cannot overlap. */
2252 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2254 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2255 x = copy_rtx (x);
2257 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2261 /* Save the stack slot for later. */
2262 reg_equiv_memory_loc[i] = x;
2266 /* Mark the slots in regs_ever_live for the hard regs used by
2267 pseudo-reg number REGNO, accessed in MODE. */
2269 static void
2270 mark_home_live_1 (int regno, enum machine_mode mode)
2272 int i, lim;
2274 i = reg_renumber[regno];
2275 if (i < 0)
2276 return;
2277 lim = end_hard_regno (mode, i);
2278 while (i < lim)
2279 df_set_regs_ever_live(i++, true);
2282 /* Mark the slots in regs_ever_live for the hard regs
2283 used by pseudo-reg number REGNO. */
2285 void
2286 mark_home_live (int regno)
2288 if (reg_renumber[regno] >= 0)
2289 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2292 /* This function handles the tracking of elimination offsets around branches.
2294 X is a piece of RTL being scanned.
2296 INSN is the insn that it came from, if any.
2298 INITIAL_P is nonzero if we are to set the offset to be the initial
2299 offset and zero if we are setting the offset of the label to be the
2300 current offset. */
2302 static void
2303 set_label_offsets (rtx x, rtx insn, int initial_p)
2305 enum rtx_code code = GET_CODE (x);
2306 rtx tem;
2307 unsigned int i;
2308 struct elim_table *p;
2310 switch (code)
2312 case LABEL_REF:
2313 if (LABEL_REF_NONLOCAL_P (x))
2314 return;
2316 x = XEXP (x, 0);
2318 /* ... fall through ... */
2320 case CODE_LABEL:
2321 /* If we know nothing about this label, set the desired offsets. Note
2322 that this sets the offset at a label to be the offset before a label
2323 if we don't know anything about the label. This is not correct for
2324 the label after a BARRIER, but is the best guess we can make. If
2325 we guessed wrong, we will suppress an elimination that might have
2326 been possible had we been able to guess correctly. */
2328 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2330 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2331 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2332 = (initial_p ? reg_eliminate[i].initial_offset
2333 : reg_eliminate[i].offset);
2334 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2337 /* Otherwise, if this is the definition of a label and it is
2338 preceded by a BARRIER, set our offsets to the known offset of
2339 that label. */
2341 else if (x == insn
2342 && (tem = prev_nonnote_insn (insn)) != 0
2343 && BARRIER_P (tem))
2344 set_offsets_for_label (insn);
2345 else
2346 /* If neither of the above cases is true, compare each offset
2347 with those previously recorded and suppress any eliminations
2348 where the offsets disagree. */
2350 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2351 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2352 != (initial_p ? reg_eliminate[i].initial_offset
2353 : reg_eliminate[i].offset))
2354 reg_eliminate[i].can_eliminate = 0;
2356 return;
2358 case JUMP_INSN:
2359 set_label_offsets (PATTERN (insn), insn, initial_p);
2361 /* ... fall through ... */
2363 case INSN:
2364 case CALL_INSN:
2365 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2366 to indirectly and hence must have all eliminations at their
2367 initial offsets. */
2368 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2369 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2370 set_label_offsets (XEXP (tem, 0), insn, 1);
2371 return;
2373 case PARALLEL:
2374 case ADDR_VEC:
2375 case ADDR_DIFF_VEC:
2376 /* Each of the labels in the parallel or address vector must be
2377 at their initial offsets. We want the first field for PARALLEL
2378 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2380 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2381 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2382 insn, initial_p);
2383 return;
2385 case SET:
2386 /* We only care about setting PC. If the source is not RETURN,
2387 IF_THEN_ELSE, or a label, disable any eliminations not at
2388 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2389 isn't one of those possibilities. For branches to a label,
2390 call ourselves recursively.
2392 Note that this can disable elimination unnecessarily when we have
2393 a non-local goto since it will look like a non-constant jump to
2394 someplace in the current function. This isn't a significant
2395 problem since such jumps will normally be when all elimination
2396 pairs are back to their initial offsets. */
2398 if (SET_DEST (x) != pc_rtx)
2399 return;
2401 switch (GET_CODE (SET_SRC (x)))
2403 case PC:
2404 case RETURN:
2405 return;
2407 case LABEL_REF:
2408 set_label_offsets (SET_SRC (x), insn, initial_p);
2409 return;
2411 case IF_THEN_ELSE:
2412 tem = XEXP (SET_SRC (x), 1);
2413 if (GET_CODE (tem) == LABEL_REF)
2414 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2415 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2416 break;
2418 tem = XEXP (SET_SRC (x), 2);
2419 if (GET_CODE (tem) == LABEL_REF)
2420 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2421 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2422 break;
2423 return;
2425 default:
2426 break;
2429 /* If we reach here, all eliminations must be at their initial
2430 offset because we are doing a jump to a variable address. */
2431 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2432 if (p->offset != p->initial_offset)
2433 p->can_eliminate = 0;
2434 break;
2436 default:
2437 break;
2441 /* Scan X and replace any eliminable registers (such as fp) with a
2442 replacement (such as sp), plus an offset.
2444 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2445 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2446 MEM, we are allowed to replace a sum of a register and the constant zero
2447 with the register, which we cannot do outside a MEM. In addition, we need
2448 to record the fact that a register is referenced outside a MEM.
2450 If INSN is an insn, it is the insn containing X. If we replace a REG
2451 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2452 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2453 the REG is being modified.
2455 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2456 That's used when we eliminate in expressions stored in notes.
2457 This means, do not set ref_outside_mem even if the reference
2458 is outside of MEMs.
2460 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2461 replacements done assuming all offsets are at their initial values. If
2462 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2463 encounter, return the actual location so that find_reloads will do
2464 the proper thing. */
2466 static rtx
2467 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2468 bool may_use_invariant)
2470 enum rtx_code code = GET_CODE (x);
2471 struct elim_table *ep;
2472 int regno;
2473 rtx new;
2474 int i, j;
2475 const char *fmt;
2476 int copied = 0;
2478 if (! current_function_decl)
2479 return x;
2481 switch (code)
2483 case CONST_INT:
2484 case CONST_DOUBLE:
2485 case CONST_FIXED:
2486 case CONST_VECTOR:
2487 case CONST:
2488 case SYMBOL_REF:
2489 case CODE_LABEL:
2490 case PC:
2491 case CC0:
2492 case ASM_INPUT:
2493 case ADDR_VEC:
2494 case ADDR_DIFF_VEC:
2495 case RETURN:
2496 return x;
2498 case REG:
2499 regno = REGNO (x);
2501 /* First handle the case where we encounter a bare register that
2502 is eliminable. Replace it with a PLUS. */
2503 if (regno < FIRST_PSEUDO_REGISTER)
2505 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2506 ep++)
2507 if (ep->from_rtx == x && ep->can_eliminate)
2508 return plus_constant (ep->to_rtx, ep->previous_offset);
2511 else if (reg_renumber && reg_renumber[regno] < 0
2512 && reg_equiv_invariant && reg_equiv_invariant[regno])
2514 if (may_use_invariant)
2515 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2516 mem_mode, insn, true);
2517 /* There exists at least one use of REGNO that cannot be
2518 eliminated. Prevent the defining insn from being deleted. */
2519 reg_equiv_init[regno] = NULL_RTX;
2520 alter_reg (regno, -1, true);
2522 return x;
2524 /* You might think handling MINUS in a manner similar to PLUS is a
2525 good idea. It is not. It has been tried multiple times and every
2526 time the change has had to have been reverted.
2528 Other parts of reload know a PLUS is special (gen_reload for example)
2529 and require special code to handle code a reloaded PLUS operand.
2531 Also consider backends where the flags register is clobbered by a
2532 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2533 lea instruction comes to mind). If we try to reload a MINUS, we
2534 may kill the flags register that was holding a useful value.
2536 So, please before trying to handle MINUS, consider reload as a
2537 whole instead of this little section as well as the backend issues. */
2538 case PLUS:
2539 /* If this is the sum of an eliminable register and a constant, rework
2540 the sum. */
2541 if (REG_P (XEXP (x, 0))
2542 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2543 && CONSTANT_P (XEXP (x, 1)))
2545 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2546 ep++)
2547 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2549 /* The only time we want to replace a PLUS with a REG (this
2550 occurs when the constant operand of the PLUS is the negative
2551 of the offset) is when we are inside a MEM. We won't want
2552 to do so at other times because that would change the
2553 structure of the insn in a way that reload can't handle.
2554 We special-case the commonest situation in
2555 eliminate_regs_in_insn, so just replace a PLUS with a
2556 PLUS here, unless inside a MEM. */
2557 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2558 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2559 return ep->to_rtx;
2560 else
2561 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2562 plus_constant (XEXP (x, 1),
2563 ep->previous_offset));
2566 /* If the register is not eliminable, we are done since the other
2567 operand is a constant. */
2568 return x;
2571 /* If this is part of an address, we want to bring any constant to the
2572 outermost PLUS. We will do this by doing register replacement in
2573 our operands and seeing if a constant shows up in one of them.
2575 Note that there is no risk of modifying the structure of the insn,
2576 since we only get called for its operands, thus we are either
2577 modifying the address inside a MEM, or something like an address
2578 operand of a load-address insn. */
2581 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2582 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2584 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2586 /* If one side is a PLUS and the other side is a pseudo that
2587 didn't get a hard register but has a reg_equiv_constant,
2588 we must replace the constant here since it may no longer
2589 be in the position of any operand. */
2590 if (GET_CODE (new0) == PLUS && REG_P (new1)
2591 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2592 && reg_renumber[REGNO (new1)] < 0
2593 && reg_equiv_constant != 0
2594 && reg_equiv_constant[REGNO (new1)] != 0)
2595 new1 = reg_equiv_constant[REGNO (new1)];
2596 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2597 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2598 && reg_renumber[REGNO (new0)] < 0
2599 && reg_equiv_constant[REGNO (new0)] != 0)
2600 new0 = reg_equiv_constant[REGNO (new0)];
2602 new = form_sum (new0, new1);
2604 /* As above, if we are not inside a MEM we do not want to
2605 turn a PLUS into something else. We might try to do so here
2606 for an addition of 0 if we aren't optimizing. */
2607 if (! mem_mode && GET_CODE (new) != PLUS)
2608 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2609 else
2610 return new;
2613 return x;
2615 case MULT:
2616 /* If this is the product of an eliminable register and a
2617 constant, apply the distribute law and move the constant out
2618 so that we have (plus (mult ..) ..). This is needed in order
2619 to keep load-address insns valid. This case is pathological.
2620 We ignore the possibility of overflow here. */
2621 if (REG_P (XEXP (x, 0))
2622 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2623 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2624 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2625 ep++)
2626 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2628 if (! mem_mode
2629 /* Refs inside notes don't count for this purpose. */
2630 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2631 || GET_CODE (insn) == INSN_LIST)))
2632 ep->ref_outside_mem = 1;
2634 return
2635 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2636 ep->previous_offset * INTVAL (XEXP (x, 1)));
2639 /* ... fall through ... */
2641 case CALL:
2642 case COMPARE:
2643 /* See comments before PLUS about handling MINUS. */
2644 case MINUS:
2645 case DIV: case UDIV:
2646 case MOD: case UMOD:
2647 case AND: case IOR: case XOR:
2648 case ROTATERT: case ROTATE:
2649 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2650 case NE: case EQ:
2651 case GE: case GT: case GEU: case GTU:
2652 case LE: case LT: case LEU: case LTU:
2654 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2655 rtx new1 = XEXP (x, 1)
2656 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2658 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2659 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2661 return x;
2663 case EXPR_LIST:
2664 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2665 if (XEXP (x, 0))
2667 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2668 if (new != XEXP (x, 0))
2670 /* If this is a REG_DEAD note, it is not valid anymore.
2671 Using the eliminated version could result in creating a
2672 REG_DEAD note for the stack or frame pointer. */
2673 if (GET_MODE (x) == REG_DEAD)
2674 return (XEXP (x, 1)
2675 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2676 : NULL_RTX);
2678 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2682 /* ... fall through ... */
2684 case INSN_LIST:
2685 /* Now do eliminations in the rest of the chain. If this was
2686 an EXPR_LIST, this might result in allocating more memory than is
2687 strictly needed, but it simplifies the code. */
2688 if (XEXP (x, 1))
2690 new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2691 if (new != XEXP (x, 1))
2692 return
2693 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2695 return x;
2697 case PRE_INC:
2698 case POST_INC:
2699 case PRE_DEC:
2700 case POST_DEC:
2701 /* We do not support elimination of a register that is modified.
2702 elimination_effects has already make sure that this does not
2703 happen. */
2704 return x;
2706 case PRE_MODIFY:
2707 case POST_MODIFY:
2708 /* We do not support elimination of a register that is modified.
2709 elimination_effects has already make sure that this does not
2710 happen. The only remaining case we need to consider here is
2711 that the increment value may be an eliminable register. */
2712 if (GET_CODE (XEXP (x, 1)) == PLUS
2713 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2715 rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2716 insn, true);
2718 if (new != XEXP (XEXP (x, 1), 1))
2719 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2720 gen_rtx_PLUS (GET_MODE (x),
2721 XEXP (x, 0), new));
2723 return x;
2725 case STRICT_LOW_PART:
2726 case NEG: case NOT:
2727 case SIGN_EXTEND: case ZERO_EXTEND:
2728 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2729 case FLOAT: case FIX:
2730 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2731 case ABS:
2732 case SQRT:
2733 case FFS:
2734 case CLZ:
2735 case CTZ:
2736 case POPCOUNT:
2737 case PARITY:
2738 case BSWAP:
2739 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2740 if (new != XEXP (x, 0))
2741 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2742 return x;
2744 case SUBREG:
2745 /* Similar to above processing, but preserve SUBREG_BYTE.
2746 Convert (subreg (mem)) to (mem) if not paradoxical.
2747 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2748 pseudo didn't get a hard reg, we must replace this with the
2749 eliminated version of the memory location because push_reload
2750 may do the replacement in certain circumstances. */
2751 if (REG_P (SUBREG_REG (x))
2752 && (GET_MODE_SIZE (GET_MODE (x))
2753 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2754 && reg_equiv_memory_loc != 0
2755 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2757 new = SUBREG_REG (x);
2759 else
2760 new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2762 if (new != SUBREG_REG (x))
2764 int x_size = GET_MODE_SIZE (GET_MODE (x));
2765 int new_size = GET_MODE_SIZE (GET_MODE (new));
2767 if (MEM_P (new)
2768 && ((x_size < new_size
2769 #ifdef WORD_REGISTER_OPERATIONS
2770 /* On these machines, combine can create rtl of the form
2771 (set (subreg:m1 (reg:m2 R) 0) ...)
2772 where m1 < m2, and expects something interesting to
2773 happen to the entire word. Moreover, it will use the
2774 (reg:m2 R) later, expecting all bits to be preserved.
2775 So if the number of words is the same, preserve the
2776 subreg so that push_reload can see it. */
2777 && ! ((x_size - 1) / UNITS_PER_WORD
2778 == (new_size -1 ) / UNITS_PER_WORD)
2779 #endif
2781 || x_size == new_size)
2783 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2784 else
2785 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2788 return x;
2790 case MEM:
2791 /* Our only special processing is to pass the mode of the MEM to our
2792 recursive call and copy the flags. While we are here, handle this
2793 case more efficiently. */
2794 return
2795 replace_equiv_address_nv (x,
2796 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2797 insn, true));
2799 case USE:
2800 /* Handle insn_list USE that a call to a pure function may generate. */
2801 new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2802 if (new != XEXP (x, 0))
2803 return gen_rtx_USE (GET_MODE (x), new);
2804 return x;
2806 case CLOBBER:
2807 case ASM_OPERANDS:
2808 case SET:
2809 gcc_unreachable ();
2811 default:
2812 break;
2815 /* Process each of our operands recursively. If any have changed, make a
2816 copy of the rtx. */
2817 fmt = GET_RTX_FORMAT (code);
2818 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2820 if (*fmt == 'e')
2822 new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2823 if (new != XEXP (x, i) && ! copied)
2825 x = shallow_copy_rtx (x);
2826 copied = 1;
2828 XEXP (x, i) = new;
2830 else if (*fmt == 'E')
2832 int copied_vec = 0;
2833 for (j = 0; j < XVECLEN (x, i); j++)
2835 new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2836 if (new != XVECEXP (x, i, j) && ! copied_vec)
2838 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2839 XVEC (x, i)->elem);
2840 if (! copied)
2842 x = shallow_copy_rtx (x);
2843 copied = 1;
2845 XVEC (x, i) = new_v;
2846 copied_vec = 1;
2848 XVECEXP (x, i, j) = new;
2853 return x;
2857 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2859 return eliminate_regs_1 (x, mem_mode, insn, false);
2862 /* Scan rtx X for modifications of elimination target registers. Update
2863 the table of eliminables to reflect the changed state. MEM_MODE is
2864 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2866 static void
2867 elimination_effects (rtx x, enum machine_mode mem_mode)
2869 enum rtx_code code = GET_CODE (x);
2870 struct elim_table *ep;
2871 int regno;
2872 int i, j;
2873 const char *fmt;
2875 switch (code)
2877 case CONST_INT:
2878 case CONST_DOUBLE:
2879 case CONST_FIXED:
2880 case CONST_VECTOR:
2881 case CONST:
2882 case SYMBOL_REF:
2883 case CODE_LABEL:
2884 case PC:
2885 case CC0:
2886 case ASM_INPUT:
2887 case ADDR_VEC:
2888 case ADDR_DIFF_VEC:
2889 case RETURN:
2890 return;
2892 case REG:
2893 regno = REGNO (x);
2895 /* First handle the case where we encounter a bare register that
2896 is eliminable. Replace it with a PLUS. */
2897 if (regno < FIRST_PSEUDO_REGISTER)
2899 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2900 ep++)
2901 if (ep->from_rtx == x && ep->can_eliminate)
2903 if (! mem_mode)
2904 ep->ref_outside_mem = 1;
2905 return;
2909 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2910 && reg_equiv_constant[regno]
2911 && ! function_invariant_p (reg_equiv_constant[regno]))
2912 elimination_effects (reg_equiv_constant[regno], mem_mode);
2913 return;
2915 case PRE_INC:
2916 case POST_INC:
2917 case PRE_DEC:
2918 case POST_DEC:
2919 case POST_MODIFY:
2920 case PRE_MODIFY:
2921 /* If we modify the source of an elimination rule, disable it. */
2922 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2923 if (ep->from_rtx == XEXP (x, 0))
2924 ep->can_eliminate = 0;
2926 /* If we modify the target of an elimination rule by adding a constant,
2927 update its offset. If we modify the target in any other way, we'll
2928 have to disable the rule as well. */
2929 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2930 if (ep->to_rtx == XEXP (x, 0))
2932 int size = GET_MODE_SIZE (mem_mode);
2934 /* If more bytes than MEM_MODE are pushed, account for them. */
2935 #ifdef PUSH_ROUNDING
2936 if (ep->to_rtx == stack_pointer_rtx)
2937 size = PUSH_ROUNDING (size);
2938 #endif
2939 if (code == PRE_DEC || code == POST_DEC)
2940 ep->offset += size;
2941 else if (code == PRE_INC || code == POST_INC)
2942 ep->offset -= size;
2943 else if (code == PRE_MODIFY || code == POST_MODIFY)
2945 if (GET_CODE (XEXP (x, 1)) == PLUS
2946 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2947 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
2948 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2949 else
2950 ep->can_eliminate = 0;
2954 /* These two aren't unary operators. */
2955 if (code == POST_MODIFY || code == PRE_MODIFY)
2956 break;
2958 /* Fall through to generic unary operation case. */
2959 case STRICT_LOW_PART:
2960 case NEG: case NOT:
2961 case SIGN_EXTEND: case ZERO_EXTEND:
2962 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2963 case FLOAT: case FIX:
2964 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2965 case ABS:
2966 case SQRT:
2967 case FFS:
2968 case CLZ:
2969 case CTZ:
2970 case POPCOUNT:
2971 case PARITY:
2972 case BSWAP:
2973 elimination_effects (XEXP (x, 0), mem_mode);
2974 return;
2976 case SUBREG:
2977 if (REG_P (SUBREG_REG (x))
2978 && (GET_MODE_SIZE (GET_MODE (x))
2979 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2980 && reg_equiv_memory_loc != 0
2981 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2982 return;
2984 elimination_effects (SUBREG_REG (x), mem_mode);
2985 return;
2987 case USE:
2988 /* If using a register that is the source of an eliminate we still
2989 think can be performed, note it cannot be performed since we don't
2990 know how this register is used. */
2991 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2992 if (ep->from_rtx == XEXP (x, 0))
2993 ep->can_eliminate = 0;
2995 elimination_effects (XEXP (x, 0), mem_mode);
2996 return;
2998 case CLOBBER:
2999 /* If clobbering a register that is the replacement register for an
3000 elimination we still think can be performed, note that it cannot
3001 be performed. Otherwise, we need not be concerned about it. */
3002 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3003 if (ep->to_rtx == XEXP (x, 0))
3004 ep->can_eliminate = 0;
3006 elimination_effects (XEXP (x, 0), mem_mode);
3007 return;
3009 case SET:
3010 /* Check for setting a register that we know about. */
3011 if (REG_P (SET_DEST (x)))
3013 /* See if this is setting the replacement register for an
3014 elimination.
3016 If DEST is the hard frame pointer, we do nothing because we
3017 assume that all assignments to the frame pointer are for
3018 non-local gotos and are being done at a time when they are valid
3019 and do not disturb anything else. Some machines want to
3020 eliminate a fake argument pointer (or even a fake frame pointer)
3021 with either the real frame or the stack pointer. Assignments to
3022 the hard frame pointer must not prevent this elimination. */
3024 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3025 ep++)
3026 if (ep->to_rtx == SET_DEST (x)
3027 && SET_DEST (x) != hard_frame_pointer_rtx)
3029 /* If it is being incremented, adjust the offset. Otherwise,
3030 this elimination can't be done. */
3031 rtx src = SET_SRC (x);
3033 if (GET_CODE (src) == PLUS
3034 && XEXP (src, 0) == SET_DEST (x)
3035 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3036 ep->offset -= INTVAL (XEXP (src, 1));
3037 else
3038 ep->can_eliminate = 0;
3042 elimination_effects (SET_DEST (x), 0);
3043 elimination_effects (SET_SRC (x), 0);
3044 return;
3046 case MEM:
3047 /* Our only special processing is to pass the mode of the MEM to our
3048 recursive call. */
3049 elimination_effects (XEXP (x, 0), GET_MODE (x));
3050 return;
3052 default:
3053 break;
3056 fmt = GET_RTX_FORMAT (code);
3057 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3059 if (*fmt == 'e')
3060 elimination_effects (XEXP (x, i), mem_mode);
3061 else if (*fmt == 'E')
3062 for (j = 0; j < XVECLEN (x, i); j++)
3063 elimination_effects (XVECEXP (x, i, j), mem_mode);
3067 /* Descend through rtx X and verify that no references to eliminable registers
3068 remain. If any do remain, mark the involved register as not
3069 eliminable. */
3071 static void
3072 check_eliminable_occurrences (rtx x)
3074 const char *fmt;
3075 int i;
3076 enum rtx_code code;
3078 if (x == 0)
3079 return;
3081 code = GET_CODE (x);
3083 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3085 struct elim_table *ep;
3087 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3088 if (ep->from_rtx == x)
3089 ep->can_eliminate = 0;
3090 return;
3093 fmt = GET_RTX_FORMAT (code);
3094 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3096 if (*fmt == 'e')
3097 check_eliminable_occurrences (XEXP (x, i));
3098 else if (*fmt == 'E')
3100 int j;
3101 for (j = 0; j < XVECLEN (x, i); j++)
3102 check_eliminable_occurrences (XVECEXP (x, i, j));
3107 /* Scan INSN and eliminate all eliminable registers in it.
3109 If REPLACE is nonzero, do the replacement destructively. Also
3110 delete the insn as dead it if it is setting an eliminable register.
3112 If REPLACE is zero, do all our allocations in reload_obstack.
3114 If no eliminations were done and this insn doesn't require any elimination
3115 processing (these are not identical conditions: it might be updating sp,
3116 but not referencing fp; this needs to be seen during reload_as_needed so
3117 that the offset between fp and sp can be taken into consideration), zero
3118 is returned. Otherwise, 1 is returned. */
3120 static int
3121 eliminate_regs_in_insn (rtx insn, int replace)
3123 int icode = recog_memoized (insn);
3124 rtx old_body = PATTERN (insn);
3125 int insn_is_asm = asm_noperands (old_body) >= 0;
3126 rtx old_set = single_set (insn);
3127 rtx new_body;
3128 int val = 0;
3129 int i;
3130 rtx substed_operand[MAX_RECOG_OPERANDS];
3131 rtx orig_operand[MAX_RECOG_OPERANDS];
3132 struct elim_table *ep;
3133 rtx plus_src, plus_cst_src;
3135 if (! insn_is_asm && icode < 0)
3137 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3138 || GET_CODE (PATTERN (insn)) == CLOBBER
3139 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3140 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3141 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3142 return 0;
3145 if (old_set != 0 && REG_P (SET_DEST (old_set))
3146 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3148 /* Check for setting an eliminable register. */
3149 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3150 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3152 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3153 /* If this is setting the frame pointer register to the
3154 hardware frame pointer register and this is an elimination
3155 that will be done (tested above), this insn is really
3156 adjusting the frame pointer downward to compensate for
3157 the adjustment done before a nonlocal goto. */
3158 if (ep->from == FRAME_POINTER_REGNUM
3159 && ep->to == HARD_FRAME_POINTER_REGNUM)
3161 rtx base = SET_SRC (old_set);
3162 rtx base_insn = insn;
3163 HOST_WIDE_INT offset = 0;
3165 while (base != ep->to_rtx)
3167 rtx prev_insn, prev_set;
3169 if (GET_CODE (base) == PLUS
3170 && GET_CODE (XEXP (base, 1)) == CONST_INT)
3172 offset += INTVAL (XEXP (base, 1));
3173 base = XEXP (base, 0);
3175 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3176 && (prev_set = single_set (prev_insn)) != 0
3177 && rtx_equal_p (SET_DEST (prev_set), base))
3179 base = SET_SRC (prev_set);
3180 base_insn = prev_insn;
3182 else
3183 break;
3186 if (base == ep->to_rtx)
3188 rtx src
3189 = plus_constant (ep->to_rtx, offset - ep->offset);
3191 new_body = old_body;
3192 if (! replace)
3194 new_body = copy_insn (old_body);
3195 if (REG_NOTES (insn))
3196 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3198 PATTERN (insn) = new_body;
3199 old_set = single_set (insn);
3201 /* First see if this insn remains valid when we
3202 make the change. If not, keep the INSN_CODE
3203 the same and let reload fit it up. */
3204 validate_change (insn, &SET_SRC (old_set), src, 1);
3205 validate_change (insn, &SET_DEST (old_set),
3206 ep->to_rtx, 1);
3207 if (! apply_change_group ())
3209 SET_SRC (old_set) = src;
3210 SET_DEST (old_set) = ep->to_rtx;
3213 val = 1;
3214 goto done;
3217 #endif
3219 /* In this case this insn isn't serving a useful purpose. We
3220 will delete it in reload_as_needed once we know that this
3221 elimination is, in fact, being done.
3223 If REPLACE isn't set, we can't delete this insn, but needn't
3224 process it since it won't be used unless something changes. */
3225 if (replace)
3227 delete_dead_insn (insn);
3228 return 1;
3230 val = 1;
3231 goto done;
3235 /* We allow one special case which happens to work on all machines we
3236 currently support: a single set with the source or a REG_EQUAL
3237 note being a PLUS of an eliminable register and a constant. */
3238 plus_src = plus_cst_src = 0;
3239 if (old_set && REG_P (SET_DEST (old_set)))
3241 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3242 plus_src = SET_SRC (old_set);
3243 /* First see if the source is of the form (plus (...) CST). */
3244 if (plus_src
3245 && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3246 plus_cst_src = plus_src;
3247 else if (REG_P (SET_SRC (old_set))
3248 || plus_src)
3250 /* Otherwise, see if we have a REG_EQUAL note of the form
3251 (plus (...) CST). */
3252 rtx links;
3253 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3255 if ((REG_NOTE_KIND (links) == REG_EQUAL
3256 || REG_NOTE_KIND (links) == REG_EQUIV)
3257 && GET_CODE (XEXP (links, 0)) == PLUS
3258 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3260 plus_cst_src = XEXP (links, 0);
3261 break;
3266 /* Check that the first operand of the PLUS is a hard reg or
3267 the lowpart subreg of one. */
3268 if (plus_cst_src)
3270 rtx reg = XEXP (plus_cst_src, 0);
3271 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3272 reg = SUBREG_REG (reg);
3274 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3275 plus_cst_src = 0;
3278 if (plus_cst_src)
3280 rtx reg = XEXP (plus_cst_src, 0);
3281 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3283 if (GET_CODE (reg) == SUBREG)
3284 reg = SUBREG_REG (reg);
3286 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3287 if (ep->from_rtx == reg && ep->can_eliminate)
3289 rtx to_rtx = ep->to_rtx;
3290 offset += ep->offset;
3291 offset = trunc_int_for_mode (offset, GET_MODE (reg));
3293 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3294 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3295 to_rtx);
3296 /* If we have a nonzero offset, and the source is already
3297 a simple REG, the following transformation would
3298 increase the cost of the insn by replacing a simple REG
3299 with (plus (reg sp) CST). So try only when we already
3300 had a PLUS before. */
3301 if (offset == 0 || plus_src)
3303 rtx new_src = plus_constant (to_rtx, offset);
3305 new_body = old_body;
3306 if (! replace)
3308 new_body = copy_insn (old_body);
3309 if (REG_NOTES (insn))
3310 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3312 PATTERN (insn) = new_body;
3313 old_set = single_set (insn);
3315 /* First see if this insn remains valid when we make the
3316 change. If not, try to replace the whole pattern with
3317 a simple set (this may help if the original insn was a
3318 PARALLEL that was only recognized as single_set due to
3319 REG_UNUSED notes). If this isn't valid either, keep
3320 the INSN_CODE the same and let reload fix it up. */
3321 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3323 rtx new_pat = gen_rtx_SET (VOIDmode,
3324 SET_DEST (old_set), new_src);
3326 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3327 SET_SRC (old_set) = new_src;
3330 else
3331 break;
3333 val = 1;
3334 /* This can't have an effect on elimination offsets, so skip right
3335 to the end. */
3336 goto done;
3340 /* Determine the effects of this insn on elimination offsets. */
3341 elimination_effects (old_body, 0);
3343 /* Eliminate all eliminable registers occurring in operands that
3344 can be handled by reload. */
3345 extract_insn (insn);
3346 for (i = 0; i < recog_data.n_operands; i++)
3348 orig_operand[i] = recog_data.operand[i];
3349 substed_operand[i] = recog_data.operand[i];
3351 /* For an asm statement, every operand is eliminable. */
3352 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3354 bool is_set_src, in_plus;
3356 /* Check for setting a register that we know about. */
3357 if (recog_data.operand_type[i] != OP_IN
3358 && REG_P (orig_operand[i]))
3360 /* If we are assigning to a register that can be eliminated, it
3361 must be as part of a PARALLEL, since the code above handles
3362 single SETs. We must indicate that we can no longer
3363 eliminate this reg. */
3364 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3365 ep++)
3366 if (ep->from_rtx == orig_operand[i])
3367 ep->can_eliminate = 0;
3370 /* Companion to the above plus substitution, we can allow
3371 invariants as the source of a plain move. */
3372 is_set_src = false;
3373 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3374 is_set_src = true;
3375 in_plus = false;
3376 if (plus_src
3377 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3378 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3379 in_plus = true;
3381 substed_operand[i]
3382 = eliminate_regs_1 (recog_data.operand[i], 0,
3383 replace ? insn : NULL_RTX,
3384 is_set_src || in_plus);
3385 if (substed_operand[i] != orig_operand[i])
3386 val = 1;
3387 /* Terminate the search in check_eliminable_occurrences at
3388 this point. */
3389 *recog_data.operand_loc[i] = 0;
3391 /* If an output operand changed from a REG to a MEM and INSN is an
3392 insn, write a CLOBBER insn. */
3393 if (recog_data.operand_type[i] != OP_IN
3394 && REG_P (orig_operand[i])
3395 && MEM_P (substed_operand[i])
3396 && replace)
3397 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3398 insn);
3402 for (i = 0; i < recog_data.n_dups; i++)
3403 *recog_data.dup_loc[i]
3404 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3406 /* If any eliminable remain, they aren't eliminable anymore. */
3407 check_eliminable_occurrences (old_body);
3409 /* Substitute the operands; the new values are in the substed_operand
3410 array. */
3411 for (i = 0; i < recog_data.n_operands; i++)
3412 *recog_data.operand_loc[i] = substed_operand[i];
3413 for (i = 0; i < recog_data.n_dups; i++)
3414 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3416 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3417 re-recognize the insn. We do this in case we had a simple addition
3418 but now can do this as a load-address. This saves an insn in this
3419 common case.
3420 If re-recognition fails, the old insn code number will still be used,
3421 and some register operands may have changed into PLUS expressions.
3422 These will be handled by find_reloads by loading them into a register
3423 again. */
3425 if (val)
3427 /* If we aren't replacing things permanently and we changed something,
3428 make another copy to ensure that all the RTL is new. Otherwise
3429 things can go wrong if find_reload swaps commutative operands
3430 and one is inside RTL that has been copied while the other is not. */
3431 new_body = old_body;
3432 if (! replace)
3434 new_body = copy_insn (old_body);
3435 if (REG_NOTES (insn))
3436 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3438 PATTERN (insn) = new_body;
3440 /* If we had a move insn but now we don't, rerecognize it. This will
3441 cause spurious re-recognition if the old move had a PARALLEL since
3442 the new one still will, but we can't call single_set without
3443 having put NEW_BODY into the insn and the re-recognition won't
3444 hurt in this rare case. */
3445 /* ??? Why this huge if statement - why don't we just rerecognize the
3446 thing always? */
3447 if (! insn_is_asm
3448 && old_set != 0
3449 && ((REG_P (SET_SRC (old_set))
3450 && (GET_CODE (new_body) != SET
3451 || !REG_P (SET_SRC (new_body))))
3452 /* If this was a load from or store to memory, compare
3453 the MEM in recog_data.operand to the one in the insn.
3454 If they are not equal, then rerecognize the insn. */
3455 || (old_set != 0
3456 && ((MEM_P (SET_SRC (old_set))
3457 && SET_SRC (old_set) != recog_data.operand[1])
3458 || (MEM_P (SET_DEST (old_set))
3459 && SET_DEST (old_set) != recog_data.operand[0])))
3460 /* If this was an add insn before, rerecognize. */
3461 || GET_CODE (SET_SRC (old_set)) == PLUS))
3463 int new_icode = recog (PATTERN (insn), insn, 0);
3464 if (new_icode >= 0)
3465 INSN_CODE (insn) = new_icode;
3469 /* Restore the old body. If there were any changes to it, we made a copy
3470 of it while the changes were still in place, so we'll correctly return
3471 a modified insn below. */
3472 if (! replace)
3474 /* Restore the old body. */
3475 for (i = 0; i < recog_data.n_operands; i++)
3476 *recog_data.operand_loc[i] = orig_operand[i];
3477 for (i = 0; i < recog_data.n_dups; i++)
3478 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3481 /* Update all elimination pairs to reflect the status after the current
3482 insn. The changes we make were determined by the earlier call to
3483 elimination_effects.
3485 We also detect cases where register elimination cannot be done,
3486 namely, if a register would be both changed and referenced outside a MEM
3487 in the resulting insn since such an insn is often undefined and, even if
3488 not, we cannot know what meaning will be given to it. Note that it is
3489 valid to have a register used in an address in an insn that changes it
3490 (presumably with a pre- or post-increment or decrement).
3492 If anything changes, return nonzero. */
3494 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3496 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3497 ep->can_eliminate = 0;
3499 ep->ref_outside_mem = 0;
3501 if (ep->previous_offset != ep->offset)
3502 val = 1;
3505 done:
3506 /* If we changed something, perform elimination in REG_NOTES. This is
3507 needed even when REPLACE is zero because a REG_DEAD note might refer
3508 to a register that we eliminate and could cause a different number
3509 of spill registers to be needed in the final reload pass than in
3510 the pre-passes. */
3511 if (val && REG_NOTES (insn) != 0)
3512 REG_NOTES (insn)
3513 = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3515 return val;
3518 /* Loop through all elimination pairs.
3519 Recalculate the number not at initial offset.
3521 Compute the maximum offset (minimum offset if the stack does not
3522 grow downward) for each elimination pair. */
3524 static void
3525 update_eliminable_offsets (void)
3527 struct elim_table *ep;
3529 num_not_at_initial_offset = 0;
3530 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3532 ep->previous_offset = ep->offset;
3533 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3534 num_not_at_initial_offset++;
3538 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3539 replacement we currently believe is valid, mark it as not eliminable if X
3540 modifies DEST in any way other than by adding a constant integer to it.
3542 If DEST is the frame pointer, we do nothing because we assume that
3543 all assignments to the hard frame pointer are nonlocal gotos and are being
3544 done at a time when they are valid and do not disturb anything else.
3545 Some machines want to eliminate a fake argument pointer with either the
3546 frame or stack pointer. Assignments to the hard frame pointer must not
3547 prevent this elimination.
3549 Called via note_stores from reload before starting its passes to scan
3550 the insns of the function. */
3552 static void
3553 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3555 unsigned int i;
3557 /* A SUBREG of a hard register here is just changing its mode. We should
3558 not see a SUBREG of an eliminable hard register, but check just in
3559 case. */
3560 if (GET_CODE (dest) == SUBREG)
3561 dest = SUBREG_REG (dest);
3563 if (dest == hard_frame_pointer_rtx)
3564 return;
3566 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3567 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3568 && (GET_CODE (x) != SET
3569 || GET_CODE (SET_SRC (x)) != PLUS
3570 || XEXP (SET_SRC (x), 0) != dest
3571 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3573 reg_eliminate[i].can_eliminate_previous
3574 = reg_eliminate[i].can_eliminate = 0;
3575 num_eliminable--;
3579 /* Verify that the initial elimination offsets did not change since the
3580 last call to set_initial_elim_offsets. This is used to catch cases
3581 where something illegal happened during reload_as_needed that could
3582 cause incorrect code to be generated if we did not check for it. */
3584 static bool
3585 verify_initial_elim_offsets (void)
3587 HOST_WIDE_INT t;
3589 if (!num_eliminable)
3590 return true;
3592 #ifdef ELIMINABLE_REGS
3594 struct elim_table *ep;
3596 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3598 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3599 if (t != ep->initial_offset)
3600 return false;
3603 #else
3604 INITIAL_FRAME_POINTER_OFFSET (t);
3605 if (t != reg_eliminate[0].initial_offset)
3606 return false;
3607 #endif
3609 return true;
3612 /* Reset all offsets on eliminable registers to their initial values. */
3614 static void
3615 set_initial_elim_offsets (void)
3617 struct elim_table *ep = reg_eliminate;
3619 #ifdef ELIMINABLE_REGS
3620 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3622 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3623 ep->previous_offset = ep->offset = ep->initial_offset;
3625 #else
3626 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3627 ep->previous_offset = ep->offset = ep->initial_offset;
3628 #endif
3630 num_not_at_initial_offset = 0;
3633 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3635 static void
3636 set_initial_eh_label_offset (rtx label)
3638 set_label_offsets (label, NULL_RTX, 1);
3641 /* Initialize the known label offsets.
3642 Set a known offset for each forced label to be at the initial offset
3643 of each elimination. We do this because we assume that all
3644 computed jumps occur from a location where each elimination is
3645 at its initial offset.
3646 For all other labels, show that we don't know the offsets. */
3648 static void
3649 set_initial_label_offsets (void)
3651 rtx x;
3652 memset (offsets_known_at, 0, num_labels);
3654 for (x = forced_labels; x; x = XEXP (x, 1))
3655 if (XEXP (x, 0))
3656 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3658 for_each_eh_label (set_initial_eh_label_offset);
3661 /* Set all elimination offsets to the known values for the code label given
3662 by INSN. */
3664 static void
3665 set_offsets_for_label (rtx insn)
3667 unsigned int i;
3668 int label_nr = CODE_LABEL_NUMBER (insn);
3669 struct elim_table *ep;
3671 num_not_at_initial_offset = 0;
3672 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3674 ep->offset = ep->previous_offset
3675 = offsets_at[label_nr - first_label_num][i];
3676 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3677 num_not_at_initial_offset++;
3681 /* See if anything that happened changes which eliminations are valid.
3682 For example, on the SPARC, whether or not the frame pointer can
3683 be eliminated can depend on what registers have been used. We need
3684 not check some conditions again (such as flag_omit_frame_pointer)
3685 since they can't have changed. */
3687 static void
3688 update_eliminables (HARD_REG_SET *pset)
3690 int previous_frame_pointer_needed = frame_pointer_needed;
3691 struct elim_table *ep;
3693 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3694 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3695 #ifdef ELIMINABLE_REGS
3696 || ! CAN_ELIMINATE (ep->from, ep->to)
3697 #endif
3699 ep->can_eliminate = 0;
3701 /* Look for the case where we have discovered that we can't replace
3702 register A with register B and that means that we will now be
3703 trying to replace register A with register C. This means we can
3704 no longer replace register C with register B and we need to disable
3705 such an elimination, if it exists. This occurs often with A == ap,
3706 B == sp, and C == fp. */
3708 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3710 struct elim_table *op;
3711 int new_to = -1;
3713 if (! ep->can_eliminate && ep->can_eliminate_previous)
3715 /* Find the current elimination for ep->from, if there is a
3716 new one. */
3717 for (op = reg_eliminate;
3718 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3719 if (op->from == ep->from && op->can_eliminate)
3721 new_to = op->to;
3722 break;
3725 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3726 disable it. */
3727 for (op = reg_eliminate;
3728 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3729 if (op->from == new_to && op->to == ep->to)
3730 op->can_eliminate = 0;
3734 /* See if any registers that we thought we could eliminate the previous
3735 time are no longer eliminable. If so, something has changed and we
3736 must spill the register. Also, recompute the number of eliminable
3737 registers and see if the frame pointer is needed; it is if there is
3738 no elimination of the frame pointer that we can perform. */
3740 frame_pointer_needed = 1;
3741 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3743 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3744 && ep->to != HARD_FRAME_POINTER_REGNUM)
3745 frame_pointer_needed = 0;
3747 if (! ep->can_eliminate && ep->can_eliminate_previous)
3749 ep->can_eliminate_previous = 0;
3750 SET_HARD_REG_BIT (*pset, ep->from);
3751 num_eliminable--;
3755 /* If we didn't need a frame pointer last time, but we do now, spill
3756 the hard frame pointer. */
3757 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3758 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3761 /* Return true if X is used as the target register of an elimination. */
3763 bool
3764 elimination_target_reg_p (rtx x)
3766 struct elim_table *ep;
3768 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3769 if (ep->to_rtx == x && ep->can_eliminate)
3770 return true;
3772 return false;
3775 /* Initialize the table of registers to eliminate. */
3777 static void
3778 init_elim_table (void)
3780 struct elim_table *ep;
3781 #ifdef ELIMINABLE_REGS
3782 const struct elim_table_1 *ep1;
3783 #endif
3785 if (!reg_eliminate)
3786 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3788 /* Does this function require a frame pointer? */
3790 frame_pointer_needed = (! flag_omit_frame_pointer
3791 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3792 and restore sp for alloca. So we can't eliminate
3793 the frame pointer in that case. At some point,
3794 we should improve this by emitting the
3795 sp-adjusting insns for this case. */
3796 || (current_function_calls_alloca
3797 && EXIT_IGNORE_STACK)
3798 || current_function_accesses_prior_frames
3799 || FRAME_POINTER_REQUIRED);
3801 num_eliminable = 0;
3803 #ifdef ELIMINABLE_REGS
3804 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3805 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3807 ep->from = ep1->from;
3808 ep->to = ep1->to;
3809 ep->can_eliminate = ep->can_eliminate_previous
3810 = (CAN_ELIMINATE (ep->from, ep->to)
3811 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3813 #else
3814 reg_eliminate[0].from = reg_eliminate_1[0].from;
3815 reg_eliminate[0].to = reg_eliminate_1[0].to;
3816 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3817 = ! frame_pointer_needed;
3818 #endif
3820 /* Count the number of eliminable registers and build the FROM and TO
3821 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3822 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3823 We depend on this. */
3824 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3826 num_eliminable += ep->can_eliminate;
3827 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3828 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3832 /* Kick all pseudos out of hard register REGNO.
3834 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3835 because we found we can't eliminate some register. In the case, no pseudos
3836 are allowed to be in the register, even if they are only in a block that
3837 doesn't require spill registers, unlike the case when we are spilling this
3838 hard reg to produce another spill register.
3840 Return nonzero if any pseudos needed to be kicked out. */
3842 static void
3843 spill_hard_reg (unsigned int regno, int cant_eliminate)
3845 int i;
3847 if (cant_eliminate)
3849 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3850 df_set_regs_ever_live (regno, true);
3853 /* Spill every pseudo reg that was allocated to this reg
3854 or to something that overlaps this reg. */
3856 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3857 if (reg_renumber[i] >= 0
3858 && (unsigned int) reg_renumber[i] <= regno
3859 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3860 SET_REGNO_REG_SET (&spilled_pseudos, i);
3863 /* After find_reload_regs has been run for all insn that need reloads,
3864 and/or spill_hard_regs was called, this function is used to actually
3865 spill pseudo registers and try to reallocate them. It also sets up the
3866 spill_regs array for use by choose_reload_regs. */
3868 static int
3869 finish_spills (int global)
3871 struct insn_chain *chain;
3872 int something_changed = 0;
3873 unsigned i;
3874 reg_set_iterator rsi;
3876 /* Build the spill_regs array for the function. */
3877 /* If there are some registers still to eliminate and one of the spill regs
3878 wasn't ever used before, additional stack space may have to be
3879 allocated to store this register. Thus, we may have changed the offset
3880 between the stack and frame pointers, so mark that something has changed.
3882 One might think that we need only set VAL to 1 if this is a call-used
3883 register. However, the set of registers that must be saved by the
3884 prologue is not identical to the call-used set. For example, the
3885 register used by the call insn for the return PC is a call-used register,
3886 but must be saved by the prologue. */
3888 n_spills = 0;
3889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3890 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3892 spill_reg_order[i] = n_spills;
3893 spill_regs[n_spills++] = i;
3894 if (num_eliminable && ! df_regs_ever_live_p (i))
3895 something_changed = 1;
3896 df_set_regs_ever_live (i, true);
3898 else
3899 spill_reg_order[i] = -1;
3901 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3902 if (! flag_ira || reg_renumber[i] >= 0)
3904 /* Record the current hard register the pseudo is allocated to
3905 in pseudo_previous_regs so we avoid reallocating it to the
3906 same hard reg in a later pass. */
3907 gcc_assert (reg_renumber[i] >= 0);
3909 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3910 /* Mark it as no longer having a hard register home. */
3911 reg_renumber[i] = -1;
3912 if (flag_ira)
3913 mark_allocation_change (i);
3914 /* We will need to scan everything again. */
3915 something_changed = 1;
3918 /* Retry global register allocation if possible. */
3919 if (global)
3921 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3922 /* For every insn that needs reloads, set the registers used as spill
3923 regs in pseudo_forbidden_regs for every pseudo live across the
3924 insn. */
3925 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3927 EXECUTE_IF_SET_IN_REG_SET
3928 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3930 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3931 chain->used_spill_regs);
3933 EXECUTE_IF_SET_IN_REG_SET
3934 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3936 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3937 chain->used_spill_regs);
3941 /* Retry allocating the spilled pseudos. For each reg, merge the
3942 various reg sets that indicate which hard regs can't be used,
3943 and call retry_global_alloc.
3944 We change spill_pseudos here to only contain pseudos that did not
3945 get a new hard register. */
3946 if (! flag_ira)
3948 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3949 if (reg_old_renumber[i] != reg_renumber[i])
3951 HARD_REG_SET forbidden;
3953 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3954 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3955 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3956 retry_global_alloc (i, forbidden);
3957 if (reg_renumber[i] >= 0)
3958 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3961 else
3963 unsigned int n;
3965 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
3966 if (reg_old_renumber[i] != reg_renumber[i])
3968 if (reg_renumber [i] < 0)
3969 temp_pseudo_reg_arr [n++] = i;
3970 else
3971 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3973 if (reassign_pseudos (temp_pseudo_reg_arr, n, bad_spill_regs_global,
3974 pseudo_forbidden_regs, pseudo_previous_regs,
3975 &spilled_pseudos))
3976 something_changed = 1;
3980 /* Fix up the register information in the insn chain.
3981 This involves deleting those of the spilled pseudos which did not get
3982 a new hard register home from the live_{before,after} sets. */
3983 for (chain = reload_insn_chain; chain; chain = chain->next)
3985 HARD_REG_SET used_by_pseudos;
3986 HARD_REG_SET used_by_pseudos2;
3988 if (! flag_ira)
3990 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3991 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3993 /* Mark any unallocated hard regs as available for spills. That
3994 makes inheritance work somewhat better. */
3995 if (chain->need_reload)
3997 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3998 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3999 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4001 /* Save the old value for the sanity test below. */
4002 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
4004 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4005 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4006 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4007 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4009 /* Make sure we only enlarge the set. */
4010 gcc_assert (hard_reg_set_subset_p (used_by_pseudos2,
4011 chain->used_spill_regs));
4015 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4016 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4018 int regno = reg_renumber[i];
4019 if (reg_old_renumber[i] == regno)
4020 continue;
4022 alter_reg (i, reg_old_renumber[i], false);
4023 reg_old_renumber[i] = regno;
4024 if (dump_file)
4026 if (regno == -1)
4027 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4028 else
4029 fprintf (dump_file, " Register %d now in %d.\n\n",
4030 i, reg_renumber[i]);
4034 return something_changed;
4037 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4039 static void
4040 scan_paradoxical_subregs (rtx x)
4042 int i;
4043 const char *fmt;
4044 enum rtx_code code = GET_CODE (x);
4046 switch (code)
4048 case REG:
4049 case CONST_INT:
4050 case CONST:
4051 case SYMBOL_REF:
4052 case LABEL_REF:
4053 case CONST_DOUBLE:
4054 case CONST_FIXED:
4055 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4056 case CC0:
4057 case PC:
4058 case USE:
4059 case CLOBBER:
4060 return;
4062 case SUBREG:
4063 if (REG_P (SUBREG_REG (x))
4064 && (GET_MODE_SIZE (GET_MODE (x))
4065 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4067 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4068 = GET_MODE_SIZE (GET_MODE (x));
4069 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4071 return;
4073 default:
4074 break;
4077 fmt = GET_RTX_FORMAT (code);
4078 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4080 if (fmt[i] == 'e')
4081 scan_paradoxical_subregs (XEXP (x, i));
4082 else if (fmt[i] == 'E')
4084 int j;
4085 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4086 scan_paradoxical_subregs (XVECEXP (x, i, j));
4091 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4092 examine all of the reload insns between PREV and NEXT exclusive, and
4093 annotate all that may trap. */
4095 static void
4096 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4098 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4099 unsigned int trap_count;
4100 rtx i;
4102 if (note == NULL)
4103 return;
4105 if (may_trap_p (PATTERN (insn)))
4106 trap_count = 1;
4107 else
4109 remove_note (insn, note);
4110 trap_count = 0;
4113 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4114 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4116 trap_count++;
4117 REG_NOTES (i)
4118 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
4122 /* Reload pseudo-registers into hard regs around each insn as needed.
4123 Additional register load insns are output before the insn that needs it
4124 and perhaps store insns after insns that modify the reloaded pseudo reg.
4126 reg_last_reload_reg and reg_reloaded_contents keep track of
4127 which registers are already available in reload registers.
4128 We update these for the reloads that we perform,
4129 as the insns are scanned. */
4131 static void
4132 reload_as_needed (int live_known)
4134 struct insn_chain *chain;
4135 #if defined (AUTO_INC_DEC)
4136 int i;
4137 #endif
4138 rtx x;
4140 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4141 memset (spill_reg_store, 0, sizeof spill_reg_store);
4142 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4143 INIT_REG_SET (&reg_has_output_reload);
4144 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4145 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4147 set_initial_elim_offsets ();
4149 for (chain = reload_insn_chain; chain; chain = chain->next)
4151 rtx prev = 0;
4152 rtx insn = chain->insn;
4153 rtx old_next = NEXT_INSN (insn);
4155 /* If we pass a label, copy the offsets from the label information
4156 into the current offsets of each elimination. */
4157 if (LABEL_P (insn))
4158 set_offsets_for_label (insn);
4160 else if (INSN_P (insn))
4162 regset_head regs_to_forget;
4163 INIT_REG_SET (&regs_to_forget);
4164 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4166 /* If this is a USE and CLOBBER of a MEM, ensure that any
4167 references to eliminable registers have been removed. */
4169 if ((GET_CODE (PATTERN (insn)) == USE
4170 || GET_CODE (PATTERN (insn)) == CLOBBER)
4171 && MEM_P (XEXP (PATTERN (insn), 0)))
4172 XEXP (XEXP (PATTERN (insn), 0), 0)
4173 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4174 GET_MODE (XEXP (PATTERN (insn), 0)),
4175 NULL_RTX);
4177 /* If we need to do register elimination processing, do so.
4178 This might delete the insn, in which case we are done. */
4179 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4181 eliminate_regs_in_insn (insn, 1);
4182 if (NOTE_P (insn))
4184 update_eliminable_offsets ();
4185 CLEAR_REG_SET (&regs_to_forget);
4186 continue;
4190 /* If need_elim is nonzero but need_reload is zero, one might think
4191 that we could simply set n_reloads to 0. However, find_reloads
4192 could have done some manipulation of the insn (such as swapping
4193 commutative operands), and these manipulations are lost during
4194 the first pass for every insn that needs register elimination.
4195 So the actions of find_reloads must be redone here. */
4197 if (! chain->need_elim && ! chain->need_reload
4198 && ! chain->need_operand_change)
4199 n_reloads = 0;
4200 /* First find the pseudo regs that must be reloaded for this insn.
4201 This info is returned in the tables reload_... (see reload.h).
4202 Also modify the body of INSN by substituting RELOAD
4203 rtx's for those pseudo regs. */
4204 else
4206 CLEAR_REG_SET (&reg_has_output_reload);
4207 CLEAR_HARD_REG_SET (reg_is_output_reload);
4209 find_reloads (insn, 1, spill_indirect_levels, live_known,
4210 spill_reg_order);
4213 if (n_reloads > 0)
4215 rtx next = NEXT_INSN (insn);
4216 rtx p;
4218 prev = PREV_INSN (insn);
4220 /* Now compute which reload regs to reload them into. Perhaps
4221 reusing reload regs from previous insns, or else output
4222 load insns to reload them. Maybe output store insns too.
4223 Record the choices of reload reg in reload_reg_rtx. */
4224 choose_reload_regs (chain);
4226 /* Merge any reloads that we didn't combine for fear of
4227 increasing the number of spill registers needed but now
4228 discover can be safely merged. */
4229 if (SMALL_REGISTER_CLASSES)
4230 merge_assigned_reloads (insn);
4232 /* Generate the insns to reload operands into or out of
4233 their reload regs. */
4234 emit_reload_insns (chain);
4236 /* Substitute the chosen reload regs from reload_reg_rtx
4237 into the insn's body (or perhaps into the bodies of other
4238 load and store insn that we just made for reloading
4239 and that we moved the structure into). */
4240 subst_reloads (insn);
4242 /* Adjust the exception region notes for loads and stores. */
4243 if (flag_non_call_exceptions && !CALL_P (insn))
4244 fixup_eh_region_note (insn, prev, next);
4246 /* If this was an ASM, make sure that all the reload insns
4247 we have generated are valid. If not, give an error
4248 and delete them. */
4249 if (asm_noperands (PATTERN (insn)) >= 0)
4250 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4251 if (p != insn && INSN_P (p)
4252 && GET_CODE (PATTERN (p)) != USE
4253 && (recog_memoized (p) < 0
4254 || (extract_insn (p), ! constrain_operands (1))))
4256 error_for_asm (insn,
4257 "%<asm%> operand requires "
4258 "impossible reload");
4259 delete_insn (p);
4263 if (num_eliminable && chain->need_elim)
4264 update_eliminable_offsets ();
4266 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4267 is no longer validly lying around to save a future reload.
4268 Note that this does not detect pseudos that were reloaded
4269 for this insn in order to be stored in
4270 (obeying register constraints). That is correct; such reload
4271 registers ARE still valid. */
4272 forget_marked_reloads (&regs_to_forget);
4273 CLEAR_REG_SET (&regs_to_forget);
4275 /* There may have been CLOBBER insns placed after INSN. So scan
4276 between INSN and NEXT and use them to forget old reloads. */
4277 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4278 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4279 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4281 #ifdef AUTO_INC_DEC
4282 /* Likewise for regs altered by auto-increment in this insn.
4283 REG_INC notes have been changed by reloading:
4284 find_reloads_address_1 records substitutions for them,
4285 which have been performed by subst_reloads above. */
4286 for (i = n_reloads - 1; i >= 0; i--)
4288 rtx in_reg = rld[i].in_reg;
4289 if (in_reg)
4291 enum rtx_code code = GET_CODE (in_reg);
4292 /* PRE_INC / PRE_DEC will have the reload register ending up
4293 with the same value as the stack slot, but that doesn't
4294 hold true for POST_INC / POST_DEC. Either we have to
4295 convert the memory access to a true POST_INC / POST_DEC,
4296 or we can't use the reload register for inheritance. */
4297 if ((code == POST_INC || code == POST_DEC)
4298 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4299 REGNO (rld[i].reg_rtx))
4300 /* Make sure it is the inc/dec pseudo, and not
4301 some other (e.g. output operand) pseudo. */
4302 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4303 == REGNO (XEXP (in_reg, 0))))
4306 rtx reload_reg = rld[i].reg_rtx;
4307 enum machine_mode mode = GET_MODE (reload_reg);
4308 int n = 0;
4309 rtx p;
4311 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4313 /* We really want to ignore REG_INC notes here, so
4314 use PATTERN (p) as argument to reg_set_p . */
4315 if (reg_set_p (reload_reg, PATTERN (p)))
4316 break;
4317 n = count_occurrences (PATTERN (p), reload_reg, 0);
4318 if (! n)
4319 continue;
4320 if (n == 1)
4322 n = validate_replace_rtx (reload_reg,
4323 gen_rtx_fmt_e (code,
4324 mode,
4325 reload_reg),
4328 /* We must also verify that the constraints
4329 are met after the replacement. */
4330 extract_insn (p);
4331 if (n)
4332 n = constrain_operands (1);
4333 else
4334 break;
4336 /* If the constraints were not met, then
4337 undo the replacement. */
4338 if (!n)
4340 validate_replace_rtx (gen_rtx_fmt_e (code,
4341 mode,
4342 reload_reg),
4343 reload_reg, p);
4344 break;
4348 break;
4350 if (n == 1)
4352 REG_NOTES (p)
4353 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4354 REG_NOTES (p));
4355 /* Mark this as having an output reload so that the
4356 REG_INC processing code below won't invalidate
4357 the reload for inheritance. */
4358 SET_HARD_REG_BIT (reg_is_output_reload,
4359 REGNO (reload_reg));
4360 SET_REGNO_REG_SET (&reg_has_output_reload,
4361 REGNO (XEXP (in_reg, 0)));
4363 else
4364 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4365 NULL);
4367 else if ((code == PRE_INC || code == PRE_DEC)
4368 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4369 REGNO (rld[i].reg_rtx))
4370 /* Make sure it is the inc/dec pseudo, and not
4371 some other (e.g. output operand) pseudo. */
4372 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4373 == REGNO (XEXP (in_reg, 0))))
4375 SET_HARD_REG_BIT (reg_is_output_reload,
4376 REGNO (rld[i].reg_rtx));
4377 SET_REGNO_REG_SET (&reg_has_output_reload,
4378 REGNO (XEXP (in_reg, 0)));
4382 /* If a pseudo that got a hard register is auto-incremented,
4383 we must purge records of copying it into pseudos without
4384 hard registers. */
4385 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4386 if (REG_NOTE_KIND (x) == REG_INC)
4388 /* See if this pseudo reg was reloaded in this insn.
4389 If so, its last-reload info is still valid
4390 because it is based on this insn's reload. */
4391 for (i = 0; i < n_reloads; i++)
4392 if (rld[i].out == XEXP (x, 0))
4393 break;
4395 if (i == n_reloads)
4396 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4398 #endif
4400 /* A reload reg's contents are unknown after a label. */
4401 if (LABEL_P (insn))
4402 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4404 /* Don't assume a reload reg is still good after a call insn
4405 if it is a call-used reg, or if it contains a value that will
4406 be partially clobbered by the call. */
4407 else if (CALL_P (insn))
4409 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4410 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4414 /* Clean up. */
4415 free (reg_last_reload_reg);
4416 CLEAR_REG_SET (&reg_has_output_reload);
4419 /* Discard all record of any value reloaded from X,
4420 or reloaded in X from someplace else;
4421 unless X is an output reload reg of the current insn.
4423 X may be a hard reg (the reload reg)
4424 or it may be a pseudo reg that was reloaded from.
4426 When DATA is non-NULL just mark the registers in regset
4427 to be forgotten later. */
4429 static void
4430 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4431 void *data)
4433 unsigned int regno;
4434 unsigned int nr;
4435 regset regs = (regset) data;
4437 /* note_stores does give us subregs of hard regs,
4438 subreg_regno_offset requires a hard reg. */
4439 while (GET_CODE (x) == SUBREG)
4441 /* We ignore the subreg offset when calculating the regno,
4442 because we are using the entire underlying hard register
4443 below. */
4444 x = SUBREG_REG (x);
4447 if (!REG_P (x))
4448 return;
4450 regno = REGNO (x);
4452 if (regno >= FIRST_PSEUDO_REGISTER)
4453 nr = 1;
4454 else
4456 unsigned int i;
4458 nr = hard_regno_nregs[regno][GET_MODE (x)];
4459 /* Storing into a spilled-reg invalidates its contents.
4460 This can happen if a block-local pseudo is allocated to that reg
4461 and it wasn't spilled because this block's total need is 0.
4462 Then some insn might have an optional reload and use this reg. */
4463 if (!regs)
4464 for (i = 0; i < nr; i++)
4465 /* But don't do this if the reg actually serves as an output
4466 reload reg in the current instruction. */
4467 if (n_reloads == 0
4468 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4470 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4471 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4472 spill_reg_store[regno + i] = 0;
4476 if (regs)
4477 while (nr-- > 0)
4478 SET_REGNO_REG_SET (regs, regno + nr);
4479 else
4481 /* Since value of X has changed,
4482 forget any value previously copied from it. */
4484 while (nr-- > 0)
4485 /* But don't forget a copy if this is the output reload
4486 that establishes the copy's validity. */
4487 if (n_reloads == 0
4488 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4489 reg_last_reload_reg[regno + nr] = 0;
4493 /* Forget the reloads marked in regset by previous function. */
4494 static void
4495 forget_marked_reloads (regset regs)
4497 unsigned int reg;
4498 reg_set_iterator rsi;
4499 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4501 if (reg < FIRST_PSEUDO_REGISTER
4502 /* But don't do this if the reg actually serves as an output
4503 reload reg in the current instruction. */
4504 && (n_reloads == 0
4505 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4507 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4508 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
4509 spill_reg_store[reg] = 0;
4511 if (n_reloads == 0
4512 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4513 reg_last_reload_reg[reg] = 0;
4517 /* The following HARD_REG_SETs indicate when each hard register is
4518 used for a reload of various parts of the current insn. */
4520 /* If reg is unavailable for all reloads. */
4521 static HARD_REG_SET reload_reg_unavailable;
4522 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4523 static HARD_REG_SET reload_reg_used;
4524 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4525 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4526 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4527 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4528 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4529 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4530 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4531 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4532 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4533 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4534 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4535 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4536 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4537 static HARD_REG_SET reload_reg_used_in_op_addr;
4538 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4539 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4540 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4541 static HARD_REG_SET reload_reg_used_in_insn;
4542 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4543 static HARD_REG_SET reload_reg_used_in_other_addr;
4545 /* If reg is in use as a reload reg for any sort of reload. */
4546 static HARD_REG_SET reload_reg_used_at_all;
4548 /* If reg is use as an inherited reload. We just mark the first register
4549 in the group. */
4550 static HARD_REG_SET reload_reg_used_for_inherit;
4552 /* Records which hard regs are used in any way, either as explicit use or
4553 by being allocated to a pseudo during any point of the current insn. */
4554 static HARD_REG_SET reg_used_in_insn;
4556 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4557 TYPE. MODE is used to indicate how many consecutive regs are
4558 actually used. */
4560 static void
4561 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4562 enum machine_mode mode)
4564 unsigned int nregs = hard_regno_nregs[regno][mode];
4565 unsigned int i;
4567 for (i = regno; i < nregs + regno; i++)
4569 switch (type)
4571 case RELOAD_OTHER:
4572 SET_HARD_REG_BIT (reload_reg_used, i);
4573 break;
4575 case RELOAD_FOR_INPUT_ADDRESS:
4576 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4577 break;
4579 case RELOAD_FOR_INPADDR_ADDRESS:
4580 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4581 break;
4583 case RELOAD_FOR_OUTPUT_ADDRESS:
4584 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4585 break;
4587 case RELOAD_FOR_OUTADDR_ADDRESS:
4588 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4589 break;
4591 case RELOAD_FOR_OPERAND_ADDRESS:
4592 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4593 break;
4595 case RELOAD_FOR_OPADDR_ADDR:
4596 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4597 break;
4599 case RELOAD_FOR_OTHER_ADDRESS:
4600 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4601 break;
4603 case RELOAD_FOR_INPUT:
4604 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4605 break;
4607 case RELOAD_FOR_OUTPUT:
4608 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4609 break;
4611 case RELOAD_FOR_INSN:
4612 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4613 break;
4616 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4620 /* Similarly, but show REGNO is no longer in use for a reload. */
4622 static void
4623 clear_reload_reg_in_use (unsigned int regno, int opnum,
4624 enum reload_type type, enum machine_mode mode)
4626 unsigned int nregs = hard_regno_nregs[regno][mode];
4627 unsigned int start_regno, end_regno, r;
4628 int i;
4629 /* A complication is that for some reload types, inheritance might
4630 allow multiple reloads of the same types to share a reload register.
4631 We set check_opnum if we have to check only reloads with the same
4632 operand number, and check_any if we have to check all reloads. */
4633 int check_opnum = 0;
4634 int check_any = 0;
4635 HARD_REG_SET *used_in_set;
4637 switch (type)
4639 case RELOAD_OTHER:
4640 used_in_set = &reload_reg_used;
4641 break;
4643 case RELOAD_FOR_INPUT_ADDRESS:
4644 used_in_set = &reload_reg_used_in_input_addr[opnum];
4645 break;
4647 case RELOAD_FOR_INPADDR_ADDRESS:
4648 check_opnum = 1;
4649 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4650 break;
4652 case RELOAD_FOR_OUTPUT_ADDRESS:
4653 used_in_set = &reload_reg_used_in_output_addr[opnum];
4654 break;
4656 case RELOAD_FOR_OUTADDR_ADDRESS:
4657 check_opnum = 1;
4658 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4659 break;
4661 case RELOAD_FOR_OPERAND_ADDRESS:
4662 used_in_set = &reload_reg_used_in_op_addr;
4663 break;
4665 case RELOAD_FOR_OPADDR_ADDR:
4666 check_any = 1;
4667 used_in_set = &reload_reg_used_in_op_addr_reload;
4668 break;
4670 case RELOAD_FOR_OTHER_ADDRESS:
4671 used_in_set = &reload_reg_used_in_other_addr;
4672 check_any = 1;
4673 break;
4675 case RELOAD_FOR_INPUT:
4676 used_in_set = &reload_reg_used_in_input[opnum];
4677 break;
4679 case RELOAD_FOR_OUTPUT:
4680 used_in_set = &reload_reg_used_in_output[opnum];
4681 break;
4683 case RELOAD_FOR_INSN:
4684 used_in_set = &reload_reg_used_in_insn;
4685 break;
4686 default:
4687 gcc_unreachable ();
4689 /* We resolve conflicts with remaining reloads of the same type by
4690 excluding the intervals of reload registers by them from the
4691 interval of freed reload registers. Since we only keep track of
4692 one set of interval bounds, we might have to exclude somewhat
4693 more than what would be necessary if we used a HARD_REG_SET here.
4694 But this should only happen very infrequently, so there should
4695 be no reason to worry about it. */
4697 start_regno = regno;
4698 end_regno = regno + nregs;
4699 if (check_opnum || check_any)
4701 for (i = n_reloads - 1; i >= 0; i--)
4703 if (rld[i].when_needed == type
4704 && (check_any || rld[i].opnum == opnum)
4705 && rld[i].reg_rtx)
4707 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4708 unsigned int conflict_end
4709 = end_hard_regno (rld[i].mode, conflict_start);
4711 /* If there is an overlap with the first to-be-freed register,
4712 adjust the interval start. */
4713 if (conflict_start <= start_regno && conflict_end > start_regno)
4714 start_regno = conflict_end;
4715 /* Otherwise, if there is a conflict with one of the other
4716 to-be-freed registers, adjust the interval end. */
4717 if (conflict_start > start_regno && conflict_start < end_regno)
4718 end_regno = conflict_start;
4723 for (r = start_regno; r < end_regno; r++)
4724 CLEAR_HARD_REG_BIT (*used_in_set, r);
4727 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4728 specified by OPNUM and TYPE. */
4730 static int
4731 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4733 int i;
4735 /* In use for a RELOAD_OTHER means it's not available for anything. */
4736 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4737 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4738 return 0;
4740 switch (type)
4742 case RELOAD_OTHER:
4743 /* In use for anything means we can't use it for RELOAD_OTHER. */
4744 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4745 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4746 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4747 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4748 return 0;
4750 for (i = 0; i < reload_n_operands; i++)
4751 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4752 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4753 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4754 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4755 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4756 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4757 return 0;
4759 return 1;
4761 case RELOAD_FOR_INPUT:
4762 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4763 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4764 return 0;
4766 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4767 return 0;
4769 /* If it is used for some other input, can't use it. */
4770 for (i = 0; i < reload_n_operands; i++)
4771 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4772 return 0;
4774 /* If it is used in a later operand's address, can't use it. */
4775 for (i = opnum + 1; i < reload_n_operands; i++)
4776 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4777 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4778 return 0;
4780 return 1;
4782 case RELOAD_FOR_INPUT_ADDRESS:
4783 /* Can't use a register if it is used for an input address for this
4784 operand or used as an input in an earlier one. */
4785 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4786 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4787 return 0;
4789 for (i = 0; i < opnum; i++)
4790 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4791 return 0;
4793 return 1;
4795 case RELOAD_FOR_INPADDR_ADDRESS:
4796 /* Can't use a register if it is used for an input address
4797 for this operand or used as an input in an earlier
4798 one. */
4799 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4800 return 0;
4802 for (i = 0; i < opnum; i++)
4803 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4804 return 0;
4806 return 1;
4808 case RELOAD_FOR_OUTPUT_ADDRESS:
4809 /* Can't use a register if it is used for an output address for this
4810 operand or used as an output in this or a later operand. Note
4811 that multiple output operands are emitted in reverse order, so
4812 the conflicting ones are those with lower indices. */
4813 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4814 return 0;
4816 for (i = 0; i <= opnum; i++)
4817 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4818 return 0;
4820 return 1;
4822 case RELOAD_FOR_OUTADDR_ADDRESS:
4823 /* Can't use a register if it is used for an output address
4824 for this operand or used as an output in this or a
4825 later operand. Note that multiple output operands are
4826 emitted in reverse order, so the conflicting ones are
4827 those with lower indices. */
4828 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4829 return 0;
4831 for (i = 0; i <= opnum; i++)
4832 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4833 return 0;
4835 return 1;
4837 case RELOAD_FOR_OPERAND_ADDRESS:
4838 for (i = 0; i < reload_n_operands; i++)
4839 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4840 return 0;
4842 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4843 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4845 case RELOAD_FOR_OPADDR_ADDR:
4846 for (i = 0; i < reload_n_operands; i++)
4847 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4848 return 0;
4850 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4852 case RELOAD_FOR_OUTPUT:
4853 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4854 outputs, or an operand address for this or an earlier output.
4855 Note that multiple output operands are emitted in reverse order,
4856 so the conflicting ones are those with higher indices. */
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4858 return 0;
4860 for (i = 0; i < reload_n_operands; i++)
4861 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4862 return 0;
4864 for (i = opnum; i < reload_n_operands; i++)
4865 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4866 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4867 return 0;
4869 return 1;
4871 case RELOAD_FOR_INSN:
4872 for (i = 0; i < reload_n_operands; i++)
4873 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4874 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4875 return 0;
4877 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4878 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4880 case RELOAD_FOR_OTHER_ADDRESS:
4881 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4883 default:
4884 gcc_unreachable ();
4888 /* Return 1 if the value in reload reg REGNO, as used by a reload
4889 needed for the part of the insn specified by OPNUM and TYPE,
4890 is still available in REGNO at the end of the insn.
4892 We can assume that the reload reg was already tested for availability
4893 at the time it is needed, and we should not check this again,
4894 in case the reg has already been marked in use. */
4896 static int
4897 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4899 int i;
4901 switch (type)
4903 case RELOAD_OTHER:
4904 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4905 its value must reach the end. */
4906 return 1;
4908 /* If this use is for part of the insn,
4909 its value reaches if no subsequent part uses the same register.
4910 Just like the above function, don't try to do this with lots
4911 of fallthroughs. */
4913 case RELOAD_FOR_OTHER_ADDRESS:
4914 /* Here we check for everything else, since these don't conflict
4915 with anything else and everything comes later. */
4917 for (i = 0; i < reload_n_operands; i++)
4918 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4919 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4920 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4921 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4922 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4923 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4924 return 0;
4926 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4927 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4928 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4929 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4931 case RELOAD_FOR_INPUT_ADDRESS:
4932 case RELOAD_FOR_INPADDR_ADDRESS:
4933 /* Similar, except that we check only for this and subsequent inputs
4934 and the address of only subsequent inputs and we do not need
4935 to check for RELOAD_OTHER objects since they are known not to
4936 conflict. */
4938 for (i = opnum; i < reload_n_operands; i++)
4939 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4940 return 0;
4942 for (i = opnum + 1; i < reload_n_operands; i++)
4943 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4944 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4945 return 0;
4947 for (i = 0; i < reload_n_operands; i++)
4948 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4949 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4950 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4951 return 0;
4953 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4954 return 0;
4956 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4957 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4958 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4960 case RELOAD_FOR_INPUT:
4961 /* Similar to input address, except we start at the next operand for
4962 both input and input address and we do not check for
4963 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4964 would conflict. */
4966 for (i = opnum + 1; i < reload_n_operands; i++)
4967 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4968 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4969 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4970 return 0;
4972 /* ... fall through ... */
4974 case RELOAD_FOR_OPERAND_ADDRESS:
4975 /* Check outputs and their addresses. */
4977 for (i = 0; i < reload_n_operands; i++)
4978 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4979 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4980 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4981 return 0;
4983 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4985 case RELOAD_FOR_OPADDR_ADDR:
4986 for (i = 0; i < reload_n_operands; i++)
4987 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4988 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4989 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4990 return 0;
4992 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4993 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4994 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4996 case RELOAD_FOR_INSN:
4997 /* These conflict with other outputs with RELOAD_OTHER. So
4998 we need only check for output addresses. */
5000 opnum = reload_n_operands;
5002 /* ... fall through ... */
5004 case RELOAD_FOR_OUTPUT:
5005 case RELOAD_FOR_OUTPUT_ADDRESS:
5006 case RELOAD_FOR_OUTADDR_ADDRESS:
5007 /* We already know these can't conflict with a later output. So the
5008 only thing to check are later output addresses.
5009 Note that multiple output operands are emitted in reverse order,
5010 so the conflicting ones are those with lower indices. */
5011 for (i = 0; i < opnum; i++)
5012 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5013 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5014 return 0;
5016 return 1;
5018 default:
5019 gcc_unreachable ();
5024 /* Returns whether R1 and R2 are uniquely chained: the value of one
5025 is used by the other, and that value is not used by any other
5026 reload for this insn. This is used to partially undo the decision
5027 made in find_reloads when in the case of multiple
5028 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5029 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5030 reloads. This code tries to avoid the conflict created by that
5031 change. It might be cleaner to explicitly keep track of which
5032 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5033 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5034 this after the fact. */
5035 static bool
5036 reloads_unique_chain_p (int r1, int r2)
5038 int i;
5040 /* We only check input reloads. */
5041 if (! rld[r1].in || ! rld[r2].in)
5042 return false;
5044 /* Avoid anything with output reloads. */
5045 if (rld[r1].out || rld[r2].out)
5046 return false;
5048 /* "chained" means one reload is a component of the other reload,
5049 not the same as the other reload. */
5050 if (rld[r1].opnum != rld[r2].opnum
5051 || rtx_equal_p (rld[r1].in, rld[r2].in)
5052 || rld[r1].optional || rld[r2].optional
5053 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5054 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5055 return false;
5057 for (i = 0; i < n_reloads; i ++)
5058 /* Look for input reloads that aren't our two */
5059 if (i != r1 && i != r2 && rld[i].in)
5061 /* If our reload is mentioned at all, it isn't a simple chain. */
5062 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5063 return false;
5065 return true;
5068 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5069 Return 0 otherwise.
5071 This function uses the same algorithm as reload_reg_free_p above. */
5073 static int
5074 reloads_conflict (int r1, int r2)
5076 enum reload_type r1_type = rld[r1].when_needed;
5077 enum reload_type r2_type = rld[r2].when_needed;
5078 int r1_opnum = rld[r1].opnum;
5079 int r2_opnum = rld[r2].opnum;
5081 /* RELOAD_OTHER conflicts with everything. */
5082 if (r2_type == RELOAD_OTHER)
5083 return 1;
5085 /* Otherwise, check conflicts differently for each type. */
5087 switch (r1_type)
5089 case RELOAD_FOR_INPUT:
5090 return (r2_type == RELOAD_FOR_INSN
5091 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5092 || r2_type == RELOAD_FOR_OPADDR_ADDR
5093 || r2_type == RELOAD_FOR_INPUT
5094 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5095 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5096 && r2_opnum > r1_opnum));
5098 case RELOAD_FOR_INPUT_ADDRESS:
5099 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5100 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5102 case RELOAD_FOR_INPADDR_ADDRESS:
5103 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5104 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5106 case RELOAD_FOR_OUTPUT_ADDRESS:
5107 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5108 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5110 case RELOAD_FOR_OUTADDR_ADDRESS:
5111 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5112 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5114 case RELOAD_FOR_OPERAND_ADDRESS:
5115 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5116 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5117 && !reloads_unique_chain_p (r1, r2)));
5119 case RELOAD_FOR_OPADDR_ADDR:
5120 return (r2_type == RELOAD_FOR_INPUT
5121 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5123 case RELOAD_FOR_OUTPUT:
5124 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5125 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5126 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5127 && r2_opnum >= r1_opnum));
5129 case RELOAD_FOR_INSN:
5130 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5131 || r2_type == RELOAD_FOR_INSN
5132 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5134 case RELOAD_FOR_OTHER_ADDRESS:
5135 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5137 case RELOAD_OTHER:
5138 return 1;
5140 default:
5141 gcc_unreachable ();
5145 /* Indexed by reload number, 1 if incoming value
5146 inherited from previous insns. */
5147 static char reload_inherited[MAX_RELOADS];
5149 /* For an inherited reload, this is the insn the reload was inherited from,
5150 if we know it. Otherwise, this is 0. */
5151 static rtx reload_inheritance_insn[MAX_RELOADS];
5153 /* If nonzero, this is a place to get the value of the reload,
5154 rather than using reload_in. */
5155 static rtx reload_override_in[MAX_RELOADS];
5157 /* For each reload, the hard register number of the register used,
5158 or -1 if we did not need a register for this reload. */
5159 static int reload_spill_index[MAX_RELOADS];
5161 /* Subroutine of free_for_value_p, used to check a single register.
5162 START_REGNO is the starting regno of the full reload register
5163 (possibly comprising multiple hard registers) that we are considering. */
5165 static int
5166 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5167 enum reload_type type, rtx value, rtx out,
5168 int reloadnum, int ignore_address_reloads)
5170 int time1;
5171 /* Set if we see an input reload that must not share its reload register
5172 with any new earlyclobber, but might otherwise share the reload
5173 register with an output or input-output reload. */
5174 int check_earlyclobber = 0;
5175 int i;
5176 int copy = 0;
5178 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5179 return 0;
5181 if (out == const0_rtx)
5183 copy = 1;
5184 out = NULL_RTX;
5187 /* We use some pseudo 'time' value to check if the lifetimes of the
5188 new register use would overlap with the one of a previous reload
5189 that is not read-only or uses a different value.
5190 The 'time' used doesn't have to be linear in any shape or form, just
5191 monotonic.
5192 Some reload types use different 'buckets' for each operand.
5193 So there are MAX_RECOG_OPERANDS different time values for each
5194 such reload type.
5195 We compute TIME1 as the time when the register for the prospective
5196 new reload ceases to be live, and TIME2 for each existing
5197 reload as the time when that the reload register of that reload
5198 becomes live.
5199 Where there is little to be gained by exact lifetime calculations,
5200 we just make conservative assumptions, i.e. a longer lifetime;
5201 this is done in the 'default:' cases. */
5202 switch (type)
5204 case RELOAD_FOR_OTHER_ADDRESS:
5205 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5206 time1 = copy ? 0 : 1;
5207 break;
5208 case RELOAD_OTHER:
5209 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5210 break;
5211 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5212 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5213 respectively, to the time values for these, we get distinct time
5214 values. To get distinct time values for each operand, we have to
5215 multiply opnum by at least three. We round that up to four because
5216 multiply by four is often cheaper. */
5217 case RELOAD_FOR_INPADDR_ADDRESS:
5218 time1 = opnum * 4 + 2;
5219 break;
5220 case RELOAD_FOR_INPUT_ADDRESS:
5221 time1 = opnum * 4 + 3;
5222 break;
5223 case RELOAD_FOR_INPUT:
5224 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5225 executes (inclusive). */
5226 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5227 break;
5228 case RELOAD_FOR_OPADDR_ADDR:
5229 /* opnum * 4 + 4
5230 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5231 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5232 break;
5233 case RELOAD_FOR_OPERAND_ADDRESS:
5234 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5235 is executed. */
5236 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5237 break;
5238 case RELOAD_FOR_OUTADDR_ADDRESS:
5239 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5240 break;
5241 case RELOAD_FOR_OUTPUT_ADDRESS:
5242 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5243 break;
5244 default:
5245 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5248 for (i = 0; i < n_reloads; i++)
5250 rtx reg = rld[i].reg_rtx;
5251 if (reg && REG_P (reg)
5252 && ((unsigned) regno - true_regnum (reg)
5253 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5254 && i != reloadnum)
5256 rtx other_input = rld[i].in;
5258 /* If the other reload loads the same input value, that
5259 will not cause a conflict only if it's loading it into
5260 the same register. */
5261 if (true_regnum (reg) != start_regno)
5262 other_input = NULL_RTX;
5263 if (! other_input || ! rtx_equal_p (other_input, value)
5264 || rld[i].out || out)
5266 int time2;
5267 switch (rld[i].when_needed)
5269 case RELOAD_FOR_OTHER_ADDRESS:
5270 time2 = 0;
5271 break;
5272 case RELOAD_FOR_INPADDR_ADDRESS:
5273 /* find_reloads makes sure that a
5274 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5275 by at most one - the first -
5276 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5277 address reload is inherited, the address address reload
5278 goes away, so we can ignore this conflict. */
5279 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5280 && ignore_address_reloads
5281 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5282 Then the address address is still needed to store
5283 back the new address. */
5284 && ! rld[reloadnum].out)
5285 continue;
5286 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5287 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5288 reloads go away. */
5289 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5290 && ignore_address_reloads
5291 /* Unless we are reloading an auto_inc expression. */
5292 && ! rld[reloadnum].out)
5293 continue;
5294 time2 = rld[i].opnum * 4 + 2;
5295 break;
5296 case RELOAD_FOR_INPUT_ADDRESS:
5297 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5298 && ignore_address_reloads
5299 && ! rld[reloadnum].out)
5300 continue;
5301 time2 = rld[i].opnum * 4 + 3;
5302 break;
5303 case RELOAD_FOR_INPUT:
5304 time2 = rld[i].opnum * 4 + 4;
5305 check_earlyclobber = 1;
5306 break;
5307 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5308 == MAX_RECOG_OPERAND * 4 */
5309 case RELOAD_FOR_OPADDR_ADDR:
5310 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5311 && ignore_address_reloads
5312 && ! rld[reloadnum].out)
5313 continue;
5314 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5315 break;
5316 case RELOAD_FOR_OPERAND_ADDRESS:
5317 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5318 check_earlyclobber = 1;
5319 break;
5320 case RELOAD_FOR_INSN:
5321 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5322 break;
5323 case RELOAD_FOR_OUTPUT:
5324 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5325 instruction is executed. */
5326 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5327 break;
5328 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5329 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5330 value. */
5331 case RELOAD_FOR_OUTADDR_ADDRESS:
5332 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5333 && ignore_address_reloads
5334 && ! rld[reloadnum].out)
5335 continue;
5336 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5337 break;
5338 case RELOAD_FOR_OUTPUT_ADDRESS:
5339 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5340 break;
5341 case RELOAD_OTHER:
5342 /* If there is no conflict in the input part, handle this
5343 like an output reload. */
5344 if (! rld[i].in || rtx_equal_p (other_input, value))
5346 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5347 /* Earlyclobbered outputs must conflict with inputs. */
5348 if (earlyclobber_operand_p (rld[i].out))
5349 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5351 break;
5353 time2 = 1;
5354 /* RELOAD_OTHER might be live beyond instruction execution,
5355 but this is not obvious when we set time2 = 1. So check
5356 here if there might be a problem with the new reload
5357 clobbering the register used by the RELOAD_OTHER. */
5358 if (out)
5359 return 0;
5360 break;
5361 default:
5362 return 0;
5364 if ((time1 >= time2
5365 && (! rld[i].in || rld[i].out
5366 || ! rtx_equal_p (other_input, value)))
5367 || (out && rld[reloadnum].out_reg
5368 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5369 return 0;
5374 /* Earlyclobbered outputs must conflict with inputs. */
5375 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5376 return 0;
5378 return 1;
5381 /* Return 1 if the value in reload reg REGNO, as used by a reload
5382 needed for the part of the insn specified by OPNUM and TYPE,
5383 may be used to load VALUE into it.
5385 MODE is the mode in which the register is used, this is needed to
5386 determine how many hard regs to test.
5388 Other read-only reloads with the same value do not conflict
5389 unless OUT is nonzero and these other reloads have to live while
5390 output reloads live.
5391 If OUT is CONST0_RTX, this is a special case: it means that the
5392 test should not be for using register REGNO as reload register, but
5393 for copying from register REGNO into the reload register.
5395 RELOADNUM is the number of the reload we want to load this value for;
5396 a reload does not conflict with itself.
5398 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5399 reloads that load an address for the very reload we are considering.
5401 The caller has to make sure that there is no conflict with the return
5402 register. */
5404 static int
5405 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5406 enum reload_type type, rtx value, rtx out, int reloadnum,
5407 int ignore_address_reloads)
5409 int nregs = hard_regno_nregs[regno][mode];
5410 while (nregs-- > 0)
5411 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5412 value, out, reloadnum,
5413 ignore_address_reloads))
5414 return 0;
5415 return 1;
5418 /* Return nonzero if the rtx X is invariant over the current function. */
5419 /* ??? Actually, the places where we use this expect exactly what is
5420 tested here, and not everything that is function invariant. In
5421 particular, the frame pointer and arg pointer are special cased;
5422 pic_offset_table_rtx is not, and we must not spill these things to
5423 memory. */
5426 function_invariant_p (const_rtx x)
5428 if (CONSTANT_P (x))
5429 return 1;
5430 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5431 return 1;
5432 if (GET_CODE (x) == PLUS
5433 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5434 && CONSTANT_P (XEXP (x, 1)))
5435 return 1;
5436 return 0;
5439 /* Determine whether the reload reg X overlaps any rtx'es used for
5440 overriding inheritance. Return nonzero if so. */
5442 static int
5443 conflicts_with_override (rtx x)
5445 int i;
5446 for (i = 0; i < n_reloads; i++)
5447 if (reload_override_in[i]
5448 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5449 return 1;
5450 return 0;
5453 /* Give an error message saying we failed to find a reload for INSN,
5454 and clear out reload R. */
5455 static void
5456 failed_reload (rtx insn, int r)
5458 if (asm_noperands (PATTERN (insn)) < 0)
5459 /* It's the compiler's fault. */
5460 fatal_insn ("could not find a spill register", insn);
5462 /* It's the user's fault; the operand's mode and constraint
5463 don't match. Disable this reload so we don't crash in final. */
5464 error_for_asm (insn,
5465 "%<asm%> operand constraint incompatible with operand size");
5466 rld[r].in = 0;
5467 rld[r].out = 0;
5468 rld[r].reg_rtx = 0;
5469 rld[r].optional = 1;
5470 rld[r].secondary_p = 1;
5473 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5474 for reload R. If it's valid, get an rtx for it. Return nonzero if
5475 successful. */
5476 static int
5477 set_reload_reg (int i, int r)
5479 int regno;
5480 rtx reg = spill_reg_rtx[i];
5482 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5483 spill_reg_rtx[i] = reg
5484 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5486 regno = true_regnum (reg);
5488 /* Detect when the reload reg can't hold the reload mode.
5489 This used to be one `if', but Sequent compiler can't handle that. */
5490 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5492 enum machine_mode test_mode = VOIDmode;
5493 if (rld[r].in)
5494 test_mode = GET_MODE (rld[r].in);
5495 /* If rld[r].in has VOIDmode, it means we will load it
5496 in whatever mode the reload reg has: to wit, rld[r].mode.
5497 We have already tested that for validity. */
5498 /* Aside from that, we need to test that the expressions
5499 to reload from or into have modes which are valid for this
5500 reload register. Otherwise the reload insns would be invalid. */
5501 if (! (rld[r].in != 0 && test_mode != VOIDmode
5502 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5503 if (! (rld[r].out != 0
5504 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5506 /* The reg is OK. */
5507 last_spill_reg = i;
5509 /* Mark as in use for this insn the reload regs we use
5510 for this. */
5511 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5512 rld[r].when_needed, rld[r].mode);
5514 rld[r].reg_rtx = reg;
5515 reload_spill_index[r] = spill_regs[i];
5516 return 1;
5519 return 0;
5522 /* Find a spill register to use as a reload register for reload R.
5523 LAST_RELOAD is nonzero if this is the last reload for the insn being
5524 processed.
5526 Set rld[R].reg_rtx to the register allocated.
5528 We return 1 if successful, or 0 if we couldn't find a spill reg and
5529 we didn't change anything. */
5531 static int
5532 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5533 int last_reload)
5535 int i, pass, count;
5537 /* If we put this reload ahead, thinking it is a group,
5538 then insist on finding a group. Otherwise we can grab a
5539 reg that some other reload needs.
5540 (That can happen when we have a 68000 DATA_OR_FP_REG
5541 which is a group of data regs or one fp reg.)
5542 We need not be so restrictive if there are no more reloads
5543 for this insn.
5545 ??? Really it would be nicer to have smarter handling
5546 for that kind of reg class, where a problem like this is normal.
5547 Perhaps those classes should be avoided for reloading
5548 by use of more alternatives. */
5550 int force_group = rld[r].nregs > 1 && ! last_reload;
5552 /* If we want a single register and haven't yet found one,
5553 take any reg in the right class and not in use.
5554 If we want a consecutive group, here is where we look for it.
5556 We use two passes so we can first look for reload regs to
5557 reuse, which are already in use for other reloads in this insn,
5558 and only then use additional registers.
5559 I think that maximizing reuse is needed to make sure we don't
5560 run out of reload regs. Suppose we have three reloads, and
5561 reloads A and B can share regs. These need two regs.
5562 Suppose A and B are given different regs.
5563 That leaves none for C. */
5564 for (pass = 0; pass < 2; pass++)
5566 /* I is the index in spill_regs.
5567 We advance it round-robin between insns to use all spill regs
5568 equally, so that inherited reloads have a chance
5569 of leapfrogging each other. */
5571 i = last_spill_reg;
5573 for (count = 0; count < n_spills; count++)
5575 int class = (int) rld[r].class;
5576 int regnum;
5578 i++;
5579 if (i >= n_spills)
5580 i -= n_spills;
5581 regnum = spill_regs[i];
5583 if ((reload_reg_free_p (regnum, rld[r].opnum,
5584 rld[r].when_needed)
5585 || (rld[r].in
5586 /* We check reload_reg_used to make sure we
5587 don't clobber the return register. */
5588 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5589 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5590 rld[r].when_needed, rld[r].in,
5591 rld[r].out, r, 1)))
5592 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5593 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5594 /* Look first for regs to share, then for unshared. But
5595 don't share regs used for inherited reloads; they are
5596 the ones we want to preserve. */
5597 && (pass
5598 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5599 regnum)
5600 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5601 regnum))))
5603 int nr = hard_regno_nregs[regnum][rld[r].mode];
5604 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5605 (on 68000) got us two FP regs. If NR is 1,
5606 we would reject both of them. */
5607 if (force_group)
5608 nr = rld[r].nregs;
5609 /* If we need only one reg, we have already won. */
5610 if (nr == 1)
5612 /* But reject a single reg if we demand a group. */
5613 if (force_group)
5614 continue;
5615 break;
5617 /* Otherwise check that as many consecutive regs as we need
5618 are available here. */
5619 while (nr > 1)
5621 int regno = regnum + nr - 1;
5622 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5623 && spill_reg_order[regno] >= 0
5624 && reload_reg_free_p (regno, rld[r].opnum,
5625 rld[r].when_needed)))
5626 break;
5627 nr--;
5629 if (nr == 1)
5630 break;
5634 /* If we found something on pass 1, omit pass 2. */
5635 if (count < n_spills)
5636 break;
5639 /* We should have found a spill register by now. */
5640 if (count >= n_spills)
5641 return 0;
5643 /* I is the index in SPILL_REG_RTX of the reload register we are to
5644 allocate. Get an rtx for it and find its register number. */
5646 return set_reload_reg (i, r);
5649 /* Initialize all the tables needed to allocate reload registers.
5650 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5651 is the array we use to restore the reg_rtx field for every reload. */
5653 static void
5654 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5656 int i;
5658 for (i = 0; i < n_reloads; i++)
5659 rld[i].reg_rtx = save_reload_reg_rtx[i];
5661 memset (reload_inherited, 0, MAX_RELOADS);
5662 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5663 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5665 CLEAR_HARD_REG_SET (reload_reg_used);
5666 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5667 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5668 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5669 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5670 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5672 CLEAR_HARD_REG_SET (reg_used_in_insn);
5674 HARD_REG_SET tmp;
5675 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5676 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5677 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5678 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5679 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5680 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5683 for (i = 0; i < reload_n_operands; i++)
5685 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5686 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5687 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5688 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5689 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5690 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5693 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5695 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5697 for (i = 0; i < n_reloads; i++)
5698 /* If we have already decided to use a certain register,
5699 don't use it in another way. */
5700 if (rld[i].reg_rtx)
5701 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5702 rld[i].when_needed, rld[i].mode);
5705 /* Assign hard reg targets for the pseudo-registers we must reload
5706 into hard regs for this insn.
5707 Also output the instructions to copy them in and out of the hard regs.
5709 For machines with register classes, we are responsible for
5710 finding a reload reg in the proper class. */
5712 static void
5713 choose_reload_regs (struct insn_chain *chain)
5715 rtx insn = chain->insn;
5716 int i, j;
5717 unsigned int max_group_size = 1;
5718 enum reg_class group_class = NO_REGS;
5719 int pass, win, inheritance;
5721 rtx save_reload_reg_rtx[MAX_RELOADS];
5723 /* In order to be certain of getting the registers we need,
5724 we must sort the reloads into order of increasing register class.
5725 Then our grabbing of reload registers will parallel the process
5726 that provided the reload registers.
5728 Also note whether any of the reloads wants a consecutive group of regs.
5729 If so, record the maximum size of the group desired and what
5730 register class contains all the groups needed by this insn. */
5732 for (j = 0; j < n_reloads; j++)
5734 reload_order[j] = j;
5735 if (rld[j].reg_rtx != NULL_RTX)
5737 gcc_assert (REG_P (rld[j].reg_rtx)
5738 && HARD_REGISTER_P (rld[j].reg_rtx));
5739 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5741 else
5742 reload_spill_index[j] = -1;
5744 if (rld[j].nregs > 1)
5746 max_group_size = MAX (rld[j].nregs, max_group_size);
5747 group_class
5748 = reg_class_superunion[(int) rld[j].class][(int) group_class];
5751 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5754 if (n_reloads > 1)
5755 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5757 /* If -O, try first with inheritance, then turning it off.
5758 If not -O, don't do inheritance.
5759 Using inheritance when not optimizing leads to paradoxes
5760 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5761 because one side of the comparison might be inherited. */
5762 win = 0;
5763 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5765 choose_reload_regs_init (chain, save_reload_reg_rtx);
5767 /* Process the reloads in order of preference just found.
5768 Beyond this point, subregs can be found in reload_reg_rtx.
5770 This used to look for an existing reloaded home for all of the
5771 reloads, and only then perform any new reloads. But that could lose
5772 if the reloads were done out of reg-class order because a later
5773 reload with a looser constraint might have an old home in a register
5774 needed by an earlier reload with a tighter constraint.
5776 To solve this, we make two passes over the reloads, in the order
5777 described above. In the first pass we try to inherit a reload
5778 from a previous insn. If there is a later reload that needs a
5779 class that is a proper subset of the class being processed, we must
5780 also allocate a spill register during the first pass.
5782 Then make a second pass over the reloads to allocate any reloads
5783 that haven't been given registers yet. */
5785 for (j = 0; j < n_reloads; j++)
5787 int r = reload_order[j];
5788 rtx search_equiv = NULL_RTX;
5790 /* Ignore reloads that got marked inoperative. */
5791 if (rld[r].out == 0 && rld[r].in == 0
5792 && ! rld[r].secondary_p)
5793 continue;
5795 /* If find_reloads chose to use reload_in or reload_out as a reload
5796 register, we don't need to chose one. Otherwise, try even if it
5797 found one since we might save an insn if we find the value lying
5798 around.
5799 Try also when reload_in is a pseudo without a hard reg. */
5800 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5801 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5802 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5803 && !MEM_P (rld[r].in)
5804 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5805 continue;
5807 #if 0 /* No longer needed for correct operation.
5808 It might give better code, or might not; worth an experiment? */
5809 /* If this is an optional reload, we can't inherit from earlier insns
5810 until we are sure that any non-optional reloads have been allocated.
5811 The following code takes advantage of the fact that optional reloads
5812 are at the end of reload_order. */
5813 if (rld[r].optional != 0)
5814 for (i = 0; i < j; i++)
5815 if ((rld[reload_order[i]].out != 0
5816 || rld[reload_order[i]].in != 0
5817 || rld[reload_order[i]].secondary_p)
5818 && ! rld[reload_order[i]].optional
5819 && rld[reload_order[i]].reg_rtx == 0)
5820 allocate_reload_reg (chain, reload_order[i], 0);
5821 #endif
5823 /* First see if this pseudo is already available as reloaded
5824 for a previous insn. We cannot try to inherit for reloads
5825 that are smaller than the maximum number of registers needed
5826 for groups unless the register we would allocate cannot be used
5827 for the groups.
5829 We could check here to see if this is a secondary reload for
5830 an object that is already in a register of the desired class.
5831 This would avoid the need for the secondary reload register.
5832 But this is complex because we can't easily determine what
5833 objects might want to be loaded via this reload. So let a
5834 register be allocated here. In `emit_reload_insns' we suppress
5835 one of the loads in the case described above. */
5837 if (inheritance)
5839 int byte = 0;
5840 int regno = -1;
5841 enum machine_mode mode = VOIDmode;
5843 if (rld[r].in == 0)
5845 else if (REG_P (rld[r].in))
5847 regno = REGNO (rld[r].in);
5848 mode = GET_MODE (rld[r].in);
5850 else if (REG_P (rld[r].in_reg))
5852 regno = REGNO (rld[r].in_reg);
5853 mode = GET_MODE (rld[r].in_reg);
5855 else if (GET_CODE (rld[r].in_reg) == SUBREG
5856 && REG_P (SUBREG_REG (rld[r].in_reg)))
5858 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5859 if (regno < FIRST_PSEUDO_REGISTER)
5860 regno = subreg_regno (rld[r].in_reg);
5861 else
5862 byte = SUBREG_BYTE (rld[r].in_reg);
5863 mode = GET_MODE (rld[r].in_reg);
5865 #ifdef AUTO_INC_DEC
5866 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
5867 && REG_P (XEXP (rld[r].in_reg, 0)))
5869 regno = REGNO (XEXP (rld[r].in_reg, 0));
5870 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5871 rld[r].out = rld[r].in;
5873 #endif
5874 #if 0
5875 /* This won't work, since REGNO can be a pseudo reg number.
5876 Also, it takes much more hair to keep track of all the things
5877 that can invalidate an inherited reload of part of a pseudoreg. */
5878 else if (GET_CODE (rld[r].in) == SUBREG
5879 && REG_P (SUBREG_REG (rld[r].in)))
5880 regno = subreg_regno (rld[r].in);
5881 #endif
5883 if (regno >= 0
5884 && reg_last_reload_reg[regno] != 0
5885 #ifdef CANNOT_CHANGE_MODE_CLASS
5886 /* Verify that the register it's in can be used in
5887 mode MODE. */
5888 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
5889 GET_MODE (reg_last_reload_reg[regno]),
5890 mode)
5891 #endif
5894 enum reg_class class = rld[r].class, last_class;
5895 rtx last_reg = reg_last_reload_reg[regno];
5896 enum machine_mode need_mode;
5898 i = REGNO (last_reg);
5899 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5900 last_class = REGNO_REG_CLASS (i);
5902 if (byte == 0)
5903 need_mode = mode;
5904 else
5905 need_mode
5906 = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5907 + byte * BITS_PER_UNIT,
5908 GET_MODE_CLASS (mode));
5910 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5911 >= GET_MODE_SIZE (need_mode))
5912 && reg_reloaded_contents[i] == regno
5913 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5914 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5915 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5916 /* Even if we can't use this register as a reload
5917 register, we might use it for reload_override_in,
5918 if copying it to the desired class is cheap
5919 enough. */
5920 || ((REGISTER_MOVE_COST (mode, last_class, class)
5921 < MEMORY_MOVE_COST (mode, class, 1))
5922 && (secondary_reload_class (1, class, mode,
5923 last_reg)
5924 == NO_REGS)
5925 #ifdef SECONDARY_MEMORY_NEEDED
5926 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5927 mode)
5928 #endif
5931 && (rld[r].nregs == max_group_size
5932 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5934 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5935 rld[r].when_needed, rld[r].in,
5936 const0_rtx, r, 1))
5938 /* If a group is needed, verify that all the subsequent
5939 registers still have their values intact. */
5940 int nr = hard_regno_nregs[i][rld[r].mode];
5941 int k;
5943 for (k = 1; k < nr; k++)
5944 if (reg_reloaded_contents[i + k] != regno
5945 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5946 break;
5948 if (k == nr)
5950 int i1;
5951 int bad_for_class;
5953 last_reg = (GET_MODE (last_reg) == mode
5954 ? last_reg : gen_rtx_REG (mode, i));
5956 bad_for_class = 0;
5957 for (k = 0; k < nr; k++)
5958 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5959 i+k);
5961 /* We found a register that contains the
5962 value we need. If this register is the
5963 same as an `earlyclobber' operand of the
5964 current insn, just mark it as a place to
5965 reload from since we can't use it as the
5966 reload register itself. */
5968 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5969 if (reg_overlap_mentioned_for_reload_p
5970 (reg_last_reload_reg[regno],
5971 reload_earlyclobbers[i1]))
5972 break;
5974 if (i1 != n_earlyclobbers
5975 || ! (free_for_value_p (i, rld[r].mode,
5976 rld[r].opnum,
5977 rld[r].when_needed, rld[r].in,
5978 rld[r].out, r, 1))
5979 /* Don't use it if we'd clobber a pseudo reg. */
5980 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5981 && rld[r].out
5982 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5983 /* Don't clobber the frame pointer. */
5984 || (i == HARD_FRAME_POINTER_REGNUM
5985 && frame_pointer_needed
5986 && rld[r].out)
5987 /* Don't really use the inherited spill reg
5988 if we need it wider than we've got it. */
5989 || (GET_MODE_SIZE (rld[r].mode)
5990 > GET_MODE_SIZE (mode))
5991 || bad_for_class
5993 /* If find_reloads chose reload_out as reload
5994 register, stay with it - that leaves the
5995 inherited register for subsequent reloads. */
5996 || (rld[r].out && rld[r].reg_rtx
5997 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5999 if (! rld[r].optional)
6001 reload_override_in[r] = last_reg;
6002 reload_inheritance_insn[r]
6003 = reg_reloaded_insn[i];
6006 else
6008 int k;
6009 /* We can use this as a reload reg. */
6010 /* Mark the register as in use for this part of
6011 the insn. */
6012 mark_reload_reg_in_use (i,
6013 rld[r].opnum,
6014 rld[r].when_needed,
6015 rld[r].mode);
6016 rld[r].reg_rtx = last_reg;
6017 reload_inherited[r] = 1;
6018 reload_inheritance_insn[r]
6019 = reg_reloaded_insn[i];
6020 reload_spill_index[r] = i;
6021 for (k = 0; k < nr; k++)
6022 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6023 i + k);
6030 /* Here's another way to see if the value is already lying around. */
6031 if (inheritance
6032 && rld[r].in != 0
6033 && ! reload_inherited[r]
6034 && rld[r].out == 0
6035 && (CONSTANT_P (rld[r].in)
6036 || GET_CODE (rld[r].in) == PLUS
6037 || REG_P (rld[r].in)
6038 || MEM_P (rld[r].in))
6039 && (rld[r].nregs == max_group_size
6040 || ! reg_classes_intersect_p (rld[r].class, group_class)))
6041 search_equiv = rld[r].in;
6042 /* If this is an output reload from a simple move insn, look
6043 if an equivalence for the input is available. */
6044 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6046 rtx set = single_set (insn);
6048 if (set
6049 && rtx_equal_p (rld[r].out, SET_DEST (set))
6050 && CONSTANT_P (SET_SRC (set)))
6051 search_equiv = SET_SRC (set);
6054 if (search_equiv)
6056 rtx equiv
6057 = find_equiv_reg (search_equiv, insn, rld[r].class,
6058 -1, NULL, 0, rld[r].mode);
6059 int regno = 0;
6061 if (equiv != 0)
6063 if (REG_P (equiv))
6064 regno = REGNO (equiv);
6065 else
6067 /* This must be a SUBREG of a hard register.
6068 Make a new REG since this might be used in an
6069 address and not all machines support SUBREGs
6070 there. */
6071 gcc_assert (GET_CODE (equiv) == SUBREG);
6072 regno = subreg_regno (equiv);
6073 equiv = gen_rtx_REG (rld[r].mode, regno);
6074 /* If we choose EQUIV as the reload register, but the
6075 loop below decides to cancel the inheritance, we'll
6076 end up reloading EQUIV in rld[r].mode, not the mode
6077 it had originally. That isn't safe when EQUIV isn't
6078 available as a spill register since its value might
6079 still be live at this point. */
6080 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6081 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6082 equiv = 0;
6086 /* If we found a spill reg, reject it unless it is free
6087 and of the desired class. */
6088 if (equiv != 0)
6090 int regs_used = 0;
6091 int bad_for_class = 0;
6092 int max_regno = regno + rld[r].nregs;
6094 for (i = regno; i < max_regno; i++)
6096 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6098 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6102 if ((regs_used
6103 && ! free_for_value_p (regno, rld[r].mode,
6104 rld[r].opnum, rld[r].when_needed,
6105 rld[r].in, rld[r].out, r, 1))
6106 || bad_for_class)
6107 equiv = 0;
6110 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6111 equiv = 0;
6113 /* We found a register that contains the value we need.
6114 If this register is the same as an `earlyclobber' operand
6115 of the current insn, just mark it as a place to reload from
6116 since we can't use it as the reload register itself. */
6118 if (equiv != 0)
6119 for (i = 0; i < n_earlyclobbers; i++)
6120 if (reg_overlap_mentioned_for_reload_p (equiv,
6121 reload_earlyclobbers[i]))
6123 if (! rld[r].optional)
6124 reload_override_in[r] = equiv;
6125 equiv = 0;
6126 break;
6129 /* If the equiv register we have found is explicitly clobbered
6130 in the current insn, it depends on the reload type if we
6131 can use it, use it for reload_override_in, or not at all.
6132 In particular, we then can't use EQUIV for a
6133 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6135 if (equiv != 0)
6137 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6138 switch (rld[r].when_needed)
6140 case RELOAD_FOR_OTHER_ADDRESS:
6141 case RELOAD_FOR_INPADDR_ADDRESS:
6142 case RELOAD_FOR_INPUT_ADDRESS:
6143 case RELOAD_FOR_OPADDR_ADDR:
6144 break;
6145 case RELOAD_OTHER:
6146 case RELOAD_FOR_INPUT:
6147 case RELOAD_FOR_OPERAND_ADDRESS:
6148 if (! rld[r].optional)
6149 reload_override_in[r] = equiv;
6150 /* Fall through. */
6151 default:
6152 equiv = 0;
6153 break;
6155 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6156 switch (rld[r].when_needed)
6158 case RELOAD_FOR_OTHER_ADDRESS:
6159 case RELOAD_FOR_INPADDR_ADDRESS:
6160 case RELOAD_FOR_INPUT_ADDRESS:
6161 case RELOAD_FOR_OPADDR_ADDR:
6162 case RELOAD_FOR_OPERAND_ADDRESS:
6163 case RELOAD_FOR_INPUT:
6164 break;
6165 case RELOAD_OTHER:
6166 if (! rld[r].optional)
6167 reload_override_in[r] = equiv;
6168 /* Fall through. */
6169 default:
6170 equiv = 0;
6171 break;
6175 /* If we found an equivalent reg, say no code need be generated
6176 to load it, and use it as our reload reg. */
6177 if (equiv != 0
6178 && (regno != HARD_FRAME_POINTER_REGNUM
6179 || !frame_pointer_needed))
6181 int nr = hard_regno_nregs[regno][rld[r].mode];
6182 int k;
6183 rld[r].reg_rtx = equiv;
6184 reload_inherited[r] = 1;
6186 /* If reg_reloaded_valid is not set for this register,
6187 there might be a stale spill_reg_store lying around.
6188 We must clear it, since otherwise emit_reload_insns
6189 might delete the store. */
6190 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6191 spill_reg_store[regno] = NULL_RTX;
6192 /* If any of the hard registers in EQUIV are spill
6193 registers, mark them as in use for this insn. */
6194 for (k = 0; k < nr; k++)
6196 i = spill_reg_order[regno + k];
6197 if (i >= 0)
6199 mark_reload_reg_in_use (regno, rld[r].opnum,
6200 rld[r].when_needed,
6201 rld[r].mode);
6202 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6203 regno + k);
6209 /* If we found a register to use already, or if this is an optional
6210 reload, we are done. */
6211 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6212 continue;
6214 #if 0
6215 /* No longer needed for correct operation. Might or might
6216 not give better code on the average. Want to experiment? */
6218 /* See if there is a later reload that has a class different from our
6219 class that intersects our class or that requires less register
6220 than our reload. If so, we must allocate a register to this
6221 reload now, since that reload might inherit a previous reload
6222 and take the only available register in our class. Don't do this
6223 for optional reloads since they will force all previous reloads
6224 to be allocated. Also don't do this for reloads that have been
6225 turned off. */
6227 for (i = j + 1; i < n_reloads; i++)
6229 int s = reload_order[i];
6231 if ((rld[s].in == 0 && rld[s].out == 0
6232 && ! rld[s].secondary_p)
6233 || rld[s].optional)
6234 continue;
6236 if ((rld[s].class != rld[r].class
6237 && reg_classes_intersect_p (rld[r].class,
6238 rld[s].class))
6239 || rld[s].nregs < rld[r].nregs)
6240 break;
6243 if (i == n_reloads)
6244 continue;
6246 allocate_reload_reg (chain, r, j == n_reloads - 1);
6247 #endif
6250 /* Now allocate reload registers for anything non-optional that
6251 didn't get one yet. */
6252 for (j = 0; j < n_reloads; j++)
6254 int r = reload_order[j];
6256 /* Ignore reloads that got marked inoperative. */
6257 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6258 continue;
6260 /* Skip reloads that already have a register allocated or are
6261 optional. */
6262 if (rld[r].reg_rtx != 0 || rld[r].optional)
6263 continue;
6265 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6266 break;
6269 /* If that loop got all the way, we have won. */
6270 if (j == n_reloads)
6272 win = 1;
6273 break;
6276 /* Loop around and try without any inheritance. */
6279 if (! win)
6281 /* First undo everything done by the failed attempt
6282 to allocate with inheritance. */
6283 choose_reload_regs_init (chain, save_reload_reg_rtx);
6285 /* Some sanity tests to verify that the reloads found in the first
6286 pass are identical to the ones we have now. */
6287 gcc_assert (chain->n_reloads == n_reloads);
6289 for (i = 0; i < n_reloads; i++)
6291 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6292 continue;
6293 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6294 for (j = 0; j < n_spills; j++)
6295 if (spill_regs[j] == chain->rld[i].regno)
6296 if (! set_reload_reg (j, i))
6297 failed_reload (chain->insn, i);
6301 /* If we thought we could inherit a reload, because it seemed that
6302 nothing else wanted the same reload register earlier in the insn,
6303 verify that assumption, now that all reloads have been assigned.
6304 Likewise for reloads where reload_override_in has been set. */
6306 /* If doing expensive optimizations, do one preliminary pass that doesn't
6307 cancel any inheritance, but removes reloads that have been needed only
6308 for reloads that we know can be inherited. */
6309 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6311 for (j = 0; j < n_reloads; j++)
6313 int r = reload_order[j];
6314 rtx check_reg;
6315 if (reload_inherited[r] && rld[r].reg_rtx)
6316 check_reg = rld[r].reg_rtx;
6317 else if (reload_override_in[r]
6318 && (REG_P (reload_override_in[r])
6319 || GET_CODE (reload_override_in[r]) == SUBREG))
6320 check_reg = reload_override_in[r];
6321 else
6322 continue;
6323 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6324 rld[r].opnum, rld[r].when_needed, rld[r].in,
6325 (reload_inherited[r]
6326 ? rld[r].out : const0_rtx),
6327 r, 1))
6329 if (pass)
6330 continue;
6331 reload_inherited[r] = 0;
6332 reload_override_in[r] = 0;
6334 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6335 reload_override_in, then we do not need its related
6336 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6337 likewise for other reload types.
6338 We handle this by removing a reload when its only replacement
6339 is mentioned in reload_in of the reload we are going to inherit.
6340 A special case are auto_inc expressions; even if the input is
6341 inherited, we still need the address for the output. We can
6342 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6343 If we succeeded removing some reload and we are doing a preliminary
6344 pass just to remove such reloads, make another pass, since the
6345 removal of one reload might allow us to inherit another one. */
6346 else if (rld[r].in
6347 && rld[r].out != rld[r].in
6348 && remove_address_replacements (rld[r].in) && pass)
6349 pass = 2;
6353 /* Now that reload_override_in is known valid,
6354 actually override reload_in. */
6355 for (j = 0; j < n_reloads; j++)
6356 if (reload_override_in[j])
6357 rld[j].in = reload_override_in[j];
6359 /* If this reload won't be done because it has been canceled or is
6360 optional and not inherited, clear reload_reg_rtx so other
6361 routines (such as subst_reloads) don't get confused. */
6362 for (j = 0; j < n_reloads; j++)
6363 if (rld[j].reg_rtx != 0
6364 && ((rld[j].optional && ! reload_inherited[j])
6365 || (rld[j].in == 0 && rld[j].out == 0
6366 && ! rld[j].secondary_p)))
6368 int regno = true_regnum (rld[j].reg_rtx);
6370 if (spill_reg_order[regno] >= 0)
6371 clear_reload_reg_in_use (regno, rld[j].opnum,
6372 rld[j].when_needed, rld[j].mode);
6373 rld[j].reg_rtx = 0;
6374 reload_spill_index[j] = -1;
6377 /* Record which pseudos and which spill regs have output reloads. */
6378 for (j = 0; j < n_reloads; j++)
6380 int r = reload_order[j];
6382 i = reload_spill_index[r];
6384 /* I is nonneg if this reload uses a register.
6385 If rld[r].reg_rtx is 0, this is an optional reload
6386 that we opted to ignore. */
6387 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6388 && rld[r].reg_rtx != 0)
6390 int nregno = REGNO (rld[r].out_reg);
6391 int nr = 1;
6393 if (nregno < FIRST_PSEUDO_REGISTER)
6394 nr = hard_regno_nregs[nregno][rld[r].mode];
6396 while (--nr >= 0)
6397 SET_REGNO_REG_SET (&reg_has_output_reload,
6398 nregno + nr);
6400 if (i >= 0)
6402 nr = hard_regno_nregs[i][rld[r].mode];
6403 while (--nr >= 0)
6404 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6407 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6408 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6409 || rld[r].when_needed == RELOAD_FOR_INSN);
6414 /* Deallocate the reload register for reload R. This is called from
6415 remove_address_replacements. */
6417 void
6418 deallocate_reload_reg (int r)
6420 int regno;
6422 if (! rld[r].reg_rtx)
6423 return;
6424 regno = true_regnum (rld[r].reg_rtx);
6425 rld[r].reg_rtx = 0;
6426 if (spill_reg_order[regno] >= 0)
6427 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6428 rld[r].mode);
6429 reload_spill_index[r] = -1;
6432 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6433 reloads of the same item for fear that we might not have enough reload
6434 registers. However, normally they will get the same reload register
6435 and hence actually need not be loaded twice.
6437 Here we check for the most common case of this phenomenon: when we have
6438 a number of reloads for the same object, each of which were allocated
6439 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6440 reload, and is not modified in the insn itself. If we find such,
6441 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6442 This will not increase the number of spill registers needed and will
6443 prevent redundant code. */
6445 static void
6446 merge_assigned_reloads (rtx insn)
6448 int i, j;
6450 /* Scan all the reloads looking for ones that only load values and
6451 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6452 assigned and not modified by INSN. */
6454 for (i = 0; i < n_reloads; i++)
6456 int conflicting_input = 0;
6457 int max_input_address_opnum = -1;
6458 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6460 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6461 || rld[i].out != 0 || rld[i].reg_rtx == 0
6462 || reg_set_p (rld[i].reg_rtx, insn))
6463 continue;
6465 /* Look at all other reloads. Ensure that the only use of this
6466 reload_reg_rtx is in a reload that just loads the same value
6467 as we do. Note that any secondary reloads must be of the identical
6468 class since the values, modes, and result registers are the
6469 same, so we need not do anything with any secondary reloads. */
6471 for (j = 0; j < n_reloads; j++)
6473 if (i == j || rld[j].reg_rtx == 0
6474 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6475 rld[i].reg_rtx))
6476 continue;
6478 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6479 && rld[j].opnum > max_input_address_opnum)
6480 max_input_address_opnum = rld[j].opnum;
6482 /* If the reload regs aren't exactly the same (e.g, different modes)
6483 or if the values are different, we can't merge this reload.
6484 But if it is an input reload, we might still merge
6485 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6487 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6488 || rld[j].out != 0 || rld[j].in == 0
6489 || ! rtx_equal_p (rld[i].in, rld[j].in))
6491 if (rld[j].when_needed != RELOAD_FOR_INPUT
6492 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6493 || rld[i].opnum > rld[j].opnum)
6494 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6495 break;
6496 conflicting_input = 1;
6497 if (min_conflicting_input_opnum > rld[j].opnum)
6498 min_conflicting_input_opnum = rld[j].opnum;
6502 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6503 we, in fact, found any matching reloads. */
6505 if (j == n_reloads
6506 && max_input_address_opnum <= min_conflicting_input_opnum)
6508 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6510 for (j = 0; j < n_reloads; j++)
6511 if (i != j && rld[j].reg_rtx != 0
6512 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6513 && (! conflicting_input
6514 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6515 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6517 rld[i].when_needed = RELOAD_OTHER;
6518 rld[j].in = 0;
6519 reload_spill_index[j] = -1;
6520 transfer_replacements (i, j);
6523 /* If this is now RELOAD_OTHER, look for any reloads that
6524 load parts of this operand and set them to
6525 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6526 RELOAD_OTHER for outputs. Note that this test is
6527 equivalent to looking for reloads for this operand
6528 number.
6530 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6531 it may share registers with a RELOAD_FOR_INPUT, so we can
6532 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6533 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6535 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6536 instruction is assigned the same register as the earlier
6537 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6538 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6539 instruction to be deleted later on. */
6541 if (rld[i].when_needed == RELOAD_OTHER)
6542 for (j = 0; j < n_reloads; j++)
6543 if (rld[j].in != 0
6544 && rld[j].when_needed != RELOAD_OTHER
6545 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6546 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6547 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6548 && (! conflicting_input
6549 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6550 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6551 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6552 rld[i].in))
6554 int k;
6556 rld[j].when_needed
6557 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6558 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6559 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6561 /* Check to see if we accidentally converted two
6562 reloads that use the same reload register with
6563 different inputs to the same type. If so, the
6564 resulting code won't work. */
6565 if (rld[j].reg_rtx)
6566 for (k = 0; k < j; k++)
6567 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6568 || rld[k].when_needed != rld[j].when_needed
6569 || !rtx_equal_p (rld[k].reg_rtx,
6570 rld[j].reg_rtx)
6571 || rtx_equal_p (rld[k].in,
6572 rld[j].in));
6578 /* These arrays are filled by emit_reload_insns and its subroutines. */
6579 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6580 static rtx other_input_address_reload_insns = 0;
6581 static rtx other_input_reload_insns = 0;
6582 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6583 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6584 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6585 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6586 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6587 static rtx operand_reload_insns = 0;
6588 static rtx other_operand_reload_insns = 0;
6589 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6591 /* Values to be put in spill_reg_store are put here first. */
6592 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6593 static HARD_REG_SET reg_reloaded_died;
6595 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6596 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6597 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6598 adjusted register, and return true. Otherwise, return false. */
6599 static bool
6600 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6601 enum reg_class new_class,
6602 enum machine_mode new_mode)
6605 rtx reg;
6607 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6609 unsigned regno = REGNO (reg);
6611 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6612 continue;
6613 if (GET_MODE (reg) != new_mode)
6615 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6616 continue;
6617 if (hard_regno_nregs[regno][new_mode]
6618 > hard_regno_nregs[regno][GET_MODE (reg)])
6619 continue;
6620 reg = reload_adjust_reg_for_mode (reg, new_mode);
6622 *reload_reg = reg;
6623 return true;
6625 return false;
6628 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6629 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6630 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6631 adjusted register, and return true. Otherwise, return false. */
6632 static bool
6633 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6634 enum insn_code icode)
6637 enum reg_class new_class = scratch_reload_class (icode);
6638 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6640 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6641 new_class, new_mode);
6644 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6645 has the number J. OLD contains the value to be used as input. */
6647 static void
6648 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6649 rtx old, int j)
6651 rtx insn = chain->insn;
6652 rtx reloadreg = rl->reg_rtx;
6653 rtx oldequiv_reg = 0;
6654 rtx oldequiv = 0;
6655 int special = 0;
6656 enum machine_mode mode;
6657 rtx *where;
6659 /* Determine the mode to reload in.
6660 This is very tricky because we have three to choose from.
6661 There is the mode the insn operand wants (rl->inmode).
6662 There is the mode of the reload register RELOADREG.
6663 There is the intrinsic mode of the operand, which we could find
6664 by stripping some SUBREGs.
6665 It turns out that RELOADREG's mode is irrelevant:
6666 we can change that arbitrarily.
6668 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6669 then the reload reg may not support QImode moves, so use SImode.
6670 If foo is in memory due to spilling a pseudo reg, this is safe,
6671 because the QImode value is in the least significant part of a
6672 slot big enough for a SImode. If foo is some other sort of
6673 memory reference, then it is impossible to reload this case,
6674 so previous passes had better make sure this never happens.
6676 Then consider a one-word union which has SImode and one of its
6677 members is a float, being fetched as (SUBREG:SF union:SI).
6678 We must fetch that as SFmode because we could be loading into
6679 a float-only register. In this case OLD's mode is correct.
6681 Consider an immediate integer: it has VOIDmode. Here we need
6682 to get a mode from something else.
6684 In some cases, there is a fourth mode, the operand's
6685 containing mode. If the insn specifies a containing mode for
6686 this operand, it overrides all others.
6688 I am not sure whether the algorithm here is always right,
6689 but it does the right things in those cases. */
6691 mode = GET_MODE (old);
6692 if (mode == VOIDmode)
6693 mode = rl->inmode;
6695 /* delete_output_reload is only invoked properly if old contains
6696 the original pseudo register. Since this is replaced with a
6697 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6698 find the pseudo in RELOAD_IN_REG. */
6699 if (reload_override_in[j]
6700 && REG_P (rl->in_reg))
6702 oldequiv = old;
6703 old = rl->in_reg;
6705 if (oldequiv == 0)
6706 oldequiv = old;
6707 else if (REG_P (oldequiv))
6708 oldequiv_reg = oldequiv;
6709 else if (GET_CODE (oldequiv) == SUBREG)
6710 oldequiv_reg = SUBREG_REG (oldequiv);
6712 /* If we are reloading from a register that was recently stored in
6713 with an output-reload, see if we can prove there was
6714 actually no need to store the old value in it. */
6716 if (optimize && REG_P (oldequiv)
6717 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6718 && spill_reg_store[REGNO (oldequiv)]
6719 && REG_P (old)
6720 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6721 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6722 rl->out_reg)))
6723 delete_output_reload (insn, j, REGNO (oldequiv));
6725 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6726 then load RELOADREG from OLDEQUIV. Note that we cannot use
6727 gen_lowpart_common since it can do the wrong thing when
6728 RELOADREG has a multi-word mode. Note that RELOADREG
6729 must always be a REG here. */
6731 if (GET_MODE (reloadreg) != mode)
6732 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6733 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6734 oldequiv = SUBREG_REG (oldequiv);
6735 if (GET_MODE (oldequiv) != VOIDmode
6736 && mode != GET_MODE (oldequiv))
6737 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6739 /* Switch to the right place to emit the reload insns. */
6740 switch (rl->when_needed)
6742 case RELOAD_OTHER:
6743 where = &other_input_reload_insns;
6744 break;
6745 case RELOAD_FOR_INPUT:
6746 where = &input_reload_insns[rl->opnum];
6747 break;
6748 case RELOAD_FOR_INPUT_ADDRESS:
6749 where = &input_address_reload_insns[rl->opnum];
6750 break;
6751 case RELOAD_FOR_INPADDR_ADDRESS:
6752 where = &inpaddr_address_reload_insns[rl->opnum];
6753 break;
6754 case RELOAD_FOR_OUTPUT_ADDRESS:
6755 where = &output_address_reload_insns[rl->opnum];
6756 break;
6757 case RELOAD_FOR_OUTADDR_ADDRESS:
6758 where = &outaddr_address_reload_insns[rl->opnum];
6759 break;
6760 case RELOAD_FOR_OPERAND_ADDRESS:
6761 where = &operand_reload_insns;
6762 break;
6763 case RELOAD_FOR_OPADDR_ADDR:
6764 where = &other_operand_reload_insns;
6765 break;
6766 case RELOAD_FOR_OTHER_ADDRESS:
6767 where = &other_input_address_reload_insns;
6768 break;
6769 default:
6770 gcc_unreachable ();
6773 push_to_sequence (*where);
6775 /* Auto-increment addresses must be reloaded in a special way. */
6776 if (rl->out && ! rl->out_reg)
6778 /* We are not going to bother supporting the case where a
6779 incremented register can't be copied directly from
6780 OLDEQUIV since this seems highly unlikely. */
6781 gcc_assert (rl->secondary_in_reload < 0);
6783 if (reload_inherited[j])
6784 oldequiv = reloadreg;
6786 old = XEXP (rl->in_reg, 0);
6788 if (optimize && REG_P (oldequiv)
6789 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6790 && spill_reg_store[REGNO (oldequiv)]
6791 && REG_P (old)
6792 && (dead_or_set_p (insn,
6793 spill_reg_stored_to[REGNO (oldequiv)])
6794 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6795 old)))
6796 delete_output_reload (insn, j, REGNO (oldequiv));
6798 /* Prevent normal processing of this reload. */
6799 special = 1;
6800 /* Output a special code sequence for this case. */
6801 new_spill_reg_store[REGNO (reloadreg)]
6802 = inc_for_reload (reloadreg, oldequiv, rl->out,
6803 rl->inc);
6806 /* If we are reloading a pseudo-register that was set by the previous
6807 insn, see if we can get rid of that pseudo-register entirely
6808 by redirecting the previous insn into our reload register. */
6810 else if (optimize && REG_P (old)
6811 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6812 && dead_or_set_p (insn, old)
6813 /* This is unsafe if some other reload
6814 uses the same reg first. */
6815 && ! conflicts_with_override (reloadreg)
6816 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6817 rl->when_needed, old, rl->out, j, 0))
6819 rtx temp = PREV_INSN (insn);
6820 while (temp && NOTE_P (temp))
6821 temp = PREV_INSN (temp);
6822 if (temp
6823 && NONJUMP_INSN_P (temp)
6824 && GET_CODE (PATTERN (temp)) == SET
6825 && SET_DEST (PATTERN (temp)) == old
6826 /* Make sure we can access insn_operand_constraint. */
6827 && asm_noperands (PATTERN (temp)) < 0
6828 /* This is unsafe if operand occurs more than once in current
6829 insn. Perhaps some occurrences aren't reloaded. */
6830 && count_occurrences (PATTERN (insn), old, 0) == 1)
6832 rtx old = SET_DEST (PATTERN (temp));
6833 /* Store into the reload register instead of the pseudo. */
6834 SET_DEST (PATTERN (temp)) = reloadreg;
6836 /* Verify that resulting insn is valid. */
6837 extract_insn (temp);
6838 if (constrain_operands (1))
6840 /* If the previous insn is an output reload, the source is
6841 a reload register, and its spill_reg_store entry will
6842 contain the previous destination. This is now
6843 invalid. */
6844 if (REG_P (SET_SRC (PATTERN (temp)))
6845 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6847 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6848 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6851 /* If these are the only uses of the pseudo reg,
6852 pretend for GDB it lives in the reload reg we used. */
6853 if (REG_N_DEATHS (REGNO (old)) == 1
6854 && REG_N_SETS (REGNO (old)) == 1)
6856 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6857 if (flag_ira)
6858 mark_allocation_change (REGNO (old));
6859 alter_reg (REGNO (old), -1, false);
6861 special = 1;
6863 else
6865 SET_DEST (PATTERN (temp)) = old;
6870 /* We can't do that, so output an insn to load RELOADREG. */
6872 /* If we have a secondary reload, pick up the secondary register
6873 and icode, if any. If OLDEQUIV and OLD are different or
6874 if this is an in-out reload, recompute whether or not we
6875 still need a secondary register and what the icode should
6876 be. If we still need a secondary register and the class or
6877 icode is different, go back to reloading from OLD if using
6878 OLDEQUIV means that we got the wrong type of register. We
6879 cannot have different class or icode due to an in-out reload
6880 because we don't make such reloads when both the input and
6881 output need secondary reload registers. */
6883 if (! special && rl->secondary_in_reload >= 0)
6885 rtx second_reload_reg = 0;
6886 rtx third_reload_reg = 0;
6887 int secondary_reload = rl->secondary_in_reload;
6888 rtx real_oldequiv = oldequiv;
6889 rtx real_old = old;
6890 rtx tmp;
6891 enum insn_code icode;
6892 enum insn_code tertiary_icode = CODE_FOR_nothing;
6894 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6895 and similarly for OLD.
6896 See comments in get_secondary_reload in reload.c. */
6897 /* If it is a pseudo that cannot be replaced with its
6898 equivalent MEM, we must fall back to reload_in, which
6899 will have all the necessary substitutions registered.
6900 Likewise for a pseudo that can't be replaced with its
6901 equivalent constant.
6903 Take extra care for subregs of such pseudos. Note that
6904 we cannot use reg_equiv_mem in this case because it is
6905 not in the right mode. */
6907 tmp = oldequiv;
6908 if (GET_CODE (tmp) == SUBREG)
6909 tmp = SUBREG_REG (tmp);
6910 if (REG_P (tmp)
6911 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6912 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6913 || reg_equiv_constant[REGNO (tmp)] != 0))
6915 if (! reg_equiv_mem[REGNO (tmp)]
6916 || num_not_at_initial_offset
6917 || GET_CODE (oldequiv) == SUBREG)
6918 real_oldequiv = rl->in;
6919 else
6920 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6923 tmp = old;
6924 if (GET_CODE (tmp) == SUBREG)
6925 tmp = SUBREG_REG (tmp);
6926 if (REG_P (tmp)
6927 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6928 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6929 || reg_equiv_constant[REGNO (tmp)] != 0))
6931 if (! reg_equiv_mem[REGNO (tmp)]
6932 || num_not_at_initial_offset
6933 || GET_CODE (old) == SUBREG)
6934 real_old = rl->in;
6935 else
6936 real_old = reg_equiv_mem[REGNO (tmp)];
6939 second_reload_reg = rld[secondary_reload].reg_rtx;
6940 if (rld[secondary_reload].secondary_in_reload >= 0)
6942 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6944 third_reload_reg = rld[tertiary_reload].reg_rtx;
6945 tertiary_icode = rld[secondary_reload].secondary_in_icode;
6946 /* We'd have to add more code for quartary reloads. */
6947 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6949 icode = rl->secondary_in_icode;
6951 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6952 || (rl->in != 0 && rl->out != 0))
6954 secondary_reload_info sri, sri2;
6955 enum reg_class new_class, new_t_class;
6957 sri.icode = CODE_FOR_nothing;
6958 sri.prev_sri = NULL;
6959 new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
6960 mode, &sri);
6962 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
6963 second_reload_reg = 0;
6964 else if (new_class == NO_REGS)
6966 if (reload_adjust_reg_for_icode (&second_reload_reg,
6967 third_reload_reg, sri.icode))
6968 icode = sri.icode, third_reload_reg = 0;
6969 else
6970 oldequiv = old, real_oldequiv = real_old;
6972 else if (sri.icode != CODE_FOR_nothing)
6973 /* We currently lack a way to express this in reloads. */
6974 gcc_unreachable ();
6975 else
6977 sri2.icode = CODE_FOR_nothing;
6978 sri2.prev_sri = &sri;
6979 new_t_class = targetm.secondary_reload (1, real_oldequiv,
6980 new_class, mode, &sri);
6981 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
6983 if (reload_adjust_reg_for_temp (&second_reload_reg,
6984 third_reload_reg,
6985 new_class, mode))
6986 third_reload_reg = 0, tertiary_icode = sri2.icode;
6987 else
6988 oldequiv = old, real_oldequiv = real_old;
6990 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
6992 rtx intermediate = second_reload_reg;
6994 if (reload_adjust_reg_for_temp (&intermediate, NULL,
6995 new_class, mode)
6996 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
6997 sri2.icode))
6999 second_reload_reg = intermediate;
7000 tertiary_icode = sri2.icode;
7002 else
7003 oldequiv = old, real_oldequiv = real_old;
7005 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7007 rtx intermediate = second_reload_reg;
7009 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7010 new_class, mode)
7011 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7012 new_t_class, mode))
7014 second_reload_reg = intermediate;
7015 tertiary_icode = sri2.icode;
7017 else
7018 oldequiv = old, real_oldequiv = real_old;
7020 else
7021 /* This could be handled more intelligently too. */
7022 oldequiv = old, real_oldequiv = real_old;
7026 /* If we still need a secondary reload register, check
7027 to see if it is being used as a scratch or intermediate
7028 register and generate code appropriately. If we need
7029 a scratch register, use REAL_OLDEQUIV since the form of
7030 the insn may depend on the actual address if it is
7031 a MEM. */
7033 if (second_reload_reg)
7035 if (icode != CODE_FOR_nothing)
7037 /* We'd have to add extra code to handle this case. */
7038 gcc_assert (!third_reload_reg);
7040 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7041 second_reload_reg));
7042 special = 1;
7044 else
7046 /* See if we need a scratch register to load the
7047 intermediate register (a tertiary reload). */
7048 if (tertiary_icode != CODE_FOR_nothing)
7050 emit_insn ((GEN_FCN (tertiary_icode)
7051 (second_reload_reg, real_oldequiv,
7052 third_reload_reg)));
7054 else if (third_reload_reg)
7056 gen_reload (third_reload_reg, real_oldequiv,
7057 rl->opnum,
7058 rl->when_needed);
7059 gen_reload (second_reload_reg, third_reload_reg,
7060 rl->opnum,
7061 rl->when_needed);
7063 else
7064 gen_reload (second_reload_reg, real_oldequiv,
7065 rl->opnum,
7066 rl->when_needed);
7068 oldequiv = second_reload_reg;
7073 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7075 rtx real_oldequiv = oldequiv;
7077 if ((REG_P (oldequiv)
7078 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7079 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7080 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7081 || (GET_CODE (oldequiv) == SUBREG
7082 && REG_P (SUBREG_REG (oldequiv))
7083 && (REGNO (SUBREG_REG (oldequiv))
7084 >= FIRST_PSEUDO_REGISTER)
7085 && ((reg_equiv_memory_loc
7086 [REGNO (SUBREG_REG (oldequiv))] != 0)
7087 || (reg_equiv_constant
7088 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7089 || (CONSTANT_P (oldequiv)
7090 && (PREFERRED_RELOAD_CLASS (oldequiv,
7091 REGNO_REG_CLASS (REGNO (reloadreg)))
7092 == NO_REGS)))
7093 real_oldequiv = rl->in;
7094 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7095 rl->when_needed);
7098 if (flag_non_call_exceptions)
7099 copy_eh_notes (insn, get_insns ());
7101 /* End this sequence. */
7102 *where = get_insns ();
7103 end_sequence ();
7105 /* Update reload_override_in so that delete_address_reloads_1
7106 can see the actual register usage. */
7107 if (oldequiv_reg)
7108 reload_override_in[j] = oldequiv;
7111 /* Generate insns to for the output reload RL, which is for the insn described
7112 by CHAIN and has the number J. */
7113 static void
7114 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7115 int j)
7117 rtx reloadreg = rl->reg_rtx;
7118 rtx insn = chain->insn;
7119 int special = 0;
7120 rtx old = rl->out;
7121 enum machine_mode mode = GET_MODE (old);
7122 rtx p;
7124 if (rl->when_needed == RELOAD_OTHER)
7125 start_sequence ();
7126 else
7127 push_to_sequence (output_reload_insns[rl->opnum]);
7129 /* Determine the mode to reload in.
7130 See comments above (for input reloading). */
7132 if (mode == VOIDmode)
7134 /* VOIDmode should never happen for an output. */
7135 if (asm_noperands (PATTERN (insn)) < 0)
7136 /* It's the compiler's fault. */
7137 fatal_insn ("VOIDmode on an output", insn);
7138 error_for_asm (insn, "output operand is constant in %<asm%>");
7139 /* Prevent crash--use something we know is valid. */
7140 mode = word_mode;
7141 old = gen_rtx_REG (mode, REGNO (reloadreg));
7144 if (GET_MODE (reloadreg) != mode)
7145 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7147 /* If we need two reload regs, set RELOADREG to the intermediate
7148 one, since it will be stored into OLD. We might need a secondary
7149 register only for an input reload, so check again here. */
7151 if (rl->secondary_out_reload >= 0)
7153 rtx real_old = old;
7154 int secondary_reload = rl->secondary_out_reload;
7155 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7157 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7158 && reg_equiv_mem[REGNO (old)] != 0)
7159 real_old = reg_equiv_mem[REGNO (old)];
7161 if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
7163 rtx second_reloadreg = reloadreg;
7164 reloadreg = rld[secondary_reload].reg_rtx;
7166 /* See if RELOADREG is to be used as a scratch register
7167 or as an intermediate register. */
7168 if (rl->secondary_out_icode != CODE_FOR_nothing)
7170 /* We'd have to add extra code to handle this case. */
7171 gcc_assert (tertiary_reload < 0);
7173 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7174 (real_old, second_reloadreg, reloadreg)));
7175 special = 1;
7177 else
7179 /* See if we need both a scratch and intermediate reload
7180 register. */
7182 enum insn_code tertiary_icode
7183 = rld[secondary_reload].secondary_out_icode;
7185 /* We'd have to add more code for quartary reloads. */
7186 gcc_assert (tertiary_reload < 0
7187 || rld[tertiary_reload].secondary_out_reload < 0);
7189 if (GET_MODE (reloadreg) != mode)
7190 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7192 if (tertiary_icode != CODE_FOR_nothing)
7194 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7195 rtx tem;
7197 /* Copy primary reload reg to secondary reload reg.
7198 (Note that these have been swapped above, then
7199 secondary reload reg to OLD using our insn.) */
7201 /* If REAL_OLD is a paradoxical SUBREG, remove it
7202 and try to put the opposite SUBREG on
7203 RELOADREG. */
7204 if (GET_CODE (real_old) == SUBREG
7205 && (GET_MODE_SIZE (GET_MODE (real_old))
7206 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7207 && 0 != (tem = gen_lowpart_common
7208 (GET_MODE (SUBREG_REG (real_old)),
7209 reloadreg)))
7210 real_old = SUBREG_REG (real_old), reloadreg = tem;
7212 gen_reload (reloadreg, second_reloadreg,
7213 rl->opnum, rl->when_needed);
7214 emit_insn ((GEN_FCN (tertiary_icode)
7215 (real_old, reloadreg, third_reloadreg)));
7216 special = 1;
7219 else
7221 /* Copy between the reload regs here and then to
7222 OUT later. */
7224 gen_reload (reloadreg, second_reloadreg,
7225 rl->opnum, rl->when_needed);
7226 if (tertiary_reload >= 0)
7228 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7230 gen_reload (third_reloadreg, reloadreg,
7231 rl->opnum, rl->when_needed);
7232 reloadreg = third_reloadreg;
7239 /* Output the last reload insn. */
7240 if (! special)
7242 rtx set;
7244 /* Don't output the last reload if OLD is not the dest of
7245 INSN and is in the src and is clobbered by INSN. */
7246 if (! flag_expensive_optimizations
7247 || !REG_P (old)
7248 || !(set = single_set (insn))
7249 || rtx_equal_p (old, SET_DEST (set))
7250 || !reg_mentioned_p (old, SET_SRC (set))
7251 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7252 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7253 gen_reload (old, reloadreg, rl->opnum,
7254 rl->when_needed);
7257 /* Look at all insns we emitted, just to be safe. */
7258 for (p = get_insns (); p; p = NEXT_INSN (p))
7259 if (INSN_P (p))
7261 rtx pat = PATTERN (p);
7263 /* If this output reload doesn't come from a spill reg,
7264 clear any memory of reloaded copies of the pseudo reg.
7265 If this output reload comes from a spill reg,
7266 reg_has_output_reload will make this do nothing. */
7267 note_stores (pat, forget_old_reloads_1, NULL);
7269 if (reg_mentioned_p (rl->reg_rtx, pat))
7271 rtx set = single_set (insn);
7272 if (reload_spill_index[j] < 0
7273 && set
7274 && SET_SRC (set) == rl->reg_rtx)
7276 int src = REGNO (SET_SRC (set));
7278 reload_spill_index[j] = src;
7279 SET_HARD_REG_BIT (reg_is_output_reload, src);
7280 if (find_regno_note (insn, REG_DEAD, src))
7281 SET_HARD_REG_BIT (reg_reloaded_died, src);
7283 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
7285 int s = rl->secondary_out_reload;
7286 set = single_set (p);
7287 /* If this reload copies only to the secondary reload
7288 register, the secondary reload does the actual
7289 store. */
7290 if (s >= 0 && set == NULL_RTX)
7291 /* We can't tell what function the secondary reload
7292 has and where the actual store to the pseudo is
7293 made; leave new_spill_reg_store alone. */
7295 else if (s >= 0
7296 && SET_SRC (set) == rl->reg_rtx
7297 && SET_DEST (set) == rld[s].reg_rtx)
7299 /* Usually the next instruction will be the
7300 secondary reload insn; if we can confirm
7301 that it is, setting new_spill_reg_store to
7302 that insn will allow an extra optimization. */
7303 rtx s_reg = rld[s].reg_rtx;
7304 rtx next = NEXT_INSN (p);
7305 rld[s].out = rl->out;
7306 rld[s].out_reg = rl->out_reg;
7307 set = single_set (next);
7308 if (set && SET_SRC (set) == s_reg
7309 && ! new_spill_reg_store[REGNO (s_reg)])
7311 SET_HARD_REG_BIT (reg_is_output_reload,
7312 REGNO (s_reg));
7313 new_spill_reg_store[REGNO (s_reg)] = next;
7316 else
7317 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
7322 if (rl->when_needed == RELOAD_OTHER)
7324 emit_insn (other_output_reload_insns[rl->opnum]);
7325 other_output_reload_insns[rl->opnum] = get_insns ();
7327 else
7328 output_reload_insns[rl->opnum] = get_insns ();
7330 if (flag_non_call_exceptions)
7331 copy_eh_notes (insn, get_insns ());
7333 end_sequence ();
7336 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7337 and has the number J. */
7338 static void
7339 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7341 rtx insn = chain->insn;
7342 rtx old = (rl->in && MEM_P (rl->in)
7343 ? rl->in_reg : rl->in);
7345 if (old != 0
7346 /* AUTO_INC reloads need to be handled even if inherited. We got an
7347 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7348 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7349 && ! rtx_equal_p (rl->reg_rtx, old)
7350 && rl->reg_rtx != 0)
7351 emit_input_reload_insns (chain, rld + j, old, j);
7353 /* When inheriting a wider reload, we have a MEM in rl->in,
7354 e.g. inheriting a SImode output reload for
7355 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7356 if (optimize && reload_inherited[j] && rl->in
7357 && MEM_P (rl->in)
7358 && MEM_P (rl->in_reg)
7359 && reload_spill_index[j] >= 0
7360 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7361 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7363 /* If we are reloading a register that was recently stored in with an
7364 output-reload, see if we can prove there was
7365 actually no need to store the old value in it. */
7367 if (optimize
7368 && (reload_inherited[j] || reload_override_in[j])
7369 && rl->reg_rtx
7370 && REG_P (rl->reg_rtx)
7371 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
7372 #if 0
7373 /* There doesn't seem to be any reason to restrict this to pseudos
7374 and doing so loses in the case where we are copying from a
7375 register of the wrong class. */
7376 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
7377 >= FIRST_PSEUDO_REGISTER)
7378 #endif
7379 /* The insn might have already some references to stackslots
7380 replaced by MEMs, while reload_out_reg still names the
7381 original pseudo. */
7382 && (dead_or_set_p (insn,
7383 spill_reg_stored_to[REGNO (rl->reg_rtx)])
7384 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
7385 rl->out_reg)))
7386 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
7389 /* Do output reloading for reload RL, which is for the insn described by
7390 CHAIN and has the number J.
7391 ??? At some point we need to support handling output reloads of
7392 JUMP_INSNs or insns that set cc0. */
7393 static void
7394 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7396 rtx note, old;
7397 rtx insn = chain->insn;
7398 /* If this is an output reload that stores something that is
7399 not loaded in this same reload, see if we can eliminate a previous
7400 store. */
7401 rtx pseudo = rl->out_reg;
7403 if (pseudo
7404 && optimize
7405 && REG_P (pseudo)
7406 && ! rtx_equal_p (rl->in_reg, pseudo)
7407 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7408 && reg_last_reload_reg[REGNO (pseudo)])
7410 int pseudo_no = REGNO (pseudo);
7411 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7413 /* We don't need to test full validity of last_regno for
7414 inherit here; we only want to know if the store actually
7415 matches the pseudo. */
7416 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7417 && reg_reloaded_contents[last_regno] == pseudo_no
7418 && spill_reg_store[last_regno]
7419 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7420 delete_output_reload (insn, j, last_regno);
7423 old = rl->out_reg;
7424 if (old == 0
7425 || rl->reg_rtx == old
7426 || rl->reg_rtx == 0)
7427 return;
7429 /* An output operand that dies right away does need a reload,
7430 but need not be copied from it. Show the new location in the
7431 REG_UNUSED note. */
7432 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7433 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7435 XEXP (note, 0) = rl->reg_rtx;
7436 return;
7438 /* Likewise for a SUBREG of an operand that dies. */
7439 else if (GET_CODE (old) == SUBREG
7440 && REG_P (SUBREG_REG (old))
7441 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7442 SUBREG_REG (old))))
7444 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7445 rl->reg_rtx);
7446 return;
7448 else if (GET_CODE (old) == SCRATCH)
7449 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7450 but we don't want to make an output reload. */
7451 return;
7453 /* If is a JUMP_INSN, we can't support output reloads yet. */
7454 gcc_assert (NONJUMP_INSN_P (insn));
7456 emit_output_reload_insns (chain, rld + j, j);
7459 /* Reload number R reloads from or to a group of hard registers starting at
7460 register REGNO. Return true if it can be treated for inheritance purposes
7461 like a group of reloads, each one reloading a single hard register.
7462 The caller has already checked that the spill register and REGNO use
7463 the same number of registers to store the reload value. */
7465 static bool
7466 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7468 #ifdef CANNOT_CHANGE_MODE_CLASS
7469 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7470 GET_MODE (rld[r].reg_rtx),
7471 reg_raw_mode[reload_spill_index[r]])
7472 && !REG_CANNOT_CHANGE_MODE_P (regno,
7473 GET_MODE (rld[r].reg_rtx),
7474 reg_raw_mode[regno]));
7475 #else
7476 return true;
7477 #endif
7480 /* Output insns to reload values in and out of the chosen reload regs. */
7482 static void
7483 emit_reload_insns (struct insn_chain *chain)
7485 rtx insn = chain->insn;
7487 int j;
7489 CLEAR_HARD_REG_SET (reg_reloaded_died);
7491 for (j = 0; j < reload_n_operands; j++)
7492 input_reload_insns[j] = input_address_reload_insns[j]
7493 = inpaddr_address_reload_insns[j]
7494 = output_reload_insns[j] = output_address_reload_insns[j]
7495 = outaddr_address_reload_insns[j]
7496 = other_output_reload_insns[j] = 0;
7497 other_input_address_reload_insns = 0;
7498 other_input_reload_insns = 0;
7499 operand_reload_insns = 0;
7500 other_operand_reload_insns = 0;
7502 /* Dump reloads into the dump file. */
7503 if (dump_file)
7505 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7506 debug_reload_to_stream (dump_file);
7509 /* Now output the instructions to copy the data into and out of the
7510 reload registers. Do these in the order that the reloads were reported,
7511 since reloads of base and index registers precede reloads of operands
7512 and the operands may need the base and index registers reloaded. */
7514 for (j = 0; j < n_reloads; j++)
7516 if (rld[j].reg_rtx
7517 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7518 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7520 do_input_reload (chain, rld + j, j);
7521 do_output_reload (chain, rld + j, j);
7524 /* Now write all the insns we made for reloads in the order expected by
7525 the allocation functions. Prior to the insn being reloaded, we write
7526 the following reloads:
7528 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7530 RELOAD_OTHER reloads.
7532 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7533 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7534 RELOAD_FOR_INPUT reload for the operand.
7536 RELOAD_FOR_OPADDR_ADDRS reloads.
7538 RELOAD_FOR_OPERAND_ADDRESS reloads.
7540 After the insn being reloaded, we write the following:
7542 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7543 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7544 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7545 reloads for the operand. The RELOAD_OTHER output reloads are
7546 output in descending order by reload number. */
7548 emit_insn_before (other_input_address_reload_insns, insn);
7549 emit_insn_before (other_input_reload_insns, insn);
7551 for (j = 0; j < reload_n_operands; j++)
7553 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7554 emit_insn_before (input_address_reload_insns[j], insn);
7555 emit_insn_before (input_reload_insns[j], insn);
7558 emit_insn_before (other_operand_reload_insns, insn);
7559 emit_insn_before (operand_reload_insns, insn);
7561 for (j = 0; j < reload_n_operands; j++)
7563 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7564 x = emit_insn_after (output_address_reload_insns[j], x);
7565 x = emit_insn_after (output_reload_insns[j], x);
7566 emit_insn_after (other_output_reload_insns[j], x);
7569 /* For all the spill regs newly reloaded in this instruction,
7570 record what they were reloaded from, so subsequent instructions
7571 can inherit the reloads.
7573 Update spill_reg_store for the reloads of this insn.
7574 Copy the elements that were updated in the loop above. */
7576 for (j = 0; j < n_reloads; j++)
7578 int r = reload_order[j];
7579 int i = reload_spill_index[r];
7581 /* If this is a non-inherited input reload from a pseudo, we must
7582 clear any memory of a previous store to the same pseudo. Only do
7583 something if there will not be an output reload for the pseudo
7584 being reloaded. */
7585 if (rld[r].in_reg != 0
7586 && ! (reload_inherited[r] || reload_override_in[r]))
7588 rtx reg = rld[r].in_reg;
7590 if (GET_CODE (reg) == SUBREG)
7591 reg = SUBREG_REG (reg);
7593 if (REG_P (reg)
7594 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7595 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7597 int nregno = REGNO (reg);
7599 if (reg_last_reload_reg[nregno])
7601 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7603 if (reg_reloaded_contents[last_regno] == nregno)
7604 spill_reg_store[last_regno] = 0;
7609 /* I is nonneg if this reload used a register.
7610 If rld[r].reg_rtx is 0, this is an optional reload
7611 that we opted to ignore. */
7613 if (i >= 0 && rld[r].reg_rtx != 0)
7615 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7616 int k;
7617 int part_reaches_end = 0;
7618 int all_reaches_end = 1;
7620 /* For a multi register reload, we need to check if all or part
7621 of the value lives to the end. */
7622 for (k = 0; k < nr; k++)
7624 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7625 rld[r].when_needed))
7626 part_reaches_end = 1;
7627 else
7628 all_reaches_end = 0;
7631 /* Ignore reloads that don't reach the end of the insn in
7632 entirety. */
7633 if (all_reaches_end)
7635 /* First, clear out memory of what used to be in this spill reg.
7636 If consecutive registers are used, clear them all. */
7638 for (k = 0; k < nr; k++)
7640 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7641 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7644 /* Maybe the spill reg contains a copy of reload_out. */
7645 if (rld[r].out != 0
7646 && (REG_P (rld[r].out)
7647 #ifdef AUTO_INC_DEC
7648 || ! rld[r].out_reg
7649 #endif
7650 || REG_P (rld[r].out_reg)))
7652 rtx out = (REG_P (rld[r].out)
7653 ? rld[r].out
7654 : rld[r].out_reg
7655 ? rld[r].out_reg
7656 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7657 int nregno = REGNO (out);
7658 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7659 : hard_regno_nregs[nregno]
7660 [GET_MODE (rld[r].reg_rtx)]);
7661 bool piecemeal;
7663 spill_reg_store[i] = new_spill_reg_store[i];
7664 spill_reg_stored_to[i] = out;
7665 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7667 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7668 && nr == nnr
7669 && inherit_piecemeal_p (r, nregno));
7671 /* If NREGNO is a hard register, it may occupy more than
7672 one register. If it does, say what is in the
7673 rest of the registers assuming that both registers
7674 agree on how many words the object takes. If not,
7675 invalidate the subsequent registers. */
7677 if (nregno < FIRST_PSEUDO_REGISTER)
7678 for (k = 1; k < nnr; k++)
7679 reg_last_reload_reg[nregno + k]
7680 = (piecemeal
7681 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7682 : 0);
7684 /* Now do the inverse operation. */
7685 for (k = 0; k < nr; k++)
7687 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7688 reg_reloaded_contents[i + k]
7689 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7690 ? nregno
7691 : nregno + k);
7692 reg_reloaded_insn[i + k] = insn;
7693 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7694 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7695 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7699 /* Maybe the spill reg contains a copy of reload_in. Only do
7700 something if there will not be an output reload for
7701 the register being reloaded. */
7702 else if (rld[r].out_reg == 0
7703 && rld[r].in != 0
7704 && ((REG_P (rld[r].in)
7705 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7706 && !REGNO_REG_SET_P (&reg_has_output_reload,
7707 REGNO (rld[r].in)))
7708 || (REG_P (rld[r].in_reg)
7709 && !REGNO_REG_SET_P (&reg_has_output_reload,
7710 REGNO (rld[r].in_reg))))
7711 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7713 int nregno;
7714 int nnr;
7715 rtx in;
7716 bool piecemeal;
7718 if (REG_P (rld[r].in)
7719 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7720 in = rld[r].in;
7721 else if (REG_P (rld[r].in_reg))
7722 in = rld[r].in_reg;
7723 else
7724 in = XEXP (rld[r].in_reg, 0);
7725 nregno = REGNO (in);
7727 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7728 : hard_regno_nregs[nregno]
7729 [GET_MODE (rld[r].reg_rtx)]);
7731 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7733 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7734 && nr == nnr
7735 && inherit_piecemeal_p (r, nregno));
7737 if (nregno < FIRST_PSEUDO_REGISTER)
7738 for (k = 1; k < nnr; k++)
7739 reg_last_reload_reg[nregno + k]
7740 = (piecemeal
7741 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7742 : 0);
7744 /* Unless we inherited this reload, show we haven't
7745 recently done a store.
7746 Previous stores of inherited auto_inc expressions
7747 also have to be discarded. */
7748 if (! reload_inherited[r]
7749 || (rld[r].out && ! rld[r].out_reg))
7750 spill_reg_store[i] = 0;
7752 for (k = 0; k < nr; k++)
7754 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7755 reg_reloaded_contents[i + k]
7756 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7757 ? nregno
7758 : nregno + k);
7759 reg_reloaded_insn[i + k] = insn;
7760 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7761 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7762 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7767 /* However, if part of the reload reaches the end, then we must
7768 invalidate the old info for the part that survives to the end. */
7769 else if (part_reaches_end)
7771 for (k = 0; k < nr; k++)
7772 if (reload_reg_reaches_end_p (i + k,
7773 rld[r].opnum,
7774 rld[r].when_needed))
7775 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7779 /* The following if-statement was #if 0'd in 1.34 (or before...).
7780 It's reenabled in 1.35 because supposedly nothing else
7781 deals with this problem. */
7783 /* If a register gets output-reloaded from a non-spill register,
7784 that invalidates any previous reloaded copy of it.
7785 But forget_old_reloads_1 won't get to see it, because
7786 it thinks only about the original insn. So invalidate it here.
7787 Also do the same thing for RELOAD_OTHER constraints where the
7788 output is discarded. */
7789 if (i < 0
7790 && ((rld[r].out != 0
7791 && (REG_P (rld[r].out)
7792 || (MEM_P (rld[r].out)
7793 && REG_P (rld[r].out_reg))))
7794 || (rld[r].out == 0 && rld[r].out_reg
7795 && REG_P (rld[r].out_reg))))
7797 rtx out = ((rld[r].out && REG_P (rld[r].out))
7798 ? rld[r].out : rld[r].out_reg);
7799 int nregno = REGNO (out);
7801 /* REG_RTX is now set or clobbered by the main instruction.
7802 As the comment above explains, forget_old_reloads_1 only
7803 sees the original instruction, and there is no guarantee
7804 that the original instruction also clobbered REG_RTX.
7805 For example, if find_reloads sees that the input side of
7806 a matched operand pair dies in this instruction, it may
7807 use the input register as the reload register.
7809 Calling forget_old_reloads_1 is a waste of effort if
7810 REG_RTX is also the output register.
7812 If we know that REG_RTX holds the value of a pseudo
7813 register, the code after the call will record that fact. */
7814 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
7815 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
7817 if (nregno >= FIRST_PSEUDO_REGISTER)
7819 rtx src_reg, store_insn = NULL_RTX;
7821 reg_last_reload_reg[nregno] = 0;
7823 /* If we can find a hard register that is stored, record
7824 the storing insn so that we may delete this insn with
7825 delete_output_reload. */
7826 src_reg = rld[r].reg_rtx;
7828 /* If this is an optional reload, try to find the source reg
7829 from an input reload. */
7830 if (! src_reg)
7832 rtx set = single_set (insn);
7833 if (set && SET_DEST (set) == rld[r].out)
7835 int k;
7837 src_reg = SET_SRC (set);
7838 store_insn = insn;
7839 for (k = 0; k < n_reloads; k++)
7841 if (rld[k].in == src_reg)
7843 src_reg = rld[k].reg_rtx;
7844 break;
7849 else
7850 store_insn = new_spill_reg_store[REGNO (src_reg)];
7851 if (src_reg && REG_P (src_reg)
7852 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7854 int src_regno = REGNO (src_reg);
7855 int nr = hard_regno_nregs[src_regno][rld[r].mode];
7856 /* The place where to find a death note varies with
7857 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7858 necessarily checked exactly in the code that moves
7859 notes, so just check both locations. */
7860 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7861 if (! note && store_insn)
7862 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7863 while (nr-- > 0)
7865 spill_reg_store[src_regno + nr] = store_insn;
7866 spill_reg_stored_to[src_regno + nr] = out;
7867 reg_reloaded_contents[src_regno + nr] = nregno;
7868 reg_reloaded_insn[src_regno + nr] = store_insn;
7869 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7870 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7871 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7872 GET_MODE (src_reg)))
7873 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7874 src_regno + nr);
7875 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7876 if (note)
7877 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7878 else
7879 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7881 reg_last_reload_reg[nregno] = src_reg;
7882 /* We have to set reg_has_output_reload here, or else
7883 forget_old_reloads_1 will clear reg_last_reload_reg
7884 right away. */
7885 SET_REGNO_REG_SET (&reg_has_output_reload,
7886 nregno);
7889 else
7891 int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
7893 while (num_regs-- > 0)
7894 reg_last_reload_reg[nregno + num_regs] = 0;
7898 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7901 /* Go through the motions to emit INSN and test if it is strictly valid.
7902 Return the emitted insn if valid, else return NULL. */
7904 static rtx
7905 emit_insn_if_valid_for_reload (rtx insn)
7907 rtx last = get_last_insn ();
7908 int code;
7910 insn = emit_insn (insn);
7911 code = recog_memoized (insn);
7913 if (code >= 0)
7915 extract_insn (insn);
7916 /* We want constrain operands to treat this insn strictly in its
7917 validity determination, i.e., the way it would after reload has
7918 completed. */
7919 if (constrain_operands (1))
7920 return insn;
7923 delete_insns_since (last);
7924 return NULL;
7927 /* Emit code to perform a reload from IN (which may be a reload register) to
7928 OUT (which may also be a reload register). IN or OUT is from operand
7929 OPNUM with reload type TYPE.
7931 Returns first insn emitted. */
7933 static rtx
7934 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7936 rtx last = get_last_insn ();
7937 rtx tem;
7939 /* If IN is a paradoxical SUBREG, remove it and try to put the
7940 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7941 if (GET_CODE (in) == SUBREG
7942 && (GET_MODE_SIZE (GET_MODE (in))
7943 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7944 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7945 in = SUBREG_REG (in), out = tem;
7946 else if (GET_CODE (out) == SUBREG
7947 && (GET_MODE_SIZE (GET_MODE (out))
7948 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7949 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7950 out = SUBREG_REG (out), in = tem;
7952 /* How to do this reload can get quite tricky. Normally, we are being
7953 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7954 register that didn't get a hard register. In that case we can just
7955 call emit_move_insn.
7957 We can also be asked to reload a PLUS that adds a register or a MEM to
7958 another register, constant or MEM. This can occur during frame pointer
7959 elimination and while reloading addresses. This case is handled by
7960 trying to emit a single insn to perform the add. If it is not valid,
7961 we use a two insn sequence.
7963 Or we can be asked to reload an unary operand that was a fragment of
7964 an addressing mode, into a register. If it isn't recognized as-is,
7965 we try making the unop operand and the reload-register the same:
7966 (set reg:X (unop:X expr:Y))
7967 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7969 Finally, we could be called to handle an 'o' constraint by putting
7970 an address into a register. In that case, we first try to do this
7971 with a named pattern of "reload_load_address". If no such pattern
7972 exists, we just emit a SET insn and hope for the best (it will normally
7973 be valid on machines that use 'o').
7975 This entire process is made complex because reload will never
7976 process the insns we generate here and so we must ensure that
7977 they will fit their constraints and also by the fact that parts of
7978 IN might be being reloaded separately and replaced with spill registers.
7979 Because of this, we are, in some sense, just guessing the right approach
7980 here. The one listed above seems to work.
7982 ??? At some point, this whole thing needs to be rethought. */
7984 if (GET_CODE (in) == PLUS
7985 && (REG_P (XEXP (in, 0))
7986 || GET_CODE (XEXP (in, 0)) == SUBREG
7987 || MEM_P (XEXP (in, 0)))
7988 && (REG_P (XEXP (in, 1))
7989 || GET_CODE (XEXP (in, 1)) == SUBREG
7990 || CONSTANT_P (XEXP (in, 1))
7991 || MEM_P (XEXP (in, 1))))
7993 /* We need to compute the sum of a register or a MEM and another
7994 register, constant, or MEM, and put it into the reload
7995 register. The best possible way of doing this is if the machine
7996 has a three-operand ADD insn that accepts the required operands.
7998 The simplest approach is to try to generate such an insn and see if it
7999 is recognized and matches its constraints. If so, it can be used.
8001 It might be better not to actually emit the insn unless it is valid,
8002 but we need to pass the insn as an operand to `recog' and
8003 `extract_insn' and it is simpler to emit and then delete the insn if
8004 not valid than to dummy things up. */
8006 rtx op0, op1, tem, insn;
8007 int code;
8009 op0 = find_replacement (&XEXP (in, 0));
8010 op1 = find_replacement (&XEXP (in, 1));
8012 /* Since constraint checking is strict, commutativity won't be
8013 checked, so we need to do that here to avoid spurious failure
8014 if the add instruction is two-address and the second operand
8015 of the add is the same as the reload reg, which is frequently
8016 the case. If the insn would be A = B + A, rearrange it so
8017 it will be A = A + B as constrain_operands expects. */
8019 if (REG_P (XEXP (in, 1))
8020 && REGNO (out) == REGNO (XEXP (in, 1)))
8021 tem = op0, op0 = op1, op1 = tem;
8023 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8024 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8026 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8027 if (insn)
8028 return insn;
8030 /* If that failed, we must use a conservative two-insn sequence.
8032 Use a move to copy one operand into the reload register. Prefer
8033 to reload a constant, MEM or pseudo since the move patterns can
8034 handle an arbitrary operand. If OP1 is not a constant, MEM or
8035 pseudo and OP1 is not a valid operand for an add instruction, then
8036 reload OP1.
8038 After reloading one of the operands into the reload register, add
8039 the reload register to the output register.
8041 If there is another way to do this for a specific machine, a
8042 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8043 we emit below. */
8045 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8047 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8048 || (REG_P (op1)
8049 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8050 || (code != CODE_FOR_nothing
8051 && ! ((*insn_data[code].operand[2].predicate)
8052 (op1, insn_data[code].operand[2].mode))))
8053 tem = op0, op0 = op1, op1 = tem;
8055 gen_reload (out, op0, opnum, type);
8057 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8058 This fixes a problem on the 32K where the stack pointer cannot
8059 be used as an operand of an add insn. */
8061 if (rtx_equal_p (op0, op1))
8062 op1 = out;
8064 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8065 if (insn)
8067 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8068 set_unique_reg_note (insn, REG_EQUIV, in);
8069 return insn;
8072 /* If that failed, copy the address register to the reload register.
8073 Then add the constant to the reload register. */
8075 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8076 gen_reload (out, op1, opnum, type);
8077 insn = emit_insn (gen_add2_insn (out, op0));
8078 set_unique_reg_note (insn, REG_EQUIV, in);
8081 #ifdef SECONDARY_MEMORY_NEEDED
8082 /* If we need a memory location to do the move, do it that way. */
8083 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
8084 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8085 && (REG_P (out) || GET_CODE (out) == SUBREG)
8086 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8087 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8088 REGNO_REG_CLASS (reg_or_subregno (out)),
8089 GET_MODE (out)))
8091 /* Get the memory to use and rewrite both registers to its mode. */
8092 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8094 if (GET_MODE (loc) != GET_MODE (out))
8095 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8097 if (GET_MODE (loc) != GET_MODE (in))
8098 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8100 gen_reload (loc, in, opnum, type);
8101 gen_reload (out, loc, opnum, type);
8103 #endif
8104 else if (REG_P (out) && UNARY_P (in))
8106 rtx insn;
8107 rtx op1;
8108 rtx out_moded;
8109 rtx set;
8111 op1 = find_replacement (&XEXP (in, 0));
8112 if (op1 != XEXP (in, 0))
8113 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8115 /* First, try a plain SET. */
8116 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8117 if (set)
8118 return set;
8120 /* If that failed, move the inner operand to the reload
8121 register, and try the same unop with the inner expression
8122 replaced with the reload register. */
8124 if (GET_MODE (op1) != GET_MODE (out))
8125 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8126 else
8127 out_moded = out;
8129 gen_reload (out_moded, op1, opnum, type);
8131 insn
8132 = gen_rtx_SET (VOIDmode, out,
8133 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8134 out_moded));
8135 insn = emit_insn_if_valid_for_reload (insn);
8136 if (insn)
8138 set_unique_reg_note (insn, REG_EQUIV, in);
8139 return insn;
8142 fatal_insn ("Failure trying to reload:", set);
8144 /* If IN is a simple operand, use gen_move_insn. */
8145 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8147 tem = emit_insn (gen_move_insn (out, in));
8148 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8149 mark_jump_label (in, tem, 0);
8152 #ifdef HAVE_reload_load_address
8153 else if (HAVE_reload_load_address)
8154 emit_insn (gen_reload_load_address (out, in));
8155 #endif
8157 /* Otherwise, just write (set OUT IN) and hope for the best. */
8158 else
8159 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8161 /* Return the first insn emitted.
8162 We can not just return get_last_insn, because there may have
8163 been multiple instructions emitted. Also note that gen_move_insn may
8164 emit more than one insn itself, so we can not assume that there is one
8165 insn emitted per emit_insn_before call. */
8167 return last ? NEXT_INSN (last) : get_insns ();
8170 /* Delete a previously made output-reload whose result we now believe
8171 is not needed. First we double-check.
8173 INSN is the insn now being processed.
8174 LAST_RELOAD_REG is the hard register number for which we want to delete
8175 the last output reload.
8176 J is the reload-number that originally used REG. The caller has made
8177 certain that reload J doesn't use REG any longer for input. */
8179 static void
8180 delete_output_reload (rtx insn, int j, int last_reload_reg)
8182 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8183 rtx reg = spill_reg_stored_to[last_reload_reg];
8184 int k;
8185 int n_occurrences;
8186 int n_inherited = 0;
8187 rtx i1;
8188 rtx substed;
8190 /* It is possible that this reload has been only used to set another reload
8191 we eliminated earlier and thus deleted this instruction too. */
8192 if (INSN_DELETED_P (output_reload_insn))
8193 return;
8195 /* Get the raw pseudo-register referred to. */
8197 while (GET_CODE (reg) == SUBREG)
8198 reg = SUBREG_REG (reg);
8199 substed = reg_equiv_memory_loc[REGNO (reg)];
8201 /* This is unsafe if the operand occurs more often in the current
8202 insn than it is inherited. */
8203 for (k = n_reloads - 1; k >= 0; k--)
8205 rtx reg2 = rld[k].in;
8206 if (! reg2)
8207 continue;
8208 if (MEM_P (reg2) || reload_override_in[k])
8209 reg2 = rld[k].in_reg;
8210 #ifdef AUTO_INC_DEC
8211 if (rld[k].out && ! rld[k].out_reg)
8212 reg2 = XEXP (rld[k].in_reg, 0);
8213 #endif
8214 while (GET_CODE (reg2) == SUBREG)
8215 reg2 = SUBREG_REG (reg2);
8216 if (rtx_equal_p (reg2, reg))
8218 if (reload_inherited[k] || reload_override_in[k] || k == j)
8219 n_inherited++;
8220 else
8221 return;
8224 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8225 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8226 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8227 reg, 0);
8228 if (substed)
8229 n_occurrences += count_occurrences (PATTERN (insn),
8230 eliminate_regs (substed, 0,
8231 NULL_RTX), 0);
8232 for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8234 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8235 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8237 if (n_occurrences > n_inherited)
8238 return;
8240 /* If the pseudo-reg we are reloading is no longer referenced
8241 anywhere between the store into it and here,
8242 and we're within the same basic block, then the value can only
8243 pass through the reload reg and end up here.
8244 Otherwise, give up--return. */
8245 for (i1 = NEXT_INSN (output_reload_insn);
8246 i1 != insn; i1 = NEXT_INSN (i1))
8248 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8249 return;
8250 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8251 && reg_mentioned_p (reg, PATTERN (i1)))
8253 /* If this is USE in front of INSN, we only have to check that
8254 there are no more references than accounted for by inheritance. */
8255 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8257 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8258 i1 = NEXT_INSN (i1);
8260 if (n_occurrences <= n_inherited && i1 == insn)
8261 break;
8262 return;
8266 /* We will be deleting the insn. Remove the spill reg information. */
8267 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8269 spill_reg_store[last_reload_reg + k] = 0;
8270 spill_reg_stored_to[last_reload_reg + k] = 0;
8273 /* The caller has already checked that REG dies or is set in INSN.
8274 It has also checked that we are optimizing, and thus some
8275 inaccuracies in the debugging information are acceptable.
8276 So we could just delete output_reload_insn. But in some cases
8277 we can improve the debugging information without sacrificing
8278 optimization - maybe even improving the code: See if the pseudo
8279 reg has been completely replaced with reload regs. If so, delete
8280 the store insn and forget we had a stack slot for the pseudo. */
8281 if (rld[j].out != rld[j].in
8282 && REG_N_DEATHS (REGNO (reg)) == 1
8283 && REG_N_SETS (REGNO (reg)) == 1
8284 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8285 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8287 rtx i2;
8289 /* We know that it was used only between here and the beginning of
8290 the current basic block. (We also know that the last use before
8291 INSN was the output reload we are thinking of deleting, but never
8292 mind that.) Search that range; see if any ref remains. */
8293 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8295 rtx set = single_set (i2);
8297 /* Uses which just store in the pseudo don't count,
8298 since if they are the only uses, they are dead. */
8299 if (set != 0 && SET_DEST (set) == reg)
8300 continue;
8301 if (LABEL_P (i2)
8302 || JUMP_P (i2))
8303 break;
8304 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8305 && reg_mentioned_p (reg, PATTERN (i2)))
8307 /* Some other ref remains; just delete the output reload we
8308 know to be dead. */
8309 delete_address_reloads (output_reload_insn, insn);
8310 delete_insn (output_reload_insn);
8311 return;
8315 /* Delete the now-dead stores into this pseudo. Note that this
8316 loop also takes care of deleting output_reload_insn. */
8317 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8319 rtx set = single_set (i2);
8321 if (set != 0 && SET_DEST (set) == reg)
8323 delete_address_reloads (i2, insn);
8324 delete_insn (i2);
8326 if (LABEL_P (i2)
8327 || JUMP_P (i2))
8328 break;
8331 /* For the debugging info, say the pseudo lives in this reload reg. */
8332 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8333 if (flag_ira)
8334 mark_allocation_change (REGNO (reg));
8335 alter_reg (REGNO (reg), -1, false);
8337 else
8339 delete_address_reloads (output_reload_insn, insn);
8340 delete_insn (output_reload_insn);
8344 /* We are going to delete DEAD_INSN. Recursively delete loads of
8345 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8346 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8347 static void
8348 delete_address_reloads (rtx dead_insn, rtx current_insn)
8350 rtx set = single_set (dead_insn);
8351 rtx set2, dst, prev, next;
8352 if (set)
8354 rtx dst = SET_DEST (set);
8355 if (MEM_P (dst))
8356 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8358 /* If we deleted the store from a reloaded post_{in,de}c expression,
8359 we can delete the matching adds. */
8360 prev = PREV_INSN (dead_insn);
8361 next = NEXT_INSN (dead_insn);
8362 if (! prev || ! next)
8363 return;
8364 set = single_set (next);
8365 set2 = single_set (prev);
8366 if (! set || ! set2
8367 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8368 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8369 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8370 return;
8371 dst = SET_DEST (set);
8372 if (! rtx_equal_p (dst, SET_DEST (set2))
8373 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8374 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8375 || (INTVAL (XEXP (SET_SRC (set), 1))
8376 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8377 return;
8378 delete_related_insns (prev);
8379 delete_related_insns (next);
8382 /* Subfunction of delete_address_reloads: process registers found in X. */
8383 static void
8384 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8386 rtx prev, set, dst, i2;
8387 int i, j;
8388 enum rtx_code code = GET_CODE (x);
8390 if (code != REG)
8392 const char *fmt = GET_RTX_FORMAT (code);
8393 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8395 if (fmt[i] == 'e')
8396 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8397 else if (fmt[i] == 'E')
8399 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8400 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8401 current_insn);
8404 return;
8407 if (spill_reg_order[REGNO (x)] < 0)
8408 return;
8410 /* Scan backwards for the insn that sets x. This might be a way back due
8411 to inheritance. */
8412 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8414 code = GET_CODE (prev);
8415 if (code == CODE_LABEL || code == JUMP_INSN)
8416 return;
8417 if (!INSN_P (prev))
8418 continue;
8419 if (reg_set_p (x, PATTERN (prev)))
8420 break;
8421 if (reg_referenced_p (x, PATTERN (prev)))
8422 return;
8424 if (! prev || INSN_UID (prev) < reload_first_uid)
8425 return;
8426 /* Check that PREV only sets the reload register. */
8427 set = single_set (prev);
8428 if (! set)
8429 return;
8430 dst = SET_DEST (set);
8431 if (!REG_P (dst)
8432 || ! rtx_equal_p (dst, x))
8433 return;
8434 if (! reg_set_p (dst, PATTERN (dead_insn)))
8436 /* Check if DST was used in a later insn -
8437 it might have been inherited. */
8438 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8440 if (LABEL_P (i2))
8441 break;
8442 if (! INSN_P (i2))
8443 continue;
8444 if (reg_referenced_p (dst, PATTERN (i2)))
8446 /* If there is a reference to the register in the current insn,
8447 it might be loaded in a non-inherited reload. If no other
8448 reload uses it, that means the register is set before
8449 referenced. */
8450 if (i2 == current_insn)
8452 for (j = n_reloads - 1; j >= 0; j--)
8453 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8454 || reload_override_in[j] == dst)
8455 return;
8456 for (j = n_reloads - 1; j >= 0; j--)
8457 if (rld[j].in && rld[j].reg_rtx == dst)
8458 break;
8459 if (j >= 0)
8460 break;
8462 return;
8464 if (JUMP_P (i2))
8465 break;
8466 /* If DST is still live at CURRENT_INSN, check if it is used for
8467 any reload. Note that even if CURRENT_INSN sets DST, we still
8468 have to check the reloads. */
8469 if (i2 == current_insn)
8471 for (j = n_reloads - 1; j >= 0; j--)
8472 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8473 || reload_override_in[j] == dst)
8474 return;
8475 /* ??? We can't finish the loop here, because dst might be
8476 allocated to a pseudo in this block if no reload in this
8477 block needs any of the classes containing DST - see
8478 spill_hard_reg. There is no easy way to tell this, so we
8479 have to scan till the end of the basic block. */
8481 if (reg_set_p (dst, PATTERN (i2)))
8482 break;
8485 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8486 reg_reloaded_contents[REGNO (dst)] = -1;
8487 delete_insn (prev);
8490 /* Output reload-insns to reload VALUE into RELOADREG.
8491 VALUE is an autoincrement or autodecrement RTX whose operand
8492 is a register or memory location;
8493 so reloading involves incrementing that location.
8494 IN is either identical to VALUE, or some cheaper place to reload from.
8496 INC_AMOUNT is the number to increment or decrement by (always positive).
8497 This cannot be deduced from VALUE.
8499 Return the instruction that stores into RELOADREG. */
8501 static rtx
8502 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8504 /* REG or MEM to be copied and incremented. */
8505 rtx incloc = find_replacement (&XEXP (value, 0));
8506 /* Nonzero if increment after copying. */
8507 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8508 || GET_CODE (value) == POST_MODIFY);
8509 rtx last;
8510 rtx inc;
8511 rtx add_insn;
8512 int code;
8513 rtx store;
8514 rtx real_in = in == value ? incloc : in;
8516 /* No hard register is equivalent to this register after
8517 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8518 we could inc/dec that register as well (maybe even using it for
8519 the source), but I'm not sure it's worth worrying about. */
8520 if (REG_P (incloc))
8521 reg_last_reload_reg[REGNO (incloc)] = 0;
8523 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8525 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8526 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8528 else
8530 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8531 inc_amount = -inc_amount;
8533 inc = GEN_INT (inc_amount);
8536 /* If this is post-increment, first copy the location to the reload reg. */
8537 if (post && real_in != reloadreg)
8538 emit_insn (gen_move_insn (reloadreg, real_in));
8540 if (in == value)
8542 /* See if we can directly increment INCLOC. Use a method similar to
8543 that in gen_reload. */
8545 last = get_last_insn ();
8546 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8547 gen_rtx_PLUS (GET_MODE (incloc),
8548 incloc, inc)));
8550 code = recog_memoized (add_insn);
8551 if (code >= 0)
8553 extract_insn (add_insn);
8554 if (constrain_operands (1))
8556 /* If this is a pre-increment and we have incremented the value
8557 where it lives, copy the incremented value to RELOADREG to
8558 be used as an address. */
8560 if (! post)
8561 emit_insn (gen_move_insn (reloadreg, incloc));
8563 return add_insn;
8566 delete_insns_since (last);
8569 /* If couldn't do the increment directly, must increment in RELOADREG.
8570 The way we do this depends on whether this is pre- or post-increment.
8571 For pre-increment, copy INCLOC to the reload register, increment it
8572 there, then save back. */
8574 if (! post)
8576 if (in != reloadreg)
8577 emit_insn (gen_move_insn (reloadreg, real_in));
8578 emit_insn (gen_add2_insn (reloadreg, inc));
8579 store = emit_insn (gen_move_insn (incloc, reloadreg));
8581 else
8583 /* Postincrement.
8584 Because this might be a jump insn or a compare, and because RELOADREG
8585 may not be available after the insn in an input reload, we must do
8586 the incrementation before the insn being reloaded for.
8588 We have already copied IN to RELOADREG. Increment the copy in
8589 RELOADREG, save that back, then decrement RELOADREG so it has
8590 the original value. */
8592 emit_insn (gen_add2_insn (reloadreg, inc));
8593 store = emit_insn (gen_move_insn (incloc, reloadreg));
8594 if (GET_CODE (inc) == CONST_INT)
8595 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8596 else
8597 emit_insn (gen_sub2_insn (reloadreg, inc));
8600 return store;
8603 #ifdef AUTO_INC_DEC
8604 static void
8605 add_auto_inc_notes (rtx insn, rtx x)
8607 enum rtx_code code = GET_CODE (x);
8608 const char *fmt;
8609 int i, j;
8611 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8613 REG_NOTES (insn)
8614 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8615 return;
8618 /* Scan all the operand sub-expressions. */
8619 fmt = GET_RTX_FORMAT (code);
8620 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8622 if (fmt[i] == 'e')
8623 add_auto_inc_notes (insn, XEXP (x, i));
8624 else if (fmt[i] == 'E')
8625 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8626 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8629 #endif
8631 /* Copy EH notes from an insn to its reloads. */
8632 static void
8633 copy_eh_notes (rtx insn, rtx x)
8635 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8636 if (eh_note)
8638 for (; x != 0; x = NEXT_INSN (x))
8640 if (may_trap_p (PATTERN (x)))
8641 REG_NOTES (x)
8642 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8643 REG_NOTES (x));
8648 /* This is used by reload pass, that does emit some instructions after
8649 abnormal calls moving basic block end, but in fact it wants to emit
8650 them on the edge. Looks for abnormal call edges, find backward the
8651 proper call and fix the damage.
8653 Similar handle instructions throwing exceptions internally. */
8654 void
8655 fixup_abnormal_edges (void)
8657 bool inserted = false;
8658 basic_block bb;
8660 FOR_EACH_BB (bb)
8662 edge e;
8663 edge_iterator ei;
8665 /* Look for cases we are interested in - calls or instructions causing
8666 exceptions. */
8667 FOR_EACH_EDGE (e, ei, bb->succs)
8669 if (e->flags & EDGE_ABNORMAL_CALL)
8670 break;
8671 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8672 == (EDGE_ABNORMAL | EDGE_EH))
8673 break;
8675 if (e && !CALL_P (BB_END (bb))
8676 && !can_throw_internal (BB_END (bb)))
8678 rtx insn;
8680 /* Get past the new insns generated. Allow notes, as the insns
8681 may be already deleted. */
8682 insn = BB_END (bb);
8683 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8684 && !can_throw_internal (insn)
8685 && insn != BB_HEAD (bb))
8686 insn = PREV_INSN (insn);
8688 if (CALL_P (insn) || can_throw_internal (insn))
8690 rtx stop, next;
8692 stop = NEXT_INSN (BB_END (bb));
8693 BB_END (bb) = insn;
8694 insn = NEXT_INSN (insn);
8696 FOR_EACH_EDGE (e, ei, bb->succs)
8697 if (e->flags & EDGE_FALLTHRU)
8698 break;
8700 while (insn && insn != stop)
8702 next = NEXT_INSN (insn);
8703 if (INSN_P (insn))
8705 delete_insn (insn);
8707 /* Sometimes there's still the return value USE.
8708 If it's placed after a trapping call (i.e. that
8709 call is the last insn anyway), we have no fallthru
8710 edge. Simply delete this use and don't try to insert
8711 on the non-existent edge. */
8712 if (GET_CODE (PATTERN (insn)) != USE)
8714 /* We're not deleting it, we're moving it. */
8715 INSN_DELETED_P (insn) = 0;
8716 PREV_INSN (insn) = NULL_RTX;
8717 NEXT_INSN (insn) = NULL_RTX;
8719 insert_insn_on_edge (insn, e);
8720 inserted = true;
8723 else if (!BARRIER_P (insn))
8724 set_block_for_insn (insn, NULL);
8725 insn = next;
8729 /* It may be that we don't find any such trapping insn. In this
8730 case we discovered quite late that the insn that had been
8731 marked as can_throw_internal in fact couldn't trap at all.
8732 So we should in fact delete the EH edges out of the block. */
8733 else
8734 purge_dead_edges (bb);
8738 /* We've possibly turned single trapping insn into multiple ones. */
8739 if (flag_non_call_exceptions)
8741 sbitmap blocks;
8742 blocks = sbitmap_alloc (last_basic_block);
8743 sbitmap_ones (blocks);
8744 find_many_sub_basic_blocks (blocks);
8745 sbitmap_free (blocks);
8748 if (inserted)
8749 commit_edge_insertions ();
8751 #ifdef ENABLE_CHECKING
8752 /* Verify that we didn't turn one trapping insn into many, and that
8753 we found and corrected all of the problems wrt fixups on the
8754 fallthru edge. */
8755 verify_flow_info ();
8756 #endif