2007-12-17 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / reload1.c
blobf63879c5055ede0f6ffe6634a9bb7874120485c8
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "df.h"
49 #include "target.h"
50 #include "dse.h"
52 /* This file contains the reload pass of the compiler, which is
53 run after register allocation has been done. It checks that
54 each insn is valid (operands required to be in registers really
55 are in registers of the proper class) and fixes up invalid ones
56 by copying values temporarily into registers for the insns
57 that need them.
59 The results of register allocation are described by the vector
60 reg_renumber; the insns still contain pseudo regs, but reg_renumber
61 can be used to find which hard reg, if any, a pseudo reg is in.
63 The technique we always use is to free up a few hard regs that are
64 called ``reload regs'', and for each place where a pseudo reg
65 must be in a hard reg, copy it temporarily into one of the reload regs.
67 Reload regs are allocated locally for every instruction that needs
68 reloads. When there are pseudos which are allocated to a register that
69 has been chosen as a reload reg, such pseudos must be ``spilled''.
70 This means that they go to other hard regs, or to stack slots if no other
71 available hard regs can be found. Spilling can invalidate more
72 insns, requiring additional need for reloads, so we must keep checking
73 until the process stabilizes.
75 For machines with different classes of registers, we must keep track
76 of the register class needed for each reload, and make sure that
77 we allocate enough reload registers of each class.
79 The file reload.c contains the code that checks one insn for
80 validity and reports the reloads that it needs. This file
81 is in charge of scanning the entire rtl code, accumulating the
82 reload needs, spilling, assigning reload registers to use for
83 fixing up each insn, and generating the new insns to copy values
84 into the reload registers. */
86 /* During reload_as_needed, element N contains a REG rtx for the hard reg
87 into which reg N has been reloaded (perhaps for a previous insn). */
88 static rtx *reg_last_reload_reg;
90 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92 static regset_head reg_has_output_reload;
94 /* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96 static HARD_REG_SET reg_is_output_reload;
98 /* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102 rtx *reg_equiv_constant;
104 /* Element N is an invariant value to which pseudo reg N is equivalent.
105 eliminate_regs_in_insn uses this to replace pseudos in particular
106 contexts. */
107 rtx *reg_equiv_invariant;
109 /* Element N is a memory location to which pseudo reg N is equivalent,
110 prior to any register elimination (such as frame pointer to stack
111 pointer). Depending on whether or not it is a valid address, this value
112 is transferred to either reg_equiv_address or reg_equiv_mem. */
113 rtx *reg_equiv_memory_loc;
115 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116 collector can keep track of what is inside. */
117 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
119 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
120 This is used when the address is not valid as a memory address
121 (because its displacement is too big for the machine.) */
122 rtx *reg_equiv_address;
124 /* Element N is the memory slot to which pseudo reg N is equivalent,
125 or zero if pseudo reg N is not equivalent to a memory slot. */
126 rtx *reg_equiv_mem;
128 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129 alternate representations of the location of pseudo reg N. */
130 rtx *reg_equiv_alt_mem_list;
132 /* Widest width in which each pseudo reg is referred to (via subreg). */
133 static unsigned int *reg_max_ref_width;
135 /* Element N is the list of insns that initialized reg N from its equivalent
136 constant or memory slot. */
137 rtx *reg_equiv_init;
138 int reg_equiv_init_size;
140 /* Vector to remember old contents of reg_renumber before spilling. */
141 static short *reg_old_renumber;
143 /* During reload_as_needed, element N contains the last pseudo regno reloaded
144 into hard register N. If that pseudo reg occupied more than one register,
145 reg_reloaded_contents points to that pseudo for each spill register in
146 use; all of these must remain set for an inheritance to occur. */
147 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
149 /* During reload_as_needed, element N contains the insn for which
150 hard register N was last used. Its contents are significant only
151 when reg_reloaded_valid is set for this register. */
152 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
154 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
155 static HARD_REG_SET reg_reloaded_valid;
156 /* Indicate if the register was dead at the end of the reload.
157 This is only valid if reg_reloaded_contents is set and valid. */
158 static HARD_REG_SET reg_reloaded_dead;
160 /* Indicate whether the register's current value is one that is not
161 safe to retain across a call, even for registers that are normally
162 call-saved. */
163 static HARD_REG_SET reg_reloaded_call_part_clobbered;
165 /* Number of spill-regs so far; number of valid elements of spill_regs. */
166 static int n_spills;
168 /* In parallel with spill_regs, contains REG rtx's for those regs.
169 Holds the last rtx used for any given reg, or 0 if it has never
170 been used for spilling yet. This rtx is reused, provided it has
171 the proper mode. */
172 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
174 /* In parallel with spill_regs, contains nonzero for a spill reg
175 that was stored after the last time it was used.
176 The precise value is the insn generated to do the store. */
177 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
179 /* This is the register that was stored with spill_reg_store. This is a
180 copy of reload_out / reload_out_reg when the value was stored; if
181 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
182 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
184 /* This table is the inverse mapping of spill_regs:
185 indexed by hard reg number,
186 it contains the position of that reg in spill_regs,
187 or -1 for something that is not in spill_regs.
189 ?!? This is no longer accurate. */
190 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
192 /* This reg set indicates registers that can't be used as spill registers for
193 the currently processed insn. These are the hard registers which are live
194 during the insn, but not allocated to pseudos, as well as fixed
195 registers. */
196 static HARD_REG_SET bad_spill_regs;
198 /* These are the hard registers that can't be used as spill register for any
199 insn. This includes registers used for user variables and registers that
200 we can't eliminate. A register that appears in this set also can't be used
201 to retry register allocation. */
202 static HARD_REG_SET bad_spill_regs_global;
204 /* Describes order of use of registers for reloading
205 of spilled pseudo-registers. `n_spills' is the number of
206 elements that are actually valid; new ones are added at the end.
208 Both spill_regs and spill_reg_order are used on two occasions:
209 once during find_reload_regs, where they keep track of the spill registers
210 for a single insn, but also during reload_as_needed where they show all
211 the registers ever used by reload. For the latter case, the information
212 is calculated during finish_spills. */
213 static short spill_regs[FIRST_PSEUDO_REGISTER];
215 /* This vector of reg sets indicates, for each pseudo, which hard registers
216 may not be used for retrying global allocation because the register was
217 formerly spilled from one of them. If we allowed reallocating a pseudo to
218 a register that it was already allocated to, reload might not
219 terminate. */
220 static HARD_REG_SET *pseudo_previous_regs;
222 /* This vector of reg sets indicates, for each pseudo, which hard
223 registers may not be used for retrying global allocation because they
224 are used as spill registers during one of the insns in which the
225 pseudo is live. */
226 static HARD_REG_SET *pseudo_forbidden_regs;
228 /* All hard regs that have been used as spill registers for any insn are
229 marked in this set. */
230 static HARD_REG_SET used_spill_regs;
232 /* Index of last register assigned as a spill register. We allocate in
233 a round-robin fashion. */
234 static int last_spill_reg;
236 /* Nonzero if indirect addressing is supported on the machine; this means
237 that spilling (REG n) does not require reloading it into a register in
238 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
239 value indicates the level of indirect addressing supported, e.g., two
240 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 a hard register. */
242 static char spill_indirect_levels;
244 /* Nonzero if indirect addressing is supported when the innermost MEM is
245 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
246 which these are valid is the same as spill_indirect_levels, above. */
247 char indirect_symref_ok;
249 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
250 char double_reg_address_ok;
252 /* Record the stack slot for each spilled hard register. */
253 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
255 /* Width allocated so far for that stack slot. */
256 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
258 /* Record which pseudos needed to be spilled. */
259 static regset_head spilled_pseudos;
261 /* Used for communication between order_regs_for_reload and count_pseudo.
262 Used to avoid counting one pseudo twice. */
263 static regset_head pseudos_counted;
265 /* First uid used by insns created by reload in this function.
266 Used in find_equiv_reg. */
267 int reload_first_uid;
269 /* Flag set by local-alloc or global-alloc if anything is live in
270 a call-clobbered reg across calls. */
271 int caller_save_needed;
273 /* Set to 1 while reload_as_needed is operating.
274 Required by some machines to handle any generated moves differently. */
275 int reload_in_progress = 0;
277 /* These arrays record the insn_code of insns that may be needed to
278 perform input and output reloads of special objects. They provide a
279 place to pass a scratch register. */
280 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
281 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
283 /* This obstack is used for allocation of rtl during register elimination.
284 The allocated storage can be freed once find_reloads has processed the
285 insn. */
286 static struct obstack reload_obstack;
288 /* Points to the beginning of the reload_obstack. All insn_chain structures
289 are allocated first. */
290 static char *reload_startobj;
292 /* The point after all insn_chain structures. Used to quickly deallocate
293 memory allocated in copy_reloads during calculate_needs_all_insns. */
294 static char *reload_firstobj;
296 /* This points before all local rtl generated by register elimination.
297 Used to quickly free all memory after processing one insn. */
298 static char *reload_insn_firstobj;
300 /* List of insn_chain instructions, one for every insn that reload needs to
301 examine. */
302 struct insn_chain *reload_insn_chain;
304 /* List of all insns needing reloads. */
305 static struct insn_chain *insns_need_reload;
307 /* This structure is used to record information about register eliminations.
308 Each array entry describes one possible way of eliminating a register
309 in favor of another. If there is more than one way of eliminating a
310 particular register, the most preferred should be specified first. */
312 struct elim_table
314 int from; /* Register number to be eliminated. */
315 int to; /* Register number used as replacement. */
316 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
317 int can_eliminate; /* Nonzero if this elimination can be done. */
318 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
319 insns made by reload. */
320 HOST_WIDE_INT offset; /* Current offset between the two regs. */
321 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
322 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
323 rtx from_rtx; /* REG rtx for the register to be eliminated.
324 We cannot simply compare the number since
325 we might then spuriously replace a hard
326 register corresponding to a pseudo
327 assigned to the reg to be eliminated. */
328 rtx to_rtx; /* REG rtx for the replacement. */
331 static struct elim_table *reg_eliminate = 0;
333 /* This is an intermediate structure to initialize the table. It has
334 exactly the members provided by ELIMINABLE_REGS. */
335 static const struct elim_table_1
337 const int from;
338 const int to;
339 } reg_eliminate_1[] =
341 /* If a set of eliminable registers was specified, define the table from it.
342 Otherwise, default to the normal case of the frame pointer being
343 replaced by the stack pointer. */
345 #ifdef ELIMINABLE_REGS
346 ELIMINABLE_REGS;
347 #else
348 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
349 #endif
351 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
353 /* Record the number of pending eliminations that have an offset not equal
354 to their initial offset. If nonzero, we use a new copy of each
355 replacement result in any insns encountered. */
356 int num_not_at_initial_offset;
358 /* Count the number of registers that we may be able to eliminate. */
359 static int num_eliminable;
360 /* And the number of registers that are equivalent to a constant that
361 can be eliminated to frame_pointer / arg_pointer + constant. */
362 static int num_eliminable_invariants;
364 /* For each label, we record the offset of each elimination. If we reach
365 a label by more than one path and an offset differs, we cannot do the
366 elimination. This information is indexed by the difference of the
367 number of the label and the first label number. We can't offset the
368 pointer itself as this can cause problems on machines with segmented
369 memory. The first table is an array of flags that records whether we
370 have yet encountered a label and the second table is an array of arrays,
371 one entry in the latter array for each elimination. */
373 static int first_label_num;
374 static char *offsets_known_at;
375 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
377 /* Number of labels in the current function. */
379 static int num_labels;
381 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
382 static void maybe_fix_stack_asms (void);
383 static void copy_reloads (struct insn_chain *);
384 static void calculate_needs_all_insns (int);
385 static int find_reg (struct insn_chain *, int);
386 static void find_reload_regs (struct insn_chain *);
387 static void select_reload_regs (void);
388 static void delete_caller_save_insns (void);
390 static void spill_failure (rtx, enum reg_class);
391 static void count_spilled_pseudo (int, int, int);
392 static void delete_dead_insn (rtx);
393 static void alter_reg (int, int, bool);
394 static void set_label_offsets (rtx, rtx, int);
395 static void check_eliminable_occurrences (rtx);
396 static void elimination_effects (rtx, enum machine_mode);
397 static int eliminate_regs_in_insn (rtx, int);
398 static void update_eliminable_offsets (void);
399 static void mark_not_eliminable (rtx, const_rtx, void *);
400 static void set_initial_elim_offsets (void);
401 static bool verify_initial_elim_offsets (void);
402 static void set_initial_label_offsets (void);
403 static void set_offsets_for_label (rtx);
404 static void init_elim_table (void);
405 static void update_eliminables (HARD_REG_SET *);
406 static void spill_hard_reg (unsigned int, int);
407 static int finish_spills (int);
408 static void scan_paradoxical_subregs (rtx);
409 static void count_pseudo (int);
410 static void order_regs_for_reload (struct insn_chain *);
411 static void reload_as_needed (int);
412 static void forget_old_reloads_1 (rtx, const_rtx, void *);
413 static void forget_marked_reloads (regset);
414 static int reload_reg_class_lower (const void *, const void *);
415 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
416 enum machine_mode);
417 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
418 enum machine_mode);
419 static int reload_reg_free_p (unsigned int, int, enum reload_type);
420 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
421 rtx, rtx, int, int);
422 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
423 rtx, rtx, int, int);
424 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
425 static int allocate_reload_reg (struct insn_chain *, int, int);
426 static int conflicts_with_override (rtx);
427 static void failed_reload (rtx, int);
428 static int set_reload_reg (int, int);
429 static void choose_reload_regs_init (struct insn_chain *, rtx *);
430 static void choose_reload_regs (struct insn_chain *);
431 static void merge_assigned_reloads (rtx);
432 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
433 rtx, int);
434 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
435 int);
436 static void do_input_reload (struct insn_chain *, struct reload *, int);
437 static void do_output_reload (struct insn_chain *, struct reload *, int);
438 static bool inherit_piecemeal_p (int, int);
439 static void emit_reload_insns (struct insn_chain *);
440 static void delete_output_reload (rtx, int, int);
441 static void delete_address_reloads (rtx, rtx);
442 static void delete_address_reloads_1 (rtx, rtx, rtx);
443 static rtx inc_for_reload (rtx, rtx, rtx, int);
444 #ifdef AUTO_INC_DEC
445 static void add_auto_inc_notes (rtx, rtx);
446 #endif
447 static void copy_eh_notes (rtx, rtx);
448 static int reloads_conflict (int, int);
449 static rtx gen_reload (rtx, rtx, int, enum reload_type);
450 static rtx emit_insn_if_valid_for_reload (rtx);
452 /* Initialize the reload pass. This is called at the beginning of compilation
453 and may be called again if the target is reinitialized. */
455 void
456 init_reload (void)
458 int i;
460 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
461 Set spill_indirect_levels to the number of levels such addressing is
462 permitted, zero if it is not permitted at all. */
464 rtx tem
465 = gen_rtx_MEM (Pmode,
466 gen_rtx_PLUS (Pmode,
467 gen_rtx_REG (Pmode,
468 LAST_VIRTUAL_REGISTER + 1),
469 GEN_INT (4)));
470 spill_indirect_levels = 0;
472 while (memory_address_p (QImode, tem))
474 spill_indirect_levels++;
475 tem = gen_rtx_MEM (Pmode, tem);
478 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
480 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
481 indirect_symref_ok = memory_address_p (QImode, tem);
483 /* See if reg+reg is a valid (and offsettable) address. */
485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
487 tem = gen_rtx_PLUS (Pmode,
488 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
489 gen_rtx_REG (Pmode, i));
491 /* This way, we make sure that reg+reg is an offsettable address. */
492 tem = plus_constant (tem, 4);
494 if (memory_address_p (QImode, tem))
496 double_reg_address_ok = 1;
497 break;
501 /* Initialize obstack for our rtl allocation. */
502 gcc_obstack_init (&reload_obstack);
503 reload_startobj = obstack_alloc (&reload_obstack, 0);
505 INIT_REG_SET (&spilled_pseudos);
506 INIT_REG_SET (&pseudos_counted);
509 /* List of insn chains that are currently unused. */
510 static struct insn_chain *unused_insn_chains = 0;
512 /* Allocate an empty insn_chain structure. */
513 struct insn_chain *
514 new_insn_chain (void)
516 struct insn_chain *c;
518 if (unused_insn_chains == 0)
520 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
521 INIT_REG_SET (&c->live_throughout);
522 INIT_REG_SET (&c->dead_or_set);
524 else
526 c = unused_insn_chains;
527 unused_insn_chains = c->next;
529 c->is_caller_save_insn = 0;
530 c->need_operand_change = 0;
531 c->need_reload = 0;
532 c->need_elim = 0;
533 return c;
536 /* Small utility function to set all regs in hard reg set TO which are
537 allocated to pseudos in regset FROM. */
539 void
540 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
542 unsigned int regno;
543 reg_set_iterator rsi;
545 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
547 int r = reg_renumber[regno];
549 if (r < 0)
551 /* reload_combine uses the information from
552 DF_RA_LIVE_IN (BASIC_BLOCK), which might still
553 contain registers that have not actually been allocated
554 since they have an equivalence. */
555 gcc_assert (reload_completed);
557 else
558 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
562 /* Replace all pseudos found in LOC with their corresponding
563 equivalences. */
565 static void
566 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
568 rtx x = *loc;
569 enum rtx_code code;
570 const char *fmt;
571 int i, j;
573 if (! x)
574 return;
576 code = GET_CODE (x);
577 if (code == REG)
579 unsigned int regno = REGNO (x);
581 if (regno < FIRST_PSEUDO_REGISTER)
582 return;
584 x = eliminate_regs (x, mem_mode, usage);
585 if (x != *loc)
587 *loc = x;
588 replace_pseudos_in (loc, mem_mode, usage);
589 return;
592 if (reg_equiv_constant[regno])
593 *loc = reg_equiv_constant[regno];
594 else if (reg_equiv_mem[regno])
595 *loc = reg_equiv_mem[regno];
596 else if (reg_equiv_address[regno])
597 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
598 else
600 gcc_assert (!REG_P (regno_reg_rtx[regno])
601 || REGNO (regno_reg_rtx[regno]) != regno);
602 *loc = regno_reg_rtx[regno];
605 return;
607 else if (code == MEM)
609 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
610 return;
613 /* Process each of our operands recursively. */
614 fmt = GET_RTX_FORMAT (code);
615 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
616 if (*fmt == 'e')
617 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
618 else if (*fmt == 'E')
619 for (j = 0; j < XVECLEN (x, i); j++)
620 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
623 /* Determine if the current function has an exception receiver block
624 that reaches the exit block via non-exceptional edges */
626 static bool
627 has_nonexceptional_receiver (void)
629 edge e;
630 edge_iterator ei;
631 basic_block *tos, *worklist, bb;
633 /* If we're not optimizing, then just err on the safe side. */
634 if (!optimize)
635 return true;
637 /* First determine which blocks can reach exit via normal paths. */
638 tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
640 FOR_EACH_BB (bb)
641 bb->flags &= ~BB_REACHABLE;
643 /* Place the exit block on our worklist. */
644 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
645 *tos++ = EXIT_BLOCK_PTR;
647 /* Iterate: find everything reachable from what we've already seen. */
648 while (tos != worklist)
650 bb = *--tos;
652 FOR_EACH_EDGE (e, ei, bb->preds)
653 if (!(e->flags & EDGE_ABNORMAL))
655 basic_block src = e->src;
657 if (!(src->flags & BB_REACHABLE))
659 src->flags |= BB_REACHABLE;
660 *tos++ = src;
664 free (worklist);
666 /* Now see if there's a reachable block with an exceptional incoming
667 edge. */
668 FOR_EACH_BB (bb)
669 if (bb->flags & BB_REACHABLE)
670 FOR_EACH_EDGE (e, ei, bb->preds)
671 if (e->flags & EDGE_ABNORMAL)
672 return true;
674 /* No exceptional block reached exit unexceptionally. */
675 return false;
679 /* Global variables used by reload and its subroutines. */
681 /* Set during calculate_needs if an insn needs register elimination. */
682 static int something_needs_elimination;
683 /* Set during calculate_needs if an insn needs an operand changed. */
684 static int something_needs_operands_changed;
686 /* Nonzero means we couldn't get enough spill regs. */
687 static int failure;
689 /* The function is used to sort pseudos according their usage
690 frequencies (putting most frequently ones first). */
691 static int
692 pseudo_reg_compare (const void *v1p, const void *v2p)
694 int regno1 = *(int *) v1p;
695 int regno2 = *(int *) v2p;
696 int diff;
698 if ((diff = REG_FREQ (regno2) - REG_FREQ (regno1)) != 0)
699 return diff;
700 return regno1 - regno2;
703 /* Main entry point for the reload pass.
705 FIRST is the first insn of the function being compiled.
707 GLOBAL nonzero means we were called from global_alloc
708 and should attempt to reallocate any pseudoregs that we
709 displace from hard regs we will use for reloads.
710 If GLOBAL is zero, we do not have enough information to do that,
711 so any pseudo reg that is spilled must go to the stack.
713 Return value is nonzero if reload failed
714 and we must not do any more for this function. */
717 reload (rtx first, int global)
719 int i;
720 rtx insn;
721 struct elim_table *ep;
722 basic_block bb;
724 /* Make sure even insns with volatile mem refs are recognizable. */
725 init_recog ();
727 failure = 0;
729 reload_firstobj = obstack_alloc (&reload_obstack, 0);
731 /* Make sure that the last insn in the chain
732 is not something that needs reloading. */
733 emit_note (NOTE_INSN_DELETED);
735 /* Enable find_equiv_reg to distinguish insns made by reload. */
736 reload_first_uid = get_max_uid ();
738 #ifdef SECONDARY_MEMORY_NEEDED
739 /* Initialize the secondary memory table. */
740 clear_secondary_mem ();
741 #endif
743 /* We don't have a stack slot for any spill reg yet. */
744 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
745 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
747 /* Initialize the save area information for caller-save, in case some
748 are needed. */
749 init_save_areas ();
751 /* Compute which hard registers are now in use
752 as homes for pseudo registers.
753 This is done here rather than (eg) in global_alloc
754 because this point is reached even if not optimizing. */
755 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
756 mark_home_live (i);
758 /* A function that has a nonlocal label that can reach the exit
759 block via non-exceptional paths must save all call-saved
760 registers. */
761 if (current_function_calls_unwind_init
762 || (current_function_has_nonlocal_label
763 && has_nonexceptional_receiver ()))
764 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
765 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
766 df_set_regs_ever_live (i, true);
768 /* Find all the pseudo registers that didn't get hard regs
769 but do have known equivalent constants or memory slots.
770 These include parameters (known equivalent to parameter slots)
771 and cse'd or loop-moved constant memory addresses.
773 Record constant equivalents in reg_equiv_constant
774 so they will be substituted by find_reloads.
775 Record memory equivalents in reg_mem_equiv so they can
776 be substituted eventually by altering the REG-rtx's. */
778 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
779 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
780 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
781 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
782 reg_equiv_address = XCNEWVEC (rtx, max_regno);
783 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
784 reg_old_renumber = XCNEWVEC (short, max_regno);
785 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
786 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
787 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
789 CLEAR_HARD_REG_SET (bad_spill_regs_global);
791 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
792 to. Also find all paradoxical subregs and find largest such for
793 each pseudo. */
795 num_eliminable_invariants = 0;
796 for (insn = first; insn; insn = NEXT_INSN (insn))
798 rtx set = single_set (insn);
800 /* We may introduce USEs that we want to remove at the end, so
801 we'll mark them with QImode. Make sure there are no
802 previously-marked insns left by say regmove. */
803 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
804 && GET_MODE (insn) != VOIDmode)
805 PUT_MODE (insn, VOIDmode);
807 if (INSN_P (insn))
808 scan_paradoxical_subregs (PATTERN (insn));
810 if (set != 0 && REG_P (SET_DEST (set)))
812 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
813 rtx x;
815 if (! note)
816 continue;
818 i = REGNO (SET_DEST (set));
819 x = XEXP (note, 0);
821 if (i <= LAST_VIRTUAL_REGISTER)
822 continue;
824 if (! function_invariant_p (x)
825 || ! flag_pic
826 /* A function invariant is often CONSTANT_P but may
827 include a register. We promise to only pass
828 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
829 || (CONSTANT_P (x)
830 && LEGITIMATE_PIC_OPERAND_P (x)))
832 /* It can happen that a REG_EQUIV note contains a MEM
833 that is not a legitimate memory operand. As later
834 stages of reload assume that all addresses found
835 in the reg_equiv_* arrays were originally legitimate,
836 we ignore such REG_EQUIV notes. */
837 if (memory_operand (x, VOIDmode))
839 /* Always unshare the equivalence, so we can
840 substitute into this insn without touching the
841 equivalence. */
842 reg_equiv_memory_loc[i] = copy_rtx (x);
844 else if (function_invariant_p (x))
846 if (GET_CODE (x) == PLUS)
848 /* This is PLUS of frame pointer and a constant,
849 and might be shared. Unshare it. */
850 reg_equiv_invariant[i] = copy_rtx (x);
851 num_eliminable_invariants++;
853 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
855 reg_equiv_invariant[i] = x;
856 num_eliminable_invariants++;
858 else if (LEGITIMATE_CONSTANT_P (x))
859 reg_equiv_constant[i] = x;
860 else
862 reg_equiv_memory_loc[i]
863 = force_const_mem (GET_MODE (SET_DEST (set)), x);
864 if (! reg_equiv_memory_loc[i])
865 reg_equiv_init[i] = NULL_RTX;
868 else
870 reg_equiv_init[i] = NULL_RTX;
871 continue;
874 else
875 reg_equiv_init[i] = NULL_RTX;
879 if (dump_file)
880 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
881 if (reg_equiv_init[i])
883 fprintf (dump_file, "init_insns for %u: ", i);
884 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
885 fprintf (dump_file, "\n");
888 init_elim_table ();
890 first_label_num = get_first_label_num ();
891 num_labels = max_label_num () - first_label_num;
893 /* Allocate the tables used to store offset information at labels. */
894 /* We used to use alloca here, but the size of what it would try to
895 allocate would occasionally cause it to exceed the stack limit and
896 cause a core dump. */
897 offsets_known_at = XNEWVEC (char, num_labels);
898 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
901 int n, *pseudo_regs;
903 /* Alter each pseudo-reg rtx to contain its hard reg number.
904 Assign stack slots to the pseudos that lack hard regs or
905 equivalents. Do not touch virtual registers. */
907 pseudo_regs = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
908 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
909 pseudo_regs [n++] = i;
911 if (flag_ira)
912 qsort (pseudo_regs, n, sizeof (int), pseudo_reg_compare);
913 if (frame_pointer_needed || ! flag_ira)
914 for (i = 0; i < n; i++)
915 alter_reg (pseudo_regs [i], -1, false);
916 else
917 for (i = n - 1; i >= 0; i--)
918 alter_reg (pseudo_regs [i], -1, false);
919 free (pseudo_regs);
922 /* If we have some registers we think can be eliminated, scan all insns to
923 see if there is an insn that sets one of these registers to something
924 other than itself plus a constant. If so, the register cannot be
925 eliminated. Doing this scan here eliminates an extra pass through the
926 main reload loop in the most common case where register elimination
927 cannot be done. */
928 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
929 if (INSN_P (insn))
930 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
932 maybe_fix_stack_asms ();
934 insns_need_reload = 0;
935 something_needs_elimination = 0;
937 /* Initialize to -1, which means take the first spill register. */
938 last_spill_reg = -1;
940 /* Spill any hard regs that we know we can't eliminate. */
941 CLEAR_HARD_REG_SET (used_spill_regs);
942 /* There can be multiple ways to eliminate a register;
943 they should be listed adjacently.
944 Elimination for any register fails only if all possible ways fail. */
945 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
947 int from = ep->from;
948 int can_eliminate = 0;
951 can_eliminate |= ep->can_eliminate;
952 ep++;
954 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
955 if (! can_eliminate)
956 spill_hard_reg (from, 1);
959 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
960 if (frame_pointer_needed)
961 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
962 #endif
963 finish_spills (global);
965 /* From now on, we may need to generate moves differently. We may also
966 allow modifications of insns which cause them to not be recognized.
967 Any such modifications will be cleaned up during reload itself. */
968 reload_in_progress = 1;
970 /* This loop scans the entire function each go-round
971 and repeats until one repetition spills no additional hard regs. */
972 for (;;)
974 int something_changed;
975 int did_spill;
976 HOST_WIDE_INT starting_frame_size;
978 starting_frame_size = get_frame_size ();
980 set_initial_elim_offsets ();
981 set_initial_label_offsets ();
983 /* For each pseudo register that has an equivalent location defined,
984 try to eliminate any eliminable registers (such as the frame pointer)
985 assuming initial offsets for the replacement register, which
986 is the normal case.
988 If the resulting location is directly addressable, substitute
989 the MEM we just got directly for the old REG.
991 If it is not addressable but is a constant or the sum of a hard reg
992 and constant, it is probably not addressable because the constant is
993 out of range, in that case record the address; we will generate
994 hairy code to compute the address in a register each time it is
995 needed. Similarly if it is a hard register, but one that is not
996 valid as an address register.
998 If the location is not addressable, but does not have one of the
999 above forms, assign a stack slot. We have to do this to avoid the
1000 potential of producing lots of reloads if, e.g., a location involves
1001 a pseudo that didn't get a hard register and has an equivalent memory
1002 location that also involves a pseudo that didn't get a hard register.
1004 Perhaps at some point we will improve reload_when_needed handling
1005 so this problem goes away. But that's very hairy. */
1007 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1008 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1010 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
1012 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
1013 XEXP (x, 0)))
1014 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1015 else if (CONSTANT_P (XEXP (x, 0))
1016 || (REG_P (XEXP (x, 0))
1017 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1018 || (GET_CODE (XEXP (x, 0)) == PLUS
1019 && REG_P (XEXP (XEXP (x, 0), 0))
1020 && (REGNO (XEXP (XEXP (x, 0), 0))
1021 < FIRST_PSEUDO_REGISTER)
1022 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1023 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1024 else
1026 /* Make a new stack slot. Then indicate that something
1027 changed so we go back and recompute offsets for
1028 eliminable registers because the allocation of memory
1029 below might change some offset. reg_equiv_{mem,address}
1030 will be set up for this pseudo on the next pass around
1031 the loop. */
1032 reg_equiv_memory_loc[i] = 0;
1033 reg_equiv_init[i] = 0;
1034 alter_reg (i, -1, true);
1038 if (caller_save_needed)
1039 setup_save_areas ();
1041 /* If we allocated another stack slot, redo elimination bookkeeping. */
1042 if (starting_frame_size != get_frame_size ())
1043 continue;
1044 if (starting_frame_size && cfun->stack_alignment_needed)
1046 /* If we have a stack frame, we must align it now. The
1047 stack size may be a part of the offset computation for
1048 register elimination. So if this changes the stack size,
1049 then repeat the elimination bookkeeping. We don't
1050 realign when there is no stack, as that will cause a
1051 stack frame when none is needed should
1052 STARTING_FRAME_OFFSET not be already aligned to
1053 STACK_BOUNDARY. */
1054 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
1055 if (starting_frame_size != get_frame_size ())
1056 continue;
1059 if (caller_save_needed)
1061 save_call_clobbered_regs ();
1062 /* That might have allocated new insn_chain structures. */
1063 reload_firstobj = obstack_alloc (&reload_obstack, 0);
1066 calculate_needs_all_insns (global);
1068 CLEAR_REG_SET (&spilled_pseudos);
1069 did_spill = 0;
1071 something_changed = 0;
1073 /* If we allocated any new memory locations, make another pass
1074 since it might have changed elimination offsets. */
1075 if (starting_frame_size != get_frame_size ())
1076 something_changed = 1;
1078 /* Even if the frame size remained the same, we might still have
1079 changed elimination offsets, e.g. if find_reloads called
1080 force_const_mem requiring the back end to allocate a constant
1081 pool base register that needs to be saved on the stack. */
1082 else if (!verify_initial_elim_offsets ())
1083 something_changed = 1;
1086 HARD_REG_SET to_spill;
1087 CLEAR_HARD_REG_SET (to_spill);
1088 update_eliminables (&to_spill);
1089 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1091 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1092 if (TEST_HARD_REG_BIT (to_spill, i))
1094 spill_hard_reg (i, 1);
1095 did_spill = 1;
1097 /* Regardless of the state of spills, if we previously had
1098 a register that we thought we could eliminate, but now can
1099 not eliminate, we must run another pass.
1101 Consider pseudos which have an entry in reg_equiv_* which
1102 reference an eliminable register. We must make another pass
1103 to update reg_equiv_* so that we do not substitute in the
1104 old value from when we thought the elimination could be
1105 performed. */
1106 something_changed = 1;
1110 select_reload_regs ();
1111 if (failure)
1112 goto failed;
1114 if (insns_need_reload != 0 || did_spill)
1115 something_changed |= finish_spills (global);
1117 if (! something_changed)
1118 break;
1120 if (caller_save_needed)
1121 delete_caller_save_insns ();
1123 obstack_free (&reload_obstack, reload_firstobj);
1126 /* If global-alloc was run, notify it of any register eliminations we have
1127 done. */
1128 if (global)
1129 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1130 if (ep->can_eliminate)
1131 mark_elimination (ep->from, ep->to);
1133 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1134 If that insn didn't set the register (i.e., it copied the register to
1135 memory), just delete that insn instead of the equivalencing insn plus
1136 anything now dead. If we call delete_dead_insn on that insn, we may
1137 delete the insn that actually sets the register if the register dies
1138 there and that is incorrect. */
1140 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1142 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1144 rtx list;
1145 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1147 rtx equiv_insn = XEXP (list, 0);
1149 /* If we already deleted the insn or if it may trap, we can't
1150 delete it. The latter case shouldn't happen, but can
1151 if an insn has a variable address, gets a REG_EH_REGION
1152 note added to it, and then gets converted into a load
1153 from a constant address. */
1154 if (NOTE_P (equiv_insn)
1155 || can_throw_internal (equiv_insn))
1157 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1158 delete_dead_insn (equiv_insn);
1159 else
1160 SET_INSN_DELETED (equiv_insn);
1165 /* Use the reload registers where necessary
1166 by generating move instructions to move the must-be-register
1167 values into or out of the reload registers. */
1169 if (insns_need_reload != 0 || something_needs_elimination
1170 || something_needs_operands_changed)
1172 HOST_WIDE_INT old_frame_size = get_frame_size ();
1174 reload_as_needed (global);
1176 gcc_assert (old_frame_size == get_frame_size ());
1178 gcc_assert (verify_initial_elim_offsets ());
1181 /* If we were able to eliminate the frame pointer, show that it is no
1182 longer live at the start of any basic block. If it ls live by
1183 virtue of being in a pseudo, that pseudo will be marked live
1184 and hence the frame pointer will be known to be live via that
1185 pseudo. */
1187 if (! frame_pointer_needed)
1188 FOR_EACH_BB (bb)
1190 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1191 bitmap_clear_bit (df_get_live_top (bb), HARD_FRAME_POINTER_REGNUM);
1194 /* Come here (with failure set nonzero) if we can't get enough spill
1195 regs. */
1196 failed:
1198 CLEAR_REG_SET (&spilled_pseudos);
1199 reload_in_progress = 0;
1201 /* Now eliminate all pseudo regs by modifying them into
1202 their equivalent memory references.
1203 The REG-rtx's for the pseudos are modified in place,
1204 so all insns that used to refer to them now refer to memory.
1206 For a reg that has a reg_equiv_address, all those insns
1207 were changed by reloading so that no insns refer to it any longer;
1208 but the DECL_RTL of a variable decl may refer to it,
1209 and if so this causes the debugging info to mention the variable. */
1211 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1213 rtx addr = 0;
1215 if (reg_equiv_mem[i])
1216 addr = XEXP (reg_equiv_mem[i], 0);
1218 if (reg_equiv_address[i])
1219 addr = reg_equiv_address[i];
1221 if (addr)
1223 if (reg_renumber[i] < 0)
1225 rtx reg = regno_reg_rtx[i];
1227 REG_USERVAR_P (reg) = 0;
1228 PUT_CODE (reg, MEM);
1229 XEXP (reg, 0) = addr;
1230 if (reg_equiv_memory_loc[i])
1231 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1232 else
1234 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1235 MEM_ATTRS (reg) = 0;
1237 MEM_NOTRAP_P (reg) = 1;
1239 else if (reg_equiv_mem[i])
1240 XEXP (reg_equiv_mem[i], 0) = addr;
1244 /* We must set reload_completed now since the cleanup_subreg_operands call
1245 below will re-recognize each insn and reload may have generated insns
1246 which are only valid during and after reload. */
1247 reload_completed = 1;
1249 /* Make a pass over all the insns and delete all USEs which we inserted
1250 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1251 notes. Delete all CLOBBER insns, except those that refer to the return
1252 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1253 from misarranging variable-array code, and simplify (subreg (reg))
1254 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1255 are no longer useful or accurate. Strip and regenerate REG_INC notes
1256 that may have been moved around. */
1258 for (insn = first; insn; insn = NEXT_INSN (insn))
1259 if (INSN_P (insn))
1261 rtx *pnote;
1263 if (CALL_P (insn))
1265 HARD_REG_SET used_function_regs;
1267 get_call_invalidated_used_regs (insn, &used_function_regs, false);
1268 IOR_HARD_REG_SET (cfun->emit->call_used_regs, used_function_regs);
1269 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1270 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1273 if ((GET_CODE (PATTERN (insn)) == USE
1274 /* We mark with QImode USEs introduced by reload itself. */
1275 && (GET_MODE (insn) == QImode
1276 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1277 || (GET_CODE (PATTERN (insn)) == CLOBBER
1278 && (!MEM_P (XEXP (PATTERN (insn), 0))
1279 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1280 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1281 && XEXP (XEXP (PATTERN (insn), 0), 0)
1282 != stack_pointer_rtx))
1283 && (!REG_P (XEXP (PATTERN (insn), 0))
1284 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1286 delete_insn (insn);
1287 continue;
1290 /* Some CLOBBERs may survive until here and still reference unassigned
1291 pseudos with const equivalent, which may in turn cause ICE in later
1292 passes if the reference remains in place. */
1293 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1294 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1295 VOIDmode, PATTERN (insn));
1297 /* Discard obvious no-ops, even without -O. This optimization
1298 is fast and doesn't interfere with debugging. */
1299 if (NONJUMP_INSN_P (insn)
1300 && GET_CODE (PATTERN (insn)) == SET
1301 && REG_P (SET_SRC (PATTERN (insn)))
1302 && REG_P (SET_DEST (PATTERN (insn)))
1303 && (REGNO (SET_SRC (PATTERN (insn)))
1304 == REGNO (SET_DEST (PATTERN (insn)))))
1306 delete_insn (insn);
1307 continue;
1310 pnote = &REG_NOTES (insn);
1311 while (*pnote != 0)
1313 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1314 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1315 || REG_NOTE_KIND (*pnote) == REG_INC
1316 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1317 || REG_NOTE_KIND (*pnote) == REG_LIBCALL_ID
1318 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1319 *pnote = XEXP (*pnote, 1);
1320 else
1321 pnote = &XEXP (*pnote, 1);
1324 #ifdef AUTO_INC_DEC
1325 add_auto_inc_notes (insn, PATTERN (insn));
1326 #endif
1328 /* Simplify (subreg (reg)) if it appears as an operand. */
1329 cleanup_subreg_operands (insn);
1331 /* Clean up invalid ASMs so that they don't confuse later passes.
1332 See PR 21299. */
1333 if (asm_noperands (PATTERN (insn)) >= 0)
1335 extract_insn (insn);
1336 if (!constrain_operands (1))
1338 error_for_asm (insn,
1339 "%<asm%> operand has impossible constraints");
1340 delete_insn (insn);
1341 continue;
1346 /* If we are doing stack checking, give a warning if this function's
1347 frame size is larger than we expect. */
1348 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1350 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1351 static int verbose_warned = 0;
1353 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1354 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1355 size += UNITS_PER_WORD;
1357 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1359 warning (0, "frame size too large for reliable stack checking");
1360 if (! verbose_warned)
1362 warning (0, "try reducing the number of local variables");
1363 verbose_warned = 1;
1368 /* Indicate that we no longer have known memory locations or constants. */
1369 if (reg_equiv_constant)
1370 free (reg_equiv_constant);
1371 if (reg_equiv_invariant)
1372 free (reg_equiv_invariant);
1373 reg_equiv_constant = 0;
1374 reg_equiv_invariant = 0;
1375 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1376 reg_equiv_memory_loc = 0;
1378 if (offsets_known_at)
1379 free (offsets_known_at);
1380 if (offsets_at)
1381 free (offsets_at);
1383 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1384 if (reg_equiv_alt_mem_list[i])
1385 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1386 free (reg_equiv_alt_mem_list);
1388 free (reg_equiv_mem);
1389 reg_equiv_init = 0;
1390 free (reg_equiv_address);
1391 free (reg_max_ref_width);
1392 free (reg_old_renumber);
1393 free (pseudo_previous_regs);
1394 free (pseudo_forbidden_regs);
1396 CLEAR_HARD_REG_SET (used_spill_regs);
1397 for (i = 0; i < n_spills; i++)
1398 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1400 /* Free all the insn_chain structures at once. */
1401 obstack_free (&reload_obstack, reload_startobj);
1402 unused_insn_chains = 0;
1403 fixup_abnormal_edges ();
1405 /* Replacing pseudos with their memory equivalents might have
1406 created shared rtx. Subsequent passes would get confused
1407 by this, so unshare everything here. */
1408 unshare_all_rtl_again (first);
1410 #ifdef STACK_BOUNDARY
1411 /* init_emit has set the alignment of the hard frame pointer
1412 to STACK_BOUNDARY. It is very likely no longer valid if
1413 the hard frame pointer was used for register allocation. */
1414 if (!frame_pointer_needed)
1415 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1416 #endif
1418 return failure;
1421 /* Yet another special case. Unfortunately, reg-stack forces people to
1422 write incorrect clobbers in asm statements. These clobbers must not
1423 cause the register to appear in bad_spill_regs, otherwise we'll call
1424 fatal_insn later. We clear the corresponding regnos in the live
1425 register sets to avoid this.
1426 The whole thing is rather sick, I'm afraid. */
1428 static void
1429 maybe_fix_stack_asms (void)
1431 #ifdef STACK_REGS
1432 const char *constraints[MAX_RECOG_OPERANDS];
1433 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1434 struct insn_chain *chain;
1436 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1438 int i, noperands;
1439 HARD_REG_SET clobbered, allowed;
1440 rtx pat;
1442 if (! INSN_P (chain->insn)
1443 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1444 continue;
1445 pat = PATTERN (chain->insn);
1446 if (GET_CODE (pat) != PARALLEL)
1447 continue;
1449 CLEAR_HARD_REG_SET (clobbered);
1450 CLEAR_HARD_REG_SET (allowed);
1452 /* First, make a mask of all stack regs that are clobbered. */
1453 for (i = 0; i < XVECLEN (pat, 0); i++)
1455 rtx t = XVECEXP (pat, 0, i);
1456 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1457 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1460 /* Get the operand values and constraints out of the insn. */
1461 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1462 constraints, operand_mode, NULL);
1464 /* For every operand, see what registers are allowed. */
1465 for (i = 0; i < noperands; i++)
1467 const char *p = constraints[i];
1468 /* For every alternative, we compute the class of registers allowed
1469 for reloading in CLS, and merge its contents into the reg set
1470 ALLOWED. */
1471 int cls = (int) NO_REGS;
1473 for (;;)
1475 char c = *p;
1477 if (c == '\0' || c == ',' || c == '#')
1479 /* End of one alternative - mark the regs in the current
1480 class, and reset the class. */
1481 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1482 cls = NO_REGS;
1483 p++;
1484 if (c == '#')
1485 do {
1486 c = *p++;
1487 } while (c != '\0' && c != ',');
1488 if (c == '\0')
1489 break;
1490 continue;
1493 switch (c)
1495 case '=': case '+': case '*': case '%': case '?': case '!':
1496 case '0': case '1': case '2': case '3': case '4': case 'm':
1497 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1498 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1499 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1500 case 'P':
1501 break;
1503 case 'p':
1504 cls = (int) reg_class_subunion[cls]
1505 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1506 break;
1508 case 'g':
1509 case 'r':
1510 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1511 break;
1513 default:
1514 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1515 cls = (int) reg_class_subunion[cls]
1516 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1517 else
1518 cls = (int) reg_class_subunion[cls]
1519 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1521 p += CONSTRAINT_LEN (c, p);
1524 /* Those of the registers which are clobbered, but allowed by the
1525 constraints, must be usable as reload registers. So clear them
1526 out of the life information. */
1527 AND_HARD_REG_SET (allowed, clobbered);
1528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1529 if (TEST_HARD_REG_BIT (allowed, i))
1531 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1532 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1536 #endif
1539 /* Copy the global variables n_reloads and rld into the corresponding elts
1540 of CHAIN. */
1541 static void
1542 copy_reloads (struct insn_chain *chain)
1544 chain->n_reloads = n_reloads;
1545 chain->rld = obstack_alloc (&reload_obstack,
1546 n_reloads * sizeof (struct reload));
1547 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1548 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1551 /* Walk the chain of insns, and determine for each whether it needs reloads
1552 and/or eliminations. Build the corresponding insns_need_reload list, and
1553 set something_needs_elimination as appropriate. */
1554 static void
1555 calculate_needs_all_insns (int global)
1557 struct insn_chain **pprev_reload = &insns_need_reload;
1558 struct insn_chain *chain, *next = 0;
1560 something_needs_elimination = 0;
1562 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1563 for (chain = reload_insn_chain; chain != 0; chain = next)
1565 rtx insn = chain->insn;
1567 next = chain->next;
1569 /* Clear out the shortcuts. */
1570 chain->n_reloads = 0;
1571 chain->need_elim = 0;
1572 chain->need_reload = 0;
1573 chain->need_operand_change = 0;
1575 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1576 include REG_LABEL), we need to see what effects this has on the
1577 known offsets at labels. */
1579 if (LABEL_P (insn) || JUMP_P (insn)
1580 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1581 set_label_offsets (insn, insn, 0);
1583 if (INSN_P (insn))
1585 rtx old_body = PATTERN (insn);
1586 int old_code = INSN_CODE (insn);
1587 rtx old_notes = REG_NOTES (insn);
1588 int did_elimination = 0;
1589 int operands_changed = 0;
1590 rtx set = single_set (insn);
1592 /* Skip insns that only set an equivalence. */
1593 if (set && REG_P (SET_DEST (set))
1594 && reg_renumber[REGNO (SET_DEST (set))] < 0
1595 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1596 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1597 && reg_equiv_init[REGNO (SET_DEST (set))])
1598 continue;
1600 /* If needed, eliminate any eliminable registers. */
1601 if (num_eliminable || num_eliminable_invariants)
1602 did_elimination = eliminate_regs_in_insn (insn, 0);
1604 /* Analyze the instruction. */
1605 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1606 global, spill_reg_order);
1608 /* If a no-op set needs more than one reload, this is likely
1609 to be something that needs input address reloads. We
1610 can't get rid of this cleanly later, and it is of no use
1611 anyway, so discard it now.
1612 We only do this when expensive_optimizations is enabled,
1613 since this complements reload inheritance / output
1614 reload deletion, and it can make debugging harder. */
1615 if (flag_expensive_optimizations && n_reloads > 1)
1617 rtx set = single_set (insn);
1618 if (set
1620 ((SET_SRC (set) == SET_DEST (set)
1621 && REG_P (SET_SRC (set))
1622 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1623 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1624 && reg_renumber [REGNO (SET_SRC (set))] < 0
1625 && reg_renumber [REGNO (SET_DEST (set))] < 0
1626 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1627 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1628 && rtx_equal_p (reg_equiv_memory_loc
1629 [REGNO (SET_SRC (set))],
1630 reg_equiv_memory_loc
1631 [REGNO (SET_DEST (set))]))))
1633 delete_insn (insn);
1634 /* Delete it from the reload chain. */
1635 if (chain->prev)
1636 chain->prev->next = next;
1637 else
1638 reload_insn_chain = next;
1639 if (next)
1640 next->prev = chain->prev;
1641 chain->next = unused_insn_chains;
1642 unused_insn_chains = chain;
1643 continue;
1646 if (num_eliminable)
1647 update_eliminable_offsets ();
1649 /* Remember for later shortcuts which insns had any reloads or
1650 register eliminations. */
1651 chain->need_elim = did_elimination;
1652 chain->need_reload = n_reloads > 0;
1653 chain->need_operand_change = operands_changed;
1655 /* Discard any register replacements done. */
1656 if (did_elimination)
1658 obstack_free (&reload_obstack, reload_insn_firstobj);
1659 PATTERN (insn) = old_body;
1660 INSN_CODE (insn) = old_code;
1661 REG_NOTES (insn) = old_notes;
1662 something_needs_elimination = 1;
1665 something_needs_operands_changed |= operands_changed;
1667 if (n_reloads != 0)
1669 copy_reloads (chain);
1670 *pprev_reload = chain;
1671 pprev_reload = &chain->next_need_reload;
1675 *pprev_reload = 0;
1678 /* Comparison function for qsort to decide which of two reloads
1679 should be handled first. *P1 and *P2 are the reload numbers. */
1681 static int
1682 reload_reg_class_lower (const void *r1p, const void *r2p)
1684 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1685 int t;
1687 /* Consider required reloads before optional ones. */
1688 t = rld[r1].optional - rld[r2].optional;
1689 if (t != 0)
1690 return t;
1692 /* Count all solitary classes before non-solitary ones. */
1693 t = ((reg_class_size[(int) rld[r2].class] == 1)
1694 - (reg_class_size[(int) rld[r1].class] == 1));
1695 if (t != 0)
1696 return t;
1698 /* Aside from solitaires, consider all multi-reg groups first. */
1699 t = rld[r2].nregs - rld[r1].nregs;
1700 if (t != 0)
1701 return t;
1703 /* Consider reloads in order of increasing reg-class number. */
1704 t = (int) rld[r1].class - (int) rld[r2].class;
1705 if (t != 0)
1706 return t;
1708 /* If reloads are equally urgent, sort by reload number,
1709 so that the results of qsort leave nothing to chance. */
1710 return r1 - r2;
1713 /* The cost of spilling each hard reg. */
1714 static int spill_cost[FIRST_PSEUDO_REGISTER];
1716 /* When spilling multiple hard registers, we use SPILL_COST for the first
1717 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1718 only the first hard reg for a multi-reg pseudo. */
1719 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1721 /* Update the spill cost arrays, considering that pseudo REG is live. */
1723 static void
1724 count_pseudo (int reg)
1726 int freq = REG_FREQ (reg);
1727 int r = reg_renumber[reg];
1728 int nregs;
1730 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1731 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1732 return;
1734 SET_REGNO_REG_SET (&pseudos_counted, reg);
1736 gcc_assert (r >= 0);
1738 spill_add_cost[r] += freq;
1740 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1741 while (nregs-- > 0)
1742 spill_cost[r + nregs] += freq;
1745 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1746 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1748 static void
1749 order_regs_for_reload (struct insn_chain *chain)
1751 unsigned i;
1752 HARD_REG_SET used_by_pseudos;
1753 HARD_REG_SET used_by_pseudos2;
1754 reg_set_iterator rsi;
1756 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1758 memset (spill_cost, 0, sizeof spill_cost);
1759 memset (spill_add_cost, 0, sizeof spill_add_cost);
1761 /* Count number of uses of each hard reg by pseudo regs allocated to it
1762 and then order them by decreasing use. First exclude hard registers
1763 that are live in or across this insn. */
1765 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1766 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1767 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1768 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1770 /* Now find out which pseudos are allocated to it, and update
1771 hard_reg_n_uses. */
1772 CLEAR_REG_SET (&pseudos_counted);
1774 EXECUTE_IF_SET_IN_REG_SET
1775 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1777 count_pseudo (i);
1779 EXECUTE_IF_SET_IN_REG_SET
1780 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1782 count_pseudo (i);
1784 CLEAR_REG_SET (&pseudos_counted);
1787 /* Vector of reload-numbers showing the order in which the reloads should
1788 be processed. */
1789 static short reload_order[MAX_RELOADS];
1791 /* This is used to keep track of the spill regs used in one insn. */
1792 static HARD_REG_SET used_spill_regs_local;
1794 /* We decided to spill hard register SPILLED, which has a size of
1795 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1796 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1797 update SPILL_COST/SPILL_ADD_COST. */
1799 static void
1800 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1802 int freq = REG_FREQ (reg);
1803 int r = reg_renumber[reg];
1804 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1806 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1807 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1808 return;
1810 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1812 spill_add_cost[r] -= freq;
1813 while (nregs-- > 0)
1814 spill_cost[r + nregs] -= freq;
1817 /* Find reload register to use for reload number ORDER. */
1819 static int
1820 find_reg (struct insn_chain *chain, int order)
1822 int rnum = reload_order[order];
1823 struct reload *rl = rld + rnum;
1824 int best_cost = INT_MAX;
1825 int best_reg = -1;
1826 unsigned int i, j;
1827 int k;
1828 HARD_REG_SET not_usable;
1829 HARD_REG_SET used_by_other_reload;
1830 reg_set_iterator rsi;
1832 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1833 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1834 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1836 CLEAR_HARD_REG_SET (used_by_other_reload);
1837 for (k = 0; k < order; k++)
1839 int other = reload_order[k];
1841 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1842 for (j = 0; j < rld[other].nregs; j++)
1843 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1848 unsigned int regno = i;
1850 if (! TEST_HARD_REG_BIT (not_usable, regno)
1851 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1852 && HARD_REGNO_MODE_OK (regno, rl->mode))
1854 int this_cost = spill_cost[regno];
1855 int ok = 1;
1856 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1858 for (j = 1; j < this_nregs; j++)
1860 this_cost += spill_add_cost[regno + j];
1861 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1862 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1863 ok = 0;
1865 if (! ok)
1866 continue;
1867 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1868 this_cost--;
1869 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1870 this_cost--;
1871 if (this_cost < best_cost
1872 /* Among registers with equal cost, prefer caller-saved ones, or
1873 use REG_ALLOC_ORDER if it is defined. */
1874 || (this_cost == best_cost
1875 #ifdef REG_ALLOC_ORDER
1876 && (inv_reg_alloc_order[regno]
1877 < inv_reg_alloc_order[best_reg])
1878 #else
1879 && call_used_regs[regno]
1880 && ! call_used_regs[best_reg]
1881 #endif
1884 best_reg = regno;
1885 best_cost = this_cost;
1889 if (best_reg == -1)
1890 return 0;
1892 if (dump_file)
1893 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1895 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1896 rl->regno = best_reg;
1898 EXECUTE_IF_SET_IN_REG_SET
1899 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1901 count_spilled_pseudo (best_reg, rl->nregs, j);
1904 EXECUTE_IF_SET_IN_REG_SET
1905 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1907 count_spilled_pseudo (best_reg, rl->nregs, j);
1910 for (i = 0; i < rl->nregs; i++)
1912 gcc_assert (spill_cost[best_reg + i] == 0);
1913 gcc_assert (spill_add_cost[best_reg + i] == 0);
1914 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1916 return 1;
1919 /* Find more reload regs to satisfy the remaining need of an insn, which
1920 is given by CHAIN.
1921 Do it by ascending class number, since otherwise a reg
1922 might be spilled for a big class and might fail to count
1923 for a smaller class even though it belongs to that class. */
1925 static void
1926 find_reload_regs (struct insn_chain *chain)
1928 int i;
1930 /* In order to be certain of getting the registers we need,
1931 we must sort the reloads into order of increasing register class.
1932 Then our grabbing of reload registers will parallel the process
1933 that provided the reload registers. */
1934 for (i = 0; i < chain->n_reloads; i++)
1936 /* Show whether this reload already has a hard reg. */
1937 if (chain->rld[i].reg_rtx)
1939 int regno = REGNO (chain->rld[i].reg_rtx);
1940 chain->rld[i].regno = regno;
1941 chain->rld[i].nregs
1942 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1944 else
1945 chain->rld[i].regno = -1;
1946 reload_order[i] = i;
1949 n_reloads = chain->n_reloads;
1950 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1952 CLEAR_HARD_REG_SET (used_spill_regs_local);
1954 if (dump_file)
1955 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1957 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1959 /* Compute the order of preference for hard registers to spill. */
1961 order_regs_for_reload (chain);
1963 for (i = 0; i < n_reloads; i++)
1965 int r = reload_order[i];
1967 /* Ignore reloads that got marked inoperative. */
1968 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1969 && ! rld[r].optional
1970 && rld[r].regno == -1)
1971 if (! find_reg (chain, i))
1973 if (dump_file)
1974 fprintf (dump_file, "reload failure for reload %d\n", r);
1975 spill_failure (chain->insn, rld[r].class);
1976 failure = 1;
1977 return;
1981 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1982 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1984 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1987 static void
1988 select_reload_regs (void)
1990 struct insn_chain *chain;
1992 /* Try to satisfy the needs for each insn. */
1993 for (chain = insns_need_reload; chain != 0;
1994 chain = chain->next_need_reload)
1995 find_reload_regs (chain);
1998 /* Delete all insns that were inserted by emit_caller_save_insns during
1999 this iteration. */
2000 static void
2001 delete_caller_save_insns (void)
2003 struct insn_chain *c = reload_insn_chain;
2005 while (c != 0)
2007 while (c != 0 && c->is_caller_save_insn)
2009 struct insn_chain *next = c->next;
2010 rtx insn = c->insn;
2012 if (c == reload_insn_chain)
2013 reload_insn_chain = next;
2014 delete_insn (insn);
2016 if (next)
2017 next->prev = c->prev;
2018 if (c->prev)
2019 c->prev->next = next;
2020 c->next = unused_insn_chains;
2021 unused_insn_chains = c;
2022 c = next;
2024 if (c != 0)
2025 c = c->next;
2029 /* Handle the failure to find a register to spill.
2030 INSN should be one of the insns which needed this particular spill reg. */
2032 static void
2033 spill_failure (rtx insn, enum reg_class class)
2035 if (asm_noperands (PATTERN (insn)) >= 0)
2036 error_for_asm (insn, "can't find a register in class %qs while "
2037 "reloading %<asm%>",
2038 reg_class_names[class]);
2039 else
2041 error ("unable to find a register to spill in class %qs",
2042 reg_class_names[class]);
2044 if (dump_file)
2046 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2047 debug_reload_to_stream (dump_file);
2049 fatal_insn ("this is the insn:", insn);
2053 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2054 data that is dead in INSN. */
2056 static void
2057 delete_dead_insn (rtx insn)
2059 rtx prev = prev_real_insn (insn);
2060 rtx prev_dest;
2062 /* If the previous insn sets a register that dies in our insn, delete it
2063 too. */
2064 if (prev && GET_CODE (PATTERN (prev)) == SET
2065 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2066 && reg_mentioned_p (prev_dest, PATTERN (insn))
2067 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2068 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2069 delete_dead_insn (prev);
2071 SET_INSN_DELETED (insn);
2074 /* Modify the home of pseudo-reg I.
2075 The new home is present in reg_renumber[I].
2077 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2078 or it may be -1, meaning there is none or it is not relevant.
2079 This is used so that all pseudos spilled from a given hard reg
2080 can share one stack slot. */
2082 static void
2083 alter_reg (int i, int from_reg, bool dont_share_p)
2085 /* When outputting an inline function, this can happen
2086 for a reg that isn't actually used. */
2087 if (regno_reg_rtx[i] == 0)
2088 return;
2090 /* If the reg got changed to a MEM at rtl-generation time,
2091 ignore it. */
2092 if (!REG_P (regno_reg_rtx[i]))
2093 return;
2095 /* Modify the reg-rtx to contain the new hard reg
2096 number or else to contain its pseudo reg number. */
2097 SET_REGNO (regno_reg_rtx[i],
2098 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2100 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2101 allocate a stack slot for it. */
2103 if (reg_renumber[i] < 0
2104 && REG_N_REFS (i) > 0
2105 && reg_equiv_constant[i] == 0
2106 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2107 && reg_equiv_memory_loc[i] == 0)
2109 rtx x;
2110 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2111 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2112 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2113 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2114 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2115 int adjust = 0;
2116 bool shared_p = false;
2119 x = (dont_share_p || ! flag_ira
2120 ? NULL_RTX : reuse_stack_slot (i, inherent_size, total_size));
2121 if (x)
2122 shared_p = true;
2123 /* Each pseudo reg has an inherent size which comes from its own mode,
2124 and a total size which provides room for paradoxical subregs
2125 which refer to the pseudo reg in wider modes.
2127 We can use a slot already allocated if it provides both
2128 enough inherent space and enough total space.
2129 Otherwise, we allocate a new slot, making sure that it has no less
2130 inherent space, and no less total space, then the previous slot. */
2131 else if (from_reg == -1 || (! dont_share_p && flag_ira))
2133 alias_set_type alias_set = new_alias_set ();
2135 /* No known place to spill from => no slot to reuse. */
2136 x = assign_stack_local (mode, total_size,
2137 min_align > inherent_align
2138 || total_size > inherent_size ? -1 : 0);
2139 if (BYTES_BIG_ENDIAN)
2140 /* Cancel the big-endian correction done in assign_stack_local.
2141 Get the address of the beginning of the slot.
2142 This is so we can do a big-endian correction unconditionally
2143 below. */
2144 adjust = inherent_size - total_size;
2146 /* Nothing can alias this slot except this pseudo. */
2147 set_mem_alias_set (x, alias_set);
2148 dse_record_singleton_alias_set (alias_set, mode);
2150 if (! dont_share_p && flag_ira)
2151 mark_new_stack_slot (x, i, total_size);
2154 /* Reuse a stack slot if possible. */
2155 else if (spill_stack_slot[from_reg] != 0
2156 && spill_stack_slot_width[from_reg] >= total_size
2157 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2158 >= inherent_size)
2159 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2160 x = spill_stack_slot[from_reg];
2161 /* Allocate a bigger slot. */
2162 else
2164 /* Compute maximum size needed, both for inherent size
2165 and for total size. */
2166 rtx stack_slot;
2168 if (spill_stack_slot[from_reg])
2170 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2171 > inherent_size)
2172 mode = GET_MODE (spill_stack_slot[from_reg]);
2173 if (spill_stack_slot_width[from_reg] > total_size)
2174 total_size = spill_stack_slot_width[from_reg];
2175 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2176 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2179 /* Make a slot with that size. */
2180 x = assign_stack_local (mode, total_size,
2181 min_align > inherent_align
2182 || total_size > inherent_size ? -1 : 0);
2183 stack_slot = x;
2185 /* All pseudos mapped to this slot can alias each other. */
2186 if (spill_stack_slot[from_reg])
2188 alias_set_type alias_set
2189 = MEM_ALIAS_SET (spill_stack_slot[from_reg]);
2190 set_mem_alias_set (x, alias_set);
2191 dse_invalidate_singleton_alias_set (alias_set);
2193 else
2195 alias_set_type alias_set = new_alias_set ();
2196 set_mem_alias_set (x, alias_set);
2197 dse_record_singleton_alias_set (alias_set, mode);
2200 if (BYTES_BIG_ENDIAN)
2202 /* Cancel the big-endian correction done in assign_stack_local.
2203 Get the address of the beginning of the slot.
2204 This is so we can do a big-endian correction unconditionally
2205 below. */
2206 adjust = GET_MODE_SIZE (mode) - total_size;
2207 if (adjust)
2208 stack_slot
2209 = adjust_address_nv (x, mode_for_size (total_size
2210 * BITS_PER_UNIT,
2211 MODE_INT, 1),
2212 adjust);
2215 spill_stack_slot[from_reg] = stack_slot;
2216 spill_stack_slot_width[from_reg] = total_size;
2219 /* On a big endian machine, the "address" of the slot
2220 is the address of the low part that fits its inherent mode. */
2221 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2222 adjust += (total_size - inherent_size);
2224 /* If we have any adjustment to make, or if the stack slot is the
2225 wrong mode, make a new stack slot. */
2226 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2228 /* If we have a decl for the original register, set it for the
2229 memory. If this is a shared MEM, make a copy. */
2230 if (shared_p)
2232 x = copy_rtx (x);
2233 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2235 else if (REG_EXPR (regno_reg_rtx[i])
2236 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2238 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2240 /* We can do this only for the DECLs home pseudo, not for
2241 any copies of it, since otherwise when the stack slot
2242 is reused, nonoverlapping_memrefs_p might think they
2243 cannot overlap. */
2244 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2246 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2247 x = copy_rtx (x);
2249 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2253 /* Save the stack slot for later. */
2254 reg_equiv_memory_loc[i] = x;
2258 /* Mark the slots in regs_ever_live for the hard regs used by
2259 pseudo-reg number REGNO, accessed in MODE. */
2261 static void
2262 mark_home_live_1 (int regno, enum machine_mode mode)
2264 int i, lim;
2266 i = reg_renumber[regno];
2267 if (i < 0)
2268 return;
2269 lim = end_hard_regno (mode, i);
2270 while (i < lim)
2271 df_set_regs_ever_live(i++, true);
2274 /* Mark the slots in regs_ever_live for the hard regs
2275 used by pseudo-reg number REGNO. */
2277 void
2278 mark_home_live (int regno)
2280 if (reg_renumber[regno] >= 0)
2281 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2284 /* This function handles the tracking of elimination offsets around branches.
2286 X is a piece of RTL being scanned.
2288 INSN is the insn that it came from, if any.
2290 INITIAL_P is nonzero if we are to set the offset to be the initial
2291 offset and zero if we are setting the offset of the label to be the
2292 current offset. */
2294 static void
2295 set_label_offsets (rtx x, rtx insn, int initial_p)
2297 enum rtx_code code = GET_CODE (x);
2298 rtx tem;
2299 unsigned int i;
2300 struct elim_table *p;
2302 switch (code)
2304 case LABEL_REF:
2305 if (LABEL_REF_NONLOCAL_P (x))
2306 return;
2308 x = XEXP (x, 0);
2310 /* ... fall through ... */
2312 case CODE_LABEL:
2313 /* If we know nothing about this label, set the desired offsets. Note
2314 that this sets the offset at a label to be the offset before a label
2315 if we don't know anything about the label. This is not correct for
2316 the label after a BARRIER, but is the best guess we can make. If
2317 we guessed wrong, we will suppress an elimination that might have
2318 been possible had we been able to guess correctly. */
2320 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2322 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2323 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2324 = (initial_p ? reg_eliminate[i].initial_offset
2325 : reg_eliminate[i].offset);
2326 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2329 /* Otherwise, if this is the definition of a label and it is
2330 preceded by a BARRIER, set our offsets to the known offset of
2331 that label. */
2333 else if (x == insn
2334 && (tem = prev_nonnote_insn (insn)) != 0
2335 && BARRIER_P (tem))
2336 set_offsets_for_label (insn);
2337 else
2338 /* If neither of the above cases is true, compare each offset
2339 with those previously recorded and suppress any eliminations
2340 where the offsets disagree. */
2342 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2343 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2344 != (initial_p ? reg_eliminate[i].initial_offset
2345 : reg_eliminate[i].offset))
2346 reg_eliminate[i].can_eliminate = 0;
2348 return;
2350 case JUMP_INSN:
2351 set_label_offsets (PATTERN (insn), insn, initial_p);
2353 /* ... fall through ... */
2355 case INSN:
2356 case CALL_INSN:
2357 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2358 and hence must have all eliminations at their initial offsets. */
2359 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2360 if (REG_NOTE_KIND (tem) == REG_LABEL)
2361 set_label_offsets (XEXP (tem, 0), insn, 1);
2362 return;
2364 case PARALLEL:
2365 case ADDR_VEC:
2366 case ADDR_DIFF_VEC:
2367 /* Each of the labels in the parallel or address vector must be
2368 at their initial offsets. We want the first field for PARALLEL
2369 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2371 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2372 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2373 insn, initial_p);
2374 return;
2376 case SET:
2377 /* We only care about setting PC. If the source is not RETURN,
2378 IF_THEN_ELSE, or a label, disable any eliminations not at
2379 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2380 isn't one of those possibilities. For branches to a label,
2381 call ourselves recursively.
2383 Note that this can disable elimination unnecessarily when we have
2384 a non-local goto since it will look like a non-constant jump to
2385 someplace in the current function. This isn't a significant
2386 problem since such jumps will normally be when all elimination
2387 pairs are back to their initial offsets. */
2389 if (SET_DEST (x) != pc_rtx)
2390 return;
2392 switch (GET_CODE (SET_SRC (x)))
2394 case PC:
2395 case RETURN:
2396 return;
2398 case LABEL_REF:
2399 set_label_offsets (SET_SRC (x), insn, initial_p);
2400 return;
2402 case IF_THEN_ELSE:
2403 tem = XEXP (SET_SRC (x), 1);
2404 if (GET_CODE (tem) == LABEL_REF)
2405 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2406 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2407 break;
2409 tem = XEXP (SET_SRC (x), 2);
2410 if (GET_CODE (tem) == LABEL_REF)
2411 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2412 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2413 break;
2414 return;
2416 default:
2417 break;
2420 /* If we reach here, all eliminations must be at their initial
2421 offset because we are doing a jump to a variable address. */
2422 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2423 if (p->offset != p->initial_offset)
2424 p->can_eliminate = 0;
2425 break;
2427 default:
2428 break;
2432 /* Scan X and replace any eliminable registers (such as fp) with a
2433 replacement (such as sp), plus an offset.
2435 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2436 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2437 MEM, we are allowed to replace a sum of a register and the constant zero
2438 with the register, which we cannot do outside a MEM. In addition, we need
2439 to record the fact that a register is referenced outside a MEM.
2441 If INSN is an insn, it is the insn containing X. If we replace a REG
2442 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2443 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2444 the REG is being modified.
2446 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2447 That's used when we eliminate in expressions stored in notes.
2448 This means, do not set ref_outside_mem even if the reference
2449 is outside of MEMs.
2451 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2452 replacements done assuming all offsets are at their initial values. If
2453 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2454 encounter, return the actual location so that find_reloads will do
2455 the proper thing. */
2457 static rtx
2458 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2459 bool may_use_invariant)
2461 enum rtx_code code = GET_CODE (x);
2462 struct elim_table *ep;
2463 int regno;
2464 rtx new;
2465 int i, j;
2466 const char *fmt;
2467 int copied = 0;
2469 if (! current_function_decl)
2470 return x;
2472 switch (code)
2474 case CONST_INT:
2475 case CONST_DOUBLE:
2476 case CONST_FIXED:
2477 case CONST_VECTOR:
2478 case CONST:
2479 case SYMBOL_REF:
2480 case CODE_LABEL:
2481 case PC:
2482 case CC0:
2483 case ASM_INPUT:
2484 case ADDR_VEC:
2485 case ADDR_DIFF_VEC:
2486 case RETURN:
2487 return x;
2489 case REG:
2490 regno = REGNO (x);
2492 /* First handle the case where we encounter a bare register that
2493 is eliminable. Replace it with a PLUS. */
2494 if (regno < FIRST_PSEUDO_REGISTER)
2496 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2497 ep++)
2498 if (ep->from_rtx == x && ep->can_eliminate)
2499 return plus_constant (ep->to_rtx, ep->previous_offset);
2502 else if (reg_renumber && reg_renumber[regno] < 0
2503 && reg_equiv_invariant && reg_equiv_invariant[regno])
2505 if (may_use_invariant)
2506 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2507 mem_mode, insn, true);
2508 /* There exists at least one use of REGNO that cannot be
2509 eliminated. Prevent the defining insn from being deleted. */
2510 reg_equiv_init[regno] = NULL_RTX;
2511 alter_reg (regno, -1, true);
2513 return x;
2515 /* You might think handling MINUS in a manner similar to PLUS is a
2516 good idea. It is not. It has been tried multiple times and every
2517 time the change has had to have been reverted.
2519 Other parts of reload know a PLUS is special (gen_reload for example)
2520 and require special code to handle code a reloaded PLUS operand.
2522 Also consider backends where the flags register is clobbered by a
2523 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2524 lea instruction comes to mind). If we try to reload a MINUS, we
2525 may kill the flags register that was holding a useful value.
2527 So, please before trying to handle MINUS, consider reload as a
2528 whole instead of this little section as well as the backend issues. */
2529 case PLUS:
2530 /* If this is the sum of an eliminable register and a constant, rework
2531 the sum. */
2532 if (REG_P (XEXP (x, 0))
2533 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2534 && CONSTANT_P (XEXP (x, 1)))
2536 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2537 ep++)
2538 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2540 /* The only time we want to replace a PLUS with a REG (this
2541 occurs when the constant operand of the PLUS is the negative
2542 of the offset) is when we are inside a MEM. We won't want
2543 to do so at other times because that would change the
2544 structure of the insn in a way that reload can't handle.
2545 We special-case the commonest situation in
2546 eliminate_regs_in_insn, so just replace a PLUS with a
2547 PLUS here, unless inside a MEM. */
2548 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2549 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2550 return ep->to_rtx;
2551 else
2552 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2553 plus_constant (XEXP (x, 1),
2554 ep->previous_offset));
2557 /* If the register is not eliminable, we are done since the other
2558 operand is a constant. */
2559 return x;
2562 /* If this is part of an address, we want to bring any constant to the
2563 outermost PLUS. We will do this by doing register replacement in
2564 our operands and seeing if a constant shows up in one of them.
2566 Note that there is no risk of modifying the structure of the insn,
2567 since we only get called for its operands, thus we are either
2568 modifying the address inside a MEM, or something like an address
2569 operand of a load-address insn. */
2572 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2573 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2575 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2577 /* If one side is a PLUS and the other side is a pseudo that
2578 didn't get a hard register but has a reg_equiv_constant,
2579 we must replace the constant here since it may no longer
2580 be in the position of any operand. */
2581 if (GET_CODE (new0) == PLUS && REG_P (new1)
2582 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2583 && reg_renumber[REGNO (new1)] < 0
2584 && reg_equiv_constant != 0
2585 && reg_equiv_constant[REGNO (new1)] != 0)
2586 new1 = reg_equiv_constant[REGNO (new1)];
2587 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2588 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2589 && reg_renumber[REGNO (new0)] < 0
2590 && reg_equiv_constant[REGNO (new0)] != 0)
2591 new0 = reg_equiv_constant[REGNO (new0)];
2593 new = form_sum (new0, new1);
2595 /* As above, if we are not inside a MEM we do not want to
2596 turn a PLUS into something else. We might try to do so here
2597 for an addition of 0 if we aren't optimizing. */
2598 if (! mem_mode && GET_CODE (new) != PLUS)
2599 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2600 else
2601 return new;
2604 return x;
2606 case MULT:
2607 /* If this is the product of an eliminable register and a
2608 constant, apply the distribute law and move the constant out
2609 so that we have (plus (mult ..) ..). This is needed in order
2610 to keep load-address insns valid. This case is pathological.
2611 We ignore the possibility of overflow here. */
2612 if (REG_P (XEXP (x, 0))
2613 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2614 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2615 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2616 ep++)
2617 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2619 if (! mem_mode
2620 /* Refs inside notes don't count for this purpose. */
2621 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2622 || GET_CODE (insn) == INSN_LIST)))
2623 ep->ref_outside_mem = 1;
2625 return
2626 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2627 ep->previous_offset * INTVAL (XEXP (x, 1)));
2630 /* ... fall through ... */
2632 case CALL:
2633 case COMPARE:
2634 /* See comments before PLUS about handling MINUS. */
2635 case MINUS:
2636 case DIV: case UDIV:
2637 case MOD: case UMOD:
2638 case AND: case IOR: case XOR:
2639 case ROTATERT: case ROTATE:
2640 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2641 case NE: case EQ:
2642 case GE: case GT: case GEU: case GTU:
2643 case LE: case LT: case LEU: case LTU:
2645 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2646 rtx new1 = XEXP (x, 1)
2647 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2649 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2650 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2652 return x;
2654 case EXPR_LIST:
2655 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2656 if (XEXP (x, 0))
2658 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2659 if (new != XEXP (x, 0))
2661 /* If this is a REG_DEAD note, it is not valid anymore.
2662 Using the eliminated version could result in creating a
2663 REG_DEAD note for the stack or frame pointer. */
2664 if (GET_MODE (x) == REG_DEAD)
2665 return (XEXP (x, 1)
2666 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2667 : NULL_RTX);
2669 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2673 /* ... fall through ... */
2675 case INSN_LIST:
2676 /* Now do eliminations in the rest of the chain. If this was
2677 an EXPR_LIST, this might result in allocating more memory than is
2678 strictly needed, but it simplifies the code. */
2679 if (XEXP (x, 1))
2681 new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2682 if (new != XEXP (x, 1))
2683 return
2684 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2686 return x;
2688 case PRE_INC:
2689 case POST_INC:
2690 case PRE_DEC:
2691 case POST_DEC:
2692 /* We do not support elimination of a register that is modified.
2693 elimination_effects has already make sure that this does not
2694 happen. */
2695 return x;
2697 case PRE_MODIFY:
2698 case POST_MODIFY:
2699 /* We do not support elimination of a register that is modified.
2700 elimination_effects has already make sure that this does not
2701 happen. The only remaining case we need to consider here is
2702 that the increment value may be an eliminable register. */
2703 if (GET_CODE (XEXP (x, 1)) == PLUS
2704 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2706 rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2707 insn, true);
2709 if (new != XEXP (XEXP (x, 1), 1))
2710 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2711 gen_rtx_PLUS (GET_MODE (x),
2712 XEXP (x, 0), new));
2714 return x;
2716 case STRICT_LOW_PART:
2717 case NEG: case NOT:
2718 case SIGN_EXTEND: case ZERO_EXTEND:
2719 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2720 case FLOAT: case FIX:
2721 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2722 case ABS:
2723 case SQRT:
2724 case FFS:
2725 case CLZ:
2726 case CTZ:
2727 case POPCOUNT:
2728 case PARITY:
2729 case BSWAP:
2730 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2731 if (new != XEXP (x, 0))
2732 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2733 return x;
2735 case SUBREG:
2736 /* Similar to above processing, but preserve SUBREG_BYTE.
2737 Convert (subreg (mem)) to (mem) if not paradoxical.
2738 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2739 pseudo didn't get a hard reg, we must replace this with the
2740 eliminated version of the memory location because push_reload
2741 may do the replacement in certain circumstances. */
2742 if (REG_P (SUBREG_REG (x))
2743 && (GET_MODE_SIZE (GET_MODE (x))
2744 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2745 && reg_equiv_memory_loc != 0
2746 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2748 new = SUBREG_REG (x);
2750 else
2751 new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2753 if (new != SUBREG_REG (x))
2755 int x_size = GET_MODE_SIZE (GET_MODE (x));
2756 int new_size = GET_MODE_SIZE (GET_MODE (new));
2758 if (MEM_P (new)
2759 && ((x_size < new_size
2760 #ifdef WORD_REGISTER_OPERATIONS
2761 /* On these machines, combine can create rtl of the form
2762 (set (subreg:m1 (reg:m2 R) 0) ...)
2763 where m1 < m2, and expects something interesting to
2764 happen to the entire word. Moreover, it will use the
2765 (reg:m2 R) later, expecting all bits to be preserved.
2766 So if the number of words is the same, preserve the
2767 subreg so that push_reload can see it. */
2768 && ! ((x_size - 1) / UNITS_PER_WORD
2769 == (new_size -1 ) / UNITS_PER_WORD)
2770 #endif
2772 || x_size == new_size)
2774 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2775 else
2776 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2779 return x;
2781 case MEM:
2782 /* Our only special processing is to pass the mode of the MEM to our
2783 recursive call and copy the flags. While we are here, handle this
2784 case more efficiently. */
2785 return
2786 replace_equiv_address_nv (x,
2787 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2788 insn, true));
2790 case USE:
2791 /* Handle insn_list USE that a call to a pure function may generate. */
2792 new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2793 if (new != XEXP (x, 0))
2794 return gen_rtx_USE (GET_MODE (x), new);
2795 return x;
2797 case CLOBBER:
2798 case ASM_OPERANDS:
2799 case SET:
2800 gcc_unreachable ();
2802 default:
2803 break;
2806 /* Process each of our operands recursively. If any have changed, make a
2807 copy of the rtx. */
2808 fmt = GET_RTX_FORMAT (code);
2809 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2811 if (*fmt == 'e')
2813 new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2814 if (new != XEXP (x, i) && ! copied)
2816 x = shallow_copy_rtx (x);
2817 copied = 1;
2819 XEXP (x, i) = new;
2821 else if (*fmt == 'E')
2823 int copied_vec = 0;
2824 for (j = 0; j < XVECLEN (x, i); j++)
2826 new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2827 if (new != XVECEXP (x, i, j) && ! copied_vec)
2829 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2830 XVEC (x, i)->elem);
2831 if (! copied)
2833 x = shallow_copy_rtx (x);
2834 copied = 1;
2836 XVEC (x, i) = new_v;
2837 copied_vec = 1;
2839 XVECEXP (x, i, j) = new;
2844 return x;
2848 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2850 return eliminate_regs_1 (x, mem_mode, insn, false);
2853 /* Scan rtx X for modifications of elimination target registers. Update
2854 the table of eliminables to reflect the changed state. MEM_MODE is
2855 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2857 static void
2858 elimination_effects (rtx x, enum machine_mode mem_mode)
2860 enum rtx_code code = GET_CODE (x);
2861 struct elim_table *ep;
2862 int regno;
2863 int i, j;
2864 const char *fmt;
2866 switch (code)
2868 case CONST_INT:
2869 case CONST_DOUBLE:
2870 case CONST_FIXED:
2871 case CONST_VECTOR:
2872 case CONST:
2873 case SYMBOL_REF:
2874 case CODE_LABEL:
2875 case PC:
2876 case CC0:
2877 case ASM_INPUT:
2878 case ADDR_VEC:
2879 case ADDR_DIFF_VEC:
2880 case RETURN:
2881 return;
2883 case REG:
2884 regno = REGNO (x);
2886 /* First handle the case where we encounter a bare register that
2887 is eliminable. Replace it with a PLUS. */
2888 if (regno < FIRST_PSEUDO_REGISTER)
2890 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2891 ep++)
2892 if (ep->from_rtx == x && ep->can_eliminate)
2894 if (! mem_mode)
2895 ep->ref_outside_mem = 1;
2896 return;
2900 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2901 && reg_equiv_constant[regno]
2902 && ! function_invariant_p (reg_equiv_constant[regno]))
2903 elimination_effects (reg_equiv_constant[regno], mem_mode);
2904 return;
2906 case PRE_INC:
2907 case POST_INC:
2908 case PRE_DEC:
2909 case POST_DEC:
2910 case POST_MODIFY:
2911 case PRE_MODIFY:
2912 /* If we modify the source of an elimination rule, disable it. */
2913 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2914 if (ep->from_rtx == XEXP (x, 0))
2915 ep->can_eliminate = 0;
2917 /* If we modify the target of an elimination rule by adding a constant,
2918 update its offset. If we modify the target in any other way, we'll
2919 have to disable the rule as well. */
2920 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2921 if (ep->to_rtx == XEXP (x, 0))
2923 int size = GET_MODE_SIZE (mem_mode);
2925 /* If more bytes than MEM_MODE are pushed, account for them. */
2926 #ifdef PUSH_ROUNDING
2927 if (ep->to_rtx == stack_pointer_rtx)
2928 size = PUSH_ROUNDING (size);
2929 #endif
2930 if (code == PRE_DEC || code == POST_DEC)
2931 ep->offset += size;
2932 else if (code == PRE_INC || code == POST_INC)
2933 ep->offset -= size;
2934 else if (code == PRE_MODIFY || code == POST_MODIFY)
2936 if (GET_CODE (XEXP (x, 1)) == PLUS
2937 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2938 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
2939 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2940 else
2941 ep->can_eliminate = 0;
2945 /* These two aren't unary operators. */
2946 if (code == POST_MODIFY || code == PRE_MODIFY)
2947 break;
2949 /* Fall through to generic unary operation case. */
2950 case STRICT_LOW_PART:
2951 case NEG: case NOT:
2952 case SIGN_EXTEND: case ZERO_EXTEND:
2953 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2954 case FLOAT: case FIX:
2955 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2956 case ABS:
2957 case SQRT:
2958 case FFS:
2959 case CLZ:
2960 case CTZ:
2961 case POPCOUNT:
2962 case PARITY:
2963 case BSWAP:
2964 elimination_effects (XEXP (x, 0), mem_mode);
2965 return;
2967 case SUBREG:
2968 if (REG_P (SUBREG_REG (x))
2969 && (GET_MODE_SIZE (GET_MODE (x))
2970 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2971 && reg_equiv_memory_loc != 0
2972 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2973 return;
2975 elimination_effects (SUBREG_REG (x), mem_mode);
2976 return;
2978 case USE:
2979 /* If using a register that is the source of an eliminate we still
2980 think can be performed, note it cannot be performed since we don't
2981 know how this register is used. */
2982 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2983 if (ep->from_rtx == XEXP (x, 0))
2984 ep->can_eliminate = 0;
2986 elimination_effects (XEXP (x, 0), mem_mode);
2987 return;
2989 case CLOBBER:
2990 /* If clobbering a register that is the replacement register for an
2991 elimination we still think can be performed, note that it cannot
2992 be performed. Otherwise, we need not be concerned about it. */
2993 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2994 if (ep->to_rtx == XEXP (x, 0))
2995 ep->can_eliminate = 0;
2997 elimination_effects (XEXP (x, 0), mem_mode);
2998 return;
3000 case SET:
3001 /* Check for setting a register that we know about. */
3002 if (REG_P (SET_DEST (x)))
3004 /* See if this is setting the replacement register for an
3005 elimination.
3007 If DEST is the hard frame pointer, we do nothing because we
3008 assume that all assignments to the frame pointer are for
3009 non-local gotos and are being done at a time when they are valid
3010 and do not disturb anything else. Some machines want to
3011 eliminate a fake argument pointer (or even a fake frame pointer)
3012 with either the real frame or the stack pointer. Assignments to
3013 the hard frame pointer must not prevent this elimination. */
3015 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3016 ep++)
3017 if (ep->to_rtx == SET_DEST (x)
3018 && SET_DEST (x) != hard_frame_pointer_rtx)
3020 /* If it is being incremented, adjust the offset. Otherwise,
3021 this elimination can't be done. */
3022 rtx src = SET_SRC (x);
3024 if (GET_CODE (src) == PLUS
3025 && XEXP (src, 0) == SET_DEST (x)
3026 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3027 ep->offset -= INTVAL (XEXP (src, 1));
3028 else
3029 ep->can_eliminate = 0;
3033 elimination_effects (SET_DEST (x), 0);
3034 elimination_effects (SET_SRC (x), 0);
3035 return;
3037 case MEM:
3038 /* Our only special processing is to pass the mode of the MEM to our
3039 recursive call. */
3040 elimination_effects (XEXP (x, 0), GET_MODE (x));
3041 return;
3043 default:
3044 break;
3047 fmt = GET_RTX_FORMAT (code);
3048 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3050 if (*fmt == 'e')
3051 elimination_effects (XEXP (x, i), mem_mode);
3052 else if (*fmt == 'E')
3053 for (j = 0; j < XVECLEN (x, i); j++)
3054 elimination_effects (XVECEXP (x, i, j), mem_mode);
3058 /* Descend through rtx X and verify that no references to eliminable registers
3059 remain. If any do remain, mark the involved register as not
3060 eliminable. */
3062 static void
3063 check_eliminable_occurrences (rtx x)
3065 const char *fmt;
3066 int i;
3067 enum rtx_code code;
3069 if (x == 0)
3070 return;
3072 code = GET_CODE (x);
3074 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3076 struct elim_table *ep;
3078 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3079 if (ep->from_rtx == x)
3080 ep->can_eliminate = 0;
3081 return;
3084 fmt = GET_RTX_FORMAT (code);
3085 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3087 if (*fmt == 'e')
3088 check_eliminable_occurrences (XEXP (x, i));
3089 else if (*fmt == 'E')
3091 int j;
3092 for (j = 0; j < XVECLEN (x, i); j++)
3093 check_eliminable_occurrences (XVECEXP (x, i, j));
3098 /* Scan INSN and eliminate all eliminable registers in it.
3100 If REPLACE is nonzero, do the replacement destructively. Also
3101 delete the insn as dead it if it is setting an eliminable register.
3103 If REPLACE is zero, do all our allocations in reload_obstack.
3105 If no eliminations were done and this insn doesn't require any elimination
3106 processing (these are not identical conditions: it might be updating sp,
3107 but not referencing fp; this needs to be seen during reload_as_needed so
3108 that the offset between fp and sp can be taken into consideration), zero
3109 is returned. Otherwise, 1 is returned. */
3111 static int
3112 eliminate_regs_in_insn (rtx insn, int replace)
3114 int icode = recog_memoized (insn);
3115 rtx old_body = PATTERN (insn);
3116 int insn_is_asm = asm_noperands (old_body) >= 0;
3117 rtx old_set = single_set (insn);
3118 rtx new_body;
3119 int val = 0;
3120 int i;
3121 rtx substed_operand[MAX_RECOG_OPERANDS];
3122 rtx orig_operand[MAX_RECOG_OPERANDS];
3123 struct elim_table *ep;
3124 rtx plus_src, plus_cst_src;
3126 if (! insn_is_asm && icode < 0)
3128 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3129 || GET_CODE (PATTERN (insn)) == CLOBBER
3130 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3131 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3132 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3133 return 0;
3136 if (old_set != 0 && REG_P (SET_DEST (old_set))
3137 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3139 /* Check for setting an eliminable register. */
3140 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3141 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3143 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3144 /* If this is setting the frame pointer register to the
3145 hardware frame pointer register and this is an elimination
3146 that will be done (tested above), this insn is really
3147 adjusting the frame pointer downward to compensate for
3148 the adjustment done before a nonlocal goto. */
3149 if (ep->from == FRAME_POINTER_REGNUM
3150 && ep->to == HARD_FRAME_POINTER_REGNUM)
3152 rtx base = SET_SRC (old_set);
3153 rtx base_insn = insn;
3154 HOST_WIDE_INT offset = 0;
3156 while (base != ep->to_rtx)
3158 rtx prev_insn, prev_set;
3160 if (GET_CODE (base) == PLUS
3161 && GET_CODE (XEXP (base, 1)) == CONST_INT)
3163 offset += INTVAL (XEXP (base, 1));
3164 base = XEXP (base, 0);
3166 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3167 && (prev_set = single_set (prev_insn)) != 0
3168 && rtx_equal_p (SET_DEST (prev_set), base))
3170 base = SET_SRC (prev_set);
3171 base_insn = prev_insn;
3173 else
3174 break;
3177 if (base == ep->to_rtx)
3179 rtx src
3180 = plus_constant (ep->to_rtx, offset - ep->offset);
3182 new_body = old_body;
3183 if (! replace)
3185 new_body = copy_insn (old_body);
3186 if (REG_NOTES (insn))
3187 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3189 PATTERN (insn) = new_body;
3190 old_set = single_set (insn);
3192 /* First see if this insn remains valid when we
3193 make the change. If not, keep the INSN_CODE
3194 the same and let reload fit it up. */
3195 validate_change (insn, &SET_SRC (old_set), src, 1);
3196 validate_change (insn, &SET_DEST (old_set),
3197 ep->to_rtx, 1);
3198 if (! apply_change_group ())
3200 SET_SRC (old_set) = src;
3201 SET_DEST (old_set) = ep->to_rtx;
3204 val = 1;
3205 goto done;
3208 #endif
3210 /* In this case this insn isn't serving a useful purpose. We
3211 will delete it in reload_as_needed once we know that this
3212 elimination is, in fact, being done.
3214 If REPLACE isn't set, we can't delete this insn, but needn't
3215 process it since it won't be used unless something changes. */
3216 if (replace)
3218 delete_dead_insn (insn);
3219 return 1;
3221 val = 1;
3222 goto done;
3226 /* We allow one special case which happens to work on all machines we
3227 currently support: a single set with the source or a REG_EQUAL
3228 note being a PLUS of an eliminable register and a constant. */
3229 plus_src = plus_cst_src = 0;
3230 if (old_set && REG_P (SET_DEST (old_set)))
3232 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3233 plus_src = SET_SRC (old_set);
3234 /* First see if the source is of the form (plus (...) CST). */
3235 if (plus_src
3236 && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3237 plus_cst_src = plus_src;
3238 else if (REG_P (SET_SRC (old_set))
3239 || plus_src)
3241 /* Otherwise, see if we have a REG_EQUAL note of the form
3242 (plus (...) CST). */
3243 rtx links;
3244 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3246 if ((REG_NOTE_KIND (links) == REG_EQUAL
3247 || REG_NOTE_KIND (links) == REG_EQUIV)
3248 && GET_CODE (XEXP (links, 0)) == PLUS
3249 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3251 plus_cst_src = XEXP (links, 0);
3252 break;
3257 /* Check that the first operand of the PLUS is a hard reg or
3258 the lowpart subreg of one. */
3259 if (plus_cst_src)
3261 rtx reg = XEXP (plus_cst_src, 0);
3262 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3263 reg = SUBREG_REG (reg);
3265 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3266 plus_cst_src = 0;
3269 if (plus_cst_src)
3271 rtx reg = XEXP (plus_cst_src, 0);
3272 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3274 if (GET_CODE (reg) == SUBREG)
3275 reg = SUBREG_REG (reg);
3277 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3278 if (ep->from_rtx == reg && ep->can_eliminate)
3280 rtx to_rtx = ep->to_rtx;
3281 offset += ep->offset;
3282 offset = trunc_int_for_mode (offset, GET_MODE (reg));
3284 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3285 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3286 to_rtx);
3287 /* If we have a nonzero offset, and the source is already
3288 a simple REG, the following transformation would
3289 increase the cost of the insn by replacing a simple REG
3290 with (plus (reg sp) CST). So try only when we already
3291 had a PLUS before. */
3292 if (offset == 0 || plus_src)
3294 rtx new_src = plus_constant (to_rtx, offset);
3296 new_body = old_body;
3297 if (! replace)
3299 new_body = copy_insn (old_body);
3300 if (REG_NOTES (insn))
3301 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3303 PATTERN (insn) = new_body;
3304 old_set = single_set (insn);
3306 /* First see if this insn remains valid when we make the
3307 change. If not, try to replace the whole pattern with
3308 a simple set (this may help if the original insn was a
3309 PARALLEL that was only recognized as single_set due to
3310 REG_UNUSED notes). If this isn't valid either, keep
3311 the INSN_CODE the same and let reload fix it up. */
3312 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3314 rtx new_pat = gen_rtx_SET (VOIDmode,
3315 SET_DEST (old_set), new_src);
3317 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3318 SET_SRC (old_set) = new_src;
3321 else
3322 break;
3324 val = 1;
3325 /* This can't have an effect on elimination offsets, so skip right
3326 to the end. */
3327 goto done;
3331 /* Determine the effects of this insn on elimination offsets. */
3332 elimination_effects (old_body, 0);
3334 /* Eliminate all eliminable registers occurring in operands that
3335 can be handled by reload. */
3336 extract_insn (insn);
3337 for (i = 0; i < recog_data.n_operands; i++)
3339 orig_operand[i] = recog_data.operand[i];
3340 substed_operand[i] = recog_data.operand[i];
3342 /* For an asm statement, every operand is eliminable. */
3343 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3345 bool is_set_src, in_plus;
3347 /* Check for setting a register that we know about. */
3348 if (recog_data.operand_type[i] != OP_IN
3349 && REG_P (orig_operand[i]))
3351 /* If we are assigning to a register that can be eliminated, it
3352 must be as part of a PARALLEL, since the code above handles
3353 single SETs. We must indicate that we can no longer
3354 eliminate this reg. */
3355 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3356 ep++)
3357 if (ep->from_rtx == orig_operand[i])
3358 ep->can_eliminate = 0;
3361 /* Companion to the above plus substitution, we can allow
3362 invariants as the source of a plain move. */
3363 is_set_src = false;
3364 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3365 is_set_src = true;
3366 in_plus = false;
3367 if (plus_src
3368 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3369 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3370 in_plus = true;
3372 substed_operand[i]
3373 = eliminate_regs_1 (recog_data.operand[i], 0,
3374 replace ? insn : NULL_RTX,
3375 is_set_src || in_plus);
3376 if (substed_operand[i] != orig_operand[i])
3377 val = 1;
3378 /* Terminate the search in check_eliminable_occurrences at
3379 this point. */
3380 *recog_data.operand_loc[i] = 0;
3382 /* If an output operand changed from a REG to a MEM and INSN is an
3383 insn, write a CLOBBER insn. */
3384 if (recog_data.operand_type[i] != OP_IN
3385 && REG_P (orig_operand[i])
3386 && MEM_P (substed_operand[i])
3387 && replace)
3388 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3389 insn);
3393 for (i = 0; i < recog_data.n_dups; i++)
3394 *recog_data.dup_loc[i]
3395 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3397 /* If any eliminable remain, they aren't eliminable anymore. */
3398 check_eliminable_occurrences (old_body);
3400 /* Substitute the operands; the new values are in the substed_operand
3401 array. */
3402 for (i = 0; i < recog_data.n_operands; i++)
3403 *recog_data.operand_loc[i] = substed_operand[i];
3404 for (i = 0; i < recog_data.n_dups; i++)
3405 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3407 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3408 re-recognize the insn. We do this in case we had a simple addition
3409 but now can do this as a load-address. This saves an insn in this
3410 common case.
3411 If re-recognition fails, the old insn code number will still be used,
3412 and some register operands may have changed into PLUS expressions.
3413 These will be handled by find_reloads by loading them into a register
3414 again. */
3416 if (val)
3418 /* If we aren't replacing things permanently and we changed something,
3419 make another copy to ensure that all the RTL is new. Otherwise
3420 things can go wrong if find_reload swaps commutative operands
3421 and one is inside RTL that has been copied while the other is not. */
3422 new_body = old_body;
3423 if (! replace)
3425 new_body = copy_insn (old_body);
3426 if (REG_NOTES (insn))
3427 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3429 PATTERN (insn) = new_body;
3431 /* If we had a move insn but now we don't, rerecognize it. This will
3432 cause spurious re-recognition if the old move had a PARALLEL since
3433 the new one still will, but we can't call single_set without
3434 having put NEW_BODY into the insn and the re-recognition won't
3435 hurt in this rare case. */
3436 /* ??? Why this huge if statement - why don't we just rerecognize the
3437 thing always? */
3438 if (! insn_is_asm
3439 && old_set != 0
3440 && ((REG_P (SET_SRC (old_set))
3441 && (GET_CODE (new_body) != SET
3442 || !REG_P (SET_SRC (new_body))))
3443 /* If this was a load from or store to memory, compare
3444 the MEM in recog_data.operand to the one in the insn.
3445 If they are not equal, then rerecognize the insn. */
3446 || (old_set != 0
3447 && ((MEM_P (SET_SRC (old_set))
3448 && SET_SRC (old_set) != recog_data.operand[1])
3449 || (MEM_P (SET_DEST (old_set))
3450 && SET_DEST (old_set) != recog_data.operand[0])))
3451 /* If this was an add insn before, rerecognize. */
3452 || GET_CODE (SET_SRC (old_set)) == PLUS))
3454 int new_icode = recog (PATTERN (insn), insn, 0);
3455 if (new_icode >= 0)
3456 INSN_CODE (insn) = new_icode;
3460 /* Restore the old body. If there were any changes to it, we made a copy
3461 of it while the changes were still in place, so we'll correctly return
3462 a modified insn below. */
3463 if (! replace)
3465 /* Restore the old body. */
3466 for (i = 0; i < recog_data.n_operands; i++)
3467 *recog_data.operand_loc[i] = orig_operand[i];
3468 for (i = 0; i < recog_data.n_dups; i++)
3469 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3472 /* Update all elimination pairs to reflect the status after the current
3473 insn. The changes we make were determined by the earlier call to
3474 elimination_effects.
3476 We also detect cases where register elimination cannot be done,
3477 namely, if a register would be both changed and referenced outside a MEM
3478 in the resulting insn since such an insn is often undefined and, even if
3479 not, we cannot know what meaning will be given to it. Note that it is
3480 valid to have a register used in an address in an insn that changes it
3481 (presumably with a pre- or post-increment or decrement).
3483 If anything changes, return nonzero. */
3485 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3487 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3488 ep->can_eliminate = 0;
3490 ep->ref_outside_mem = 0;
3492 if (ep->previous_offset != ep->offset)
3493 val = 1;
3496 done:
3497 /* If we changed something, perform elimination in REG_NOTES. This is
3498 needed even when REPLACE is zero because a REG_DEAD note might refer
3499 to a register that we eliminate and could cause a different number
3500 of spill registers to be needed in the final reload pass than in
3501 the pre-passes. */
3502 if (val && REG_NOTES (insn) != 0)
3503 REG_NOTES (insn)
3504 = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3506 return val;
3509 /* Loop through all elimination pairs.
3510 Recalculate the number not at initial offset.
3512 Compute the maximum offset (minimum offset if the stack does not
3513 grow downward) for each elimination pair. */
3515 static void
3516 update_eliminable_offsets (void)
3518 struct elim_table *ep;
3520 num_not_at_initial_offset = 0;
3521 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3523 ep->previous_offset = ep->offset;
3524 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3525 num_not_at_initial_offset++;
3529 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3530 replacement we currently believe is valid, mark it as not eliminable if X
3531 modifies DEST in any way other than by adding a constant integer to it.
3533 If DEST is the frame pointer, we do nothing because we assume that
3534 all assignments to the hard frame pointer are nonlocal gotos and are being
3535 done at a time when they are valid and do not disturb anything else.
3536 Some machines want to eliminate a fake argument pointer with either the
3537 frame or stack pointer. Assignments to the hard frame pointer must not
3538 prevent this elimination.
3540 Called via note_stores from reload before starting its passes to scan
3541 the insns of the function. */
3543 static void
3544 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3546 unsigned int i;
3548 /* A SUBREG of a hard register here is just changing its mode. We should
3549 not see a SUBREG of an eliminable hard register, but check just in
3550 case. */
3551 if (GET_CODE (dest) == SUBREG)
3552 dest = SUBREG_REG (dest);
3554 if (dest == hard_frame_pointer_rtx)
3555 return;
3557 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3558 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3559 && (GET_CODE (x) != SET
3560 || GET_CODE (SET_SRC (x)) != PLUS
3561 || XEXP (SET_SRC (x), 0) != dest
3562 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3564 reg_eliminate[i].can_eliminate_previous
3565 = reg_eliminate[i].can_eliminate = 0;
3566 num_eliminable--;
3570 /* Verify that the initial elimination offsets did not change since the
3571 last call to set_initial_elim_offsets. This is used to catch cases
3572 where something illegal happened during reload_as_needed that could
3573 cause incorrect code to be generated if we did not check for it. */
3575 static bool
3576 verify_initial_elim_offsets (void)
3578 HOST_WIDE_INT t;
3580 if (!num_eliminable)
3581 return true;
3583 #ifdef ELIMINABLE_REGS
3585 struct elim_table *ep;
3587 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3589 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3590 if (t != ep->initial_offset)
3591 return false;
3594 #else
3595 INITIAL_FRAME_POINTER_OFFSET (t);
3596 if (t != reg_eliminate[0].initial_offset)
3597 return false;
3598 #endif
3600 return true;
3603 /* Reset all offsets on eliminable registers to their initial values. */
3605 static void
3606 set_initial_elim_offsets (void)
3608 struct elim_table *ep = reg_eliminate;
3610 #ifdef ELIMINABLE_REGS
3611 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3613 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3614 ep->previous_offset = ep->offset = ep->initial_offset;
3616 #else
3617 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3618 ep->previous_offset = ep->offset = ep->initial_offset;
3619 #endif
3621 num_not_at_initial_offset = 0;
3624 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3626 static void
3627 set_initial_eh_label_offset (rtx label)
3629 set_label_offsets (label, NULL_RTX, 1);
3632 /* Initialize the known label offsets.
3633 Set a known offset for each forced label to be at the initial offset
3634 of each elimination. We do this because we assume that all
3635 computed jumps occur from a location where each elimination is
3636 at its initial offset.
3637 For all other labels, show that we don't know the offsets. */
3639 static void
3640 set_initial_label_offsets (void)
3642 rtx x;
3643 memset (offsets_known_at, 0, num_labels);
3645 for (x = forced_labels; x; x = XEXP (x, 1))
3646 if (XEXP (x, 0))
3647 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3649 for_each_eh_label (set_initial_eh_label_offset);
3652 /* Set all elimination offsets to the known values for the code label given
3653 by INSN. */
3655 static void
3656 set_offsets_for_label (rtx insn)
3658 unsigned int i;
3659 int label_nr = CODE_LABEL_NUMBER (insn);
3660 struct elim_table *ep;
3662 num_not_at_initial_offset = 0;
3663 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3665 ep->offset = ep->previous_offset
3666 = offsets_at[label_nr - first_label_num][i];
3667 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3668 num_not_at_initial_offset++;
3672 /* See if anything that happened changes which eliminations are valid.
3673 For example, on the SPARC, whether or not the frame pointer can
3674 be eliminated can depend on what registers have been used. We need
3675 not check some conditions again (such as flag_omit_frame_pointer)
3676 since they can't have changed. */
3678 static void
3679 update_eliminables (HARD_REG_SET *pset)
3681 int previous_frame_pointer_needed = frame_pointer_needed;
3682 struct elim_table *ep;
3684 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3685 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3686 #ifdef ELIMINABLE_REGS
3687 || ! CAN_ELIMINATE (ep->from, ep->to)
3688 #endif
3690 ep->can_eliminate = 0;
3692 /* Look for the case where we have discovered that we can't replace
3693 register A with register B and that means that we will now be
3694 trying to replace register A with register C. This means we can
3695 no longer replace register C with register B and we need to disable
3696 such an elimination, if it exists. This occurs often with A == ap,
3697 B == sp, and C == fp. */
3699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3701 struct elim_table *op;
3702 int new_to = -1;
3704 if (! ep->can_eliminate && ep->can_eliminate_previous)
3706 /* Find the current elimination for ep->from, if there is a
3707 new one. */
3708 for (op = reg_eliminate;
3709 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3710 if (op->from == ep->from && op->can_eliminate)
3712 new_to = op->to;
3713 break;
3716 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3717 disable it. */
3718 for (op = reg_eliminate;
3719 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3720 if (op->from == new_to && op->to == ep->to)
3721 op->can_eliminate = 0;
3725 /* See if any registers that we thought we could eliminate the previous
3726 time are no longer eliminable. If so, something has changed and we
3727 must spill the register. Also, recompute the number of eliminable
3728 registers and see if the frame pointer is needed; it is if there is
3729 no elimination of the frame pointer that we can perform. */
3731 frame_pointer_needed = 1;
3732 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3734 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3735 && ep->to != HARD_FRAME_POINTER_REGNUM)
3736 frame_pointer_needed = 0;
3738 if (! ep->can_eliminate && ep->can_eliminate_previous)
3740 ep->can_eliminate_previous = 0;
3741 SET_HARD_REG_BIT (*pset, ep->from);
3742 num_eliminable--;
3746 /* If we didn't need a frame pointer last time, but we do now, spill
3747 the hard frame pointer. */
3748 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3749 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3752 /* Return true if X is used as the target register of an elimination. */
3754 bool
3755 elimination_target_reg_p (rtx x)
3757 struct elim_table *ep;
3759 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3760 if (ep->to_rtx == x && ep->can_eliminate)
3761 return true;
3763 return false;
3766 /* Initialize the table of registers to eliminate. */
3768 static void
3769 init_elim_table (void)
3771 struct elim_table *ep;
3772 #ifdef ELIMINABLE_REGS
3773 const struct elim_table_1 *ep1;
3774 #endif
3776 if (!reg_eliminate)
3777 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3779 /* Does this function require a frame pointer? */
3781 frame_pointer_needed = (! flag_omit_frame_pointer
3782 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3783 and restore sp for alloca. So we can't eliminate
3784 the frame pointer in that case. At some point,
3785 we should improve this by emitting the
3786 sp-adjusting insns for this case. */
3787 || (current_function_calls_alloca
3788 && EXIT_IGNORE_STACK)
3789 || current_function_accesses_prior_frames
3790 || FRAME_POINTER_REQUIRED);
3792 num_eliminable = 0;
3794 #ifdef ELIMINABLE_REGS
3795 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3796 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3798 ep->from = ep1->from;
3799 ep->to = ep1->to;
3800 ep->can_eliminate = ep->can_eliminate_previous
3801 = (CAN_ELIMINATE (ep->from, ep->to)
3802 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3804 #else
3805 reg_eliminate[0].from = reg_eliminate_1[0].from;
3806 reg_eliminate[0].to = reg_eliminate_1[0].to;
3807 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3808 = ! frame_pointer_needed;
3809 #endif
3811 /* Count the number of eliminable registers and build the FROM and TO
3812 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3813 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3814 We depend on this. */
3815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3817 num_eliminable += ep->can_eliminate;
3818 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3819 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3823 /* Kick all pseudos out of hard register REGNO.
3825 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3826 because we found we can't eliminate some register. In the case, no pseudos
3827 are allowed to be in the register, even if they are only in a block that
3828 doesn't require spill registers, unlike the case when we are spilling this
3829 hard reg to produce another spill register.
3831 Return nonzero if any pseudos needed to be kicked out. */
3833 static void
3834 spill_hard_reg (unsigned int regno, int cant_eliminate)
3836 int i;
3838 if (cant_eliminate)
3840 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3841 df_set_regs_ever_live (regno, true);
3844 /* Spill every pseudo reg that was allocated to this reg
3845 or to something that overlaps this reg. */
3847 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3848 if (reg_renumber[i] >= 0
3849 && (unsigned int) reg_renumber[i] <= regno
3850 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3851 SET_REGNO_REG_SET (&spilled_pseudos, i);
3854 /* After find_reload_regs has been run for all insn that need reloads,
3855 and/or spill_hard_regs was called, this function is used to actually
3856 spill pseudo registers and try to reallocate them. It also sets up the
3857 spill_regs array for use by choose_reload_regs. */
3859 static int
3860 finish_spills (int global)
3862 struct insn_chain *chain;
3863 int something_changed = 0;
3864 unsigned i;
3865 reg_set_iterator rsi;
3867 /* Build the spill_regs array for the function. */
3868 /* If there are some registers still to eliminate and one of the spill regs
3869 wasn't ever used before, additional stack space may have to be
3870 allocated to store this register. Thus, we may have changed the offset
3871 between the stack and frame pointers, so mark that something has changed.
3873 One might think that we need only set VAL to 1 if this is a call-used
3874 register. However, the set of registers that must be saved by the
3875 prologue is not identical to the call-used set. For example, the
3876 register used by the call insn for the return PC is a call-used register,
3877 but must be saved by the prologue. */
3879 n_spills = 0;
3880 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3881 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3883 spill_reg_order[i] = n_spills;
3884 spill_regs[n_spills++] = i;
3885 if (num_eliminable && ! df_regs_ever_live_p (i))
3886 something_changed = 1;
3887 df_set_regs_ever_live (i, true);
3889 else
3890 spill_reg_order[i] = -1;
3892 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3894 /* Record the current hard register the pseudo is allocated to in
3895 pseudo_previous_regs so we avoid reallocating it to the same
3896 hard reg in a later pass. */
3897 gcc_assert (reg_renumber[i] >= 0);
3899 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3900 /* Mark it as no longer having a hard register home. */
3901 reg_renumber[i] = -1;
3902 if (flag_ira)
3903 mark_allocation_change (i);
3904 /* We will need to scan everything again. */
3905 something_changed = 1;
3908 /* Retry global register allocation if possible. */
3909 if (global)
3911 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3912 /* For every insn that needs reloads, set the registers used as spill
3913 regs in pseudo_forbidden_regs for every pseudo live across the
3914 insn. */
3915 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3917 EXECUTE_IF_SET_IN_REG_SET
3918 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3920 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3921 chain->used_spill_regs);
3923 EXECUTE_IF_SET_IN_REG_SET
3924 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3926 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3927 chain->used_spill_regs);
3931 /* Retry allocating the spilled pseudos. For each reg, merge the
3932 various reg sets that indicate which hard regs can't be used,
3933 and call retry_global_alloc.
3934 We change spill_pseudos here to only contain pseudos that did not
3935 get a new hard register. */
3936 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3937 if (reg_old_renumber[i] != reg_renumber[i])
3939 HARD_REG_SET forbidden;
3941 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3942 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3943 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3944 if (flag_ira)
3946 /* We might migrate pseudo to another hard register on
3947 previous iteration. So check this. */
3948 if (reg_renumber [i] < 0)
3950 retry_ira_color (i, forbidden);
3951 if (reg_renumber[i] >= 0)
3952 something_changed = 1;
3955 else
3956 retry_global_alloc (i, forbidden);
3957 if (reg_renumber[i] >= 0)
3958 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3962 /* Fix up the register information in the insn chain.
3963 This involves deleting those of the spilled pseudos which did not get
3964 a new hard register home from the live_{before,after} sets. */
3965 for (chain = reload_insn_chain; chain; chain = chain->next)
3967 HARD_REG_SET used_by_pseudos;
3968 HARD_REG_SET used_by_pseudos2;
3970 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3971 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3973 /* Mark any unallocated hard regs as available for spills. That
3974 makes inheritance work somewhat better. */
3975 if (chain->need_reload)
3977 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3978 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3979 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3981 /* Save the old value for the sanity test below. */
3982 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3984 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3985 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3986 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3987 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3989 /* Make sure we only enlarge the set. */
3990 gcc_assert (hard_reg_set_subset_p (used_by_pseudos2,
3991 chain->used_spill_regs));
3995 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3996 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3998 int regno = reg_renumber[i];
3999 if (reg_old_renumber[i] == regno)
4000 continue;
4002 alter_reg (i, reg_old_renumber[i], false);
4003 reg_old_renumber[i] = regno;
4004 if (dump_file)
4006 if (regno == -1)
4007 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4008 else
4009 fprintf (dump_file, " Register %d now in %d.\n\n",
4010 i, reg_renumber[i]);
4014 return something_changed;
4017 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4019 static void
4020 scan_paradoxical_subregs (rtx x)
4022 int i;
4023 const char *fmt;
4024 enum rtx_code code = GET_CODE (x);
4026 switch (code)
4028 case REG:
4029 case CONST_INT:
4030 case CONST:
4031 case SYMBOL_REF:
4032 case LABEL_REF:
4033 case CONST_DOUBLE:
4034 case CONST_FIXED:
4035 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4036 case CC0:
4037 case PC:
4038 case USE:
4039 case CLOBBER:
4040 return;
4042 case SUBREG:
4043 if (REG_P (SUBREG_REG (x))
4044 && (GET_MODE_SIZE (GET_MODE (x))
4045 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4047 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4048 = GET_MODE_SIZE (GET_MODE (x));
4049 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4051 return;
4053 default:
4054 break;
4057 fmt = GET_RTX_FORMAT (code);
4058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4060 if (fmt[i] == 'e')
4061 scan_paradoxical_subregs (XEXP (x, i));
4062 else if (fmt[i] == 'E')
4064 int j;
4065 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4066 scan_paradoxical_subregs (XVECEXP (x, i, j));
4071 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4072 examine all of the reload insns between PREV and NEXT exclusive, and
4073 annotate all that may trap. */
4075 static void
4076 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4078 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4079 unsigned int trap_count;
4080 rtx i;
4082 if (note == NULL)
4083 return;
4085 if (may_trap_p (PATTERN (insn)))
4086 trap_count = 1;
4087 else
4089 remove_note (insn, note);
4090 trap_count = 0;
4093 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4094 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4096 trap_count++;
4097 REG_NOTES (i)
4098 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
4102 /* Reload pseudo-registers into hard regs around each insn as needed.
4103 Additional register load insns are output before the insn that needs it
4104 and perhaps store insns after insns that modify the reloaded pseudo reg.
4106 reg_last_reload_reg and reg_reloaded_contents keep track of
4107 which registers are already available in reload registers.
4108 We update these for the reloads that we perform,
4109 as the insns are scanned. */
4111 static void
4112 reload_as_needed (int live_known)
4114 struct insn_chain *chain;
4115 #if defined (AUTO_INC_DEC)
4116 int i;
4117 #endif
4118 rtx x;
4120 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4121 memset (spill_reg_store, 0, sizeof spill_reg_store);
4122 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4123 INIT_REG_SET (&reg_has_output_reload);
4124 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4125 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4127 set_initial_elim_offsets ();
4129 for (chain = reload_insn_chain; chain; chain = chain->next)
4131 rtx prev = 0;
4132 rtx insn = chain->insn;
4133 rtx old_next = NEXT_INSN (insn);
4135 /* If we pass a label, copy the offsets from the label information
4136 into the current offsets of each elimination. */
4137 if (LABEL_P (insn))
4138 set_offsets_for_label (insn);
4140 else if (INSN_P (insn))
4142 regset_head regs_to_forget;
4143 INIT_REG_SET (&regs_to_forget);
4144 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4146 /* If this is a USE and CLOBBER of a MEM, ensure that any
4147 references to eliminable registers have been removed. */
4149 if ((GET_CODE (PATTERN (insn)) == USE
4150 || GET_CODE (PATTERN (insn)) == CLOBBER)
4151 && MEM_P (XEXP (PATTERN (insn), 0)))
4152 XEXP (XEXP (PATTERN (insn), 0), 0)
4153 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4154 GET_MODE (XEXP (PATTERN (insn), 0)),
4155 NULL_RTX);
4157 /* If we need to do register elimination processing, do so.
4158 This might delete the insn, in which case we are done. */
4159 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4161 eliminate_regs_in_insn (insn, 1);
4162 if (NOTE_P (insn))
4164 update_eliminable_offsets ();
4165 CLEAR_REG_SET (&regs_to_forget);
4166 continue;
4170 /* If need_elim is nonzero but need_reload is zero, one might think
4171 that we could simply set n_reloads to 0. However, find_reloads
4172 could have done some manipulation of the insn (such as swapping
4173 commutative operands), and these manipulations are lost during
4174 the first pass for every insn that needs register elimination.
4175 So the actions of find_reloads must be redone here. */
4177 if (! chain->need_elim && ! chain->need_reload
4178 && ! chain->need_operand_change)
4179 n_reloads = 0;
4180 /* First find the pseudo regs that must be reloaded for this insn.
4181 This info is returned in the tables reload_... (see reload.h).
4182 Also modify the body of INSN by substituting RELOAD
4183 rtx's for those pseudo regs. */
4184 else
4186 CLEAR_REG_SET (&reg_has_output_reload);
4187 CLEAR_HARD_REG_SET (reg_is_output_reload);
4189 find_reloads (insn, 1, spill_indirect_levels, live_known,
4190 spill_reg_order);
4193 if (n_reloads > 0)
4195 rtx next = NEXT_INSN (insn);
4196 rtx p;
4198 prev = PREV_INSN (insn);
4200 /* Now compute which reload regs to reload them into. Perhaps
4201 reusing reload regs from previous insns, or else output
4202 load insns to reload them. Maybe output store insns too.
4203 Record the choices of reload reg in reload_reg_rtx. */
4204 choose_reload_regs (chain);
4206 /* Merge any reloads that we didn't combine for fear of
4207 increasing the number of spill registers needed but now
4208 discover can be safely merged. */
4209 if (SMALL_REGISTER_CLASSES)
4210 merge_assigned_reloads (insn);
4212 /* Generate the insns to reload operands into or out of
4213 their reload regs. */
4214 emit_reload_insns (chain);
4216 /* Substitute the chosen reload regs from reload_reg_rtx
4217 into the insn's body (or perhaps into the bodies of other
4218 load and store insn that we just made for reloading
4219 and that we moved the structure into). */
4220 subst_reloads (insn);
4222 /* Adjust the exception region notes for loads and stores. */
4223 if (flag_non_call_exceptions && !CALL_P (insn))
4224 fixup_eh_region_note (insn, prev, next);
4226 /* If this was an ASM, make sure that all the reload insns
4227 we have generated are valid. If not, give an error
4228 and delete them. */
4229 if (asm_noperands (PATTERN (insn)) >= 0)
4230 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4231 if (p != insn && INSN_P (p)
4232 && GET_CODE (PATTERN (p)) != USE
4233 && (recog_memoized (p) < 0
4234 || (extract_insn (p), ! constrain_operands (1))))
4236 error_for_asm (insn,
4237 "%<asm%> operand requires "
4238 "impossible reload");
4239 delete_insn (p);
4243 if (num_eliminable && chain->need_elim)
4244 update_eliminable_offsets ();
4246 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4247 is no longer validly lying around to save a future reload.
4248 Note that this does not detect pseudos that were reloaded
4249 for this insn in order to be stored in
4250 (obeying register constraints). That is correct; such reload
4251 registers ARE still valid. */
4252 forget_marked_reloads (&regs_to_forget);
4253 CLEAR_REG_SET (&regs_to_forget);
4255 /* There may have been CLOBBER insns placed after INSN. So scan
4256 between INSN and NEXT and use them to forget old reloads. */
4257 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4258 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4259 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4261 #ifdef AUTO_INC_DEC
4262 /* Likewise for regs altered by auto-increment in this insn.
4263 REG_INC notes have been changed by reloading:
4264 find_reloads_address_1 records substitutions for them,
4265 which have been performed by subst_reloads above. */
4266 for (i = n_reloads - 1; i >= 0; i--)
4268 rtx in_reg = rld[i].in_reg;
4269 if (in_reg)
4271 enum rtx_code code = GET_CODE (in_reg);
4272 /* PRE_INC / PRE_DEC will have the reload register ending up
4273 with the same value as the stack slot, but that doesn't
4274 hold true for POST_INC / POST_DEC. Either we have to
4275 convert the memory access to a true POST_INC / POST_DEC,
4276 or we can't use the reload register for inheritance. */
4277 if ((code == POST_INC || code == POST_DEC)
4278 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4279 REGNO (rld[i].reg_rtx))
4280 /* Make sure it is the inc/dec pseudo, and not
4281 some other (e.g. output operand) pseudo. */
4282 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4283 == REGNO (XEXP (in_reg, 0))))
4286 rtx reload_reg = rld[i].reg_rtx;
4287 enum machine_mode mode = GET_MODE (reload_reg);
4288 int n = 0;
4289 rtx p;
4291 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4293 /* We really want to ignore REG_INC notes here, so
4294 use PATTERN (p) as argument to reg_set_p . */
4295 if (reg_set_p (reload_reg, PATTERN (p)))
4296 break;
4297 n = count_occurrences (PATTERN (p), reload_reg, 0);
4298 if (! n)
4299 continue;
4300 if (n == 1)
4302 n = validate_replace_rtx (reload_reg,
4303 gen_rtx_fmt_e (code,
4304 mode,
4305 reload_reg),
4308 /* We must also verify that the constraints
4309 are met after the replacement. */
4310 extract_insn (p);
4311 if (n)
4312 n = constrain_operands (1);
4313 else
4314 break;
4316 /* If the constraints were not met, then
4317 undo the replacement. */
4318 if (!n)
4320 validate_replace_rtx (gen_rtx_fmt_e (code,
4321 mode,
4322 reload_reg),
4323 reload_reg, p);
4324 break;
4328 break;
4330 if (n == 1)
4332 REG_NOTES (p)
4333 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4334 REG_NOTES (p));
4335 /* Mark this as having an output reload so that the
4336 REG_INC processing code below won't invalidate
4337 the reload for inheritance. */
4338 SET_HARD_REG_BIT (reg_is_output_reload,
4339 REGNO (reload_reg));
4340 SET_REGNO_REG_SET (&reg_has_output_reload,
4341 REGNO (XEXP (in_reg, 0)));
4343 else
4344 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4345 NULL);
4347 else if ((code == PRE_INC || code == PRE_DEC)
4348 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4349 REGNO (rld[i].reg_rtx))
4350 /* Make sure it is the inc/dec pseudo, and not
4351 some other (e.g. output operand) pseudo. */
4352 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4353 == REGNO (XEXP (in_reg, 0))))
4355 SET_HARD_REG_BIT (reg_is_output_reload,
4356 REGNO (rld[i].reg_rtx));
4357 SET_REGNO_REG_SET (&reg_has_output_reload,
4358 REGNO (XEXP (in_reg, 0)));
4362 /* If a pseudo that got a hard register is auto-incremented,
4363 we must purge records of copying it into pseudos without
4364 hard registers. */
4365 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4366 if (REG_NOTE_KIND (x) == REG_INC)
4368 /* See if this pseudo reg was reloaded in this insn.
4369 If so, its last-reload info is still valid
4370 because it is based on this insn's reload. */
4371 for (i = 0; i < n_reloads; i++)
4372 if (rld[i].out == XEXP (x, 0))
4373 break;
4375 if (i == n_reloads)
4376 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4378 #endif
4380 /* A reload reg's contents are unknown after a label. */
4381 if (LABEL_P (insn))
4382 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4384 /* Don't assume a reload reg is still good after a call insn
4385 if it is a call-used reg, or if it contains a value that will
4386 be partially clobbered by the call. */
4387 else if (CALL_P (insn))
4389 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4390 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4394 /* Clean up. */
4395 free (reg_last_reload_reg);
4396 CLEAR_REG_SET (&reg_has_output_reload);
4399 /* Discard all record of any value reloaded from X,
4400 or reloaded in X from someplace else;
4401 unless X is an output reload reg of the current insn.
4403 X may be a hard reg (the reload reg)
4404 or it may be a pseudo reg that was reloaded from.
4406 When DATA is non-NULL just mark the registers in regset
4407 to be forgotten later. */
4409 static void
4410 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4411 void *data)
4413 unsigned int regno;
4414 unsigned int nr;
4415 regset regs = (regset) data;
4417 /* note_stores does give us subregs of hard regs,
4418 subreg_regno_offset requires a hard reg. */
4419 while (GET_CODE (x) == SUBREG)
4421 /* We ignore the subreg offset when calculating the regno,
4422 because we are using the entire underlying hard register
4423 below. */
4424 x = SUBREG_REG (x);
4427 if (!REG_P (x))
4428 return;
4430 regno = REGNO (x);
4432 if (regno >= FIRST_PSEUDO_REGISTER)
4433 nr = 1;
4434 else
4436 unsigned int i;
4438 nr = hard_regno_nregs[regno][GET_MODE (x)];
4439 /* Storing into a spilled-reg invalidates its contents.
4440 This can happen if a block-local pseudo is allocated to that reg
4441 and it wasn't spilled because this block's total need is 0.
4442 Then some insn might have an optional reload and use this reg. */
4443 if (!regs)
4444 for (i = 0; i < nr; i++)
4445 /* But don't do this if the reg actually serves as an output
4446 reload reg in the current instruction. */
4447 if (n_reloads == 0
4448 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4450 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4451 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4452 spill_reg_store[regno + i] = 0;
4456 if (regs)
4457 while (nr-- > 0)
4458 SET_REGNO_REG_SET (regs, regno + nr);
4459 else
4461 /* Since value of X has changed,
4462 forget any value previously copied from it. */
4464 while (nr-- > 0)
4465 /* But don't forget a copy if this is the output reload
4466 that establishes the copy's validity. */
4467 if (n_reloads == 0
4468 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4469 reg_last_reload_reg[regno + nr] = 0;
4473 /* Forget the reloads marked in regset by previous function. */
4474 static void
4475 forget_marked_reloads (regset regs)
4477 unsigned int reg;
4478 reg_set_iterator rsi;
4479 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4481 if (reg < FIRST_PSEUDO_REGISTER
4482 /* But don't do this if the reg actually serves as an output
4483 reload reg in the current instruction. */
4484 && (n_reloads == 0
4485 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4487 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4488 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
4489 spill_reg_store[reg] = 0;
4491 if (n_reloads == 0
4492 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4493 reg_last_reload_reg[reg] = 0;
4497 /* The following HARD_REG_SETs indicate when each hard register is
4498 used for a reload of various parts of the current insn. */
4500 /* If reg is unavailable for all reloads. */
4501 static HARD_REG_SET reload_reg_unavailable;
4502 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4503 static HARD_REG_SET reload_reg_used;
4504 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4505 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4506 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4507 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4508 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4509 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4510 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4511 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4512 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4513 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4514 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4515 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4516 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4517 static HARD_REG_SET reload_reg_used_in_op_addr;
4518 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4519 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4520 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4521 static HARD_REG_SET reload_reg_used_in_insn;
4522 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4523 static HARD_REG_SET reload_reg_used_in_other_addr;
4525 /* If reg is in use as a reload reg for any sort of reload. */
4526 static HARD_REG_SET reload_reg_used_at_all;
4528 /* If reg is use as an inherited reload. We just mark the first register
4529 in the group. */
4530 static HARD_REG_SET reload_reg_used_for_inherit;
4532 /* Records which hard regs are used in any way, either as explicit use or
4533 by being allocated to a pseudo during any point of the current insn. */
4534 static HARD_REG_SET reg_used_in_insn;
4536 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4537 TYPE. MODE is used to indicate how many consecutive regs are
4538 actually used. */
4540 static void
4541 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4542 enum machine_mode mode)
4544 unsigned int nregs = hard_regno_nregs[regno][mode];
4545 unsigned int i;
4547 for (i = regno; i < nregs + regno; i++)
4549 switch (type)
4551 case RELOAD_OTHER:
4552 SET_HARD_REG_BIT (reload_reg_used, i);
4553 break;
4555 case RELOAD_FOR_INPUT_ADDRESS:
4556 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4557 break;
4559 case RELOAD_FOR_INPADDR_ADDRESS:
4560 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4561 break;
4563 case RELOAD_FOR_OUTPUT_ADDRESS:
4564 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4565 break;
4567 case RELOAD_FOR_OUTADDR_ADDRESS:
4568 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4569 break;
4571 case RELOAD_FOR_OPERAND_ADDRESS:
4572 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4573 break;
4575 case RELOAD_FOR_OPADDR_ADDR:
4576 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4577 break;
4579 case RELOAD_FOR_OTHER_ADDRESS:
4580 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4581 break;
4583 case RELOAD_FOR_INPUT:
4584 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4585 break;
4587 case RELOAD_FOR_OUTPUT:
4588 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4589 break;
4591 case RELOAD_FOR_INSN:
4592 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4593 break;
4596 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4600 /* Similarly, but show REGNO is no longer in use for a reload. */
4602 static void
4603 clear_reload_reg_in_use (unsigned int regno, int opnum,
4604 enum reload_type type, enum machine_mode mode)
4606 unsigned int nregs = hard_regno_nregs[regno][mode];
4607 unsigned int start_regno, end_regno, r;
4608 int i;
4609 /* A complication is that for some reload types, inheritance might
4610 allow multiple reloads of the same types to share a reload register.
4611 We set check_opnum if we have to check only reloads with the same
4612 operand number, and check_any if we have to check all reloads. */
4613 int check_opnum = 0;
4614 int check_any = 0;
4615 HARD_REG_SET *used_in_set;
4617 switch (type)
4619 case RELOAD_OTHER:
4620 used_in_set = &reload_reg_used;
4621 break;
4623 case RELOAD_FOR_INPUT_ADDRESS:
4624 used_in_set = &reload_reg_used_in_input_addr[opnum];
4625 break;
4627 case RELOAD_FOR_INPADDR_ADDRESS:
4628 check_opnum = 1;
4629 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4630 break;
4632 case RELOAD_FOR_OUTPUT_ADDRESS:
4633 used_in_set = &reload_reg_used_in_output_addr[opnum];
4634 break;
4636 case RELOAD_FOR_OUTADDR_ADDRESS:
4637 check_opnum = 1;
4638 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4639 break;
4641 case RELOAD_FOR_OPERAND_ADDRESS:
4642 used_in_set = &reload_reg_used_in_op_addr;
4643 break;
4645 case RELOAD_FOR_OPADDR_ADDR:
4646 check_any = 1;
4647 used_in_set = &reload_reg_used_in_op_addr_reload;
4648 break;
4650 case RELOAD_FOR_OTHER_ADDRESS:
4651 used_in_set = &reload_reg_used_in_other_addr;
4652 check_any = 1;
4653 break;
4655 case RELOAD_FOR_INPUT:
4656 used_in_set = &reload_reg_used_in_input[opnum];
4657 break;
4659 case RELOAD_FOR_OUTPUT:
4660 used_in_set = &reload_reg_used_in_output[opnum];
4661 break;
4663 case RELOAD_FOR_INSN:
4664 used_in_set = &reload_reg_used_in_insn;
4665 break;
4666 default:
4667 gcc_unreachable ();
4669 /* We resolve conflicts with remaining reloads of the same type by
4670 excluding the intervals of reload registers by them from the
4671 interval of freed reload registers. Since we only keep track of
4672 one set of interval bounds, we might have to exclude somewhat
4673 more than what would be necessary if we used a HARD_REG_SET here.
4674 But this should only happen very infrequently, so there should
4675 be no reason to worry about it. */
4677 start_regno = regno;
4678 end_regno = regno + nregs;
4679 if (check_opnum || check_any)
4681 for (i = n_reloads - 1; i >= 0; i--)
4683 if (rld[i].when_needed == type
4684 && (check_any || rld[i].opnum == opnum)
4685 && rld[i].reg_rtx)
4687 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4688 unsigned int conflict_end
4689 = end_hard_regno (rld[i].mode, conflict_start);
4691 /* If there is an overlap with the first to-be-freed register,
4692 adjust the interval start. */
4693 if (conflict_start <= start_regno && conflict_end > start_regno)
4694 start_regno = conflict_end;
4695 /* Otherwise, if there is a conflict with one of the other
4696 to-be-freed registers, adjust the interval end. */
4697 if (conflict_start > start_regno && conflict_start < end_regno)
4698 end_regno = conflict_start;
4703 for (r = start_regno; r < end_regno; r++)
4704 CLEAR_HARD_REG_BIT (*used_in_set, r);
4707 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4708 specified by OPNUM and TYPE. */
4710 static int
4711 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4713 int i;
4715 /* In use for a RELOAD_OTHER means it's not available for anything. */
4716 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4717 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4718 return 0;
4720 switch (type)
4722 case RELOAD_OTHER:
4723 /* In use for anything means we can't use it for RELOAD_OTHER. */
4724 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4725 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4726 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4727 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4728 return 0;
4730 for (i = 0; i < reload_n_operands; i++)
4731 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4732 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4733 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4734 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4735 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4736 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4737 return 0;
4739 return 1;
4741 case RELOAD_FOR_INPUT:
4742 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4743 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4744 return 0;
4746 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4747 return 0;
4749 /* If it is used for some other input, can't use it. */
4750 for (i = 0; i < reload_n_operands; i++)
4751 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4752 return 0;
4754 /* If it is used in a later operand's address, can't use it. */
4755 for (i = opnum + 1; i < reload_n_operands; i++)
4756 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4757 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4758 return 0;
4760 return 1;
4762 case RELOAD_FOR_INPUT_ADDRESS:
4763 /* Can't use a register if it is used for an input address for this
4764 operand or used as an input in an earlier one. */
4765 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4766 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4767 return 0;
4769 for (i = 0; i < opnum; i++)
4770 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4771 return 0;
4773 return 1;
4775 case RELOAD_FOR_INPADDR_ADDRESS:
4776 /* Can't use a register if it is used for an input address
4777 for this operand or used as an input in an earlier
4778 one. */
4779 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4780 return 0;
4782 for (i = 0; i < opnum; i++)
4783 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4784 return 0;
4786 return 1;
4788 case RELOAD_FOR_OUTPUT_ADDRESS:
4789 /* Can't use a register if it is used for an output address for this
4790 operand or used as an output in this or a later operand. Note
4791 that multiple output operands are emitted in reverse order, so
4792 the conflicting ones are those with lower indices. */
4793 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4794 return 0;
4796 for (i = 0; i <= opnum; i++)
4797 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4798 return 0;
4800 return 1;
4802 case RELOAD_FOR_OUTADDR_ADDRESS:
4803 /* Can't use a register if it is used for an output address
4804 for this operand or used as an output in this or a
4805 later operand. Note that multiple output operands are
4806 emitted in reverse order, so the conflicting ones are
4807 those with lower indices. */
4808 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4809 return 0;
4811 for (i = 0; i <= opnum; i++)
4812 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4813 return 0;
4815 return 1;
4817 case RELOAD_FOR_OPERAND_ADDRESS:
4818 for (i = 0; i < reload_n_operands; i++)
4819 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4820 return 0;
4822 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4823 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4825 case RELOAD_FOR_OPADDR_ADDR:
4826 for (i = 0; i < reload_n_operands; i++)
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4828 return 0;
4830 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4832 case RELOAD_FOR_OUTPUT:
4833 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4834 outputs, or an operand address for this or an earlier output.
4835 Note that multiple output operands are emitted in reverse order,
4836 so the conflicting ones are those with higher indices. */
4837 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4838 return 0;
4840 for (i = 0; i < reload_n_operands; i++)
4841 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4842 return 0;
4844 for (i = opnum; i < reload_n_operands; i++)
4845 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4846 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4847 return 0;
4849 return 1;
4851 case RELOAD_FOR_INSN:
4852 for (i = 0; i < reload_n_operands; i++)
4853 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4854 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4855 return 0;
4857 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4858 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4860 case RELOAD_FOR_OTHER_ADDRESS:
4861 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4863 default:
4864 gcc_unreachable ();
4868 /* Return 1 if the value in reload reg REGNO, as used by a reload
4869 needed for the part of the insn specified by OPNUM and TYPE,
4870 is still available in REGNO at the end of the insn.
4872 We can assume that the reload reg was already tested for availability
4873 at the time it is needed, and we should not check this again,
4874 in case the reg has already been marked in use. */
4876 static int
4877 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4879 int i;
4881 switch (type)
4883 case RELOAD_OTHER:
4884 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4885 its value must reach the end. */
4886 return 1;
4888 /* If this use is for part of the insn,
4889 its value reaches if no subsequent part uses the same register.
4890 Just like the above function, don't try to do this with lots
4891 of fallthroughs. */
4893 case RELOAD_FOR_OTHER_ADDRESS:
4894 /* Here we check for everything else, since these don't conflict
4895 with anything else and everything comes later. */
4897 for (i = 0; i < reload_n_operands; i++)
4898 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4899 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4900 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4901 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4902 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4903 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4904 return 0;
4906 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4907 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4908 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4909 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4911 case RELOAD_FOR_INPUT_ADDRESS:
4912 case RELOAD_FOR_INPADDR_ADDRESS:
4913 /* Similar, except that we check only for this and subsequent inputs
4914 and the address of only subsequent inputs and we do not need
4915 to check for RELOAD_OTHER objects since they are known not to
4916 conflict. */
4918 for (i = opnum; i < reload_n_operands; i++)
4919 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4920 return 0;
4922 for (i = opnum + 1; i < reload_n_operands; i++)
4923 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4924 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4925 return 0;
4927 for (i = 0; i < reload_n_operands; i++)
4928 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4929 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4930 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4931 return 0;
4933 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4934 return 0;
4936 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4937 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4938 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4940 case RELOAD_FOR_INPUT:
4941 /* Similar to input address, except we start at the next operand for
4942 both input and input address and we do not check for
4943 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4944 would conflict. */
4946 for (i = opnum + 1; i < reload_n_operands; i++)
4947 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4948 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4949 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4950 return 0;
4952 /* ... fall through ... */
4954 case RELOAD_FOR_OPERAND_ADDRESS:
4955 /* Check outputs and their addresses. */
4957 for (i = 0; i < reload_n_operands; i++)
4958 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4959 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4960 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4961 return 0;
4963 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4965 case RELOAD_FOR_OPADDR_ADDR:
4966 for (i = 0; i < reload_n_operands; i++)
4967 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4968 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4969 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4970 return 0;
4972 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4973 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4974 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4976 case RELOAD_FOR_INSN:
4977 /* These conflict with other outputs with RELOAD_OTHER. So
4978 we need only check for output addresses. */
4980 opnum = reload_n_operands;
4982 /* ... fall through ... */
4984 case RELOAD_FOR_OUTPUT:
4985 case RELOAD_FOR_OUTPUT_ADDRESS:
4986 case RELOAD_FOR_OUTADDR_ADDRESS:
4987 /* We already know these can't conflict with a later output. So the
4988 only thing to check are later output addresses.
4989 Note that multiple output operands are emitted in reverse order,
4990 so the conflicting ones are those with lower indices. */
4991 for (i = 0; i < opnum; i++)
4992 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4993 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4994 return 0;
4996 return 1;
4998 default:
4999 gcc_unreachable ();
5004 /* Returns whether R1 and R2 are uniquely chained: the value of one
5005 is used by the other, and that value is not used by any other
5006 reload for this insn. This is used to partially undo the decision
5007 made in find_reloads when in the case of multiple
5008 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5009 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5010 reloads. This code tries to avoid the conflict created by that
5011 change. It might be cleaner to explicitly keep track of which
5012 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5013 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5014 this after the fact. */
5015 static bool
5016 reloads_unique_chain_p (int r1, int r2)
5018 int i;
5020 /* We only check input reloads. */
5021 if (! rld[r1].in || ! rld[r2].in)
5022 return false;
5024 /* Avoid anything with output reloads. */
5025 if (rld[r1].out || rld[r2].out)
5026 return false;
5028 /* "chained" means one reload is a component of the other reload,
5029 not the same as the other reload. */
5030 if (rld[r1].opnum != rld[r2].opnum
5031 || rtx_equal_p (rld[r1].in, rld[r2].in)
5032 || rld[r1].optional || rld[r2].optional
5033 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5034 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5035 return false;
5037 for (i = 0; i < n_reloads; i ++)
5038 /* Look for input reloads that aren't our two */
5039 if (i != r1 && i != r2 && rld[i].in)
5041 /* If our reload is mentioned at all, it isn't a simple chain. */
5042 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5043 return false;
5045 return true;
5048 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5049 Return 0 otherwise.
5051 This function uses the same algorithm as reload_reg_free_p above. */
5053 static int
5054 reloads_conflict (int r1, int r2)
5056 enum reload_type r1_type = rld[r1].when_needed;
5057 enum reload_type r2_type = rld[r2].when_needed;
5058 int r1_opnum = rld[r1].opnum;
5059 int r2_opnum = rld[r2].opnum;
5061 /* RELOAD_OTHER conflicts with everything. */
5062 if (r2_type == RELOAD_OTHER)
5063 return 1;
5065 /* Otherwise, check conflicts differently for each type. */
5067 switch (r1_type)
5069 case RELOAD_FOR_INPUT:
5070 return (r2_type == RELOAD_FOR_INSN
5071 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5072 || r2_type == RELOAD_FOR_OPADDR_ADDR
5073 || r2_type == RELOAD_FOR_INPUT
5074 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5075 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5076 && r2_opnum > r1_opnum));
5078 case RELOAD_FOR_INPUT_ADDRESS:
5079 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5080 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5082 case RELOAD_FOR_INPADDR_ADDRESS:
5083 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5084 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5086 case RELOAD_FOR_OUTPUT_ADDRESS:
5087 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5088 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5090 case RELOAD_FOR_OUTADDR_ADDRESS:
5091 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5092 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5094 case RELOAD_FOR_OPERAND_ADDRESS:
5095 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5096 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5097 && !reloads_unique_chain_p (r1, r2)));
5099 case RELOAD_FOR_OPADDR_ADDR:
5100 return (r2_type == RELOAD_FOR_INPUT
5101 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5103 case RELOAD_FOR_OUTPUT:
5104 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5105 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5106 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5107 && r2_opnum >= r1_opnum));
5109 case RELOAD_FOR_INSN:
5110 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5111 || r2_type == RELOAD_FOR_INSN
5112 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5114 case RELOAD_FOR_OTHER_ADDRESS:
5115 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5117 case RELOAD_OTHER:
5118 return 1;
5120 default:
5121 gcc_unreachable ();
5125 /* Indexed by reload number, 1 if incoming value
5126 inherited from previous insns. */
5127 static char reload_inherited[MAX_RELOADS];
5129 /* For an inherited reload, this is the insn the reload was inherited from,
5130 if we know it. Otherwise, this is 0. */
5131 static rtx reload_inheritance_insn[MAX_RELOADS];
5133 /* If nonzero, this is a place to get the value of the reload,
5134 rather than using reload_in. */
5135 static rtx reload_override_in[MAX_RELOADS];
5137 /* For each reload, the hard register number of the register used,
5138 or -1 if we did not need a register for this reload. */
5139 static int reload_spill_index[MAX_RELOADS];
5141 /* Subroutine of free_for_value_p, used to check a single register.
5142 START_REGNO is the starting regno of the full reload register
5143 (possibly comprising multiple hard registers) that we are considering. */
5145 static int
5146 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5147 enum reload_type type, rtx value, rtx out,
5148 int reloadnum, int ignore_address_reloads)
5150 int time1;
5151 /* Set if we see an input reload that must not share its reload register
5152 with any new earlyclobber, but might otherwise share the reload
5153 register with an output or input-output reload. */
5154 int check_earlyclobber = 0;
5155 int i;
5156 int copy = 0;
5158 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5159 return 0;
5161 if (out == const0_rtx)
5163 copy = 1;
5164 out = NULL_RTX;
5167 /* We use some pseudo 'time' value to check if the lifetimes of the
5168 new register use would overlap with the one of a previous reload
5169 that is not read-only or uses a different value.
5170 The 'time' used doesn't have to be linear in any shape or form, just
5171 monotonic.
5172 Some reload types use different 'buckets' for each operand.
5173 So there are MAX_RECOG_OPERANDS different time values for each
5174 such reload type.
5175 We compute TIME1 as the time when the register for the prospective
5176 new reload ceases to be live, and TIME2 for each existing
5177 reload as the time when that the reload register of that reload
5178 becomes live.
5179 Where there is little to be gained by exact lifetime calculations,
5180 we just make conservative assumptions, i.e. a longer lifetime;
5181 this is done in the 'default:' cases. */
5182 switch (type)
5184 case RELOAD_FOR_OTHER_ADDRESS:
5185 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5186 time1 = copy ? 0 : 1;
5187 break;
5188 case RELOAD_OTHER:
5189 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5190 break;
5191 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5192 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5193 respectively, to the time values for these, we get distinct time
5194 values. To get distinct time values for each operand, we have to
5195 multiply opnum by at least three. We round that up to four because
5196 multiply by four is often cheaper. */
5197 case RELOAD_FOR_INPADDR_ADDRESS:
5198 time1 = opnum * 4 + 2;
5199 break;
5200 case RELOAD_FOR_INPUT_ADDRESS:
5201 time1 = opnum * 4 + 3;
5202 break;
5203 case RELOAD_FOR_INPUT:
5204 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5205 executes (inclusive). */
5206 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5207 break;
5208 case RELOAD_FOR_OPADDR_ADDR:
5209 /* opnum * 4 + 4
5210 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5211 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5212 break;
5213 case RELOAD_FOR_OPERAND_ADDRESS:
5214 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5215 is executed. */
5216 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5217 break;
5218 case RELOAD_FOR_OUTADDR_ADDRESS:
5219 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5220 break;
5221 case RELOAD_FOR_OUTPUT_ADDRESS:
5222 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5223 break;
5224 default:
5225 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5228 for (i = 0; i < n_reloads; i++)
5230 rtx reg = rld[i].reg_rtx;
5231 if (reg && REG_P (reg)
5232 && ((unsigned) regno - true_regnum (reg)
5233 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5234 && i != reloadnum)
5236 rtx other_input = rld[i].in;
5238 /* If the other reload loads the same input value, that
5239 will not cause a conflict only if it's loading it into
5240 the same register. */
5241 if (true_regnum (reg) != start_regno)
5242 other_input = NULL_RTX;
5243 if (! other_input || ! rtx_equal_p (other_input, value)
5244 || rld[i].out || out)
5246 int time2;
5247 switch (rld[i].when_needed)
5249 case RELOAD_FOR_OTHER_ADDRESS:
5250 time2 = 0;
5251 break;
5252 case RELOAD_FOR_INPADDR_ADDRESS:
5253 /* find_reloads makes sure that a
5254 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5255 by at most one - the first -
5256 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5257 address reload is inherited, the address address reload
5258 goes away, so we can ignore this conflict. */
5259 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5260 && ignore_address_reloads
5261 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5262 Then the address address is still needed to store
5263 back the new address. */
5264 && ! rld[reloadnum].out)
5265 continue;
5266 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5267 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5268 reloads go away. */
5269 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5270 && ignore_address_reloads
5271 /* Unless we are reloading an auto_inc expression. */
5272 && ! rld[reloadnum].out)
5273 continue;
5274 time2 = rld[i].opnum * 4 + 2;
5275 break;
5276 case RELOAD_FOR_INPUT_ADDRESS:
5277 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5278 && ignore_address_reloads
5279 && ! rld[reloadnum].out)
5280 continue;
5281 time2 = rld[i].opnum * 4 + 3;
5282 break;
5283 case RELOAD_FOR_INPUT:
5284 time2 = rld[i].opnum * 4 + 4;
5285 check_earlyclobber = 1;
5286 break;
5287 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5288 == MAX_RECOG_OPERAND * 4 */
5289 case RELOAD_FOR_OPADDR_ADDR:
5290 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5291 && ignore_address_reloads
5292 && ! rld[reloadnum].out)
5293 continue;
5294 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5295 break;
5296 case RELOAD_FOR_OPERAND_ADDRESS:
5297 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5298 check_earlyclobber = 1;
5299 break;
5300 case RELOAD_FOR_INSN:
5301 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5302 break;
5303 case RELOAD_FOR_OUTPUT:
5304 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5305 instruction is executed. */
5306 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5307 break;
5308 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5309 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5310 value. */
5311 case RELOAD_FOR_OUTADDR_ADDRESS:
5312 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5313 && ignore_address_reloads
5314 && ! rld[reloadnum].out)
5315 continue;
5316 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5317 break;
5318 case RELOAD_FOR_OUTPUT_ADDRESS:
5319 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5320 break;
5321 case RELOAD_OTHER:
5322 /* If there is no conflict in the input part, handle this
5323 like an output reload. */
5324 if (! rld[i].in || rtx_equal_p (other_input, value))
5326 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5327 /* Earlyclobbered outputs must conflict with inputs. */
5328 if (earlyclobber_operand_p (rld[i].out))
5329 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5331 break;
5333 time2 = 1;
5334 /* RELOAD_OTHER might be live beyond instruction execution,
5335 but this is not obvious when we set time2 = 1. So check
5336 here if there might be a problem with the new reload
5337 clobbering the register used by the RELOAD_OTHER. */
5338 if (out)
5339 return 0;
5340 break;
5341 default:
5342 return 0;
5344 if ((time1 >= time2
5345 && (! rld[i].in || rld[i].out
5346 || ! rtx_equal_p (other_input, value)))
5347 || (out && rld[reloadnum].out_reg
5348 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5349 return 0;
5354 /* Earlyclobbered outputs must conflict with inputs. */
5355 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5356 return 0;
5358 return 1;
5361 /* Return 1 if the value in reload reg REGNO, as used by a reload
5362 needed for the part of the insn specified by OPNUM and TYPE,
5363 may be used to load VALUE into it.
5365 MODE is the mode in which the register is used, this is needed to
5366 determine how many hard regs to test.
5368 Other read-only reloads with the same value do not conflict
5369 unless OUT is nonzero and these other reloads have to live while
5370 output reloads live.
5371 If OUT is CONST0_RTX, this is a special case: it means that the
5372 test should not be for using register REGNO as reload register, but
5373 for copying from register REGNO into the reload register.
5375 RELOADNUM is the number of the reload we want to load this value for;
5376 a reload does not conflict with itself.
5378 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5379 reloads that load an address for the very reload we are considering.
5381 The caller has to make sure that there is no conflict with the return
5382 register. */
5384 static int
5385 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5386 enum reload_type type, rtx value, rtx out, int reloadnum,
5387 int ignore_address_reloads)
5389 int nregs = hard_regno_nregs[regno][mode];
5390 while (nregs-- > 0)
5391 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5392 value, out, reloadnum,
5393 ignore_address_reloads))
5394 return 0;
5395 return 1;
5398 /* Return nonzero if the rtx X is invariant over the current function. */
5399 /* ??? Actually, the places where we use this expect exactly what is
5400 tested here, and not everything that is function invariant. In
5401 particular, the frame pointer and arg pointer are special cased;
5402 pic_offset_table_rtx is not, and we must not spill these things to
5403 memory. */
5406 function_invariant_p (const_rtx x)
5408 if (CONSTANT_P (x))
5409 return 1;
5410 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5411 return 1;
5412 if (GET_CODE (x) == PLUS
5413 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5414 && CONSTANT_P (XEXP (x, 1)))
5415 return 1;
5416 return 0;
5419 /* Determine whether the reload reg X overlaps any rtx'es used for
5420 overriding inheritance. Return nonzero if so. */
5422 static int
5423 conflicts_with_override (rtx x)
5425 int i;
5426 for (i = 0; i < n_reloads; i++)
5427 if (reload_override_in[i]
5428 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5429 return 1;
5430 return 0;
5433 /* Give an error message saying we failed to find a reload for INSN,
5434 and clear out reload R. */
5435 static void
5436 failed_reload (rtx insn, int r)
5438 if (asm_noperands (PATTERN (insn)) < 0)
5439 /* It's the compiler's fault. */
5440 fatal_insn ("could not find a spill register", insn);
5442 /* It's the user's fault; the operand's mode and constraint
5443 don't match. Disable this reload so we don't crash in final. */
5444 error_for_asm (insn,
5445 "%<asm%> operand constraint incompatible with operand size");
5446 rld[r].in = 0;
5447 rld[r].out = 0;
5448 rld[r].reg_rtx = 0;
5449 rld[r].optional = 1;
5450 rld[r].secondary_p = 1;
5453 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5454 for reload R. If it's valid, get an rtx for it. Return nonzero if
5455 successful. */
5456 static int
5457 set_reload_reg (int i, int r)
5459 int regno;
5460 rtx reg = spill_reg_rtx[i];
5462 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5463 spill_reg_rtx[i] = reg
5464 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5466 regno = true_regnum (reg);
5468 /* Detect when the reload reg can't hold the reload mode.
5469 This used to be one `if', but Sequent compiler can't handle that. */
5470 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5472 enum machine_mode test_mode = VOIDmode;
5473 if (rld[r].in)
5474 test_mode = GET_MODE (rld[r].in);
5475 /* If rld[r].in has VOIDmode, it means we will load it
5476 in whatever mode the reload reg has: to wit, rld[r].mode.
5477 We have already tested that for validity. */
5478 /* Aside from that, we need to test that the expressions
5479 to reload from or into have modes which are valid for this
5480 reload register. Otherwise the reload insns would be invalid. */
5481 if (! (rld[r].in != 0 && test_mode != VOIDmode
5482 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5483 if (! (rld[r].out != 0
5484 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5486 /* The reg is OK. */
5487 last_spill_reg = i;
5489 /* Mark as in use for this insn the reload regs we use
5490 for this. */
5491 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5492 rld[r].when_needed, rld[r].mode);
5494 rld[r].reg_rtx = reg;
5495 reload_spill_index[r] = spill_regs[i];
5496 return 1;
5499 return 0;
5502 /* Find a spill register to use as a reload register for reload R.
5503 LAST_RELOAD is nonzero if this is the last reload for the insn being
5504 processed.
5506 Set rld[R].reg_rtx to the register allocated.
5508 We return 1 if successful, or 0 if we couldn't find a spill reg and
5509 we didn't change anything. */
5511 static int
5512 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5513 int last_reload)
5515 int i, pass, count;
5517 /* If we put this reload ahead, thinking it is a group,
5518 then insist on finding a group. Otherwise we can grab a
5519 reg that some other reload needs.
5520 (That can happen when we have a 68000 DATA_OR_FP_REG
5521 which is a group of data regs or one fp reg.)
5522 We need not be so restrictive if there are no more reloads
5523 for this insn.
5525 ??? Really it would be nicer to have smarter handling
5526 for that kind of reg class, where a problem like this is normal.
5527 Perhaps those classes should be avoided for reloading
5528 by use of more alternatives. */
5530 int force_group = rld[r].nregs > 1 && ! last_reload;
5532 /* If we want a single register and haven't yet found one,
5533 take any reg in the right class and not in use.
5534 If we want a consecutive group, here is where we look for it.
5536 We use two passes so we can first look for reload regs to
5537 reuse, which are already in use for other reloads in this insn,
5538 and only then use additional registers.
5539 I think that maximizing reuse is needed to make sure we don't
5540 run out of reload regs. Suppose we have three reloads, and
5541 reloads A and B can share regs. These need two regs.
5542 Suppose A and B are given different regs.
5543 That leaves none for C. */
5544 for (pass = 0; pass < 2; pass++)
5546 /* I is the index in spill_regs.
5547 We advance it round-robin between insns to use all spill regs
5548 equally, so that inherited reloads have a chance
5549 of leapfrogging each other. */
5551 i = last_spill_reg;
5553 for (count = 0; count < n_spills; count++)
5555 int class = (int) rld[r].class;
5556 int regnum;
5558 i++;
5559 if (i >= n_spills)
5560 i -= n_spills;
5561 regnum = spill_regs[i];
5563 if ((reload_reg_free_p (regnum, rld[r].opnum,
5564 rld[r].when_needed)
5565 || (rld[r].in
5566 /* We check reload_reg_used to make sure we
5567 don't clobber the return register. */
5568 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5569 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5570 rld[r].when_needed, rld[r].in,
5571 rld[r].out, r, 1)))
5572 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5573 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5574 /* Look first for regs to share, then for unshared. But
5575 don't share regs used for inherited reloads; they are
5576 the ones we want to preserve. */
5577 && (pass
5578 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5579 regnum)
5580 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5581 regnum))))
5583 int nr = hard_regno_nregs[regnum][rld[r].mode];
5584 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5585 (on 68000) got us two FP regs. If NR is 1,
5586 we would reject both of them. */
5587 if (force_group)
5588 nr = rld[r].nregs;
5589 /* If we need only one reg, we have already won. */
5590 if (nr == 1)
5592 /* But reject a single reg if we demand a group. */
5593 if (force_group)
5594 continue;
5595 break;
5597 /* Otherwise check that as many consecutive regs as we need
5598 are available here. */
5599 while (nr > 1)
5601 int regno = regnum + nr - 1;
5602 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5603 && spill_reg_order[regno] >= 0
5604 && reload_reg_free_p (regno, rld[r].opnum,
5605 rld[r].when_needed)))
5606 break;
5607 nr--;
5609 if (nr == 1)
5610 break;
5614 /* If we found something on pass 1, omit pass 2. */
5615 if (count < n_spills)
5616 break;
5619 /* We should have found a spill register by now. */
5620 if (count >= n_spills)
5621 return 0;
5623 /* I is the index in SPILL_REG_RTX of the reload register we are to
5624 allocate. Get an rtx for it and find its register number. */
5626 return set_reload_reg (i, r);
5629 /* Initialize all the tables needed to allocate reload registers.
5630 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5631 is the array we use to restore the reg_rtx field for every reload. */
5633 static void
5634 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5636 int i;
5638 for (i = 0; i < n_reloads; i++)
5639 rld[i].reg_rtx = save_reload_reg_rtx[i];
5641 memset (reload_inherited, 0, MAX_RELOADS);
5642 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5643 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5645 CLEAR_HARD_REG_SET (reload_reg_used);
5646 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5647 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5648 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5649 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5650 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5652 CLEAR_HARD_REG_SET (reg_used_in_insn);
5654 HARD_REG_SET tmp;
5655 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5656 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5657 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5658 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5659 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5660 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5663 for (i = 0; i < reload_n_operands; i++)
5665 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5666 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5667 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5668 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5669 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5670 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5673 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5675 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5677 for (i = 0; i < n_reloads; i++)
5678 /* If we have already decided to use a certain register,
5679 don't use it in another way. */
5680 if (rld[i].reg_rtx)
5681 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5682 rld[i].when_needed, rld[i].mode);
5685 /* Assign hard reg targets for the pseudo-registers we must reload
5686 into hard regs for this insn.
5687 Also output the instructions to copy them in and out of the hard regs.
5689 For machines with register classes, we are responsible for
5690 finding a reload reg in the proper class. */
5692 static void
5693 choose_reload_regs (struct insn_chain *chain)
5695 rtx insn = chain->insn;
5696 int i, j;
5697 unsigned int max_group_size = 1;
5698 enum reg_class group_class = NO_REGS;
5699 int pass, win, inheritance;
5701 rtx save_reload_reg_rtx[MAX_RELOADS];
5703 /* In order to be certain of getting the registers we need,
5704 we must sort the reloads into order of increasing register class.
5705 Then our grabbing of reload registers will parallel the process
5706 that provided the reload registers.
5708 Also note whether any of the reloads wants a consecutive group of regs.
5709 If so, record the maximum size of the group desired and what
5710 register class contains all the groups needed by this insn. */
5712 for (j = 0; j < n_reloads; j++)
5714 reload_order[j] = j;
5715 if (rld[j].reg_rtx != NULL_RTX)
5717 gcc_assert (REG_P (rld[j].reg_rtx)
5718 && HARD_REGISTER_P (rld[j].reg_rtx));
5719 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5721 else
5722 reload_spill_index[j] = -1;
5724 if (rld[j].nregs > 1)
5726 max_group_size = MAX (rld[j].nregs, max_group_size);
5727 group_class
5728 = reg_class_superunion[(int) rld[j].class][(int) group_class];
5731 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5734 if (n_reloads > 1)
5735 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5737 /* If -O, try first with inheritance, then turning it off.
5738 If not -O, don't do inheritance.
5739 Using inheritance when not optimizing leads to paradoxes
5740 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5741 because one side of the comparison might be inherited. */
5742 win = 0;
5743 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5745 choose_reload_regs_init (chain, save_reload_reg_rtx);
5747 /* Process the reloads in order of preference just found.
5748 Beyond this point, subregs can be found in reload_reg_rtx.
5750 This used to look for an existing reloaded home for all of the
5751 reloads, and only then perform any new reloads. But that could lose
5752 if the reloads were done out of reg-class order because a later
5753 reload with a looser constraint might have an old home in a register
5754 needed by an earlier reload with a tighter constraint.
5756 To solve this, we make two passes over the reloads, in the order
5757 described above. In the first pass we try to inherit a reload
5758 from a previous insn. If there is a later reload that needs a
5759 class that is a proper subset of the class being processed, we must
5760 also allocate a spill register during the first pass.
5762 Then make a second pass over the reloads to allocate any reloads
5763 that haven't been given registers yet. */
5765 for (j = 0; j < n_reloads; j++)
5767 int r = reload_order[j];
5768 rtx search_equiv = NULL_RTX;
5770 /* Ignore reloads that got marked inoperative. */
5771 if (rld[r].out == 0 && rld[r].in == 0
5772 && ! rld[r].secondary_p)
5773 continue;
5775 /* If find_reloads chose to use reload_in or reload_out as a reload
5776 register, we don't need to chose one. Otherwise, try even if it
5777 found one since we might save an insn if we find the value lying
5778 around.
5779 Try also when reload_in is a pseudo without a hard reg. */
5780 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5781 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5782 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5783 && !MEM_P (rld[r].in)
5784 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5785 continue;
5787 #if 0 /* No longer needed for correct operation.
5788 It might give better code, or might not; worth an experiment? */
5789 /* If this is an optional reload, we can't inherit from earlier insns
5790 until we are sure that any non-optional reloads have been allocated.
5791 The following code takes advantage of the fact that optional reloads
5792 are at the end of reload_order. */
5793 if (rld[r].optional != 0)
5794 for (i = 0; i < j; i++)
5795 if ((rld[reload_order[i]].out != 0
5796 || rld[reload_order[i]].in != 0
5797 || rld[reload_order[i]].secondary_p)
5798 && ! rld[reload_order[i]].optional
5799 && rld[reload_order[i]].reg_rtx == 0)
5800 allocate_reload_reg (chain, reload_order[i], 0);
5801 #endif
5803 /* First see if this pseudo is already available as reloaded
5804 for a previous insn. We cannot try to inherit for reloads
5805 that are smaller than the maximum number of registers needed
5806 for groups unless the register we would allocate cannot be used
5807 for the groups.
5809 We could check here to see if this is a secondary reload for
5810 an object that is already in a register of the desired class.
5811 This would avoid the need for the secondary reload register.
5812 But this is complex because we can't easily determine what
5813 objects might want to be loaded via this reload. So let a
5814 register be allocated here. In `emit_reload_insns' we suppress
5815 one of the loads in the case described above. */
5817 if (inheritance)
5819 int byte = 0;
5820 int regno = -1;
5821 enum machine_mode mode = VOIDmode;
5823 if (rld[r].in == 0)
5825 else if (REG_P (rld[r].in))
5827 regno = REGNO (rld[r].in);
5828 mode = GET_MODE (rld[r].in);
5830 else if (REG_P (rld[r].in_reg))
5832 regno = REGNO (rld[r].in_reg);
5833 mode = GET_MODE (rld[r].in_reg);
5835 else if (GET_CODE (rld[r].in_reg) == SUBREG
5836 && REG_P (SUBREG_REG (rld[r].in_reg)))
5838 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5839 if (regno < FIRST_PSEUDO_REGISTER)
5840 regno = subreg_regno (rld[r].in_reg);
5841 else
5842 byte = SUBREG_BYTE (rld[r].in_reg);
5843 mode = GET_MODE (rld[r].in_reg);
5845 #ifdef AUTO_INC_DEC
5846 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
5847 && REG_P (XEXP (rld[r].in_reg, 0)))
5849 regno = REGNO (XEXP (rld[r].in_reg, 0));
5850 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5851 rld[r].out = rld[r].in;
5853 #endif
5854 #if 0
5855 /* This won't work, since REGNO can be a pseudo reg number.
5856 Also, it takes much more hair to keep track of all the things
5857 that can invalidate an inherited reload of part of a pseudoreg. */
5858 else if (GET_CODE (rld[r].in) == SUBREG
5859 && REG_P (SUBREG_REG (rld[r].in)))
5860 regno = subreg_regno (rld[r].in);
5861 #endif
5863 if (regno >= 0
5864 && reg_last_reload_reg[regno] != 0
5865 #ifdef CANNOT_CHANGE_MODE_CLASS
5866 /* Verify that the register it's in can be used in
5867 mode MODE. */
5868 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
5869 GET_MODE (reg_last_reload_reg[regno]),
5870 mode)
5871 #endif
5874 enum reg_class class = rld[r].class, last_class;
5875 rtx last_reg = reg_last_reload_reg[regno];
5876 enum machine_mode need_mode;
5878 i = REGNO (last_reg);
5879 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5880 last_class = REGNO_REG_CLASS (i);
5882 if (byte == 0)
5883 need_mode = mode;
5884 else
5885 need_mode
5886 = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5887 + byte * BITS_PER_UNIT,
5888 GET_MODE_CLASS (mode));
5890 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5891 >= GET_MODE_SIZE (need_mode))
5892 && reg_reloaded_contents[i] == regno
5893 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5894 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5895 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5896 /* Even if we can't use this register as a reload
5897 register, we might use it for reload_override_in,
5898 if copying it to the desired class is cheap
5899 enough. */
5900 || ((REGISTER_MOVE_COST (mode, last_class, class)
5901 < MEMORY_MOVE_COST (mode, class, 1))
5902 && (secondary_reload_class (1, class, mode,
5903 last_reg)
5904 == NO_REGS)
5905 #ifdef SECONDARY_MEMORY_NEEDED
5906 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5907 mode)
5908 #endif
5911 && (rld[r].nregs == max_group_size
5912 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5914 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5915 rld[r].when_needed, rld[r].in,
5916 const0_rtx, r, 1))
5918 /* If a group is needed, verify that all the subsequent
5919 registers still have their values intact. */
5920 int nr = hard_regno_nregs[i][rld[r].mode];
5921 int k;
5923 for (k = 1; k < nr; k++)
5924 if (reg_reloaded_contents[i + k] != regno
5925 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5926 break;
5928 if (k == nr)
5930 int i1;
5931 int bad_for_class;
5933 last_reg = (GET_MODE (last_reg) == mode
5934 ? last_reg : gen_rtx_REG (mode, i));
5936 bad_for_class = 0;
5937 for (k = 0; k < nr; k++)
5938 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5939 i+k);
5941 /* We found a register that contains the
5942 value we need. If this register is the
5943 same as an `earlyclobber' operand of the
5944 current insn, just mark it as a place to
5945 reload from since we can't use it as the
5946 reload register itself. */
5948 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5949 if (reg_overlap_mentioned_for_reload_p
5950 (reg_last_reload_reg[regno],
5951 reload_earlyclobbers[i1]))
5952 break;
5954 if (i1 != n_earlyclobbers
5955 || ! (free_for_value_p (i, rld[r].mode,
5956 rld[r].opnum,
5957 rld[r].when_needed, rld[r].in,
5958 rld[r].out, r, 1))
5959 /* Don't use it if we'd clobber a pseudo reg. */
5960 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5961 && rld[r].out
5962 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5963 /* Don't clobber the frame pointer. */
5964 || (i == HARD_FRAME_POINTER_REGNUM
5965 && frame_pointer_needed
5966 && rld[r].out)
5967 /* Don't really use the inherited spill reg
5968 if we need it wider than we've got it. */
5969 || (GET_MODE_SIZE (rld[r].mode)
5970 > GET_MODE_SIZE (mode))
5971 || bad_for_class
5973 /* If find_reloads chose reload_out as reload
5974 register, stay with it - that leaves the
5975 inherited register for subsequent reloads. */
5976 || (rld[r].out && rld[r].reg_rtx
5977 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5979 if (! rld[r].optional)
5981 reload_override_in[r] = last_reg;
5982 reload_inheritance_insn[r]
5983 = reg_reloaded_insn[i];
5986 else
5988 int k;
5989 /* We can use this as a reload reg. */
5990 /* Mark the register as in use for this part of
5991 the insn. */
5992 mark_reload_reg_in_use (i,
5993 rld[r].opnum,
5994 rld[r].when_needed,
5995 rld[r].mode);
5996 rld[r].reg_rtx = last_reg;
5997 reload_inherited[r] = 1;
5998 reload_inheritance_insn[r]
5999 = reg_reloaded_insn[i];
6000 reload_spill_index[r] = i;
6001 for (k = 0; k < nr; k++)
6002 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6003 i + k);
6010 /* Here's another way to see if the value is already lying around. */
6011 if (inheritance
6012 && rld[r].in != 0
6013 && ! reload_inherited[r]
6014 && rld[r].out == 0
6015 && (CONSTANT_P (rld[r].in)
6016 || GET_CODE (rld[r].in) == PLUS
6017 || REG_P (rld[r].in)
6018 || MEM_P (rld[r].in))
6019 && (rld[r].nregs == max_group_size
6020 || ! reg_classes_intersect_p (rld[r].class, group_class)))
6021 search_equiv = rld[r].in;
6022 /* If this is an output reload from a simple move insn, look
6023 if an equivalence for the input is available. */
6024 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6026 rtx set = single_set (insn);
6028 if (set
6029 && rtx_equal_p (rld[r].out, SET_DEST (set))
6030 && CONSTANT_P (SET_SRC (set)))
6031 search_equiv = SET_SRC (set);
6034 if (search_equiv)
6036 rtx equiv
6037 = find_equiv_reg (search_equiv, insn, rld[r].class,
6038 -1, NULL, 0, rld[r].mode);
6039 int regno = 0;
6041 if (equiv != 0)
6043 if (REG_P (equiv))
6044 regno = REGNO (equiv);
6045 else
6047 /* This must be a SUBREG of a hard register.
6048 Make a new REG since this might be used in an
6049 address and not all machines support SUBREGs
6050 there. */
6051 gcc_assert (GET_CODE (equiv) == SUBREG);
6052 regno = subreg_regno (equiv);
6053 equiv = gen_rtx_REG (rld[r].mode, regno);
6054 /* If we choose EQUIV as the reload register, but the
6055 loop below decides to cancel the inheritance, we'll
6056 end up reloading EQUIV in rld[r].mode, not the mode
6057 it had originally. That isn't safe when EQUIV isn't
6058 available as a spill register since its value might
6059 still be live at this point. */
6060 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6061 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6062 equiv = 0;
6066 /* If we found a spill reg, reject it unless it is free
6067 and of the desired class. */
6068 if (equiv != 0)
6070 int regs_used = 0;
6071 int bad_for_class = 0;
6072 int max_regno = regno + rld[r].nregs;
6074 for (i = regno; i < max_regno; i++)
6076 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6078 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6082 if ((regs_used
6083 && ! free_for_value_p (regno, rld[r].mode,
6084 rld[r].opnum, rld[r].when_needed,
6085 rld[r].in, rld[r].out, r, 1))
6086 || bad_for_class)
6087 equiv = 0;
6090 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6091 equiv = 0;
6093 /* We found a register that contains the value we need.
6094 If this register is the same as an `earlyclobber' operand
6095 of the current insn, just mark it as a place to reload from
6096 since we can't use it as the reload register itself. */
6098 if (equiv != 0)
6099 for (i = 0; i < n_earlyclobbers; i++)
6100 if (reg_overlap_mentioned_for_reload_p (equiv,
6101 reload_earlyclobbers[i]))
6103 if (! rld[r].optional)
6104 reload_override_in[r] = equiv;
6105 equiv = 0;
6106 break;
6109 /* If the equiv register we have found is explicitly clobbered
6110 in the current insn, it depends on the reload type if we
6111 can use it, use it for reload_override_in, or not at all.
6112 In particular, we then can't use EQUIV for a
6113 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6115 if (equiv != 0)
6117 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6118 switch (rld[r].when_needed)
6120 case RELOAD_FOR_OTHER_ADDRESS:
6121 case RELOAD_FOR_INPADDR_ADDRESS:
6122 case RELOAD_FOR_INPUT_ADDRESS:
6123 case RELOAD_FOR_OPADDR_ADDR:
6124 break;
6125 case RELOAD_OTHER:
6126 case RELOAD_FOR_INPUT:
6127 case RELOAD_FOR_OPERAND_ADDRESS:
6128 if (! rld[r].optional)
6129 reload_override_in[r] = equiv;
6130 /* Fall through. */
6131 default:
6132 equiv = 0;
6133 break;
6135 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6136 switch (rld[r].when_needed)
6138 case RELOAD_FOR_OTHER_ADDRESS:
6139 case RELOAD_FOR_INPADDR_ADDRESS:
6140 case RELOAD_FOR_INPUT_ADDRESS:
6141 case RELOAD_FOR_OPADDR_ADDR:
6142 case RELOAD_FOR_OPERAND_ADDRESS:
6143 case RELOAD_FOR_INPUT:
6144 break;
6145 case RELOAD_OTHER:
6146 if (! rld[r].optional)
6147 reload_override_in[r] = equiv;
6148 /* Fall through. */
6149 default:
6150 equiv = 0;
6151 break;
6155 /* If we found an equivalent reg, say no code need be generated
6156 to load it, and use it as our reload reg. */
6157 if (equiv != 0
6158 && (regno != HARD_FRAME_POINTER_REGNUM
6159 || !frame_pointer_needed))
6161 int nr = hard_regno_nregs[regno][rld[r].mode];
6162 int k;
6163 rld[r].reg_rtx = equiv;
6164 reload_inherited[r] = 1;
6166 /* If reg_reloaded_valid is not set for this register,
6167 there might be a stale spill_reg_store lying around.
6168 We must clear it, since otherwise emit_reload_insns
6169 might delete the store. */
6170 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6171 spill_reg_store[regno] = NULL_RTX;
6172 /* If any of the hard registers in EQUIV are spill
6173 registers, mark them as in use for this insn. */
6174 for (k = 0; k < nr; k++)
6176 i = spill_reg_order[regno + k];
6177 if (i >= 0)
6179 mark_reload_reg_in_use (regno, rld[r].opnum,
6180 rld[r].when_needed,
6181 rld[r].mode);
6182 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6183 regno + k);
6189 /* If we found a register to use already, or if this is an optional
6190 reload, we are done. */
6191 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6192 continue;
6194 #if 0
6195 /* No longer needed for correct operation. Might or might
6196 not give better code on the average. Want to experiment? */
6198 /* See if there is a later reload that has a class different from our
6199 class that intersects our class or that requires less register
6200 than our reload. If so, we must allocate a register to this
6201 reload now, since that reload might inherit a previous reload
6202 and take the only available register in our class. Don't do this
6203 for optional reloads since they will force all previous reloads
6204 to be allocated. Also don't do this for reloads that have been
6205 turned off. */
6207 for (i = j + 1; i < n_reloads; i++)
6209 int s = reload_order[i];
6211 if ((rld[s].in == 0 && rld[s].out == 0
6212 && ! rld[s].secondary_p)
6213 || rld[s].optional)
6214 continue;
6216 if ((rld[s].class != rld[r].class
6217 && reg_classes_intersect_p (rld[r].class,
6218 rld[s].class))
6219 || rld[s].nregs < rld[r].nregs)
6220 break;
6223 if (i == n_reloads)
6224 continue;
6226 allocate_reload_reg (chain, r, j == n_reloads - 1);
6227 #endif
6230 /* Now allocate reload registers for anything non-optional that
6231 didn't get one yet. */
6232 for (j = 0; j < n_reloads; j++)
6234 int r = reload_order[j];
6236 /* Ignore reloads that got marked inoperative. */
6237 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6238 continue;
6240 /* Skip reloads that already have a register allocated or are
6241 optional. */
6242 if (rld[r].reg_rtx != 0 || rld[r].optional)
6243 continue;
6245 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6246 break;
6249 /* If that loop got all the way, we have won. */
6250 if (j == n_reloads)
6252 win = 1;
6253 break;
6256 /* Loop around and try without any inheritance. */
6259 if (! win)
6261 /* First undo everything done by the failed attempt
6262 to allocate with inheritance. */
6263 choose_reload_regs_init (chain, save_reload_reg_rtx);
6265 /* Some sanity tests to verify that the reloads found in the first
6266 pass are identical to the ones we have now. */
6267 gcc_assert (chain->n_reloads == n_reloads);
6269 for (i = 0; i < n_reloads; i++)
6271 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6272 continue;
6273 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6274 for (j = 0; j < n_spills; j++)
6275 if (spill_regs[j] == chain->rld[i].regno)
6276 if (! set_reload_reg (j, i))
6277 failed_reload (chain->insn, i);
6281 /* If we thought we could inherit a reload, because it seemed that
6282 nothing else wanted the same reload register earlier in the insn,
6283 verify that assumption, now that all reloads have been assigned.
6284 Likewise for reloads where reload_override_in has been set. */
6286 /* If doing expensive optimizations, do one preliminary pass that doesn't
6287 cancel any inheritance, but removes reloads that have been needed only
6288 for reloads that we know can be inherited. */
6289 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6291 for (j = 0; j < n_reloads; j++)
6293 int r = reload_order[j];
6294 rtx check_reg;
6295 if (reload_inherited[r] && rld[r].reg_rtx)
6296 check_reg = rld[r].reg_rtx;
6297 else if (reload_override_in[r]
6298 && (REG_P (reload_override_in[r])
6299 || GET_CODE (reload_override_in[r]) == SUBREG))
6300 check_reg = reload_override_in[r];
6301 else
6302 continue;
6303 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6304 rld[r].opnum, rld[r].when_needed, rld[r].in,
6305 (reload_inherited[r]
6306 ? rld[r].out : const0_rtx),
6307 r, 1))
6309 if (pass)
6310 continue;
6311 reload_inherited[r] = 0;
6312 reload_override_in[r] = 0;
6314 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6315 reload_override_in, then we do not need its related
6316 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6317 likewise for other reload types.
6318 We handle this by removing a reload when its only replacement
6319 is mentioned in reload_in of the reload we are going to inherit.
6320 A special case are auto_inc expressions; even if the input is
6321 inherited, we still need the address for the output. We can
6322 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6323 If we succeeded removing some reload and we are doing a preliminary
6324 pass just to remove such reloads, make another pass, since the
6325 removal of one reload might allow us to inherit another one. */
6326 else if (rld[r].in
6327 && rld[r].out != rld[r].in
6328 && remove_address_replacements (rld[r].in) && pass)
6329 pass = 2;
6333 /* Now that reload_override_in is known valid,
6334 actually override reload_in. */
6335 for (j = 0; j < n_reloads; j++)
6336 if (reload_override_in[j])
6337 rld[j].in = reload_override_in[j];
6339 /* If this reload won't be done because it has been canceled or is
6340 optional and not inherited, clear reload_reg_rtx so other
6341 routines (such as subst_reloads) don't get confused. */
6342 for (j = 0; j < n_reloads; j++)
6343 if (rld[j].reg_rtx != 0
6344 && ((rld[j].optional && ! reload_inherited[j])
6345 || (rld[j].in == 0 && rld[j].out == 0
6346 && ! rld[j].secondary_p)))
6348 int regno = true_regnum (rld[j].reg_rtx);
6350 if (spill_reg_order[regno] >= 0)
6351 clear_reload_reg_in_use (regno, rld[j].opnum,
6352 rld[j].when_needed, rld[j].mode);
6353 rld[j].reg_rtx = 0;
6354 reload_spill_index[j] = -1;
6357 /* Record which pseudos and which spill regs have output reloads. */
6358 for (j = 0; j < n_reloads; j++)
6360 int r = reload_order[j];
6362 i = reload_spill_index[r];
6364 /* I is nonneg if this reload uses a register.
6365 If rld[r].reg_rtx is 0, this is an optional reload
6366 that we opted to ignore. */
6367 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6368 && rld[r].reg_rtx != 0)
6370 int nregno = REGNO (rld[r].out_reg);
6371 int nr = 1;
6373 if (nregno < FIRST_PSEUDO_REGISTER)
6374 nr = hard_regno_nregs[nregno][rld[r].mode];
6376 while (--nr >= 0)
6377 SET_REGNO_REG_SET (&reg_has_output_reload,
6378 nregno + nr);
6380 if (i >= 0)
6382 nr = hard_regno_nregs[i][rld[r].mode];
6383 while (--nr >= 0)
6384 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6387 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6388 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6389 || rld[r].when_needed == RELOAD_FOR_INSN);
6394 /* Deallocate the reload register for reload R. This is called from
6395 remove_address_replacements. */
6397 void
6398 deallocate_reload_reg (int r)
6400 int regno;
6402 if (! rld[r].reg_rtx)
6403 return;
6404 regno = true_regnum (rld[r].reg_rtx);
6405 rld[r].reg_rtx = 0;
6406 if (spill_reg_order[regno] >= 0)
6407 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6408 rld[r].mode);
6409 reload_spill_index[r] = -1;
6412 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6413 reloads of the same item for fear that we might not have enough reload
6414 registers. However, normally they will get the same reload register
6415 and hence actually need not be loaded twice.
6417 Here we check for the most common case of this phenomenon: when we have
6418 a number of reloads for the same object, each of which were allocated
6419 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6420 reload, and is not modified in the insn itself. If we find such,
6421 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6422 This will not increase the number of spill registers needed and will
6423 prevent redundant code. */
6425 static void
6426 merge_assigned_reloads (rtx insn)
6428 int i, j;
6430 /* Scan all the reloads looking for ones that only load values and
6431 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6432 assigned and not modified by INSN. */
6434 for (i = 0; i < n_reloads; i++)
6436 int conflicting_input = 0;
6437 int max_input_address_opnum = -1;
6438 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6440 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6441 || rld[i].out != 0 || rld[i].reg_rtx == 0
6442 || reg_set_p (rld[i].reg_rtx, insn))
6443 continue;
6445 /* Look at all other reloads. Ensure that the only use of this
6446 reload_reg_rtx is in a reload that just loads the same value
6447 as we do. Note that any secondary reloads must be of the identical
6448 class since the values, modes, and result registers are the
6449 same, so we need not do anything with any secondary reloads. */
6451 for (j = 0; j < n_reloads; j++)
6453 if (i == j || rld[j].reg_rtx == 0
6454 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6455 rld[i].reg_rtx))
6456 continue;
6458 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6459 && rld[j].opnum > max_input_address_opnum)
6460 max_input_address_opnum = rld[j].opnum;
6462 /* If the reload regs aren't exactly the same (e.g, different modes)
6463 or if the values are different, we can't merge this reload.
6464 But if it is an input reload, we might still merge
6465 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6467 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6468 || rld[j].out != 0 || rld[j].in == 0
6469 || ! rtx_equal_p (rld[i].in, rld[j].in))
6471 if (rld[j].when_needed != RELOAD_FOR_INPUT
6472 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6473 || rld[i].opnum > rld[j].opnum)
6474 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6475 break;
6476 conflicting_input = 1;
6477 if (min_conflicting_input_opnum > rld[j].opnum)
6478 min_conflicting_input_opnum = rld[j].opnum;
6482 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6483 we, in fact, found any matching reloads. */
6485 if (j == n_reloads
6486 && max_input_address_opnum <= min_conflicting_input_opnum)
6488 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6490 for (j = 0; j < n_reloads; j++)
6491 if (i != j && rld[j].reg_rtx != 0
6492 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6493 && (! conflicting_input
6494 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6495 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6497 rld[i].when_needed = RELOAD_OTHER;
6498 rld[j].in = 0;
6499 reload_spill_index[j] = -1;
6500 transfer_replacements (i, j);
6503 /* If this is now RELOAD_OTHER, look for any reloads that
6504 load parts of this operand and set them to
6505 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6506 RELOAD_OTHER for outputs. Note that this test is
6507 equivalent to looking for reloads for this operand
6508 number.
6510 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6511 it may share registers with a RELOAD_FOR_INPUT, so we can
6512 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6513 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6515 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6516 instruction is assigned the same register as the earlier
6517 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6518 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6519 instruction to be deleted later on. */
6521 if (rld[i].when_needed == RELOAD_OTHER)
6522 for (j = 0; j < n_reloads; j++)
6523 if (rld[j].in != 0
6524 && rld[j].when_needed != RELOAD_OTHER
6525 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6526 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6527 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6528 && (! conflicting_input
6529 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6530 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6531 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6532 rld[i].in))
6534 int k;
6536 rld[j].when_needed
6537 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6538 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6539 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6541 /* Check to see if we accidentally converted two
6542 reloads that use the same reload register with
6543 different inputs to the same type. If so, the
6544 resulting code won't work. */
6545 if (rld[j].reg_rtx)
6546 for (k = 0; k < j; k++)
6547 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6548 || rld[k].when_needed != rld[j].when_needed
6549 || !rtx_equal_p (rld[k].reg_rtx,
6550 rld[j].reg_rtx)
6551 || rtx_equal_p (rld[k].in,
6552 rld[j].in));
6558 /* These arrays are filled by emit_reload_insns and its subroutines. */
6559 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6560 static rtx other_input_address_reload_insns = 0;
6561 static rtx other_input_reload_insns = 0;
6562 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6563 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6564 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6565 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6566 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6567 static rtx operand_reload_insns = 0;
6568 static rtx other_operand_reload_insns = 0;
6569 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6571 /* Values to be put in spill_reg_store are put here first. */
6572 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6573 static HARD_REG_SET reg_reloaded_died;
6575 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6576 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6577 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6578 adjusted register, and return true. Otherwise, return false. */
6579 static bool
6580 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6581 enum reg_class new_class,
6582 enum machine_mode new_mode)
6585 rtx reg;
6587 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6589 unsigned regno = REGNO (reg);
6591 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6592 continue;
6593 if (GET_MODE (reg) != new_mode)
6595 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6596 continue;
6597 if (hard_regno_nregs[regno][new_mode]
6598 > hard_regno_nregs[regno][GET_MODE (reg)])
6599 continue;
6600 reg = reload_adjust_reg_for_mode (reg, new_mode);
6602 *reload_reg = reg;
6603 return true;
6605 return false;
6608 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6609 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6610 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6611 adjusted register, and return true. Otherwise, return false. */
6612 static bool
6613 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6614 enum insn_code icode)
6617 enum reg_class new_class = scratch_reload_class (icode);
6618 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6620 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6621 new_class, new_mode);
6624 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6625 has the number J. OLD contains the value to be used as input. */
6627 static void
6628 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6629 rtx old, int j)
6631 rtx insn = chain->insn;
6632 rtx reloadreg = rl->reg_rtx;
6633 rtx oldequiv_reg = 0;
6634 rtx oldequiv = 0;
6635 int special = 0;
6636 enum machine_mode mode;
6637 rtx *where;
6639 /* Determine the mode to reload in.
6640 This is very tricky because we have three to choose from.
6641 There is the mode the insn operand wants (rl->inmode).
6642 There is the mode of the reload register RELOADREG.
6643 There is the intrinsic mode of the operand, which we could find
6644 by stripping some SUBREGs.
6645 It turns out that RELOADREG's mode is irrelevant:
6646 we can change that arbitrarily.
6648 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6649 then the reload reg may not support QImode moves, so use SImode.
6650 If foo is in memory due to spilling a pseudo reg, this is safe,
6651 because the QImode value is in the least significant part of a
6652 slot big enough for a SImode. If foo is some other sort of
6653 memory reference, then it is impossible to reload this case,
6654 so previous passes had better make sure this never happens.
6656 Then consider a one-word union which has SImode and one of its
6657 members is a float, being fetched as (SUBREG:SF union:SI).
6658 We must fetch that as SFmode because we could be loading into
6659 a float-only register. In this case OLD's mode is correct.
6661 Consider an immediate integer: it has VOIDmode. Here we need
6662 to get a mode from something else.
6664 In some cases, there is a fourth mode, the operand's
6665 containing mode. If the insn specifies a containing mode for
6666 this operand, it overrides all others.
6668 I am not sure whether the algorithm here is always right,
6669 but it does the right things in those cases. */
6671 mode = GET_MODE (old);
6672 if (mode == VOIDmode)
6673 mode = rl->inmode;
6675 /* delete_output_reload is only invoked properly if old contains
6676 the original pseudo register. Since this is replaced with a
6677 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6678 find the pseudo in RELOAD_IN_REG. */
6679 if (reload_override_in[j]
6680 && REG_P (rl->in_reg))
6682 oldequiv = old;
6683 old = rl->in_reg;
6685 if (oldequiv == 0)
6686 oldequiv = old;
6687 else if (REG_P (oldequiv))
6688 oldequiv_reg = oldequiv;
6689 else if (GET_CODE (oldequiv) == SUBREG)
6690 oldequiv_reg = SUBREG_REG (oldequiv);
6692 /* If we are reloading from a register that was recently stored in
6693 with an output-reload, see if we can prove there was
6694 actually no need to store the old value in it. */
6696 if (optimize && REG_P (oldequiv)
6697 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6698 && spill_reg_store[REGNO (oldequiv)]
6699 && REG_P (old)
6700 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6701 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6702 rl->out_reg)))
6703 delete_output_reload (insn, j, REGNO (oldequiv));
6705 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6706 then load RELOADREG from OLDEQUIV. Note that we cannot use
6707 gen_lowpart_common since it can do the wrong thing when
6708 RELOADREG has a multi-word mode. Note that RELOADREG
6709 must always be a REG here. */
6711 if (GET_MODE (reloadreg) != mode)
6712 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6713 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6714 oldequiv = SUBREG_REG (oldequiv);
6715 if (GET_MODE (oldequiv) != VOIDmode
6716 && mode != GET_MODE (oldequiv))
6717 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6719 /* Switch to the right place to emit the reload insns. */
6720 switch (rl->when_needed)
6722 case RELOAD_OTHER:
6723 where = &other_input_reload_insns;
6724 break;
6725 case RELOAD_FOR_INPUT:
6726 where = &input_reload_insns[rl->opnum];
6727 break;
6728 case RELOAD_FOR_INPUT_ADDRESS:
6729 where = &input_address_reload_insns[rl->opnum];
6730 break;
6731 case RELOAD_FOR_INPADDR_ADDRESS:
6732 where = &inpaddr_address_reload_insns[rl->opnum];
6733 break;
6734 case RELOAD_FOR_OUTPUT_ADDRESS:
6735 where = &output_address_reload_insns[rl->opnum];
6736 break;
6737 case RELOAD_FOR_OUTADDR_ADDRESS:
6738 where = &outaddr_address_reload_insns[rl->opnum];
6739 break;
6740 case RELOAD_FOR_OPERAND_ADDRESS:
6741 where = &operand_reload_insns;
6742 break;
6743 case RELOAD_FOR_OPADDR_ADDR:
6744 where = &other_operand_reload_insns;
6745 break;
6746 case RELOAD_FOR_OTHER_ADDRESS:
6747 where = &other_input_address_reload_insns;
6748 break;
6749 default:
6750 gcc_unreachable ();
6753 push_to_sequence (*where);
6755 /* Auto-increment addresses must be reloaded in a special way. */
6756 if (rl->out && ! rl->out_reg)
6758 /* We are not going to bother supporting the case where a
6759 incremented register can't be copied directly from
6760 OLDEQUIV since this seems highly unlikely. */
6761 gcc_assert (rl->secondary_in_reload < 0);
6763 if (reload_inherited[j])
6764 oldequiv = reloadreg;
6766 old = XEXP (rl->in_reg, 0);
6768 if (optimize && REG_P (oldequiv)
6769 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6770 && spill_reg_store[REGNO (oldequiv)]
6771 && REG_P (old)
6772 && (dead_or_set_p (insn,
6773 spill_reg_stored_to[REGNO (oldequiv)])
6774 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6775 old)))
6776 delete_output_reload (insn, j, REGNO (oldequiv));
6778 /* Prevent normal processing of this reload. */
6779 special = 1;
6780 /* Output a special code sequence for this case. */
6781 new_spill_reg_store[REGNO (reloadreg)]
6782 = inc_for_reload (reloadreg, oldequiv, rl->out,
6783 rl->inc);
6786 /* If we are reloading a pseudo-register that was set by the previous
6787 insn, see if we can get rid of that pseudo-register entirely
6788 by redirecting the previous insn into our reload register. */
6790 else if (optimize && REG_P (old)
6791 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6792 && dead_or_set_p (insn, old)
6793 /* This is unsafe if some other reload
6794 uses the same reg first. */
6795 && ! conflicts_with_override (reloadreg)
6796 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6797 rl->when_needed, old, rl->out, j, 0))
6799 rtx temp = PREV_INSN (insn);
6800 while (temp && NOTE_P (temp))
6801 temp = PREV_INSN (temp);
6802 if (temp
6803 && NONJUMP_INSN_P (temp)
6804 && GET_CODE (PATTERN (temp)) == SET
6805 && SET_DEST (PATTERN (temp)) == old
6806 /* Make sure we can access insn_operand_constraint. */
6807 && asm_noperands (PATTERN (temp)) < 0
6808 /* This is unsafe if operand occurs more than once in current
6809 insn. Perhaps some occurrences aren't reloaded. */
6810 && count_occurrences (PATTERN (insn), old, 0) == 1)
6812 rtx old = SET_DEST (PATTERN (temp));
6813 /* Store into the reload register instead of the pseudo. */
6814 SET_DEST (PATTERN (temp)) = reloadreg;
6816 /* Verify that resulting insn is valid. */
6817 extract_insn (temp);
6818 if (constrain_operands (1))
6820 /* If the previous insn is an output reload, the source is
6821 a reload register, and its spill_reg_store entry will
6822 contain the previous destination. This is now
6823 invalid. */
6824 if (REG_P (SET_SRC (PATTERN (temp)))
6825 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6827 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6828 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6831 /* If these are the only uses of the pseudo reg,
6832 pretend for GDB it lives in the reload reg we used. */
6833 if (REG_N_DEATHS (REGNO (old)) == 1
6834 && REG_N_SETS (REGNO (old)) == 1)
6836 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6837 if (flag_ira)
6838 mark_allocation_change (REGNO (old));
6839 alter_reg (REGNO (old), -1, false);
6841 special = 1;
6843 else
6845 SET_DEST (PATTERN (temp)) = old;
6850 /* We can't do that, so output an insn to load RELOADREG. */
6852 /* If we have a secondary reload, pick up the secondary register
6853 and icode, if any. If OLDEQUIV and OLD are different or
6854 if this is an in-out reload, recompute whether or not we
6855 still need a secondary register and what the icode should
6856 be. If we still need a secondary register and the class or
6857 icode is different, go back to reloading from OLD if using
6858 OLDEQUIV means that we got the wrong type of register. We
6859 cannot have different class or icode due to an in-out reload
6860 because we don't make such reloads when both the input and
6861 output need secondary reload registers. */
6863 if (! special && rl->secondary_in_reload >= 0)
6865 rtx second_reload_reg = 0;
6866 rtx third_reload_reg = 0;
6867 int secondary_reload = rl->secondary_in_reload;
6868 rtx real_oldequiv = oldequiv;
6869 rtx real_old = old;
6870 rtx tmp;
6871 enum insn_code icode;
6872 enum insn_code tertiary_icode = CODE_FOR_nothing;
6874 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6875 and similarly for OLD.
6876 See comments in get_secondary_reload in reload.c. */
6877 /* If it is a pseudo that cannot be replaced with its
6878 equivalent MEM, we must fall back to reload_in, which
6879 will have all the necessary substitutions registered.
6880 Likewise for a pseudo that can't be replaced with its
6881 equivalent constant.
6883 Take extra care for subregs of such pseudos. Note that
6884 we cannot use reg_equiv_mem in this case because it is
6885 not in the right mode. */
6887 tmp = oldequiv;
6888 if (GET_CODE (tmp) == SUBREG)
6889 tmp = SUBREG_REG (tmp);
6890 if (REG_P (tmp)
6891 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6892 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6893 || reg_equiv_constant[REGNO (tmp)] != 0))
6895 if (! reg_equiv_mem[REGNO (tmp)]
6896 || num_not_at_initial_offset
6897 || GET_CODE (oldequiv) == SUBREG)
6898 real_oldequiv = rl->in;
6899 else
6900 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6903 tmp = old;
6904 if (GET_CODE (tmp) == SUBREG)
6905 tmp = SUBREG_REG (tmp);
6906 if (REG_P (tmp)
6907 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6908 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6909 || reg_equiv_constant[REGNO (tmp)] != 0))
6911 if (! reg_equiv_mem[REGNO (tmp)]
6912 || num_not_at_initial_offset
6913 || GET_CODE (old) == SUBREG)
6914 real_old = rl->in;
6915 else
6916 real_old = reg_equiv_mem[REGNO (tmp)];
6919 second_reload_reg = rld[secondary_reload].reg_rtx;
6920 if (rld[secondary_reload].secondary_in_reload >= 0)
6922 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6924 third_reload_reg = rld[tertiary_reload].reg_rtx;
6925 tertiary_icode = rld[secondary_reload].secondary_in_icode;
6926 /* We'd have to add more code for quartary reloads. */
6927 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6929 icode = rl->secondary_in_icode;
6931 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6932 || (rl->in != 0 && rl->out != 0))
6934 secondary_reload_info sri, sri2;
6935 enum reg_class new_class, new_t_class;
6937 sri.icode = CODE_FOR_nothing;
6938 sri.prev_sri = NULL;
6939 new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
6940 mode, &sri);
6942 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
6943 second_reload_reg = 0;
6944 else if (new_class == NO_REGS)
6946 if (reload_adjust_reg_for_icode (&second_reload_reg,
6947 third_reload_reg, sri.icode))
6948 icode = sri.icode, third_reload_reg = 0;
6949 else
6950 oldequiv = old, real_oldequiv = real_old;
6952 else if (sri.icode != CODE_FOR_nothing)
6953 /* We currently lack a way to express this in reloads. */
6954 gcc_unreachable ();
6955 else
6957 sri2.icode = CODE_FOR_nothing;
6958 sri2.prev_sri = &sri;
6959 new_t_class = targetm.secondary_reload (1, real_oldequiv,
6960 new_class, mode, &sri);
6961 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
6963 if (reload_adjust_reg_for_temp (&second_reload_reg,
6964 third_reload_reg,
6965 new_class, mode))
6966 third_reload_reg = 0, tertiary_icode = sri2.icode;
6967 else
6968 oldequiv = old, real_oldequiv = real_old;
6970 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
6972 rtx intermediate = second_reload_reg;
6974 if (reload_adjust_reg_for_temp (&intermediate, NULL,
6975 new_class, mode)
6976 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
6977 sri2.icode))
6979 second_reload_reg = intermediate;
6980 tertiary_icode = sri2.icode;
6982 else
6983 oldequiv = old, real_oldequiv = real_old;
6985 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
6987 rtx intermediate = second_reload_reg;
6989 if (reload_adjust_reg_for_temp (&intermediate, NULL,
6990 new_class, mode)
6991 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
6992 new_t_class, mode))
6994 second_reload_reg = intermediate;
6995 tertiary_icode = sri2.icode;
6997 else
6998 oldequiv = old, real_oldequiv = real_old;
7000 else
7001 /* This could be handled more intelligently too. */
7002 oldequiv = old, real_oldequiv = real_old;
7006 /* If we still need a secondary reload register, check
7007 to see if it is being used as a scratch or intermediate
7008 register and generate code appropriately. If we need
7009 a scratch register, use REAL_OLDEQUIV since the form of
7010 the insn may depend on the actual address if it is
7011 a MEM. */
7013 if (second_reload_reg)
7015 if (icode != CODE_FOR_nothing)
7017 /* We'd have to add extra code to handle this case. */
7018 gcc_assert (!third_reload_reg);
7020 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7021 second_reload_reg));
7022 special = 1;
7024 else
7026 /* See if we need a scratch register to load the
7027 intermediate register (a tertiary reload). */
7028 if (tertiary_icode != CODE_FOR_nothing)
7030 emit_insn ((GEN_FCN (tertiary_icode)
7031 (second_reload_reg, real_oldequiv,
7032 third_reload_reg)));
7034 else if (third_reload_reg)
7036 gen_reload (third_reload_reg, real_oldequiv,
7037 rl->opnum,
7038 rl->when_needed);
7039 gen_reload (second_reload_reg, third_reload_reg,
7040 rl->opnum,
7041 rl->when_needed);
7043 else
7044 gen_reload (second_reload_reg, real_oldequiv,
7045 rl->opnum,
7046 rl->when_needed);
7048 oldequiv = second_reload_reg;
7053 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7055 rtx real_oldequiv = oldequiv;
7057 if ((REG_P (oldequiv)
7058 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7059 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7060 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7061 || (GET_CODE (oldequiv) == SUBREG
7062 && REG_P (SUBREG_REG (oldequiv))
7063 && (REGNO (SUBREG_REG (oldequiv))
7064 >= FIRST_PSEUDO_REGISTER)
7065 && ((reg_equiv_memory_loc
7066 [REGNO (SUBREG_REG (oldequiv))] != 0)
7067 || (reg_equiv_constant
7068 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7069 || (CONSTANT_P (oldequiv)
7070 && (PREFERRED_RELOAD_CLASS (oldequiv,
7071 REGNO_REG_CLASS (REGNO (reloadreg)))
7072 == NO_REGS)))
7073 real_oldequiv = rl->in;
7074 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7075 rl->when_needed);
7078 if (flag_non_call_exceptions)
7079 copy_eh_notes (insn, get_insns ());
7081 /* End this sequence. */
7082 *where = get_insns ();
7083 end_sequence ();
7085 /* Update reload_override_in so that delete_address_reloads_1
7086 can see the actual register usage. */
7087 if (oldequiv_reg)
7088 reload_override_in[j] = oldequiv;
7091 /* Generate insns to for the output reload RL, which is for the insn described
7092 by CHAIN and has the number J. */
7093 static void
7094 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7095 int j)
7097 rtx reloadreg = rl->reg_rtx;
7098 rtx insn = chain->insn;
7099 int special = 0;
7100 rtx old = rl->out;
7101 enum machine_mode mode = GET_MODE (old);
7102 rtx p;
7104 if (rl->when_needed == RELOAD_OTHER)
7105 start_sequence ();
7106 else
7107 push_to_sequence (output_reload_insns[rl->opnum]);
7109 /* Determine the mode to reload in.
7110 See comments above (for input reloading). */
7112 if (mode == VOIDmode)
7114 /* VOIDmode should never happen for an output. */
7115 if (asm_noperands (PATTERN (insn)) < 0)
7116 /* It's the compiler's fault. */
7117 fatal_insn ("VOIDmode on an output", insn);
7118 error_for_asm (insn, "output operand is constant in %<asm%>");
7119 /* Prevent crash--use something we know is valid. */
7120 mode = word_mode;
7121 old = gen_rtx_REG (mode, REGNO (reloadreg));
7124 if (GET_MODE (reloadreg) != mode)
7125 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7127 /* If we need two reload regs, set RELOADREG to the intermediate
7128 one, since it will be stored into OLD. We might need a secondary
7129 register only for an input reload, so check again here. */
7131 if (rl->secondary_out_reload >= 0)
7133 rtx real_old = old;
7134 int secondary_reload = rl->secondary_out_reload;
7135 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7137 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7138 && reg_equiv_mem[REGNO (old)] != 0)
7139 real_old = reg_equiv_mem[REGNO (old)];
7141 if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
7143 rtx second_reloadreg = reloadreg;
7144 reloadreg = rld[secondary_reload].reg_rtx;
7146 /* See if RELOADREG is to be used as a scratch register
7147 or as an intermediate register. */
7148 if (rl->secondary_out_icode != CODE_FOR_nothing)
7150 /* We'd have to add extra code to handle this case. */
7151 gcc_assert (tertiary_reload < 0);
7153 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7154 (real_old, second_reloadreg, reloadreg)));
7155 special = 1;
7157 else
7159 /* See if we need both a scratch and intermediate reload
7160 register. */
7162 enum insn_code tertiary_icode
7163 = rld[secondary_reload].secondary_out_icode;
7165 /* We'd have to add more code for quartary reloads. */
7166 gcc_assert (tertiary_reload < 0
7167 || rld[tertiary_reload].secondary_out_reload < 0);
7169 if (GET_MODE (reloadreg) != mode)
7170 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7172 if (tertiary_icode != CODE_FOR_nothing)
7174 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7175 rtx tem;
7177 /* Copy primary reload reg to secondary reload reg.
7178 (Note that these have been swapped above, then
7179 secondary reload reg to OLD using our insn.) */
7181 /* If REAL_OLD is a paradoxical SUBREG, remove it
7182 and try to put the opposite SUBREG on
7183 RELOADREG. */
7184 if (GET_CODE (real_old) == SUBREG
7185 && (GET_MODE_SIZE (GET_MODE (real_old))
7186 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7187 && 0 != (tem = gen_lowpart_common
7188 (GET_MODE (SUBREG_REG (real_old)),
7189 reloadreg)))
7190 real_old = SUBREG_REG (real_old), reloadreg = tem;
7192 gen_reload (reloadreg, second_reloadreg,
7193 rl->opnum, rl->when_needed);
7194 emit_insn ((GEN_FCN (tertiary_icode)
7195 (real_old, reloadreg, third_reloadreg)));
7196 special = 1;
7199 else
7201 /* Copy between the reload regs here and then to
7202 OUT later. */
7204 gen_reload (reloadreg, second_reloadreg,
7205 rl->opnum, rl->when_needed);
7206 if (tertiary_reload >= 0)
7208 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7210 gen_reload (third_reloadreg, reloadreg,
7211 rl->opnum, rl->when_needed);
7212 reloadreg = third_reloadreg;
7219 /* Output the last reload insn. */
7220 if (! special)
7222 rtx set;
7224 /* Don't output the last reload if OLD is not the dest of
7225 INSN and is in the src and is clobbered by INSN. */
7226 if (! flag_expensive_optimizations
7227 || !REG_P (old)
7228 || !(set = single_set (insn))
7229 || rtx_equal_p (old, SET_DEST (set))
7230 || !reg_mentioned_p (old, SET_SRC (set))
7231 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7232 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7233 gen_reload (old, reloadreg, rl->opnum,
7234 rl->when_needed);
7237 /* Look at all insns we emitted, just to be safe. */
7238 for (p = get_insns (); p; p = NEXT_INSN (p))
7239 if (INSN_P (p))
7241 rtx pat = PATTERN (p);
7243 /* If this output reload doesn't come from a spill reg,
7244 clear any memory of reloaded copies of the pseudo reg.
7245 If this output reload comes from a spill reg,
7246 reg_has_output_reload will make this do nothing. */
7247 note_stores (pat, forget_old_reloads_1, NULL);
7249 if (reg_mentioned_p (rl->reg_rtx, pat))
7251 rtx set = single_set (insn);
7252 if (reload_spill_index[j] < 0
7253 && set
7254 && SET_SRC (set) == rl->reg_rtx)
7256 int src = REGNO (SET_SRC (set));
7258 reload_spill_index[j] = src;
7259 SET_HARD_REG_BIT (reg_is_output_reload, src);
7260 if (find_regno_note (insn, REG_DEAD, src))
7261 SET_HARD_REG_BIT (reg_reloaded_died, src);
7263 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
7265 int s = rl->secondary_out_reload;
7266 set = single_set (p);
7267 /* If this reload copies only to the secondary reload
7268 register, the secondary reload does the actual
7269 store. */
7270 if (s >= 0 && set == NULL_RTX)
7271 /* We can't tell what function the secondary reload
7272 has and where the actual store to the pseudo is
7273 made; leave new_spill_reg_store alone. */
7275 else if (s >= 0
7276 && SET_SRC (set) == rl->reg_rtx
7277 && SET_DEST (set) == rld[s].reg_rtx)
7279 /* Usually the next instruction will be the
7280 secondary reload insn; if we can confirm
7281 that it is, setting new_spill_reg_store to
7282 that insn will allow an extra optimization. */
7283 rtx s_reg = rld[s].reg_rtx;
7284 rtx next = NEXT_INSN (p);
7285 rld[s].out = rl->out;
7286 rld[s].out_reg = rl->out_reg;
7287 set = single_set (next);
7288 if (set && SET_SRC (set) == s_reg
7289 && ! new_spill_reg_store[REGNO (s_reg)])
7291 SET_HARD_REG_BIT (reg_is_output_reload,
7292 REGNO (s_reg));
7293 new_spill_reg_store[REGNO (s_reg)] = next;
7296 else
7297 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
7302 if (rl->when_needed == RELOAD_OTHER)
7304 emit_insn (other_output_reload_insns[rl->opnum]);
7305 other_output_reload_insns[rl->opnum] = get_insns ();
7307 else
7308 output_reload_insns[rl->opnum] = get_insns ();
7310 if (flag_non_call_exceptions)
7311 copy_eh_notes (insn, get_insns ());
7313 end_sequence ();
7316 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7317 and has the number J. */
7318 static void
7319 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7321 rtx insn = chain->insn;
7322 rtx old = (rl->in && MEM_P (rl->in)
7323 ? rl->in_reg : rl->in);
7325 if (old != 0
7326 /* AUTO_INC reloads need to be handled even if inherited. We got an
7327 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7328 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7329 && ! rtx_equal_p (rl->reg_rtx, old)
7330 && rl->reg_rtx != 0)
7331 emit_input_reload_insns (chain, rld + j, old, j);
7333 /* When inheriting a wider reload, we have a MEM in rl->in,
7334 e.g. inheriting a SImode output reload for
7335 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7336 if (optimize && reload_inherited[j] && rl->in
7337 && MEM_P (rl->in)
7338 && MEM_P (rl->in_reg)
7339 && reload_spill_index[j] >= 0
7340 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7341 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7343 /* If we are reloading a register that was recently stored in with an
7344 output-reload, see if we can prove there was
7345 actually no need to store the old value in it. */
7347 if (optimize
7348 && (reload_inherited[j] || reload_override_in[j])
7349 && rl->reg_rtx
7350 && REG_P (rl->reg_rtx)
7351 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
7352 #if 0
7353 /* There doesn't seem to be any reason to restrict this to pseudos
7354 and doing so loses in the case where we are copying from a
7355 register of the wrong class. */
7356 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
7357 >= FIRST_PSEUDO_REGISTER)
7358 #endif
7359 /* The insn might have already some references to stackslots
7360 replaced by MEMs, while reload_out_reg still names the
7361 original pseudo. */
7362 && (dead_or_set_p (insn,
7363 spill_reg_stored_to[REGNO (rl->reg_rtx)])
7364 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
7365 rl->out_reg)))
7366 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
7369 /* Do output reloading for reload RL, which is for the insn described by
7370 CHAIN and has the number J.
7371 ??? At some point we need to support handling output reloads of
7372 JUMP_INSNs or insns that set cc0. */
7373 static void
7374 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7376 rtx note, old;
7377 rtx insn = chain->insn;
7378 /* If this is an output reload that stores something that is
7379 not loaded in this same reload, see if we can eliminate a previous
7380 store. */
7381 rtx pseudo = rl->out_reg;
7383 if (pseudo
7384 && optimize
7385 && REG_P (pseudo)
7386 && ! rtx_equal_p (rl->in_reg, pseudo)
7387 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7388 && reg_last_reload_reg[REGNO (pseudo)])
7390 int pseudo_no = REGNO (pseudo);
7391 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7393 /* We don't need to test full validity of last_regno for
7394 inherit here; we only want to know if the store actually
7395 matches the pseudo. */
7396 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7397 && reg_reloaded_contents[last_regno] == pseudo_no
7398 && spill_reg_store[last_regno]
7399 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7400 delete_output_reload (insn, j, last_regno);
7403 old = rl->out_reg;
7404 if (old == 0
7405 || rl->reg_rtx == old
7406 || rl->reg_rtx == 0)
7407 return;
7409 /* An output operand that dies right away does need a reload,
7410 but need not be copied from it. Show the new location in the
7411 REG_UNUSED note. */
7412 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7413 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7415 XEXP (note, 0) = rl->reg_rtx;
7416 return;
7418 /* Likewise for a SUBREG of an operand that dies. */
7419 else if (GET_CODE (old) == SUBREG
7420 && REG_P (SUBREG_REG (old))
7421 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7422 SUBREG_REG (old))))
7424 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7425 rl->reg_rtx);
7426 return;
7428 else if (GET_CODE (old) == SCRATCH)
7429 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7430 but we don't want to make an output reload. */
7431 return;
7433 /* If is a JUMP_INSN, we can't support output reloads yet. */
7434 gcc_assert (NONJUMP_INSN_P (insn));
7436 emit_output_reload_insns (chain, rld + j, j);
7439 /* Reload number R reloads from or to a group of hard registers starting at
7440 register REGNO. Return true if it can be treated for inheritance purposes
7441 like a group of reloads, each one reloading a single hard register.
7442 The caller has already checked that the spill register and REGNO use
7443 the same number of registers to store the reload value. */
7445 static bool
7446 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7448 #ifdef CANNOT_CHANGE_MODE_CLASS
7449 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7450 GET_MODE (rld[r].reg_rtx),
7451 reg_raw_mode[reload_spill_index[r]])
7452 && !REG_CANNOT_CHANGE_MODE_P (regno,
7453 GET_MODE (rld[r].reg_rtx),
7454 reg_raw_mode[regno]));
7455 #else
7456 return true;
7457 #endif
7460 /* Output insns to reload values in and out of the chosen reload regs. */
7462 static void
7463 emit_reload_insns (struct insn_chain *chain)
7465 rtx insn = chain->insn;
7467 int j;
7469 CLEAR_HARD_REG_SET (reg_reloaded_died);
7471 for (j = 0; j < reload_n_operands; j++)
7472 input_reload_insns[j] = input_address_reload_insns[j]
7473 = inpaddr_address_reload_insns[j]
7474 = output_reload_insns[j] = output_address_reload_insns[j]
7475 = outaddr_address_reload_insns[j]
7476 = other_output_reload_insns[j] = 0;
7477 other_input_address_reload_insns = 0;
7478 other_input_reload_insns = 0;
7479 operand_reload_insns = 0;
7480 other_operand_reload_insns = 0;
7482 /* Dump reloads into the dump file. */
7483 if (dump_file)
7485 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7486 debug_reload_to_stream (dump_file);
7489 /* Now output the instructions to copy the data into and out of the
7490 reload registers. Do these in the order that the reloads were reported,
7491 since reloads of base and index registers precede reloads of operands
7492 and the operands may need the base and index registers reloaded. */
7494 for (j = 0; j < n_reloads; j++)
7496 if (rld[j].reg_rtx
7497 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7498 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7500 do_input_reload (chain, rld + j, j);
7501 do_output_reload (chain, rld + j, j);
7504 /* Now write all the insns we made for reloads in the order expected by
7505 the allocation functions. Prior to the insn being reloaded, we write
7506 the following reloads:
7508 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7510 RELOAD_OTHER reloads.
7512 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7513 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7514 RELOAD_FOR_INPUT reload for the operand.
7516 RELOAD_FOR_OPADDR_ADDRS reloads.
7518 RELOAD_FOR_OPERAND_ADDRESS reloads.
7520 After the insn being reloaded, we write the following:
7522 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7523 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7524 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7525 reloads for the operand. The RELOAD_OTHER output reloads are
7526 output in descending order by reload number. */
7528 emit_insn_before (other_input_address_reload_insns, insn);
7529 emit_insn_before (other_input_reload_insns, insn);
7531 for (j = 0; j < reload_n_operands; j++)
7533 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7534 emit_insn_before (input_address_reload_insns[j], insn);
7535 emit_insn_before (input_reload_insns[j], insn);
7538 emit_insn_before (other_operand_reload_insns, insn);
7539 emit_insn_before (operand_reload_insns, insn);
7541 for (j = 0; j < reload_n_operands; j++)
7543 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7544 x = emit_insn_after (output_address_reload_insns[j], x);
7545 x = emit_insn_after (output_reload_insns[j], x);
7546 emit_insn_after (other_output_reload_insns[j], x);
7549 /* For all the spill regs newly reloaded in this instruction,
7550 record what they were reloaded from, so subsequent instructions
7551 can inherit the reloads.
7553 Update spill_reg_store for the reloads of this insn.
7554 Copy the elements that were updated in the loop above. */
7556 for (j = 0; j < n_reloads; j++)
7558 int r = reload_order[j];
7559 int i = reload_spill_index[r];
7561 /* If this is a non-inherited input reload from a pseudo, we must
7562 clear any memory of a previous store to the same pseudo. Only do
7563 something if there will not be an output reload for the pseudo
7564 being reloaded. */
7565 if (rld[r].in_reg != 0
7566 && ! (reload_inherited[r] || reload_override_in[r]))
7568 rtx reg = rld[r].in_reg;
7570 if (GET_CODE (reg) == SUBREG)
7571 reg = SUBREG_REG (reg);
7573 if (REG_P (reg)
7574 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7575 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7577 int nregno = REGNO (reg);
7579 if (reg_last_reload_reg[nregno])
7581 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7583 if (reg_reloaded_contents[last_regno] == nregno)
7584 spill_reg_store[last_regno] = 0;
7589 /* I is nonneg if this reload used a register.
7590 If rld[r].reg_rtx is 0, this is an optional reload
7591 that we opted to ignore. */
7593 if (i >= 0 && rld[r].reg_rtx != 0)
7595 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7596 int k;
7597 int part_reaches_end = 0;
7598 int all_reaches_end = 1;
7600 /* For a multi register reload, we need to check if all or part
7601 of the value lives to the end. */
7602 for (k = 0; k < nr; k++)
7604 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7605 rld[r].when_needed))
7606 part_reaches_end = 1;
7607 else
7608 all_reaches_end = 0;
7611 /* Ignore reloads that don't reach the end of the insn in
7612 entirety. */
7613 if (all_reaches_end)
7615 /* First, clear out memory of what used to be in this spill reg.
7616 If consecutive registers are used, clear them all. */
7618 for (k = 0; k < nr; k++)
7620 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7621 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7624 /* Maybe the spill reg contains a copy of reload_out. */
7625 if (rld[r].out != 0
7626 && (REG_P (rld[r].out)
7627 #ifdef AUTO_INC_DEC
7628 || ! rld[r].out_reg
7629 #endif
7630 || REG_P (rld[r].out_reg)))
7632 rtx out = (REG_P (rld[r].out)
7633 ? rld[r].out
7634 : rld[r].out_reg
7635 ? rld[r].out_reg
7636 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7637 int nregno = REGNO (out);
7638 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7639 : hard_regno_nregs[nregno]
7640 [GET_MODE (rld[r].reg_rtx)]);
7641 bool piecemeal;
7643 spill_reg_store[i] = new_spill_reg_store[i];
7644 spill_reg_stored_to[i] = out;
7645 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7647 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7648 && nr == nnr
7649 && inherit_piecemeal_p (r, nregno));
7651 /* If NREGNO is a hard register, it may occupy more than
7652 one register. If it does, say what is in the
7653 rest of the registers assuming that both registers
7654 agree on how many words the object takes. If not,
7655 invalidate the subsequent registers. */
7657 if (nregno < FIRST_PSEUDO_REGISTER)
7658 for (k = 1; k < nnr; k++)
7659 reg_last_reload_reg[nregno + k]
7660 = (piecemeal
7661 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7662 : 0);
7664 /* Now do the inverse operation. */
7665 for (k = 0; k < nr; k++)
7667 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7668 reg_reloaded_contents[i + k]
7669 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7670 ? nregno
7671 : nregno + k);
7672 reg_reloaded_insn[i + k] = insn;
7673 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7674 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7675 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7679 /* Maybe the spill reg contains a copy of reload_in. Only do
7680 something if there will not be an output reload for
7681 the register being reloaded. */
7682 else if (rld[r].out_reg == 0
7683 && rld[r].in != 0
7684 && ((REG_P (rld[r].in)
7685 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7686 && !REGNO_REG_SET_P (&reg_has_output_reload,
7687 REGNO (rld[r].in)))
7688 || (REG_P (rld[r].in_reg)
7689 && !REGNO_REG_SET_P (&reg_has_output_reload,
7690 REGNO (rld[r].in_reg))))
7691 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7693 int nregno;
7694 int nnr;
7695 rtx in;
7696 bool piecemeal;
7698 if (REG_P (rld[r].in)
7699 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7700 in = rld[r].in;
7701 else if (REG_P (rld[r].in_reg))
7702 in = rld[r].in_reg;
7703 else
7704 in = XEXP (rld[r].in_reg, 0);
7705 nregno = REGNO (in);
7707 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7708 : hard_regno_nregs[nregno]
7709 [GET_MODE (rld[r].reg_rtx)]);
7711 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7713 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7714 && nr == nnr
7715 && inherit_piecemeal_p (r, nregno));
7717 if (nregno < FIRST_PSEUDO_REGISTER)
7718 for (k = 1; k < nnr; k++)
7719 reg_last_reload_reg[nregno + k]
7720 = (piecemeal
7721 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7722 : 0);
7724 /* Unless we inherited this reload, show we haven't
7725 recently done a store.
7726 Previous stores of inherited auto_inc expressions
7727 also have to be discarded. */
7728 if (! reload_inherited[r]
7729 || (rld[r].out && ! rld[r].out_reg))
7730 spill_reg_store[i] = 0;
7732 for (k = 0; k < nr; k++)
7734 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7735 reg_reloaded_contents[i + k]
7736 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7737 ? nregno
7738 : nregno + k);
7739 reg_reloaded_insn[i + k] = insn;
7740 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7741 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7742 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7747 /* However, if part of the reload reaches the end, then we must
7748 invalidate the old info for the part that survives to the end. */
7749 else if (part_reaches_end)
7751 for (k = 0; k < nr; k++)
7752 if (reload_reg_reaches_end_p (i + k,
7753 rld[r].opnum,
7754 rld[r].when_needed))
7755 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7759 /* The following if-statement was #if 0'd in 1.34 (or before...).
7760 It's reenabled in 1.35 because supposedly nothing else
7761 deals with this problem. */
7763 /* If a register gets output-reloaded from a non-spill register,
7764 that invalidates any previous reloaded copy of it.
7765 But forget_old_reloads_1 won't get to see it, because
7766 it thinks only about the original insn. So invalidate it here.
7767 Also do the same thing for RELOAD_OTHER constraints where the
7768 output is discarded. */
7769 if (i < 0
7770 && ((rld[r].out != 0
7771 && (REG_P (rld[r].out)
7772 || (MEM_P (rld[r].out)
7773 && REG_P (rld[r].out_reg))))
7774 || (rld[r].out == 0 && rld[r].out_reg
7775 && REG_P (rld[r].out_reg))))
7777 rtx out = ((rld[r].out && REG_P (rld[r].out))
7778 ? rld[r].out : rld[r].out_reg);
7779 int nregno = REGNO (out);
7781 /* REG_RTX is now set or clobbered by the main instruction.
7782 As the comment above explains, forget_old_reloads_1 only
7783 sees the original instruction, and there is no guarantee
7784 that the original instruction also clobbered REG_RTX.
7785 For example, if find_reloads sees that the input side of
7786 a matched operand pair dies in this instruction, it may
7787 use the input register as the reload register.
7789 Calling forget_old_reloads_1 is a waste of effort if
7790 REG_RTX is also the output register.
7792 If we know that REG_RTX holds the value of a pseudo
7793 register, the code after the call will record that fact. */
7794 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
7795 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
7797 if (nregno >= FIRST_PSEUDO_REGISTER)
7799 rtx src_reg, store_insn = NULL_RTX;
7801 reg_last_reload_reg[nregno] = 0;
7803 /* If we can find a hard register that is stored, record
7804 the storing insn so that we may delete this insn with
7805 delete_output_reload. */
7806 src_reg = rld[r].reg_rtx;
7808 /* If this is an optional reload, try to find the source reg
7809 from an input reload. */
7810 if (! src_reg)
7812 rtx set = single_set (insn);
7813 if (set && SET_DEST (set) == rld[r].out)
7815 int k;
7817 src_reg = SET_SRC (set);
7818 store_insn = insn;
7819 for (k = 0; k < n_reloads; k++)
7821 if (rld[k].in == src_reg)
7823 src_reg = rld[k].reg_rtx;
7824 break;
7829 else
7830 store_insn = new_spill_reg_store[REGNO (src_reg)];
7831 if (src_reg && REG_P (src_reg)
7832 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7834 int src_regno = REGNO (src_reg);
7835 int nr = hard_regno_nregs[src_regno][rld[r].mode];
7836 /* The place where to find a death note varies with
7837 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7838 necessarily checked exactly in the code that moves
7839 notes, so just check both locations. */
7840 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7841 if (! note && store_insn)
7842 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7843 while (nr-- > 0)
7845 spill_reg_store[src_regno + nr] = store_insn;
7846 spill_reg_stored_to[src_regno + nr] = out;
7847 reg_reloaded_contents[src_regno + nr] = nregno;
7848 reg_reloaded_insn[src_regno + nr] = store_insn;
7849 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7850 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7851 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7852 GET_MODE (src_reg)))
7853 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7854 src_regno + nr);
7855 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7856 if (note)
7857 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7858 else
7859 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7861 reg_last_reload_reg[nregno] = src_reg;
7862 /* We have to set reg_has_output_reload here, or else
7863 forget_old_reloads_1 will clear reg_last_reload_reg
7864 right away. */
7865 SET_REGNO_REG_SET (&reg_has_output_reload,
7866 nregno);
7869 else
7871 int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
7873 while (num_regs-- > 0)
7874 reg_last_reload_reg[nregno + num_regs] = 0;
7878 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7881 /* Go through the motions to emit INSN and test if it is strictly valid.
7882 Return the emitted insn if valid, else return NULL. */
7884 static rtx
7885 emit_insn_if_valid_for_reload (rtx insn)
7887 rtx last = get_last_insn ();
7888 int code;
7890 insn = emit_insn (insn);
7891 code = recog_memoized (insn);
7893 if (code >= 0)
7895 extract_insn (insn);
7896 /* We want constrain operands to treat this insn strictly in its
7897 validity determination, i.e., the way it would after reload has
7898 completed. */
7899 if (constrain_operands (1))
7900 return insn;
7903 delete_insns_since (last);
7904 return NULL;
7907 /* Emit code to perform a reload from IN (which may be a reload register) to
7908 OUT (which may also be a reload register). IN or OUT is from operand
7909 OPNUM with reload type TYPE.
7911 Returns first insn emitted. */
7913 static rtx
7914 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7916 rtx last = get_last_insn ();
7917 rtx tem;
7919 /* If IN is a paradoxical SUBREG, remove it and try to put the
7920 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7921 if (GET_CODE (in) == SUBREG
7922 && (GET_MODE_SIZE (GET_MODE (in))
7923 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7924 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7925 in = SUBREG_REG (in), out = tem;
7926 else if (GET_CODE (out) == SUBREG
7927 && (GET_MODE_SIZE (GET_MODE (out))
7928 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7929 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7930 out = SUBREG_REG (out), in = tem;
7932 /* How to do this reload can get quite tricky. Normally, we are being
7933 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7934 register that didn't get a hard register. In that case we can just
7935 call emit_move_insn.
7937 We can also be asked to reload a PLUS that adds a register or a MEM to
7938 another register, constant or MEM. This can occur during frame pointer
7939 elimination and while reloading addresses. This case is handled by
7940 trying to emit a single insn to perform the add. If it is not valid,
7941 we use a two insn sequence.
7943 Or we can be asked to reload an unary operand that was a fragment of
7944 an addressing mode, into a register. If it isn't recognized as-is,
7945 we try making the unop operand and the reload-register the same:
7946 (set reg:X (unop:X expr:Y))
7947 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7949 Finally, we could be called to handle an 'o' constraint by putting
7950 an address into a register. In that case, we first try to do this
7951 with a named pattern of "reload_load_address". If no such pattern
7952 exists, we just emit a SET insn and hope for the best (it will normally
7953 be valid on machines that use 'o').
7955 This entire process is made complex because reload will never
7956 process the insns we generate here and so we must ensure that
7957 they will fit their constraints and also by the fact that parts of
7958 IN might be being reloaded separately and replaced with spill registers.
7959 Because of this, we are, in some sense, just guessing the right approach
7960 here. The one listed above seems to work.
7962 ??? At some point, this whole thing needs to be rethought. */
7964 if (GET_CODE (in) == PLUS
7965 && (REG_P (XEXP (in, 0))
7966 || GET_CODE (XEXP (in, 0)) == SUBREG
7967 || MEM_P (XEXP (in, 0)))
7968 && (REG_P (XEXP (in, 1))
7969 || GET_CODE (XEXP (in, 1)) == SUBREG
7970 || CONSTANT_P (XEXP (in, 1))
7971 || MEM_P (XEXP (in, 1))))
7973 /* We need to compute the sum of a register or a MEM and another
7974 register, constant, or MEM, and put it into the reload
7975 register. The best possible way of doing this is if the machine
7976 has a three-operand ADD insn that accepts the required operands.
7978 The simplest approach is to try to generate such an insn and see if it
7979 is recognized and matches its constraints. If so, it can be used.
7981 It might be better not to actually emit the insn unless it is valid,
7982 but we need to pass the insn as an operand to `recog' and
7983 `extract_insn' and it is simpler to emit and then delete the insn if
7984 not valid than to dummy things up. */
7986 rtx op0, op1, tem, insn;
7987 int code;
7989 op0 = find_replacement (&XEXP (in, 0));
7990 op1 = find_replacement (&XEXP (in, 1));
7992 /* Since constraint checking is strict, commutativity won't be
7993 checked, so we need to do that here to avoid spurious failure
7994 if the add instruction is two-address and the second operand
7995 of the add is the same as the reload reg, which is frequently
7996 the case. If the insn would be A = B + A, rearrange it so
7997 it will be A = A + B as constrain_operands expects. */
7999 if (REG_P (XEXP (in, 1))
8000 && REGNO (out) == REGNO (XEXP (in, 1)))
8001 tem = op0, op0 = op1, op1 = tem;
8003 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8004 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8006 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8007 if (insn)
8008 return insn;
8010 /* If that failed, we must use a conservative two-insn sequence.
8012 Use a move to copy one operand into the reload register. Prefer
8013 to reload a constant, MEM or pseudo since the move patterns can
8014 handle an arbitrary operand. If OP1 is not a constant, MEM or
8015 pseudo and OP1 is not a valid operand for an add instruction, then
8016 reload OP1.
8018 After reloading one of the operands into the reload register, add
8019 the reload register to the output register.
8021 If there is another way to do this for a specific machine, a
8022 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8023 we emit below. */
8025 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8027 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8028 || (REG_P (op1)
8029 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8030 || (code != CODE_FOR_nothing
8031 && ! ((*insn_data[code].operand[2].predicate)
8032 (op1, insn_data[code].operand[2].mode))))
8033 tem = op0, op0 = op1, op1 = tem;
8035 gen_reload (out, op0, opnum, type);
8037 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8038 This fixes a problem on the 32K where the stack pointer cannot
8039 be used as an operand of an add insn. */
8041 if (rtx_equal_p (op0, op1))
8042 op1 = out;
8044 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8045 if (insn)
8047 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8048 set_unique_reg_note (insn, REG_EQUIV, in);
8049 return insn;
8052 /* If that failed, copy the address register to the reload register.
8053 Then add the constant to the reload register. */
8055 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8056 gen_reload (out, op1, opnum, type);
8057 insn = emit_insn (gen_add2_insn (out, op0));
8058 set_unique_reg_note (insn, REG_EQUIV, in);
8061 #ifdef SECONDARY_MEMORY_NEEDED
8062 /* If we need a memory location to do the move, do it that way. */
8063 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
8064 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8065 && (REG_P (out) || GET_CODE (out) == SUBREG)
8066 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8067 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8068 REGNO_REG_CLASS (reg_or_subregno (out)),
8069 GET_MODE (out)))
8071 /* Get the memory to use and rewrite both registers to its mode. */
8072 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8074 if (GET_MODE (loc) != GET_MODE (out))
8075 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8077 if (GET_MODE (loc) != GET_MODE (in))
8078 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8080 gen_reload (loc, in, opnum, type);
8081 gen_reload (out, loc, opnum, type);
8083 #endif
8084 else if (REG_P (out) && UNARY_P (in))
8086 rtx insn;
8087 rtx op1;
8088 rtx out_moded;
8089 rtx set;
8091 op1 = find_replacement (&XEXP (in, 0));
8092 if (op1 != XEXP (in, 0))
8093 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8095 /* First, try a plain SET. */
8096 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8097 if (set)
8098 return set;
8100 /* If that failed, move the inner operand to the reload
8101 register, and try the same unop with the inner expression
8102 replaced with the reload register. */
8104 if (GET_MODE (op1) != GET_MODE (out))
8105 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8106 else
8107 out_moded = out;
8109 gen_reload (out_moded, op1, opnum, type);
8111 insn
8112 = gen_rtx_SET (VOIDmode, out,
8113 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8114 out_moded));
8115 insn = emit_insn_if_valid_for_reload (insn);
8116 if (insn)
8118 set_unique_reg_note (insn, REG_EQUIV, in);
8119 return insn;
8122 fatal_insn ("Failure trying to reload:", set);
8124 /* If IN is a simple operand, use gen_move_insn. */
8125 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8127 tem = emit_insn (gen_move_insn (out, in));
8128 /* IN may contain a LABEL_REF, if so add a REG_LABEL note. */
8129 mark_jump_label (in, tem, 0);
8132 #ifdef HAVE_reload_load_address
8133 else if (HAVE_reload_load_address)
8134 emit_insn (gen_reload_load_address (out, in));
8135 #endif
8137 /* Otherwise, just write (set OUT IN) and hope for the best. */
8138 else
8139 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8141 /* Return the first insn emitted.
8142 We can not just return get_last_insn, because there may have
8143 been multiple instructions emitted. Also note that gen_move_insn may
8144 emit more than one insn itself, so we can not assume that there is one
8145 insn emitted per emit_insn_before call. */
8147 return last ? NEXT_INSN (last) : get_insns ();
8150 /* Delete a previously made output-reload whose result we now believe
8151 is not needed. First we double-check.
8153 INSN is the insn now being processed.
8154 LAST_RELOAD_REG is the hard register number for which we want to delete
8155 the last output reload.
8156 J is the reload-number that originally used REG. The caller has made
8157 certain that reload J doesn't use REG any longer for input. */
8159 static void
8160 delete_output_reload (rtx insn, int j, int last_reload_reg)
8162 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8163 rtx reg = spill_reg_stored_to[last_reload_reg];
8164 int k;
8165 int n_occurrences;
8166 int n_inherited = 0;
8167 rtx i1;
8168 rtx substed;
8170 /* It is possible that this reload has been only used to set another reload
8171 we eliminated earlier and thus deleted this instruction too. */
8172 if (INSN_DELETED_P (output_reload_insn))
8173 return;
8175 /* Get the raw pseudo-register referred to. */
8177 while (GET_CODE (reg) == SUBREG)
8178 reg = SUBREG_REG (reg);
8179 substed = reg_equiv_memory_loc[REGNO (reg)];
8181 /* This is unsafe if the operand occurs more often in the current
8182 insn than it is inherited. */
8183 for (k = n_reloads - 1; k >= 0; k--)
8185 rtx reg2 = rld[k].in;
8186 if (! reg2)
8187 continue;
8188 if (MEM_P (reg2) || reload_override_in[k])
8189 reg2 = rld[k].in_reg;
8190 #ifdef AUTO_INC_DEC
8191 if (rld[k].out && ! rld[k].out_reg)
8192 reg2 = XEXP (rld[k].in_reg, 0);
8193 #endif
8194 while (GET_CODE (reg2) == SUBREG)
8195 reg2 = SUBREG_REG (reg2);
8196 if (rtx_equal_p (reg2, reg))
8198 if (reload_inherited[k] || reload_override_in[k] || k == j)
8199 n_inherited++;
8200 else
8201 return;
8204 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8205 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8206 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8207 reg, 0);
8208 if (substed)
8209 n_occurrences += count_occurrences (PATTERN (insn),
8210 eliminate_regs (substed, 0,
8211 NULL_RTX), 0);
8212 for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8214 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8215 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8217 if (n_occurrences > n_inherited)
8218 return;
8220 /* If the pseudo-reg we are reloading is no longer referenced
8221 anywhere between the store into it and here,
8222 and we're within the same basic block, then the value can only
8223 pass through the reload reg and end up here.
8224 Otherwise, give up--return. */
8225 for (i1 = NEXT_INSN (output_reload_insn);
8226 i1 != insn; i1 = NEXT_INSN (i1))
8228 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8229 return;
8230 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8231 && reg_mentioned_p (reg, PATTERN (i1)))
8233 /* If this is USE in front of INSN, we only have to check that
8234 there are no more references than accounted for by inheritance. */
8235 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8237 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8238 i1 = NEXT_INSN (i1);
8240 if (n_occurrences <= n_inherited && i1 == insn)
8241 break;
8242 return;
8246 /* We will be deleting the insn. Remove the spill reg information. */
8247 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8249 spill_reg_store[last_reload_reg + k] = 0;
8250 spill_reg_stored_to[last_reload_reg + k] = 0;
8253 /* The caller has already checked that REG dies or is set in INSN.
8254 It has also checked that we are optimizing, and thus some
8255 inaccuracies in the debugging information are acceptable.
8256 So we could just delete output_reload_insn. But in some cases
8257 we can improve the debugging information without sacrificing
8258 optimization - maybe even improving the code: See if the pseudo
8259 reg has been completely replaced with reload regs. If so, delete
8260 the store insn and forget we had a stack slot for the pseudo. */
8261 if (rld[j].out != rld[j].in
8262 && REG_N_DEATHS (REGNO (reg)) == 1
8263 && REG_N_SETS (REGNO (reg)) == 1
8264 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8265 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8267 rtx i2;
8269 /* We know that it was used only between here and the beginning of
8270 the current basic block. (We also know that the last use before
8271 INSN was the output reload we are thinking of deleting, but never
8272 mind that.) Search that range; see if any ref remains. */
8273 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8275 rtx set = single_set (i2);
8277 /* Uses which just store in the pseudo don't count,
8278 since if they are the only uses, they are dead. */
8279 if (set != 0 && SET_DEST (set) == reg)
8280 continue;
8281 if (LABEL_P (i2)
8282 || JUMP_P (i2))
8283 break;
8284 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8285 && reg_mentioned_p (reg, PATTERN (i2)))
8287 /* Some other ref remains; just delete the output reload we
8288 know to be dead. */
8289 delete_address_reloads (output_reload_insn, insn);
8290 delete_insn (output_reload_insn);
8291 return;
8295 /* Delete the now-dead stores into this pseudo. Note that this
8296 loop also takes care of deleting output_reload_insn. */
8297 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8299 rtx set = single_set (i2);
8301 if (set != 0 && SET_DEST (set) == reg)
8303 delete_address_reloads (i2, insn);
8304 delete_insn (i2);
8306 if (LABEL_P (i2)
8307 || JUMP_P (i2))
8308 break;
8311 /* For the debugging info, say the pseudo lives in this reload reg. */
8312 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8313 if (flag_ira)
8314 mark_allocation_change (REGNO (reg));
8315 alter_reg (REGNO (reg), -1, false);
8317 else
8319 delete_address_reloads (output_reload_insn, insn);
8320 delete_insn (output_reload_insn);
8324 /* We are going to delete DEAD_INSN. Recursively delete loads of
8325 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8326 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8327 static void
8328 delete_address_reloads (rtx dead_insn, rtx current_insn)
8330 rtx set = single_set (dead_insn);
8331 rtx set2, dst, prev, next;
8332 if (set)
8334 rtx dst = SET_DEST (set);
8335 if (MEM_P (dst))
8336 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8338 /* If we deleted the store from a reloaded post_{in,de}c expression,
8339 we can delete the matching adds. */
8340 prev = PREV_INSN (dead_insn);
8341 next = NEXT_INSN (dead_insn);
8342 if (! prev || ! next)
8343 return;
8344 set = single_set (next);
8345 set2 = single_set (prev);
8346 if (! set || ! set2
8347 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8348 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8349 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8350 return;
8351 dst = SET_DEST (set);
8352 if (! rtx_equal_p (dst, SET_DEST (set2))
8353 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8354 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8355 || (INTVAL (XEXP (SET_SRC (set), 1))
8356 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8357 return;
8358 delete_related_insns (prev);
8359 delete_related_insns (next);
8362 /* Subfunction of delete_address_reloads: process registers found in X. */
8363 static void
8364 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8366 rtx prev, set, dst, i2;
8367 int i, j;
8368 enum rtx_code code = GET_CODE (x);
8370 if (code != REG)
8372 const char *fmt = GET_RTX_FORMAT (code);
8373 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8375 if (fmt[i] == 'e')
8376 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8377 else if (fmt[i] == 'E')
8379 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8380 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8381 current_insn);
8384 return;
8387 if (spill_reg_order[REGNO (x)] < 0)
8388 return;
8390 /* Scan backwards for the insn that sets x. This might be a way back due
8391 to inheritance. */
8392 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8394 code = GET_CODE (prev);
8395 if (code == CODE_LABEL || code == JUMP_INSN)
8396 return;
8397 if (!INSN_P (prev))
8398 continue;
8399 if (reg_set_p (x, PATTERN (prev)))
8400 break;
8401 if (reg_referenced_p (x, PATTERN (prev)))
8402 return;
8404 if (! prev || INSN_UID (prev) < reload_first_uid)
8405 return;
8406 /* Check that PREV only sets the reload register. */
8407 set = single_set (prev);
8408 if (! set)
8409 return;
8410 dst = SET_DEST (set);
8411 if (!REG_P (dst)
8412 || ! rtx_equal_p (dst, x))
8413 return;
8414 if (! reg_set_p (dst, PATTERN (dead_insn)))
8416 /* Check if DST was used in a later insn -
8417 it might have been inherited. */
8418 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8420 if (LABEL_P (i2))
8421 break;
8422 if (! INSN_P (i2))
8423 continue;
8424 if (reg_referenced_p (dst, PATTERN (i2)))
8426 /* If there is a reference to the register in the current insn,
8427 it might be loaded in a non-inherited reload. If no other
8428 reload uses it, that means the register is set before
8429 referenced. */
8430 if (i2 == current_insn)
8432 for (j = n_reloads - 1; j >= 0; j--)
8433 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8434 || reload_override_in[j] == dst)
8435 return;
8436 for (j = n_reloads - 1; j >= 0; j--)
8437 if (rld[j].in && rld[j].reg_rtx == dst)
8438 break;
8439 if (j >= 0)
8440 break;
8442 return;
8444 if (JUMP_P (i2))
8445 break;
8446 /* If DST is still live at CURRENT_INSN, check if it is used for
8447 any reload. Note that even if CURRENT_INSN sets DST, we still
8448 have to check the reloads. */
8449 if (i2 == current_insn)
8451 for (j = n_reloads - 1; j >= 0; j--)
8452 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8453 || reload_override_in[j] == dst)
8454 return;
8455 /* ??? We can't finish the loop here, because dst might be
8456 allocated to a pseudo in this block if no reload in this
8457 block needs any of the classes containing DST - see
8458 spill_hard_reg. There is no easy way to tell this, so we
8459 have to scan till the end of the basic block. */
8461 if (reg_set_p (dst, PATTERN (i2)))
8462 break;
8465 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8466 reg_reloaded_contents[REGNO (dst)] = -1;
8467 delete_insn (prev);
8470 /* Output reload-insns to reload VALUE into RELOADREG.
8471 VALUE is an autoincrement or autodecrement RTX whose operand
8472 is a register or memory location;
8473 so reloading involves incrementing that location.
8474 IN is either identical to VALUE, or some cheaper place to reload from.
8476 INC_AMOUNT is the number to increment or decrement by (always positive).
8477 This cannot be deduced from VALUE.
8479 Return the instruction that stores into RELOADREG. */
8481 static rtx
8482 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8484 /* REG or MEM to be copied and incremented. */
8485 rtx incloc = find_replacement (&XEXP (value, 0));
8486 /* Nonzero if increment after copying. */
8487 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8488 || GET_CODE (value) == POST_MODIFY);
8489 rtx last;
8490 rtx inc;
8491 rtx add_insn;
8492 int code;
8493 rtx store;
8494 rtx real_in = in == value ? incloc : in;
8496 /* No hard register is equivalent to this register after
8497 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8498 we could inc/dec that register as well (maybe even using it for
8499 the source), but I'm not sure it's worth worrying about. */
8500 if (REG_P (incloc))
8501 reg_last_reload_reg[REGNO (incloc)] = 0;
8503 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8505 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8506 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8508 else
8510 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8511 inc_amount = -inc_amount;
8513 inc = GEN_INT (inc_amount);
8516 /* If this is post-increment, first copy the location to the reload reg. */
8517 if (post && real_in != reloadreg)
8518 emit_insn (gen_move_insn (reloadreg, real_in));
8520 if (in == value)
8522 /* See if we can directly increment INCLOC. Use a method similar to
8523 that in gen_reload. */
8525 last = get_last_insn ();
8526 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8527 gen_rtx_PLUS (GET_MODE (incloc),
8528 incloc, inc)));
8530 code = recog_memoized (add_insn);
8531 if (code >= 0)
8533 extract_insn (add_insn);
8534 if (constrain_operands (1))
8536 /* If this is a pre-increment and we have incremented the value
8537 where it lives, copy the incremented value to RELOADREG to
8538 be used as an address. */
8540 if (! post)
8541 emit_insn (gen_move_insn (reloadreg, incloc));
8543 return add_insn;
8546 delete_insns_since (last);
8549 /* If couldn't do the increment directly, must increment in RELOADREG.
8550 The way we do this depends on whether this is pre- or post-increment.
8551 For pre-increment, copy INCLOC to the reload register, increment it
8552 there, then save back. */
8554 if (! post)
8556 if (in != reloadreg)
8557 emit_insn (gen_move_insn (reloadreg, real_in));
8558 emit_insn (gen_add2_insn (reloadreg, inc));
8559 store = emit_insn (gen_move_insn (incloc, reloadreg));
8561 else
8563 /* Postincrement.
8564 Because this might be a jump insn or a compare, and because RELOADREG
8565 may not be available after the insn in an input reload, we must do
8566 the incrementation before the insn being reloaded for.
8568 We have already copied IN to RELOADREG. Increment the copy in
8569 RELOADREG, save that back, then decrement RELOADREG so it has
8570 the original value. */
8572 emit_insn (gen_add2_insn (reloadreg, inc));
8573 store = emit_insn (gen_move_insn (incloc, reloadreg));
8574 if (GET_CODE (inc) == CONST_INT)
8575 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8576 else
8577 emit_insn (gen_sub2_insn (reloadreg, inc));
8580 return store;
8583 #ifdef AUTO_INC_DEC
8584 static void
8585 add_auto_inc_notes (rtx insn, rtx x)
8587 enum rtx_code code = GET_CODE (x);
8588 const char *fmt;
8589 int i, j;
8591 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8593 REG_NOTES (insn)
8594 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8595 return;
8598 /* Scan all the operand sub-expressions. */
8599 fmt = GET_RTX_FORMAT (code);
8600 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8602 if (fmt[i] == 'e')
8603 add_auto_inc_notes (insn, XEXP (x, i));
8604 else if (fmt[i] == 'E')
8605 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8606 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8609 #endif
8611 /* Copy EH notes from an insn to its reloads. */
8612 static void
8613 copy_eh_notes (rtx insn, rtx x)
8615 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8616 if (eh_note)
8618 for (; x != 0; x = NEXT_INSN (x))
8620 if (may_trap_p (PATTERN (x)))
8621 REG_NOTES (x)
8622 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8623 REG_NOTES (x));
8628 /* This is used by reload pass, that does emit some instructions after
8629 abnormal calls moving basic block end, but in fact it wants to emit
8630 them on the edge. Looks for abnormal call edges, find backward the
8631 proper call and fix the damage.
8633 Similar handle instructions throwing exceptions internally. */
8634 void
8635 fixup_abnormal_edges (void)
8637 bool inserted = false;
8638 basic_block bb;
8640 FOR_EACH_BB (bb)
8642 edge e;
8643 edge_iterator ei;
8645 /* Look for cases we are interested in - calls or instructions causing
8646 exceptions. */
8647 FOR_EACH_EDGE (e, ei, bb->succs)
8649 if (e->flags & EDGE_ABNORMAL_CALL)
8650 break;
8651 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8652 == (EDGE_ABNORMAL | EDGE_EH))
8653 break;
8655 if (e && !CALL_P (BB_END (bb))
8656 && !can_throw_internal (BB_END (bb)))
8658 rtx insn;
8660 /* Get past the new insns generated. Allow notes, as the insns
8661 may be already deleted. */
8662 insn = BB_END (bb);
8663 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8664 && !can_throw_internal (insn)
8665 && insn != BB_HEAD (bb))
8666 insn = PREV_INSN (insn);
8668 if (CALL_P (insn) || can_throw_internal (insn))
8670 rtx stop, next;
8672 stop = NEXT_INSN (BB_END (bb));
8673 BB_END (bb) = insn;
8674 insn = NEXT_INSN (insn);
8676 FOR_EACH_EDGE (e, ei, bb->succs)
8677 if (e->flags & EDGE_FALLTHRU)
8678 break;
8680 while (insn && insn != stop)
8682 next = NEXT_INSN (insn);
8683 if (INSN_P (insn))
8685 delete_insn (insn);
8687 /* Sometimes there's still the return value USE.
8688 If it's placed after a trapping call (i.e. that
8689 call is the last insn anyway), we have no fallthru
8690 edge. Simply delete this use and don't try to insert
8691 on the non-existent edge. */
8692 if (GET_CODE (PATTERN (insn)) != USE)
8694 /* We're not deleting it, we're moving it. */
8695 INSN_DELETED_P (insn) = 0;
8696 PREV_INSN (insn) = NULL_RTX;
8697 NEXT_INSN (insn) = NULL_RTX;
8699 insert_insn_on_edge (insn, e);
8700 inserted = true;
8703 else if (!BARRIER_P (insn))
8704 set_block_for_insn (insn, NULL);
8705 insn = next;
8709 /* It may be that we don't find any such trapping insn. In this
8710 case we discovered quite late that the insn that had been
8711 marked as can_throw_internal in fact couldn't trap at all.
8712 So we should in fact delete the EH edges out of the block. */
8713 else
8714 purge_dead_edges (bb);
8718 /* We've possibly turned single trapping insn into multiple ones. */
8719 if (flag_non_call_exceptions)
8721 sbitmap blocks;
8722 blocks = sbitmap_alloc (last_basic_block);
8723 sbitmap_ones (blocks);
8724 find_many_sub_basic_blocks (blocks);
8725 sbitmap_free (blocks);
8728 if (inserted)
8729 commit_edge_insertions ();
8731 #ifdef ENABLE_CHECKING
8732 /* Verify that we didn't turn one trapping insn into many, and that
8733 we found and corrected all of the problems wrt fixups on the
8734 fallthru edge. */
8735 verify_flow_info ();
8736 #endif