2008-05-29 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / reload1.c
blob99bbd6c9e2d261b8288cdec69351cd2619b146ea
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "df.h"
49 #include "target.h"
50 #include "dse.h"
52 /* This file contains the reload pass of the compiler, which is
53 run after register allocation has been done. It checks that
54 each insn is valid (operands required to be in registers really
55 are in registers of the proper class) and fixes up invalid ones
56 by copying values temporarily into registers for the insns
57 that need them.
59 The results of register allocation are described by the vector
60 reg_renumber; the insns still contain pseudo regs, but reg_renumber
61 can be used to find which hard reg, if any, a pseudo reg is in.
63 The technique we always use is to free up a few hard regs that are
64 called ``reload regs'', and for each place where a pseudo reg
65 must be in a hard reg, copy it temporarily into one of the reload regs.
67 Reload regs are allocated locally for every instruction that needs
68 reloads. When there are pseudos which are allocated to a register that
69 has been chosen as a reload reg, such pseudos must be ``spilled''.
70 This means that they go to other hard regs, or to stack slots if no other
71 available hard regs can be found. Spilling can invalidate more
72 insns, requiring additional need for reloads, so we must keep checking
73 until the process stabilizes.
75 For machines with different classes of registers, we must keep track
76 of the register class needed for each reload, and make sure that
77 we allocate enough reload registers of each class.
79 The file reload.c contains the code that checks one insn for
80 validity and reports the reloads that it needs. This file
81 is in charge of scanning the entire rtl code, accumulating the
82 reload needs, spilling, assigning reload registers to use for
83 fixing up each insn, and generating the new insns to copy values
84 into the reload registers. */
86 /* During reload_as_needed, element N contains a REG rtx for the hard reg
87 into which reg N has been reloaded (perhaps for a previous insn). */
88 static rtx *reg_last_reload_reg;
90 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92 static regset_head reg_has_output_reload;
94 /* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96 static HARD_REG_SET reg_is_output_reload;
98 /* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102 rtx *reg_equiv_constant;
104 /* Element N is an invariant value to which pseudo reg N is equivalent.
105 eliminate_regs_in_insn uses this to replace pseudos in particular
106 contexts. */
107 rtx *reg_equiv_invariant;
109 /* Element N is a memory location to which pseudo reg N is equivalent,
110 prior to any register elimination (such as frame pointer to stack
111 pointer). Depending on whether or not it is a valid address, this value
112 is transferred to either reg_equiv_address or reg_equiv_mem. */
113 rtx *reg_equiv_memory_loc;
115 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116 collector can keep track of what is inside. */
117 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
119 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
120 This is used when the address is not valid as a memory address
121 (because its displacement is too big for the machine.) */
122 rtx *reg_equiv_address;
124 /* Element N is the memory slot to which pseudo reg N is equivalent,
125 or zero if pseudo reg N is not equivalent to a memory slot. */
126 rtx *reg_equiv_mem;
128 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129 alternate representations of the location of pseudo reg N. */
130 rtx *reg_equiv_alt_mem_list;
132 /* Widest width in which each pseudo reg is referred to (via subreg). */
133 static unsigned int *reg_max_ref_width;
135 /* Element N is the list of insns that initialized reg N from its equivalent
136 constant or memory slot. */
137 rtx *reg_equiv_init;
138 int reg_equiv_init_size;
140 /* Vector to remember old contents of reg_renumber before spilling. */
141 static short *reg_old_renumber;
143 /* During reload_as_needed, element N contains the last pseudo regno reloaded
144 into hard register N. If that pseudo reg occupied more than one register,
145 reg_reloaded_contents points to that pseudo for each spill register in
146 use; all of these must remain set for an inheritance to occur. */
147 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
149 /* During reload_as_needed, element N contains the insn for which
150 hard register N was last used. Its contents are significant only
151 when reg_reloaded_valid is set for this register. */
152 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
154 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
155 static HARD_REG_SET reg_reloaded_valid;
156 /* Indicate if the register was dead at the end of the reload.
157 This is only valid if reg_reloaded_contents is set and valid. */
158 static HARD_REG_SET reg_reloaded_dead;
160 /* Indicate whether the register's current value is one that is not
161 safe to retain across a call, even for registers that are normally
162 call-saved. This is only meaningful for members of reg_reloaded_valid. */
163 static HARD_REG_SET reg_reloaded_call_part_clobbered;
165 /* Number of spill-regs so far; number of valid elements of spill_regs. */
166 static int n_spills;
168 /* In parallel with spill_regs, contains REG rtx's for those regs.
169 Holds the last rtx used for any given reg, or 0 if it has never
170 been used for spilling yet. This rtx is reused, provided it has
171 the proper mode. */
172 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
174 /* In parallel with spill_regs, contains nonzero for a spill reg
175 that was stored after the last time it was used.
176 The precise value is the insn generated to do the store. */
177 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
179 /* This is the register that was stored with spill_reg_store. This is a
180 copy of reload_out / reload_out_reg when the value was stored; if
181 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
182 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
184 /* This table is the inverse mapping of spill_regs:
185 indexed by hard reg number,
186 it contains the position of that reg in spill_regs,
187 or -1 for something that is not in spill_regs.
189 ?!? This is no longer accurate. */
190 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
192 /* This reg set indicates registers that can't be used as spill registers for
193 the currently processed insn. These are the hard registers which are live
194 during the insn, but not allocated to pseudos, as well as fixed
195 registers. */
196 static HARD_REG_SET bad_spill_regs;
198 /* These are the hard registers that can't be used as spill register for any
199 insn. This includes registers used for user variables and registers that
200 we can't eliminate. A register that appears in this set also can't be used
201 to retry register allocation. */
202 static HARD_REG_SET bad_spill_regs_global;
204 /* Describes order of use of registers for reloading
205 of spilled pseudo-registers. `n_spills' is the number of
206 elements that are actually valid; new ones are added at the end.
208 Both spill_regs and spill_reg_order are used on two occasions:
209 once during find_reload_regs, where they keep track of the spill registers
210 for a single insn, but also during reload_as_needed where they show all
211 the registers ever used by reload. For the latter case, the information
212 is calculated during finish_spills. */
213 static short spill_regs[FIRST_PSEUDO_REGISTER];
215 /* This vector of reg sets indicates, for each pseudo, which hard registers
216 may not be used for retrying global allocation because the register was
217 formerly spilled from one of them. If we allowed reallocating a pseudo to
218 a register that it was already allocated to, reload might not
219 terminate. */
220 static HARD_REG_SET *pseudo_previous_regs;
222 /* This vector of reg sets indicates, for each pseudo, which hard
223 registers may not be used for retrying global allocation because they
224 are used as spill registers during one of the insns in which the
225 pseudo is live. */
226 static HARD_REG_SET *pseudo_forbidden_regs;
228 /* All hard regs that have been used as spill registers for any insn are
229 marked in this set. */
230 static HARD_REG_SET used_spill_regs;
232 /* Index of last register assigned as a spill register. We allocate in
233 a round-robin fashion. */
234 static int last_spill_reg;
236 /* Nonzero if indirect addressing is supported on the machine; this means
237 that spilling (REG n) does not require reloading it into a register in
238 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
239 value indicates the level of indirect addressing supported, e.g., two
240 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 a hard register. */
242 static char spill_indirect_levels;
244 /* Nonzero if indirect addressing is supported when the innermost MEM is
245 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
246 which these are valid is the same as spill_indirect_levels, above. */
247 char indirect_symref_ok;
249 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
250 char double_reg_address_ok;
252 /* Record the stack slot for each spilled hard register. */
253 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
255 /* Width allocated so far for that stack slot. */
256 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
258 /* Record which pseudos needed to be spilled. */
259 static regset_head spilled_pseudos;
261 /* Record which pseudos changed their allocation in finish_spills. */
262 static regset_head changed_allocation_pseudos;
264 /* Used for communication between order_regs_for_reload and count_pseudo.
265 Used to avoid counting one pseudo twice. */
266 static regset_head pseudos_counted;
268 /* First uid used by insns created by reload in this function.
269 Used in find_equiv_reg. */
270 int reload_first_uid;
272 /* Flag set by local-alloc or global-alloc if anything is live in
273 a call-clobbered reg across calls. */
274 int caller_save_needed;
276 /* Set to 1 while reload_as_needed is operating.
277 Required by some machines to handle any generated moves differently. */
278 int reload_in_progress = 0;
280 /* These arrays record the insn_code of insns that may be needed to
281 perform input and output reloads of special objects. They provide a
282 place to pass a scratch register. */
283 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
284 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
286 /* This obstack is used for allocation of rtl during register elimination.
287 The allocated storage can be freed once find_reloads has processed the
288 insn. */
289 static struct obstack reload_obstack;
291 /* Points to the beginning of the reload_obstack. All insn_chain structures
292 are allocated first. */
293 static char *reload_startobj;
295 /* The point after all insn_chain structures. Used to quickly deallocate
296 memory allocated in copy_reloads during calculate_needs_all_insns. */
297 static char *reload_firstobj;
299 /* This points before all local rtl generated by register elimination.
300 Used to quickly free all memory after processing one insn. */
301 static char *reload_insn_firstobj;
303 /* List of insn_chain instructions, one for every insn that reload needs to
304 examine. */
305 struct insn_chain *reload_insn_chain;
307 /* List of all insns needing reloads. */
308 static struct insn_chain *insns_need_reload;
310 /* This structure is used to record information about register eliminations.
311 Each array entry describes one possible way of eliminating a register
312 in favor of another. If there is more than one way of eliminating a
313 particular register, the most preferred should be specified first. */
315 struct elim_table
317 int from; /* Register number to be eliminated. */
318 int to; /* Register number used as replacement. */
319 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
320 int can_eliminate; /* Nonzero if this elimination can be done. */
321 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
322 insns made by reload. */
323 HOST_WIDE_INT offset; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
325 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since
328 we might then spuriously replace a hard
329 register corresponding to a pseudo
330 assigned to the reg to be eliminated. */
331 rtx to_rtx; /* REG rtx for the replacement. */
334 static struct elim_table *reg_eliminate = 0;
336 /* This is an intermediate structure to initialize the table. It has
337 exactly the members provided by ELIMINABLE_REGS. */
338 static const struct elim_table_1
340 const int from;
341 const int to;
342 } reg_eliminate_1[] =
344 /* If a set of eliminable registers was specified, define the table from it.
345 Otherwise, default to the normal case of the frame pointer being
346 replaced by the stack pointer. */
348 #ifdef ELIMINABLE_REGS
349 ELIMINABLE_REGS;
350 #else
351 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
352 #endif
354 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356 /* Record the number of pending eliminations that have an offset not equal
357 to their initial offset. If nonzero, we use a new copy of each
358 replacement result in any insns encountered. */
359 int num_not_at_initial_offset;
361 /* Count the number of registers that we may be able to eliminate. */
362 static int num_eliminable;
363 /* And the number of registers that are equivalent to a constant that
364 can be eliminated to frame_pointer / arg_pointer + constant. */
365 static int num_eliminable_invariants;
367 /* For each label, we record the offset of each elimination. If we reach
368 a label by more than one path and an offset differs, we cannot do the
369 elimination. This information is indexed by the difference of the
370 number of the label and the first label number. We can't offset the
371 pointer itself as this can cause problems on machines with segmented
372 memory. The first table is an array of flags that records whether we
373 have yet encountered a label and the second table is an array of arrays,
374 one entry in the latter array for each elimination. */
376 static int first_label_num;
377 static char *offsets_known_at;
378 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
380 /* Number of labels in the current function. */
382 static int num_labels;
384 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
385 static void maybe_fix_stack_asms (void);
386 static void copy_reloads (struct insn_chain *);
387 static void calculate_needs_all_insns (int);
388 static int find_reg (struct insn_chain *, int);
389 static void find_reload_regs (struct insn_chain *);
390 static void select_reload_regs (void);
391 static void delete_caller_save_insns (void);
393 static void spill_failure (rtx, enum reg_class);
394 static void count_spilled_pseudo (int, int, int);
395 static void delete_dead_insn (rtx);
396 static void alter_reg (int, int, bool);
397 static void set_label_offsets (rtx, rtx, int);
398 static void check_eliminable_occurrences (rtx);
399 static void elimination_effects (rtx, enum machine_mode);
400 static int eliminate_regs_in_insn (rtx, int);
401 static void update_eliminable_offsets (void);
402 static void mark_not_eliminable (rtx, const_rtx, void *);
403 static void set_initial_elim_offsets (void);
404 static bool verify_initial_elim_offsets (void);
405 static void set_initial_label_offsets (void);
406 static void set_offsets_for_label (rtx);
407 static void init_elim_table (void);
408 static void update_eliminables (HARD_REG_SET *);
409 static void spill_hard_reg (unsigned int, int);
410 static int finish_spills (int);
411 static void scan_paradoxical_subregs (rtx);
412 static void count_pseudo (int);
413 static void order_regs_for_reload (struct insn_chain *);
414 static void reload_as_needed (int);
415 static void forget_old_reloads_1 (rtx, const_rtx, void *);
416 static void forget_marked_reloads (regset);
417 static int reload_reg_class_lower (const void *, const void *);
418 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
419 enum machine_mode);
420 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
421 enum machine_mode);
422 static int reload_reg_free_p (unsigned int, int, enum reload_type);
423 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
424 rtx, rtx, int, int);
425 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
426 rtx, rtx, int, int);
427 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
428 static int allocate_reload_reg (struct insn_chain *, int, int);
429 static int conflicts_with_override (rtx);
430 static void failed_reload (rtx, int);
431 static int set_reload_reg (int, int);
432 static void choose_reload_regs_init (struct insn_chain *, rtx *);
433 static void choose_reload_regs (struct insn_chain *);
434 static void merge_assigned_reloads (rtx);
435 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
436 rtx, int);
437 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
438 int);
439 static void do_input_reload (struct insn_chain *, struct reload *, int);
440 static void do_output_reload (struct insn_chain *, struct reload *, int);
441 static void emit_reload_insns (struct insn_chain *);
442 static void delete_output_reload (rtx, int, int, rtx);
443 static void delete_address_reloads (rtx, rtx);
444 static void delete_address_reloads_1 (rtx, rtx, rtx);
445 static rtx inc_for_reload (rtx, rtx, rtx, int);
446 #ifdef AUTO_INC_DEC
447 static void add_auto_inc_notes (rtx, rtx);
448 #endif
449 static void copy_eh_notes (rtx, rtx);
450 static void substitute (rtx *, const_rtx, rtx);
451 static bool gen_reload_chain_without_interm_reg_p (int, int);
452 static int reloads_conflict (int, int);
453 static rtx gen_reload (rtx, rtx, int, enum reload_type);
454 static rtx emit_insn_if_valid_for_reload (rtx);
456 /* Initialize the reload pass. This is called at the beginning of compilation
457 and may be called again if the target is reinitialized. */
459 void
460 init_reload (void)
462 int i;
464 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
465 Set spill_indirect_levels to the number of levels such addressing is
466 permitted, zero if it is not permitted at all. */
468 rtx tem
469 = gen_rtx_MEM (Pmode,
470 gen_rtx_PLUS (Pmode,
471 gen_rtx_REG (Pmode,
472 LAST_VIRTUAL_REGISTER + 1),
473 GEN_INT (4)));
474 spill_indirect_levels = 0;
476 while (memory_address_p (QImode, tem))
478 spill_indirect_levels++;
479 tem = gen_rtx_MEM (Pmode, tem);
482 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
484 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
485 indirect_symref_ok = memory_address_p (QImode, tem);
487 /* See if reg+reg is a valid (and offsettable) address. */
489 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
491 tem = gen_rtx_PLUS (Pmode,
492 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
493 gen_rtx_REG (Pmode, i));
495 /* This way, we make sure that reg+reg is an offsettable address. */
496 tem = plus_constant (tem, 4);
498 if (memory_address_p (QImode, tem))
500 double_reg_address_ok = 1;
501 break;
505 /* Initialize obstack for our rtl allocation. */
506 gcc_obstack_init (&reload_obstack);
507 reload_startobj = obstack_alloc (&reload_obstack, 0);
509 INIT_REG_SET (&spilled_pseudos);
510 INIT_REG_SET (&changed_allocation_pseudos);
511 INIT_REG_SET (&pseudos_counted);
514 /* List of insn chains that are currently unused. */
515 static struct insn_chain *unused_insn_chains = 0;
517 /* Allocate an empty insn_chain structure. */
518 struct insn_chain *
519 new_insn_chain (void)
521 struct insn_chain *c;
523 if (unused_insn_chains == 0)
525 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
526 INIT_REG_SET (&c->live_throughout);
527 INIT_REG_SET (&c->dead_or_set);
528 INIT_REG_SET (&c->saved);
530 else
532 c = unused_insn_chains;
533 unused_insn_chains = c->next;
535 c->is_caller_save_insn = 0;
536 c->need_operand_change = 0;
537 c->need_reload = 0;
538 c->need_elim = 0;
539 return c;
542 /* Small utility function to set all regs in hard reg set TO which are
543 allocated to pseudos in regset FROM. */
545 void
546 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
548 unsigned int regno;
549 reg_set_iterator rsi;
551 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
553 int r = reg_renumber[regno];
555 if (r < 0)
557 /* reload_combine uses the information from DF_LIVE_IN,
558 which might still contain registers that have not
559 actually been allocated since they have an
560 equivalence. */
561 gcc_assert ((flag_ira && optimize) || reload_completed);
563 else
564 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
568 /* Replace all pseudos found in LOC with their corresponding
569 equivalences. */
571 static void
572 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
574 rtx x = *loc;
575 enum rtx_code code;
576 const char *fmt;
577 int i, j;
579 if (! x)
580 return;
582 code = GET_CODE (x);
583 if (code == REG)
585 unsigned int regno = REGNO (x);
587 if (regno < FIRST_PSEUDO_REGISTER)
588 return;
590 x = eliminate_regs (x, mem_mode, usage);
591 if (x != *loc)
593 *loc = x;
594 replace_pseudos_in (loc, mem_mode, usage);
595 return;
598 if (reg_equiv_constant[regno])
599 *loc = reg_equiv_constant[regno];
600 else if (reg_equiv_mem[regno])
601 *loc = reg_equiv_mem[regno];
602 else if (reg_equiv_address[regno])
603 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
604 else
606 gcc_assert (!REG_P (regno_reg_rtx[regno])
607 || REGNO (regno_reg_rtx[regno]) != regno);
608 *loc = regno_reg_rtx[regno];
611 return;
613 else if (code == MEM)
615 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
616 return;
619 /* Process each of our operands recursively. */
620 fmt = GET_RTX_FORMAT (code);
621 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
622 if (*fmt == 'e')
623 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
624 else if (*fmt == 'E')
625 for (j = 0; j < XVECLEN (x, i); j++)
626 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
629 /* Determine if the current function has an exception receiver block
630 that reaches the exit block via non-exceptional edges */
632 static bool
633 has_nonexceptional_receiver (void)
635 edge e;
636 edge_iterator ei;
637 basic_block *tos, *worklist, bb;
639 /* If we're not optimizing, then just err on the safe side. */
640 if (!optimize)
641 return true;
643 /* First determine which blocks can reach exit via normal paths. */
644 tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
646 FOR_EACH_BB (bb)
647 bb->flags &= ~BB_REACHABLE;
649 /* Place the exit block on our worklist. */
650 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
651 *tos++ = EXIT_BLOCK_PTR;
653 /* Iterate: find everything reachable from what we've already seen. */
654 while (tos != worklist)
656 bb = *--tos;
658 FOR_EACH_EDGE (e, ei, bb->preds)
659 if (!(e->flags & EDGE_ABNORMAL))
661 basic_block src = e->src;
663 if (!(src->flags & BB_REACHABLE))
665 src->flags |= BB_REACHABLE;
666 *tos++ = src;
670 free (worklist);
672 /* Now see if there's a reachable block with an exceptional incoming
673 edge. */
674 FOR_EACH_BB (bb)
675 if (bb->flags & BB_REACHABLE)
676 FOR_EACH_EDGE (e, ei, bb->preds)
677 if (e->flags & EDGE_ABNORMAL)
678 return true;
680 /* No exceptional block reached exit unexceptionally. */
681 return false;
685 /* Global variables used by reload and its subroutines. */
687 /* Set during calculate_needs if an insn needs register elimination. */
688 static int something_needs_elimination;
689 /* Set during calculate_needs if an insn needs an operand changed. */
690 static int something_needs_operands_changed;
692 /* Nonzero means we couldn't get enough spill regs. */
693 static int failure;
695 /* Temporary array of pseudo-register number. */
696 static int *temp_pseudo_reg_arr;
698 /* Main entry point for the reload pass.
700 FIRST is the first insn of the function being compiled.
702 GLOBAL nonzero means we were called from global_alloc
703 and should attempt to reallocate any pseudoregs that we
704 displace from hard regs we will use for reloads.
705 If GLOBAL is zero, we do not have enough information to do that,
706 so any pseudo reg that is spilled must go to the stack.
708 Return value is nonzero if reload failed
709 and we must not do any more for this function. */
712 reload (rtx first, int global)
714 int i, n;
715 rtx insn;
716 struct elim_table *ep;
717 basic_block bb;
719 /* Make sure even insns with volatile mem refs are recognizable. */
720 init_recog ();
722 failure = 0;
724 reload_firstobj = obstack_alloc (&reload_obstack, 0);
726 /* Make sure that the last insn in the chain
727 is not something that needs reloading. */
728 emit_note (NOTE_INSN_DELETED);
730 /* Enable find_equiv_reg to distinguish insns made by reload. */
731 reload_first_uid = get_max_uid ();
733 #ifdef SECONDARY_MEMORY_NEEDED
734 /* Initialize the secondary memory table. */
735 clear_secondary_mem ();
736 #endif
738 /* We don't have a stack slot for any spill reg yet. */
739 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
740 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
742 /* Initialize the save area information for caller-save, in case some
743 are needed. */
744 init_save_areas ();
746 /* Compute which hard registers are now in use
747 as homes for pseudo registers.
748 This is done here rather than (eg) in global_alloc
749 because this point is reached even if not optimizing. */
750 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
751 mark_home_live (i);
753 /* A function that has a nonlocal label that can reach the exit
754 block via non-exceptional paths must save all call-saved
755 registers. */
756 if (current_function_has_nonlocal_label
757 && has_nonexceptional_receiver ())
758 current_function_saves_all_registers = 1;
760 if (current_function_saves_all_registers)
761 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
762 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
763 df_set_regs_ever_live (i, true);
765 /* Find all the pseudo registers that didn't get hard regs
766 but do have known equivalent constants or memory slots.
767 These include parameters (known equivalent to parameter slots)
768 and cse'd or loop-moved constant memory addresses.
770 Record constant equivalents in reg_equiv_constant
771 so they will be substituted by find_reloads.
772 Record memory equivalents in reg_mem_equiv so they can
773 be substituted eventually by altering the REG-rtx's. */
775 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
776 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
777 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
778 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
779 reg_equiv_address = XCNEWVEC (rtx, max_regno);
780 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
781 reg_old_renumber = XCNEWVEC (short, max_regno);
782 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
783 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
784 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
786 CLEAR_HARD_REG_SET (bad_spill_regs_global);
788 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
789 to. Also find all paradoxical subregs and find largest such for
790 each pseudo. */
792 num_eliminable_invariants = 0;
793 for (insn = first; insn; insn = NEXT_INSN (insn))
795 rtx set = single_set (insn);
797 /* We may introduce USEs that we want to remove at the end, so
798 we'll mark them with QImode. Make sure there are no
799 previously-marked insns left by say regmove. */
800 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
801 && GET_MODE (insn) != VOIDmode)
802 PUT_MODE (insn, VOIDmode);
804 if (INSN_P (insn))
805 scan_paradoxical_subregs (PATTERN (insn));
807 if (set != 0 && REG_P (SET_DEST (set)))
809 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
810 rtx x;
812 if (! note)
813 continue;
815 i = REGNO (SET_DEST (set));
816 x = XEXP (note, 0);
818 if (i <= LAST_VIRTUAL_REGISTER)
819 continue;
821 if (! function_invariant_p (x)
822 || ! flag_pic
823 /* A function invariant is often CONSTANT_P but may
824 include a register. We promise to only pass
825 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
826 || (CONSTANT_P (x)
827 && LEGITIMATE_PIC_OPERAND_P (x)))
829 /* It can happen that a REG_EQUIV note contains a MEM
830 that is not a legitimate memory operand. As later
831 stages of reload assume that all addresses found
832 in the reg_equiv_* arrays were originally legitimate,
833 we ignore such REG_EQUIV notes. */
834 if (memory_operand (x, VOIDmode))
836 /* Always unshare the equivalence, so we can
837 substitute into this insn without touching the
838 equivalence. */
839 reg_equiv_memory_loc[i] = copy_rtx (x);
841 else if (function_invariant_p (x))
843 if (GET_CODE (x) == PLUS)
845 /* This is PLUS of frame pointer and a constant,
846 and might be shared. Unshare it. */
847 reg_equiv_invariant[i] = copy_rtx (x);
848 num_eliminable_invariants++;
850 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
852 reg_equiv_invariant[i] = x;
853 num_eliminable_invariants++;
855 else if (LEGITIMATE_CONSTANT_P (x))
856 reg_equiv_constant[i] = x;
857 else
859 reg_equiv_memory_loc[i]
860 = force_const_mem (GET_MODE (SET_DEST (set)), x);
861 if (! reg_equiv_memory_loc[i])
862 reg_equiv_init[i] = NULL_RTX;
865 else
867 reg_equiv_init[i] = NULL_RTX;
868 continue;
871 else
872 reg_equiv_init[i] = NULL_RTX;
876 if (dump_file)
877 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
878 if (reg_equiv_init[i])
880 fprintf (dump_file, "init_insns for %u: ", i);
881 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
882 fprintf (dump_file, "\n");
885 init_elim_table ();
887 first_label_num = get_first_label_num ();
888 num_labels = max_label_num () - first_label_num;
890 /* Allocate the tables used to store offset information at labels. */
891 /* We used to use alloca here, but the size of what it would try to
892 allocate would occasionally cause it to exceed the stack limit and
893 cause a core dump. */
894 offsets_known_at = XNEWVEC (char, num_labels);
895 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
897 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
898 stack slots to the pseudos that lack hard regs or equivalents.
899 Do not touch virtual registers. */
901 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
902 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
903 temp_pseudo_reg_arr[n++] = i;
905 if (flag_ira && optimize)
906 /* Ask IRA to order pseudo-registers for better stack slot
907 sharing. */
908 sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
910 for (i = 0; i < n; i++)
911 alter_reg (temp_pseudo_reg_arr[i], -1, false);
913 /* If we have some registers we think can be eliminated, scan all insns to
914 see if there is an insn that sets one of these registers to something
915 other than itself plus a constant. If so, the register cannot be
916 eliminated. Doing this scan here eliminates an extra pass through the
917 main reload loop in the most common case where register elimination
918 cannot be done. */
919 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
920 if (INSN_P (insn))
921 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
923 maybe_fix_stack_asms ();
925 insns_need_reload = 0;
926 something_needs_elimination = 0;
928 /* Initialize to -1, which means take the first spill register. */
929 last_spill_reg = -1;
931 /* Spill any hard regs that we know we can't eliminate. */
932 CLEAR_HARD_REG_SET (used_spill_regs);
933 /* There can be multiple ways to eliminate a register;
934 they should be listed adjacently.
935 Elimination for any register fails only if all possible ways fail. */
936 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
938 int from = ep->from;
939 int can_eliminate = 0;
942 can_eliminate |= ep->can_eliminate;
943 ep++;
945 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
946 if (! can_eliminate)
947 spill_hard_reg (from, 1);
950 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
951 if (frame_pointer_needed)
952 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
953 #endif
954 finish_spills (global);
956 /* From now on, we may need to generate moves differently. We may also
957 allow modifications of insns which cause them to not be recognized.
958 Any such modifications will be cleaned up during reload itself. */
959 reload_in_progress = 1;
961 /* This loop scans the entire function each go-round
962 and repeats until one repetition spills no additional hard regs. */
963 for (;;)
965 int something_changed;
966 int did_spill;
967 HOST_WIDE_INT starting_frame_size;
969 starting_frame_size = get_frame_size ();
971 set_initial_elim_offsets ();
972 set_initial_label_offsets ();
974 /* For each pseudo register that has an equivalent location defined,
975 try to eliminate any eliminable registers (such as the frame pointer)
976 assuming initial offsets for the replacement register, which
977 is the normal case.
979 If the resulting location is directly addressable, substitute
980 the MEM we just got directly for the old REG.
982 If it is not addressable but is a constant or the sum of a hard reg
983 and constant, it is probably not addressable because the constant is
984 out of range, in that case record the address; we will generate
985 hairy code to compute the address in a register each time it is
986 needed. Similarly if it is a hard register, but one that is not
987 valid as an address register.
989 If the location is not addressable, but does not have one of the
990 above forms, assign a stack slot. We have to do this to avoid the
991 potential of producing lots of reloads if, e.g., a location involves
992 a pseudo that didn't get a hard register and has an equivalent memory
993 location that also involves a pseudo that didn't get a hard register.
995 Perhaps at some point we will improve reload_when_needed handling
996 so this problem goes away. But that's very hairy. */
998 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
999 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1001 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
1003 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
1004 XEXP (x, 0)))
1005 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1006 else if (CONSTANT_P (XEXP (x, 0))
1007 || (REG_P (XEXP (x, 0))
1008 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1009 || (GET_CODE (XEXP (x, 0)) == PLUS
1010 && REG_P (XEXP (XEXP (x, 0), 0))
1011 && (REGNO (XEXP (XEXP (x, 0), 0))
1012 < FIRST_PSEUDO_REGISTER)
1013 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1014 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1015 else
1017 /* Make a new stack slot. Then indicate that something
1018 changed so we go back and recompute offsets for
1019 eliminable registers because the allocation of memory
1020 below might change some offset. reg_equiv_{mem,address}
1021 will be set up for this pseudo on the next pass around
1022 the loop. */
1023 reg_equiv_memory_loc[i] = 0;
1024 reg_equiv_init[i] = 0;
1025 alter_reg (i, -1, true);
1029 if (caller_save_needed)
1030 setup_save_areas ();
1032 /* If we allocated another stack slot, redo elimination bookkeeping. */
1033 if (starting_frame_size != get_frame_size ())
1034 continue;
1035 if (starting_frame_size && cfun->stack_alignment_needed)
1037 /* If we have a stack frame, we must align it now. The
1038 stack size may be a part of the offset computation for
1039 register elimination. So if this changes the stack size,
1040 then repeat the elimination bookkeeping. We don't
1041 realign when there is no stack, as that will cause a
1042 stack frame when none is needed should
1043 STARTING_FRAME_OFFSET not be already aligned to
1044 STACK_BOUNDARY. */
1045 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
1046 if (starting_frame_size != get_frame_size ())
1047 continue;
1050 if (caller_save_needed)
1052 save_call_clobbered_regs ();
1053 /* That might have allocated new insn_chain structures. */
1054 reload_firstobj = obstack_alloc (&reload_obstack, 0);
1057 calculate_needs_all_insns (global);
1059 if (! flag_ira || ! optimize)
1060 /* Don't do it for IRA. We need this info because we don't
1061 change live_throughout and dead_or_set for chains when IRA
1062 is used. */
1063 CLEAR_REG_SET (&spilled_pseudos);
1065 did_spill = 0;
1067 something_changed = 0;
1069 /* If we allocated any new memory locations, make another pass
1070 since it might have changed elimination offsets. */
1071 if (starting_frame_size != get_frame_size ())
1072 something_changed = 1;
1074 /* Even if the frame size remained the same, we might still have
1075 changed elimination offsets, e.g. if find_reloads called
1076 force_const_mem requiring the back end to allocate a constant
1077 pool base register that needs to be saved on the stack. */
1078 else if (!verify_initial_elim_offsets ())
1079 something_changed = 1;
1082 HARD_REG_SET to_spill;
1083 CLEAR_HARD_REG_SET (to_spill);
1084 update_eliminables (&to_spill);
1085 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1087 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1088 if (TEST_HARD_REG_BIT (to_spill, i))
1090 spill_hard_reg (i, 1);
1091 did_spill = 1;
1093 /* Regardless of the state of spills, if we previously had
1094 a register that we thought we could eliminate, but now can
1095 not eliminate, we must run another pass.
1097 Consider pseudos which have an entry in reg_equiv_* which
1098 reference an eliminable register. We must make another pass
1099 to update reg_equiv_* so that we do not substitute in the
1100 old value from when we thought the elimination could be
1101 performed. */
1102 something_changed = 1;
1106 select_reload_regs ();
1107 if (failure)
1108 goto failed;
1110 if (insns_need_reload != 0 || did_spill)
1111 something_changed |= finish_spills (global);
1113 if (! something_changed)
1114 break;
1116 if (caller_save_needed)
1117 delete_caller_save_insns ();
1119 obstack_free (&reload_obstack, reload_firstobj);
1122 if (flag_ira && optimize)
1123 /* Restore the original insn chain order for correct reload
1124 work. */
1125 sort_insn_chain (FALSE);
1127 /* If global-alloc was run, notify it of any register eliminations we have
1128 done. */
1129 if (global)
1130 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1131 if (ep->can_eliminate)
1132 mark_elimination (ep->from, ep->to);
1134 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1135 If that insn didn't set the register (i.e., it copied the register to
1136 memory), just delete that insn instead of the equivalencing insn plus
1137 anything now dead. If we call delete_dead_insn on that insn, we may
1138 delete the insn that actually sets the register if the register dies
1139 there and that is incorrect. */
1141 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1143 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1145 rtx list;
1146 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1148 rtx equiv_insn = XEXP (list, 0);
1150 /* If we already deleted the insn or if it may trap, we can't
1151 delete it. The latter case shouldn't happen, but can
1152 if an insn has a variable address, gets a REG_EH_REGION
1153 note added to it, and then gets converted into a load
1154 from a constant address. */
1155 if (NOTE_P (equiv_insn)
1156 || can_throw_internal (equiv_insn))
1158 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1159 delete_dead_insn (equiv_insn);
1160 else
1161 SET_INSN_DELETED (equiv_insn);
1166 /* Use the reload registers where necessary
1167 by generating move instructions to move the must-be-register
1168 values into or out of the reload registers. */
1170 if (insns_need_reload != 0 || something_needs_elimination
1171 || something_needs_operands_changed)
1173 HOST_WIDE_INT old_frame_size = get_frame_size ();
1175 reload_as_needed (global);
1177 gcc_assert (old_frame_size == get_frame_size ());
1179 gcc_assert (verify_initial_elim_offsets ());
1182 /* If we were able to eliminate the frame pointer, show that it is no
1183 longer live at the start of any basic block. If it ls live by
1184 virtue of being in a pseudo, that pseudo will be marked live
1185 and hence the frame pointer will be known to be live via that
1186 pseudo. */
1188 if (! frame_pointer_needed)
1189 FOR_EACH_BB (bb)
1190 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1192 /* Come here (with failure set nonzero) if we can't get enough spill
1193 regs. */
1194 failed:
1196 CLEAR_REG_SET (&spilled_pseudos);
1197 reload_in_progress = 0;
1199 /* Now eliminate all pseudo regs by modifying them into
1200 their equivalent memory references.
1201 The REG-rtx's for the pseudos are modified in place,
1202 so all insns that used to refer to them now refer to memory.
1204 For a reg that has a reg_equiv_address, all those insns
1205 were changed by reloading so that no insns refer to it any longer;
1206 but the DECL_RTL of a variable decl may refer to it,
1207 and if so this causes the debugging info to mention the variable. */
1209 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1211 rtx addr = 0;
1213 if (reg_equiv_mem[i])
1214 addr = XEXP (reg_equiv_mem[i], 0);
1216 if (reg_equiv_address[i])
1217 addr = reg_equiv_address[i];
1219 if (addr)
1221 if (reg_renumber[i] < 0)
1223 rtx reg = regno_reg_rtx[i];
1225 REG_USERVAR_P (reg) = 0;
1226 PUT_CODE (reg, MEM);
1227 XEXP (reg, 0) = addr;
1228 if (reg_equiv_memory_loc[i])
1229 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1230 else
1232 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1233 MEM_ATTRS (reg) = 0;
1235 MEM_NOTRAP_P (reg) = 1;
1237 else if (reg_equiv_mem[i])
1238 XEXP (reg_equiv_mem[i], 0) = addr;
1242 /* We must set reload_completed now since the cleanup_subreg_operands call
1243 below will re-recognize each insn and reload may have generated insns
1244 which are only valid during and after reload. */
1245 reload_completed = 1;
1247 /* Make a pass over all the insns and delete all USEs which we inserted
1248 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1249 notes. Delete all CLOBBER insns, except those that refer to the return
1250 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1251 from misarranging variable-array code, and simplify (subreg (reg))
1252 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1253 are no longer useful or accurate. Strip and regenerate REG_INC notes
1254 that may have been moved around. */
1256 for (insn = first; insn; insn = NEXT_INSN (insn))
1257 if (INSN_P (insn))
1259 rtx *pnote;
1261 if (CALL_P (insn))
1263 HARD_REG_SET used_function_regs;
1265 get_call_invalidated_used_regs (insn, &used_function_regs, false);
1266 IOR_HARD_REG_SET (cfun->emit->call_used_regs, used_function_regs);
1267 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1268 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1271 if ((GET_CODE (PATTERN (insn)) == USE
1272 /* We mark with QImode USEs introduced by reload itself. */
1273 && (GET_MODE (insn) == QImode
1274 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1275 || (GET_CODE (PATTERN (insn)) == CLOBBER
1276 && (!MEM_P (XEXP (PATTERN (insn), 0))
1277 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1278 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1279 && XEXP (XEXP (PATTERN (insn), 0), 0)
1280 != stack_pointer_rtx))
1281 && (!REG_P (XEXP (PATTERN (insn), 0))
1282 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1284 delete_insn (insn);
1285 continue;
1288 /* Some CLOBBERs may survive until here and still reference unassigned
1289 pseudos with const equivalent, which may in turn cause ICE in later
1290 passes if the reference remains in place. */
1291 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1292 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1293 VOIDmode, PATTERN (insn));
1295 /* Discard obvious no-ops, even without -O. This optimization
1296 is fast and doesn't interfere with debugging. */
1297 if (NONJUMP_INSN_P (insn)
1298 && GET_CODE (PATTERN (insn)) == SET
1299 && REG_P (SET_SRC (PATTERN (insn)))
1300 && REG_P (SET_DEST (PATTERN (insn)))
1301 && (REGNO (SET_SRC (PATTERN (insn)))
1302 == REGNO (SET_DEST (PATTERN (insn)))))
1304 delete_insn (insn);
1305 continue;
1308 pnote = &REG_NOTES (insn);
1309 while (*pnote != 0)
1311 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1312 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1313 || REG_NOTE_KIND (*pnote) == REG_INC
1314 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1315 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1316 *pnote = XEXP (*pnote, 1);
1317 else
1318 pnote = &XEXP (*pnote, 1);
1321 #ifdef AUTO_INC_DEC
1322 add_auto_inc_notes (insn, PATTERN (insn));
1323 #endif
1325 /* Simplify (subreg (reg)) if it appears as an operand. */
1326 cleanup_subreg_operands (insn);
1328 /* Clean up invalid ASMs so that they don't confuse later passes.
1329 See PR 21299. */
1330 if (asm_noperands (PATTERN (insn)) >= 0)
1332 extract_insn (insn);
1333 if (!constrain_operands (1))
1335 error_for_asm (insn,
1336 "%<asm%> operand has impossible constraints");
1337 delete_insn (insn);
1338 continue;
1343 /* If we are doing stack checking, give a warning if this function's
1344 frame size is larger than we expect. */
1345 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1347 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1348 static int verbose_warned = 0;
1350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1351 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1352 size += UNITS_PER_WORD;
1354 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1356 warning (0, "frame size too large for reliable stack checking");
1357 if (! verbose_warned)
1359 warning (0, "try reducing the number of local variables");
1360 verbose_warned = 1;
1365 /* Indicate that we no longer have known memory locations or constants. */
1366 if (reg_equiv_constant)
1367 free (reg_equiv_constant);
1368 if (reg_equiv_invariant)
1369 free (reg_equiv_invariant);
1370 reg_equiv_constant = 0;
1371 reg_equiv_invariant = 0;
1372 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1373 reg_equiv_memory_loc = 0;
1375 free (temp_pseudo_reg_arr);
1377 if (offsets_known_at)
1378 free (offsets_known_at);
1379 if (offsets_at)
1380 free (offsets_at);
1382 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1383 if (reg_equiv_alt_mem_list[i])
1384 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1385 free (reg_equiv_alt_mem_list);
1387 free (reg_equiv_mem);
1388 reg_equiv_init = 0;
1389 free (reg_equiv_address);
1390 free (reg_max_ref_width);
1391 free (reg_old_renumber);
1392 free (pseudo_previous_regs);
1393 free (pseudo_forbidden_regs);
1395 CLEAR_HARD_REG_SET (used_spill_regs);
1396 for (i = 0; i < n_spills; i++)
1397 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1399 /* Free all the insn_chain structures at once. */
1400 obstack_free (&reload_obstack, reload_startobj);
1401 unused_insn_chains = 0;
1402 fixup_abnormal_edges ();
1404 /* Replacing pseudos with their memory equivalents might have
1405 created shared rtx. Subsequent passes would get confused
1406 by this, so unshare everything here. */
1407 unshare_all_rtl_again (first);
1409 #ifdef STACK_BOUNDARY
1410 /* init_emit has set the alignment of the hard frame pointer
1411 to STACK_BOUNDARY. It is very likely no longer valid if
1412 the hard frame pointer was used for register allocation. */
1413 if (!frame_pointer_needed)
1414 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1415 #endif
1417 return failure;
1420 /* Yet another special case. Unfortunately, reg-stack forces people to
1421 write incorrect clobbers in asm statements. These clobbers must not
1422 cause the register to appear in bad_spill_regs, otherwise we'll call
1423 fatal_insn later. We clear the corresponding regnos in the live
1424 register sets to avoid this.
1425 The whole thing is rather sick, I'm afraid. */
1427 static void
1428 maybe_fix_stack_asms (void)
1430 #ifdef STACK_REGS
1431 const char *constraints[MAX_RECOG_OPERANDS];
1432 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1433 struct insn_chain *chain;
1435 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1437 int i, noperands;
1438 HARD_REG_SET clobbered, allowed;
1439 rtx pat;
1441 if (! INSN_P (chain->insn)
1442 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1443 continue;
1444 pat = PATTERN (chain->insn);
1445 if (GET_CODE (pat) != PARALLEL)
1446 continue;
1448 CLEAR_HARD_REG_SET (clobbered);
1449 CLEAR_HARD_REG_SET (allowed);
1451 /* First, make a mask of all stack regs that are clobbered. */
1452 for (i = 0; i < XVECLEN (pat, 0); i++)
1454 rtx t = XVECEXP (pat, 0, i);
1455 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1456 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1459 /* Get the operand values and constraints out of the insn. */
1460 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1461 constraints, operand_mode, NULL);
1463 /* For every operand, see what registers are allowed. */
1464 for (i = 0; i < noperands; i++)
1466 const char *p = constraints[i];
1467 /* For every alternative, we compute the class of registers allowed
1468 for reloading in CLS, and merge its contents into the reg set
1469 ALLOWED. */
1470 int cls = (int) NO_REGS;
1472 for (;;)
1474 char c = *p;
1476 if (c == '\0' || c == ',' || c == '#')
1478 /* End of one alternative - mark the regs in the current
1479 class, and reset the class. */
1480 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1481 cls = NO_REGS;
1482 p++;
1483 if (c == '#')
1484 do {
1485 c = *p++;
1486 } while (c != '\0' && c != ',');
1487 if (c == '\0')
1488 break;
1489 continue;
1492 switch (c)
1494 case '=': case '+': case '*': case '%': case '?': case '!':
1495 case '0': case '1': case '2': case '3': case '4': case 'm':
1496 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1497 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1498 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1499 case 'P':
1500 break;
1502 case 'p':
1503 cls = (int) reg_class_subunion[cls]
1504 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1505 break;
1507 case 'g':
1508 case 'r':
1509 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1510 break;
1512 default:
1513 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1514 cls = (int) reg_class_subunion[cls]
1515 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1516 else
1517 cls = (int) reg_class_subunion[cls]
1518 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1520 p += CONSTRAINT_LEN (c, p);
1523 /* Those of the registers which are clobbered, but allowed by the
1524 constraints, must be usable as reload registers. So clear them
1525 out of the life information. */
1526 AND_HARD_REG_SET (allowed, clobbered);
1527 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1528 if (TEST_HARD_REG_BIT (allowed, i))
1530 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1531 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1535 #endif
1538 /* Copy the global variables n_reloads and rld into the corresponding elts
1539 of CHAIN. */
1540 static void
1541 copy_reloads (struct insn_chain *chain)
1543 chain->n_reloads = n_reloads;
1544 chain->rld = obstack_alloc (&reload_obstack,
1545 n_reloads * sizeof (struct reload));
1546 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1547 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1550 /* Walk the chain of insns, and determine for each whether it needs reloads
1551 and/or eliminations. Build the corresponding insns_need_reload list, and
1552 set something_needs_elimination as appropriate. */
1553 static void
1554 calculate_needs_all_insns (int global)
1556 struct insn_chain **pprev_reload = &insns_need_reload;
1557 struct insn_chain *chain, *next = 0;
1559 something_needs_elimination = 0;
1561 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1562 for (chain = reload_insn_chain; chain != 0; chain = next)
1564 rtx insn = chain->insn;
1566 next = chain->next;
1568 /* Clear out the shortcuts. */
1569 chain->n_reloads = 0;
1570 chain->need_elim = 0;
1571 chain->need_reload = 0;
1572 chain->need_operand_change = 0;
1574 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1575 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1576 what effects this has on the known offsets at labels. */
1578 if (LABEL_P (insn) || JUMP_P (insn)
1579 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1580 set_label_offsets (insn, insn, 0);
1582 if (INSN_P (insn))
1584 rtx old_body = PATTERN (insn);
1585 int old_code = INSN_CODE (insn);
1586 rtx old_notes = REG_NOTES (insn);
1587 int did_elimination = 0;
1588 int operands_changed = 0;
1589 rtx set = single_set (insn);
1591 /* Skip insns that only set an equivalence. */
1592 if (set && REG_P (SET_DEST (set))
1593 && reg_renumber[REGNO (SET_DEST (set))] < 0
1594 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1595 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1596 && reg_equiv_init[REGNO (SET_DEST (set))])
1597 continue;
1599 /* If needed, eliminate any eliminable registers. */
1600 if (num_eliminable || num_eliminable_invariants)
1601 did_elimination = eliminate_regs_in_insn (insn, 0);
1603 /* Analyze the instruction. */
1604 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1605 global, spill_reg_order);
1607 /* If a no-op set needs more than one reload, this is likely
1608 to be something that needs input address reloads. We
1609 can't get rid of this cleanly later, and it is of no use
1610 anyway, so discard it now.
1611 We only do this when expensive_optimizations is enabled,
1612 since this complements reload inheritance / output
1613 reload deletion, and it can make debugging harder. */
1614 if (flag_expensive_optimizations && n_reloads > 1)
1616 rtx set = single_set (insn);
1617 if (set
1619 ((SET_SRC (set) == SET_DEST (set)
1620 && REG_P (SET_SRC (set))
1621 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1622 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1623 && reg_renumber[REGNO (SET_SRC (set))] < 0
1624 && reg_renumber[REGNO (SET_DEST (set))] < 0
1625 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1626 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1627 && rtx_equal_p (reg_equiv_memory_loc
1628 [REGNO (SET_SRC (set))],
1629 reg_equiv_memory_loc
1630 [REGNO (SET_DEST (set))]))))
1632 if (flag_ira && optimize)
1633 /* Inform IRA about the insn deletion. */
1634 mark_memory_move_deletion (REGNO (SET_DEST (set)),
1635 REGNO (SET_SRC (set)));
1636 delete_insn (insn);
1637 /* Delete it from the reload chain. */
1638 if (chain->prev)
1639 chain->prev->next = next;
1640 else
1641 reload_insn_chain = next;
1642 if (next)
1643 next->prev = chain->prev;
1644 chain->next = unused_insn_chains;
1645 unused_insn_chains = chain;
1646 continue;
1649 if (num_eliminable)
1650 update_eliminable_offsets ();
1652 /* Remember for later shortcuts which insns had any reloads or
1653 register eliminations. */
1654 chain->need_elim = did_elimination;
1655 chain->need_reload = n_reloads > 0;
1656 chain->need_operand_change = operands_changed;
1658 /* Discard any register replacements done. */
1659 if (did_elimination)
1661 obstack_free (&reload_obstack, reload_insn_firstobj);
1662 PATTERN (insn) = old_body;
1663 INSN_CODE (insn) = old_code;
1664 REG_NOTES (insn) = old_notes;
1665 something_needs_elimination = 1;
1668 something_needs_operands_changed |= operands_changed;
1670 if (n_reloads != 0)
1672 copy_reloads (chain);
1673 *pprev_reload = chain;
1674 pprev_reload = &chain->next_need_reload;
1678 *pprev_reload = 0;
1681 /* Comparison function for qsort to decide which of two reloads
1682 should be handled first. *P1 and *P2 are the reload numbers. */
1684 static int
1685 reload_reg_class_lower (const void *r1p, const void *r2p)
1687 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1688 int t;
1690 /* Consider required reloads before optional ones. */
1691 t = rld[r1].optional - rld[r2].optional;
1692 if (t != 0)
1693 return t;
1695 /* Count all solitary classes before non-solitary ones. */
1696 t = ((reg_class_size[(int) rld[r2].class] == 1)
1697 - (reg_class_size[(int) rld[r1].class] == 1));
1698 if (t != 0)
1699 return t;
1701 /* Aside from solitaires, consider all multi-reg groups first. */
1702 t = rld[r2].nregs - rld[r1].nregs;
1703 if (t != 0)
1704 return t;
1706 /* Consider reloads in order of increasing reg-class number. */
1707 t = (int) rld[r1].class - (int) rld[r2].class;
1708 if (t != 0)
1709 return t;
1711 /* If reloads are equally urgent, sort by reload number,
1712 so that the results of qsort leave nothing to chance. */
1713 return r1 - r2;
1716 /* The cost of spilling each hard reg. */
1717 static int spill_cost[FIRST_PSEUDO_REGISTER];
1719 /* When spilling multiple hard registers, we use SPILL_COST for the first
1720 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1721 only the first hard reg for a multi-reg pseudo. */
1722 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1724 /* Map of hard regno to pseudo regno currently occupying the hard
1725 reg. */
1726 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1728 /* Update the spill cost arrays, considering that pseudo REG is live. */
1730 static void
1731 count_pseudo (int reg)
1733 int freq = REG_FREQ (reg);
1734 int r = reg_renumber[reg];
1735 int nregs;
1737 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1738 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1739 /* Ignore spilled pseudo-registers which can be here only if IRA
1740 is used. */
1741 || (flag_ira && optimize && r < 0))
1742 return;
1744 SET_REGNO_REG_SET (&pseudos_counted, reg);
1746 gcc_assert (r >= 0);
1748 spill_add_cost[r] += freq;
1749 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1750 while (nregs-- > 0)
1752 hard_regno_to_pseudo_regno[r + nregs] = reg;
1753 spill_cost[r + nregs] += freq;
1757 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1758 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1760 static void
1761 order_regs_for_reload (struct insn_chain *chain)
1763 unsigned i;
1764 HARD_REG_SET used_by_pseudos;
1765 HARD_REG_SET used_by_pseudos2;
1766 reg_set_iterator rsi;
1768 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1770 memset (spill_cost, 0, sizeof spill_cost);
1771 memset (spill_add_cost, 0, sizeof spill_add_cost);
1772 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1773 hard_regno_to_pseudo_regno[i] = -1;
1775 /* Count number of uses of each hard reg by pseudo regs allocated to it
1776 and then order them by decreasing use. First exclude hard registers
1777 that are live in or across this insn. */
1779 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1780 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1781 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1782 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1784 /* Now find out which pseudos are allocated to it, and update
1785 hard_reg_n_uses. */
1786 CLEAR_REG_SET (&pseudos_counted);
1788 EXECUTE_IF_SET_IN_REG_SET
1789 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1791 if (!REGNO_REG_SET_P (&chain->saved, i))
1792 count_pseudo (i);
1794 EXECUTE_IF_SET_IN_REG_SET
1795 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1797 gcc_assert (!REGNO_REG_SET_P (&chain->saved, i));
1798 count_pseudo (i);
1800 CLEAR_REG_SET (&pseudos_counted);
1803 /* Vector of reload-numbers showing the order in which the reloads should
1804 be processed. */
1805 static short reload_order[MAX_RELOADS];
1807 /* This is used to keep track of the spill regs used in one insn. */
1808 static HARD_REG_SET used_spill_regs_local;
1810 /* We decided to spill hard register SPILLED, which has a size of
1811 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1812 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1813 update SPILL_COST/SPILL_ADD_COST. */
1815 static void
1816 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1818 int freq = REG_FREQ (reg);
1819 int r = reg_renumber[reg];
1820 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1822 /* Ignore spilled pseudo-registers which can be here only if IRA is
1823 used. */
1824 if ((flag_ira && optimize && r < 0)
1825 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1826 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1827 return;
1829 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1831 spill_add_cost[r] -= freq;
1832 while (nregs-- > 0)
1834 hard_regno_to_pseudo_regno[r + nregs] = -1;
1835 spill_cost[r + nregs] -= freq;
1839 /* Find reload register to use for reload number ORDER. */
1841 static int
1842 find_reg (struct insn_chain *chain, int order)
1844 int rnum = reload_order[order];
1845 struct reload *rl = rld + rnum;
1846 int best_cost = INT_MAX;
1847 int best_reg = -1;
1848 unsigned int i, j, n;
1849 int k;
1850 HARD_REG_SET not_usable;
1851 HARD_REG_SET used_by_other_reload;
1852 reg_set_iterator rsi;
1853 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1854 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1856 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1857 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1858 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1860 CLEAR_HARD_REG_SET (used_by_other_reload);
1861 for (k = 0; k < order; k++)
1863 int other = reload_order[k];
1865 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1866 for (j = 0; j < rld[other].nregs; j++)
1867 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1870 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1872 unsigned int regno = i;
1874 if (! TEST_HARD_REG_BIT (not_usable, regno)
1875 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1876 && HARD_REGNO_MODE_OK (regno, rl->mode))
1878 int this_cost = spill_cost[regno];
1879 int ok = 1;
1880 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1882 for (j = 1; j < this_nregs; j++)
1884 this_cost += spill_add_cost[regno + j];
1885 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1886 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1887 ok = 0;
1889 if (! ok)
1890 continue;
1892 if (flag_ira && optimize)
1894 /* Ask IRA to find a better pseudo-register for
1895 spilling. */
1896 for (n = j = 0; j < this_nregs; j++)
1898 int r = hard_regno_to_pseudo_regno[regno + j];
1900 if (r < 0)
1901 continue;
1902 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1903 regno_pseudo_regs[n++] = r;
1905 regno_pseudo_regs[n++] = -1;
1906 if (best_reg < 0
1907 || better_spill_reload_regno_p (regno_pseudo_regs,
1908 best_regno_pseudo_regs,
1909 rl->in, rl->out,
1910 chain->insn))
1912 best_reg = regno;
1913 for (j = 0;; j++)
1915 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1916 if (regno_pseudo_regs[j] < 0)
1917 break;
1920 continue;
1923 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1924 this_cost--;
1925 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1926 this_cost--;
1927 if (this_cost < best_cost
1928 /* Among registers with equal cost, prefer caller-saved ones, or
1929 use REG_ALLOC_ORDER if it is defined. */
1930 || (this_cost == best_cost
1931 #ifdef REG_ALLOC_ORDER
1932 && (inv_reg_alloc_order[regno]
1933 < inv_reg_alloc_order[best_reg])
1934 #else
1935 && call_used_regs[regno]
1936 && ! call_used_regs[best_reg]
1937 #endif
1940 best_reg = regno;
1941 best_cost = this_cost;
1945 if (best_reg == -1)
1946 return 0;
1948 if (dump_file)
1949 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1951 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1952 rl->regno = best_reg;
1954 EXECUTE_IF_SET_IN_REG_SET
1955 (&chain->saved, FIRST_PSEUDO_REGISTER, j, rsi)
1957 int nregs;
1958 int r = reg_renumber[j];
1960 if (r < 0)
1961 continue;
1962 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (j)];
1963 if (dump_file != NULL
1964 && ((best_reg <= r && r < best_reg + (int) rl->nregs)
1965 || (r <= best_reg && best_reg < r + nregs)))
1967 fprintf (dump_file, "using saved reg %d (of %u) for insn %u\n",
1968 r, j, INSN_UID (chain->insn));
1969 break;
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1976 if (!REGNO_REG_SET_P (&chain->saved, j))
1977 count_spilled_pseudo (best_reg, rl->nregs, j);
1980 EXECUTE_IF_SET_IN_REG_SET
1981 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1983 gcc_assert (!REGNO_REG_SET_P (&chain->saved, j));
1984 count_spilled_pseudo (best_reg, rl->nregs, j);
1987 for (i = 0; i < rl->nregs; i++)
1989 gcc_assert (spill_cost[best_reg + i] == 0);
1990 gcc_assert (spill_add_cost[best_reg + i] == 0);
1991 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1992 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1994 return 1;
1997 /* Find more reload regs to satisfy the remaining need of an insn, which
1998 is given by CHAIN.
1999 Do it by ascending class number, since otherwise a reg
2000 might be spilled for a big class and might fail to count
2001 for a smaller class even though it belongs to that class. */
2003 static void
2004 find_reload_regs (struct insn_chain *chain)
2006 int i;
2008 /* In order to be certain of getting the registers we need,
2009 we must sort the reloads into order of increasing register class.
2010 Then our grabbing of reload registers will parallel the process
2011 that provided the reload registers. */
2012 for (i = 0; i < chain->n_reloads; i++)
2014 /* Show whether this reload already has a hard reg. */
2015 if (chain->rld[i].reg_rtx)
2017 int regno = REGNO (chain->rld[i].reg_rtx);
2018 chain->rld[i].regno = regno;
2019 chain->rld[i].nregs
2020 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2022 else
2023 chain->rld[i].regno = -1;
2024 reload_order[i] = i;
2027 n_reloads = chain->n_reloads;
2028 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2030 CLEAR_HARD_REG_SET (used_spill_regs_local);
2032 if (dump_file)
2033 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2035 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2037 /* Compute the order of preference for hard registers to spill. */
2039 order_regs_for_reload (chain);
2041 for (i = 0; i < n_reloads; i++)
2043 int r = reload_order[i];
2045 /* Ignore reloads that got marked inoperative. */
2046 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2047 && ! rld[r].optional
2048 && rld[r].regno == -1)
2049 if (! find_reg (chain, i))
2051 if (dump_file)
2052 fprintf (dump_file, "reload failure for reload %d\n", r);
2053 spill_failure (chain->insn, rld[r].class);
2054 failure = 1;
2055 return;
2059 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2060 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2062 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2065 static void
2066 select_reload_regs (void)
2068 struct insn_chain *chain;
2070 /* Try to satisfy the needs for each insn. */
2071 for (chain = insns_need_reload; chain != 0;
2072 chain = chain->next_need_reload)
2073 find_reload_regs (chain);
2076 /* Delete all insns that were inserted by emit_caller_save_insns during
2077 this iteration. */
2078 static void
2079 delete_caller_save_insns (void)
2081 struct insn_chain *c = reload_insn_chain;
2083 CLEAR_HARD_REG_SET (used_spill_regs);
2084 for (;;)
2086 while (c != 0 && c->is_caller_save_insn)
2088 struct insn_chain *next = c->next;
2089 rtx insn = c->insn;
2091 if (dump_file)
2092 fprintf (dump_file, "removing caller save insn %u\n",
2093 INSN_UID (insn));
2095 if (c == reload_insn_chain)
2096 reload_insn_chain = next;
2097 delete_insn (insn);
2099 if (next)
2100 next->prev = c->prev;
2101 if (c->prev)
2102 c->prev->next = next;
2103 c->next = unused_insn_chains;
2104 unused_insn_chains = c;
2105 c = next;
2107 if (c == 0)
2108 break;
2109 /* Invalidating spill hard registers got from caller-saved hard
2110 registers. */
2111 if (c->need_reload)
2113 HARD_REG_SET saved;
2114 int i;
2115 unsigned int regno;
2116 reg_set_iterator rsi;
2118 CLEAR_HARD_REG_SET (saved);
2119 EXECUTE_IF_SET_IN_REG_SET (&c->saved, FIRST_PSEUDO_REGISTER,
2120 regno, rsi)
2122 int r = reg_renumber[regno];
2124 if (r < 0)
2125 continue;
2126 add_to_hard_reg_set (&saved, PSEUDO_REGNO_MODE (regno), r);
2128 if (!hard_reg_set_empty_p (saved))
2129 for (i = 0; i < c->n_reloads; i++)
2131 int nregs;
2132 struct reload *rl = &c->rld[i];
2133 int r = rl->regno;
2135 if (rl->regno < 0)
2136 continue;
2137 nregs = rl->nregs;
2138 for (nregs--; nregs >= 0; nregs--)
2139 if (TEST_HARD_REG_BIT (saved, r + nregs))
2141 if (dump_file != NULL)
2142 fprintf (dump_file,
2143 "invalidating %d reg for insn %u reload\n",
2144 rl->regno, INSN_UID (c->insn));
2145 rl->regno = -1;
2146 break;
2149 AND_COMPL_HARD_REG_SET (c->used_spill_regs, saved);
2150 IOR_HARD_REG_SET (used_spill_regs, c->used_spill_regs);
2152 c = c->next;
2156 /* Handle the failure to find a register to spill.
2157 INSN should be one of the insns which needed this particular spill reg. */
2159 static void
2160 spill_failure (rtx insn, enum reg_class class)
2162 if (asm_noperands (PATTERN (insn)) >= 0)
2163 error_for_asm (insn, "can't find a register in class %qs while "
2164 "reloading %<asm%>",
2165 reg_class_names[class]);
2166 else
2168 error ("unable to find a register to spill in class %qs",
2169 reg_class_names[class]);
2171 if (dump_file)
2173 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2174 debug_reload_to_stream (dump_file);
2176 fatal_insn ("this is the insn:", insn);
2180 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2181 data that is dead in INSN. */
2183 static void
2184 delete_dead_insn (rtx insn)
2186 rtx prev = prev_real_insn (insn);
2187 rtx prev_dest;
2189 /* If the previous insn sets a register that dies in our insn, delete it
2190 too. */
2191 if (prev && GET_CODE (PATTERN (prev)) == SET
2192 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2193 && reg_mentioned_p (prev_dest, PATTERN (insn))
2194 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2195 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2196 delete_dead_insn (prev);
2198 SET_INSN_DELETED (insn);
2201 /* Modify the home of pseudo-reg I.
2202 The new home is present in reg_renumber[I].
2204 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2205 or it may be -1, meaning there is none or it is not relevant.
2206 This is used so that all pseudos spilled from a given hard reg
2207 can share one stack slot. */
2209 static void
2210 alter_reg (int i, int from_reg, bool dont_share_p)
2212 /* When outputting an inline function, this can happen
2213 for a reg that isn't actually used. */
2214 if (regno_reg_rtx[i] == 0)
2215 return;
2217 /* If the reg got changed to a MEM at rtl-generation time,
2218 ignore it. */
2219 if (!REG_P (regno_reg_rtx[i]))
2220 return;
2222 /* Modify the reg-rtx to contain the new hard reg
2223 number or else to contain its pseudo reg number. */
2224 SET_REGNO (regno_reg_rtx[i],
2225 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2227 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2228 allocate a stack slot for it. */
2230 if (reg_renumber[i] < 0
2231 && REG_N_REFS (i) > 0
2232 && reg_equiv_constant[i] == 0
2233 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2234 && reg_equiv_memory_loc[i] == 0)
2236 rtx x;
2237 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2238 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2239 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2240 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2241 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2242 int adjust = 0;
2243 bool shared_p = false;
2245 if (flag_ira && optimize)
2246 /* Mark the spill for IRA. */
2247 SET_REGNO_REG_SET (&spilled_pseudos, i);
2248 x = (dont_share_p || ! flag_ira || ! optimize
2249 ? NULL_RTX : reuse_stack_slot (i, inherent_size, total_size));
2250 if (x)
2251 shared_p = true;
2252 /* Each pseudo reg has an inherent size which comes from its own mode,
2253 and a total size which provides room for paradoxical subregs
2254 which refer to the pseudo reg in wider modes.
2256 We can use a slot already allocated if it provides both
2257 enough inherent space and enough total space.
2258 Otherwise, we allocate a new slot, making sure that it has no less
2259 inherent space, and no less total space, then the previous slot. */
2260 else if (from_reg == -1 || (! dont_share_p && flag_ira && optimize))
2262 alias_set_type alias_set = new_alias_set ();
2264 /* No known place to spill from => no slot to reuse. */
2265 x = assign_stack_local (mode, total_size,
2266 min_align > inherent_align
2267 || total_size > inherent_size ? -1 : 0);
2268 if (BYTES_BIG_ENDIAN)
2269 /* Cancel the big-endian correction done in assign_stack_local.
2270 Get the address of the beginning of the slot.
2271 This is so we can do a big-endian correction unconditionally
2272 below. */
2273 adjust = inherent_size - total_size;
2275 /* Nothing can alias this slot except this pseudo. */
2276 set_mem_alias_set (x, alias_set);
2277 dse_record_singleton_alias_set (alias_set, mode);
2279 if (! dont_share_p && flag_ira && optimize)
2280 /* Inform IRA about allocation a new stack slot. */
2281 mark_new_stack_slot (x, i, total_size);
2284 /* Reuse a stack slot if possible. */
2285 else if (spill_stack_slot[from_reg] != 0
2286 && spill_stack_slot_width[from_reg] >= total_size
2287 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2288 >= inherent_size)
2289 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2290 x = spill_stack_slot[from_reg];
2291 /* Allocate a bigger slot. */
2292 else
2294 /* Compute maximum size needed, both for inherent size
2295 and for total size. */
2296 rtx stack_slot;
2298 if (spill_stack_slot[from_reg])
2300 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2301 > inherent_size)
2302 mode = GET_MODE (spill_stack_slot[from_reg]);
2303 if (spill_stack_slot_width[from_reg] > total_size)
2304 total_size = spill_stack_slot_width[from_reg];
2305 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2306 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2309 /* Make a slot with that size. */
2310 x = assign_stack_local (mode, total_size,
2311 min_align > inherent_align
2312 || total_size > inherent_size ? -1 : 0);
2313 stack_slot = x;
2315 /* All pseudos mapped to this slot can alias each other. */
2316 if (spill_stack_slot[from_reg])
2318 alias_set_type alias_set
2319 = MEM_ALIAS_SET (spill_stack_slot[from_reg]);
2320 set_mem_alias_set (x, alias_set);
2321 dse_invalidate_singleton_alias_set (alias_set);
2323 else
2325 alias_set_type alias_set = new_alias_set ();
2326 set_mem_alias_set (x, alias_set);
2327 dse_record_singleton_alias_set (alias_set, mode);
2330 if (BYTES_BIG_ENDIAN)
2332 /* Cancel the big-endian correction done in assign_stack_local.
2333 Get the address of the beginning of the slot.
2334 This is so we can do a big-endian correction unconditionally
2335 below. */
2336 adjust = GET_MODE_SIZE (mode) - total_size;
2337 if (adjust)
2338 stack_slot
2339 = adjust_address_nv (x, mode_for_size (total_size
2340 * BITS_PER_UNIT,
2341 MODE_INT, 1),
2342 adjust);
2345 spill_stack_slot[from_reg] = stack_slot;
2346 spill_stack_slot_width[from_reg] = total_size;
2349 /* On a big endian machine, the "address" of the slot
2350 is the address of the low part that fits its inherent mode. */
2351 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2352 adjust += (total_size - inherent_size);
2354 /* If we have any adjustment to make, or if the stack slot is the
2355 wrong mode, make a new stack slot. */
2356 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2358 /* If we have a decl for the original register, set it for the
2359 memory. If this is a shared MEM, make a copy. */
2360 if (shared_p)
2362 x = copy_rtx (x);
2363 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2365 else if (REG_EXPR (regno_reg_rtx[i])
2366 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2368 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2370 /* We can do this only for the DECLs home pseudo, not for
2371 any copies of it, since otherwise when the stack slot
2372 is reused, nonoverlapping_memrefs_p might think they
2373 cannot overlap. */
2374 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2376 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2377 x = copy_rtx (x);
2379 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2383 /* Save the stack slot for later. */
2384 reg_equiv_memory_loc[i] = x;
2388 /* Mark the slots in regs_ever_live for the hard regs used by
2389 pseudo-reg number REGNO, accessed in MODE. */
2391 static void
2392 mark_home_live_1 (int regno, enum machine_mode mode)
2394 int i, lim;
2396 i = reg_renumber[regno];
2397 if (i < 0)
2398 return;
2399 lim = end_hard_regno (mode, i);
2400 while (i < lim)
2401 df_set_regs_ever_live(i++, true);
2404 /* Mark the slots in regs_ever_live for the hard regs
2405 used by pseudo-reg number REGNO. */
2407 void
2408 mark_home_live (int regno)
2410 if (reg_renumber[regno] >= 0)
2411 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2414 /* This function handles the tracking of elimination offsets around branches.
2416 X is a piece of RTL being scanned.
2418 INSN is the insn that it came from, if any.
2420 INITIAL_P is nonzero if we are to set the offset to be the initial
2421 offset and zero if we are setting the offset of the label to be the
2422 current offset. */
2424 static void
2425 set_label_offsets (rtx x, rtx insn, int initial_p)
2427 enum rtx_code code = GET_CODE (x);
2428 rtx tem;
2429 unsigned int i;
2430 struct elim_table *p;
2432 switch (code)
2434 case LABEL_REF:
2435 if (LABEL_REF_NONLOCAL_P (x))
2436 return;
2438 x = XEXP (x, 0);
2440 /* ... fall through ... */
2442 case CODE_LABEL:
2443 /* If we know nothing about this label, set the desired offsets. Note
2444 that this sets the offset at a label to be the offset before a label
2445 if we don't know anything about the label. This is not correct for
2446 the label after a BARRIER, but is the best guess we can make. If
2447 we guessed wrong, we will suppress an elimination that might have
2448 been possible had we been able to guess correctly. */
2450 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2452 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2453 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2454 = (initial_p ? reg_eliminate[i].initial_offset
2455 : reg_eliminate[i].offset);
2456 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2459 /* Otherwise, if this is the definition of a label and it is
2460 preceded by a BARRIER, set our offsets to the known offset of
2461 that label. */
2463 else if (x == insn
2464 && (tem = prev_nonnote_insn (insn)) != 0
2465 && BARRIER_P (tem))
2466 set_offsets_for_label (insn);
2467 else
2468 /* If neither of the above cases is true, compare each offset
2469 with those previously recorded and suppress any eliminations
2470 where the offsets disagree. */
2472 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2473 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2474 != (initial_p ? reg_eliminate[i].initial_offset
2475 : reg_eliminate[i].offset))
2476 reg_eliminate[i].can_eliminate = 0;
2478 return;
2480 case JUMP_INSN:
2481 set_label_offsets (PATTERN (insn), insn, initial_p);
2483 /* ... fall through ... */
2485 case INSN:
2486 case CALL_INSN:
2487 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2488 to indirectly and hence must have all eliminations at their
2489 initial offsets. */
2490 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2491 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2492 set_label_offsets (XEXP (tem, 0), insn, 1);
2493 return;
2495 case PARALLEL:
2496 case ADDR_VEC:
2497 case ADDR_DIFF_VEC:
2498 /* Each of the labels in the parallel or address vector must be
2499 at their initial offsets. We want the first field for PARALLEL
2500 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2502 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2503 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2504 insn, initial_p);
2505 return;
2507 case SET:
2508 /* We only care about setting PC. If the source is not RETURN,
2509 IF_THEN_ELSE, or a label, disable any eliminations not at
2510 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2511 isn't one of those possibilities. For branches to a label,
2512 call ourselves recursively.
2514 Note that this can disable elimination unnecessarily when we have
2515 a non-local goto since it will look like a non-constant jump to
2516 someplace in the current function. This isn't a significant
2517 problem since such jumps will normally be when all elimination
2518 pairs are back to their initial offsets. */
2520 if (SET_DEST (x) != pc_rtx)
2521 return;
2523 switch (GET_CODE (SET_SRC (x)))
2525 case PC:
2526 case RETURN:
2527 return;
2529 case LABEL_REF:
2530 set_label_offsets (SET_SRC (x), insn, initial_p);
2531 return;
2533 case IF_THEN_ELSE:
2534 tem = XEXP (SET_SRC (x), 1);
2535 if (GET_CODE (tem) == LABEL_REF)
2536 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2537 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2538 break;
2540 tem = XEXP (SET_SRC (x), 2);
2541 if (GET_CODE (tem) == LABEL_REF)
2542 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2543 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2544 break;
2545 return;
2547 default:
2548 break;
2551 /* If we reach here, all eliminations must be at their initial
2552 offset because we are doing a jump to a variable address. */
2553 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2554 if (p->offset != p->initial_offset)
2555 p->can_eliminate = 0;
2556 break;
2558 default:
2559 break;
2563 /* Scan X and replace any eliminable registers (such as fp) with a
2564 replacement (such as sp), plus an offset.
2566 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2567 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2568 MEM, we are allowed to replace a sum of a register and the constant zero
2569 with the register, which we cannot do outside a MEM. In addition, we need
2570 to record the fact that a register is referenced outside a MEM.
2572 If INSN is an insn, it is the insn containing X. If we replace a REG
2573 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2574 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2575 the REG is being modified.
2577 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2578 That's used when we eliminate in expressions stored in notes.
2579 This means, do not set ref_outside_mem even if the reference
2580 is outside of MEMs.
2582 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2583 replacements done assuming all offsets are at their initial values. If
2584 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2585 encounter, return the actual location so that find_reloads will do
2586 the proper thing. */
2588 static rtx
2589 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2590 bool may_use_invariant)
2592 enum rtx_code code = GET_CODE (x);
2593 struct elim_table *ep;
2594 int regno;
2595 rtx new;
2596 int i, j;
2597 const char *fmt;
2598 int copied = 0;
2600 if (! current_function_decl)
2601 return x;
2603 switch (code)
2605 case CONST_INT:
2606 case CONST_DOUBLE:
2607 case CONST_FIXED:
2608 case CONST_VECTOR:
2609 case CONST:
2610 case SYMBOL_REF:
2611 case CODE_LABEL:
2612 case PC:
2613 case CC0:
2614 case ASM_INPUT:
2615 case ADDR_VEC:
2616 case ADDR_DIFF_VEC:
2617 case RETURN:
2618 return x;
2620 case REG:
2621 regno = REGNO (x);
2623 /* First handle the case where we encounter a bare register that
2624 is eliminable. Replace it with a PLUS. */
2625 if (regno < FIRST_PSEUDO_REGISTER)
2627 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2628 ep++)
2629 if (ep->from_rtx == x && ep->can_eliminate)
2630 return plus_constant (ep->to_rtx, ep->previous_offset);
2633 else if (reg_renumber && reg_renumber[regno] < 0
2634 && reg_equiv_invariant && reg_equiv_invariant[regno])
2636 if (may_use_invariant)
2637 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2638 mem_mode, insn, true);
2639 /* There exists at least one use of REGNO that cannot be
2640 eliminated. Prevent the defining insn from being deleted. */
2641 reg_equiv_init[regno] = NULL_RTX;
2642 alter_reg (regno, -1, true);
2644 return x;
2646 /* You might think handling MINUS in a manner similar to PLUS is a
2647 good idea. It is not. It has been tried multiple times and every
2648 time the change has had to have been reverted.
2650 Other parts of reload know a PLUS is special (gen_reload for example)
2651 and require special code to handle code a reloaded PLUS operand.
2653 Also consider backends where the flags register is clobbered by a
2654 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2655 lea instruction comes to mind). If we try to reload a MINUS, we
2656 may kill the flags register that was holding a useful value.
2658 So, please before trying to handle MINUS, consider reload as a
2659 whole instead of this little section as well as the backend issues. */
2660 case PLUS:
2661 /* If this is the sum of an eliminable register and a constant, rework
2662 the sum. */
2663 if (REG_P (XEXP (x, 0))
2664 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2665 && CONSTANT_P (XEXP (x, 1)))
2667 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2668 ep++)
2669 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2671 /* The only time we want to replace a PLUS with a REG (this
2672 occurs when the constant operand of the PLUS is the negative
2673 of the offset) is when we are inside a MEM. We won't want
2674 to do so at other times because that would change the
2675 structure of the insn in a way that reload can't handle.
2676 We special-case the commonest situation in
2677 eliminate_regs_in_insn, so just replace a PLUS with a
2678 PLUS here, unless inside a MEM. */
2679 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2680 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2681 return ep->to_rtx;
2682 else
2683 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2684 plus_constant (XEXP (x, 1),
2685 ep->previous_offset));
2688 /* If the register is not eliminable, we are done since the other
2689 operand is a constant. */
2690 return x;
2693 /* If this is part of an address, we want to bring any constant to the
2694 outermost PLUS. We will do this by doing register replacement in
2695 our operands and seeing if a constant shows up in one of them.
2697 Note that there is no risk of modifying the structure of the insn,
2698 since we only get called for its operands, thus we are either
2699 modifying the address inside a MEM, or something like an address
2700 operand of a load-address insn. */
2703 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2704 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2706 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2708 /* If one side is a PLUS and the other side is a pseudo that
2709 didn't get a hard register but has a reg_equiv_constant,
2710 we must replace the constant here since it may no longer
2711 be in the position of any operand. */
2712 if (GET_CODE (new0) == PLUS && REG_P (new1)
2713 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2714 && reg_renumber[REGNO (new1)] < 0
2715 && reg_equiv_constant != 0
2716 && reg_equiv_constant[REGNO (new1)] != 0)
2717 new1 = reg_equiv_constant[REGNO (new1)];
2718 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2719 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2720 && reg_renumber[REGNO (new0)] < 0
2721 && reg_equiv_constant[REGNO (new0)] != 0)
2722 new0 = reg_equiv_constant[REGNO (new0)];
2724 new = form_sum (new0, new1);
2726 /* As above, if we are not inside a MEM we do not want to
2727 turn a PLUS into something else. We might try to do so here
2728 for an addition of 0 if we aren't optimizing. */
2729 if (! mem_mode && GET_CODE (new) != PLUS)
2730 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2731 else
2732 return new;
2735 return x;
2737 case MULT:
2738 /* If this is the product of an eliminable register and a
2739 constant, apply the distribute law and move the constant out
2740 so that we have (plus (mult ..) ..). This is needed in order
2741 to keep load-address insns valid. This case is pathological.
2742 We ignore the possibility of overflow here. */
2743 if (REG_P (XEXP (x, 0))
2744 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2745 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2747 ep++)
2748 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2750 if (! mem_mode
2751 /* Refs inside notes don't count for this purpose. */
2752 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2753 || GET_CODE (insn) == INSN_LIST)))
2754 ep->ref_outside_mem = 1;
2756 return
2757 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2758 ep->previous_offset * INTVAL (XEXP (x, 1)));
2761 /* ... fall through ... */
2763 case CALL:
2764 case COMPARE:
2765 /* See comments before PLUS about handling MINUS. */
2766 case MINUS:
2767 case DIV: case UDIV:
2768 case MOD: case UMOD:
2769 case AND: case IOR: case XOR:
2770 case ROTATERT: case ROTATE:
2771 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2772 case NE: case EQ:
2773 case GE: case GT: case GEU: case GTU:
2774 case LE: case LT: case LEU: case LTU:
2776 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2777 rtx new1 = XEXP (x, 1)
2778 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2780 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2781 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2783 return x;
2785 case EXPR_LIST:
2786 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2787 if (XEXP (x, 0))
2789 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2790 if (new != XEXP (x, 0))
2792 /* If this is a REG_DEAD note, it is not valid anymore.
2793 Using the eliminated version could result in creating a
2794 REG_DEAD note for the stack or frame pointer. */
2795 if (GET_MODE (x) == REG_DEAD)
2796 return (XEXP (x, 1)
2797 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2798 : NULL_RTX);
2800 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2804 /* ... fall through ... */
2806 case INSN_LIST:
2807 /* Now do eliminations in the rest of the chain. If this was
2808 an EXPR_LIST, this might result in allocating more memory than is
2809 strictly needed, but it simplifies the code. */
2810 if (XEXP (x, 1))
2812 new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2813 if (new != XEXP (x, 1))
2814 return
2815 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2817 return x;
2819 case PRE_INC:
2820 case POST_INC:
2821 case PRE_DEC:
2822 case POST_DEC:
2823 /* We do not support elimination of a register that is modified.
2824 elimination_effects has already make sure that this does not
2825 happen. */
2826 return x;
2828 case PRE_MODIFY:
2829 case POST_MODIFY:
2830 /* We do not support elimination of a register that is modified.
2831 elimination_effects has already make sure that this does not
2832 happen. The only remaining case we need to consider here is
2833 that the increment value may be an eliminable register. */
2834 if (GET_CODE (XEXP (x, 1)) == PLUS
2835 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2837 rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2838 insn, true);
2840 if (new != XEXP (XEXP (x, 1), 1))
2841 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2842 gen_rtx_PLUS (GET_MODE (x),
2843 XEXP (x, 0), new));
2845 return x;
2847 case STRICT_LOW_PART:
2848 case NEG: case NOT:
2849 case SIGN_EXTEND: case ZERO_EXTEND:
2850 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2851 case FLOAT: case FIX:
2852 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2853 case ABS:
2854 case SQRT:
2855 case FFS:
2856 case CLZ:
2857 case CTZ:
2858 case POPCOUNT:
2859 case PARITY:
2860 case BSWAP:
2861 new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2862 if (new != XEXP (x, 0))
2863 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2864 return x;
2866 case SUBREG:
2867 /* Similar to above processing, but preserve SUBREG_BYTE.
2868 Convert (subreg (mem)) to (mem) if not paradoxical.
2869 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2870 pseudo didn't get a hard reg, we must replace this with the
2871 eliminated version of the memory location because push_reload
2872 may do the replacement in certain circumstances. */
2873 if (REG_P (SUBREG_REG (x))
2874 && (GET_MODE_SIZE (GET_MODE (x))
2875 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2876 && reg_equiv_memory_loc != 0
2877 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2879 new = SUBREG_REG (x);
2881 else
2882 new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2884 if (new != SUBREG_REG (x))
2886 int x_size = GET_MODE_SIZE (GET_MODE (x));
2887 int new_size = GET_MODE_SIZE (GET_MODE (new));
2889 if (MEM_P (new)
2890 && ((x_size < new_size
2891 #ifdef WORD_REGISTER_OPERATIONS
2892 /* On these machines, combine can create rtl of the form
2893 (set (subreg:m1 (reg:m2 R) 0) ...)
2894 where m1 < m2, and expects something interesting to
2895 happen to the entire word. Moreover, it will use the
2896 (reg:m2 R) later, expecting all bits to be preserved.
2897 So if the number of words is the same, preserve the
2898 subreg so that push_reload can see it. */
2899 && ! ((x_size - 1) / UNITS_PER_WORD
2900 == (new_size -1 ) / UNITS_PER_WORD)
2901 #endif
2903 || x_size == new_size)
2905 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2906 else
2907 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2910 return x;
2912 case MEM:
2913 /* Our only special processing is to pass the mode of the MEM to our
2914 recursive call and copy the flags. While we are here, handle this
2915 case more efficiently. */
2916 return
2917 replace_equiv_address_nv (x,
2918 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2919 insn, true));
2921 case USE:
2922 /* Handle insn_list USE that a call to a pure function may generate. */
2923 new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2924 if (new != XEXP (x, 0))
2925 return gen_rtx_USE (GET_MODE (x), new);
2926 return x;
2928 case CLOBBER:
2929 case ASM_OPERANDS:
2930 case SET:
2931 gcc_unreachable ();
2933 default:
2934 break;
2937 /* Process each of our operands recursively. If any have changed, make a
2938 copy of the rtx. */
2939 fmt = GET_RTX_FORMAT (code);
2940 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2942 if (*fmt == 'e')
2944 new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2945 if (new != XEXP (x, i) && ! copied)
2947 x = shallow_copy_rtx (x);
2948 copied = 1;
2950 XEXP (x, i) = new;
2952 else if (*fmt == 'E')
2954 int copied_vec = 0;
2955 for (j = 0; j < XVECLEN (x, i); j++)
2957 new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2958 if (new != XVECEXP (x, i, j) && ! copied_vec)
2960 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2961 XVEC (x, i)->elem);
2962 if (! copied)
2964 x = shallow_copy_rtx (x);
2965 copied = 1;
2967 XVEC (x, i) = new_v;
2968 copied_vec = 1;
2970 XVECEXP (x, i, j) = new;
2975 return x;
2979 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2981 return eliminate_regs_1 (x, mem_mode, insn, false);
2984 /* Scan rtx X for modifications of elimination target registers. Update
2985 the table of eliminables to reflect the changed state. MEM_MODE is
2986 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2988 static void
2989 elimination_effects (rtx x, enum machine_mode mem_mode)
2991 enum rtx_code code = GET_CODE (x);
2992 struct elim_table *ep;
2993 int regno;
2994 int i, j;
2995 const char *fmt;
2997 switch (code)
2999 case CONST_INT:
3000 case CONST_DOUBLE:
3001 case CONST_FIXED:
3002 case CONST_VECTOR:
3003 case CONST:
3004 case SYMBOL_REF:
3005 case CODE_LABEL:
3006 case PC:
3007 case CC0:
3008 case ASM_INPUT:
3009 case ADDR_VEC:
3010 case ADDR_DIFF_VEC:
3011 case RETURN:
3012 return;
3014 case REG:
3015 regno = REGNO (x);
3017 /* First handle the case where we encounter a bare register that
3018 is eliminable. Replace it with a PLUS. */
3019 if (regno < FIRST_PSEUDO_REGISTER)
3021 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3022 ep++)
3023 if (ep->from_rtx == x && ep->can_eliminate)
3025 if (! mem_mode)
3026 ep->ref_outside_mem = 1;
3027 return;
3031 else if (reg_renumber[regno] < 0 && reg_equiv_constant
3032 && reg_equiv_constant[regno]
3033 && ! function_invariant_p (reg_equiv_constant[regno]))
3034 elimination_effects (reg_equiv_constant[regno], mem_mode);
3035 return;
3037 case PRE_INC:
3038 case POST_INC:
3039 case PRE_DEC:
3040 case POST_DEC:
3041 case POST_MODIFY:
3042 case PRE_MODIFY:
3043 /* If we modify the source of an elimination rule, disable it. */
3044 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3045 if (ep->from_rtx == XEXP (x, 0))
3046 ep->can_eliminate = 0;
3048 /* If we modify the target of an elimination rule by adding a constant,
3049 update its offset. If we modify the target in any other way, we'll
3050 have to disable the rule as well. */
3051 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3052 if (ep->to_rtx == XEXP (x, 0))
3054 int size = GET_MODE_SIZE (mem_mode);
3056 /* If more bytes than MEM_MODE are pushed, account for them. */
3057 #ifdef PUSH_ROUNDING
3058 if (ep->to_rtx == stack_pointer_rtx)
3059 size = PUSH_ROUNDING (size);
3060 #endif
3061 if (code == PRE_DEC || code == POST_DEC)
3062 ep->offset += size;
3063 else if (code == PRE_INC || code == POST_INC)
3064 ep->offset -= size;
3065 else if (code == PRE_MODIFY || code == POST_MODIFY)
3067 if (GET_CODE (XEXP (x, 1)) == PLUS
3068 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3069 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3070 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3071 else
3072 ep->can_eliminate = 0;
3076 /* These two aren't unary operators. */
3077 if (code == POST_MODIFY || code == PRE_MODIFY)
3078 break;
3080 /* Fall through to generic unary operation case. */
3081 case STRICT_LOW_PART:
3082 case NEG: case NOT:
3083 case SIGN_EXTEND: case ZERO_EXTEND:
3084 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3085 case FLOAT: case FIX:
3086 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3087 case ABS:
3088 case SQRT:
3089 case FFS:
3090 case CLZ:
3091 case CTZ:
3092 case POPCOUNT:
3093 case PARITY:
3094 case BSWAP:
3095 elimination_effects (XEXP (x, 0), mem_mode);
3096 return;
3098 case SUBREG:
3099 if (REG_P (SUBREG_REG (x))
3100 && (GET_MODE_SIZE (GET_MODE (x))
3101 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3102 && reg_equiv_memory_loc != 0
3103 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3104 return;
3106 elimination_effects (SUBREG_REG (x), mem_mode);
3107 return;
3109 case USE:
3110 /* If using a register that is the source of an eliminate we still
3111 think can be performed, note it cannot be performed since we don't
3112 know how this register is used. */
3113 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3114 if (ep->from_rtx == XEXP (x, 0))
3115 ep->can_eliminate = 0;
3117 elimination_effects (XEXP (x, 0), mem_mode);
3118 return;
3120 case CLOBBER:
3121 /* If clobbering a register that is the replacement register for an
3122 elimination we still think can be performed, note that it cannot
3123 be performed. Otherwise, we need not be concerned about it. */
3124 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3125 if (ep->to_rtx == XEXP (x, 0))
3126 ep->can_eliminate = 0;
3128 elimination_effects (XEXP (x, 0), mem_mode);
3129 return;
3131 case SET:
3132 /* Check for setting a register that we know about. */
3133 if (REG_P (SET_DEST (x)))
3135 /* See if this is setting the replacement register for an
3136 elimination.
3138 If DEST is the hard frame pointer, we do nothing because we
3139 assume that all assignments to the frame pointer are for
3140 non-local gotos and are being done at a time when they are valid
3141 and do not disturb anything else. Some machines want to
3142 eliminate a fake argument pointer (or even a fake frame pointer)
3143 with either the real frame or the stack pointer. Assignments to
3144 the hard frame pointer must not prevent this elimination. */
3146 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3147 ep++)
3148 if (ep->to_rtx == SET_DEST (x)
3149 && SET_DEST (x) != hard_frame_pointer_rtx)
3151 /* If it is being incremented, adjust the offset. Otherwise,
3152 this elimination can't be done. */
3153 rtx src = SET_SRC (x);
3155 if (GET_CODE (src) == PLUS
3156 && XEXP (src, 0) == SET_DEST (x)
3157 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3158 ep->offset -= INTVAL (XEXP (src, 1));
3159 else
3160 ep->can_eliminate = 0;
3164 elimination_effects (SET_DEST (x), 0);
3165 elimination_effects (SET_SRC (x), 0);
3166 return;
3168 case MEM:
3169 /* Our only special processing is to pass the mode of the MEM to our
3170 recursive call. */
3171 elimination_effects (XEXP (x, 0), GET_MODE (x));
3172 return;
3174 default:
3175 break;
3178 fmt = GET_RTX_FORMAT (code);
3179 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3181 if (*fmt == 'e')
3182 elimination_effects (XEXP (x, i), mem_mode);
3183 else if (*fmt == 'E')
3184 for (j = 0; j < XVECLEN (x, i); j++)
3185 elimination_effects (XVECEXP (x, i, j), mem_mode);
3189 /* Descend through rtx X and verify that no references to eliminable registers
3190 remain. If any do remain, mark the involved register as not
3191 eliminable. */
3193 static void
3194 check_eliminable_occurrences (rtx x)
3196 const char *fmt;
3197 int i;
3198 enum rtx_code code;
3200 if (x == 0)
3201 return;
3203 code = GET_CODE (x);
3205 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3207 struct elim_table *ep;
3209 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3210 if (ep->from_rtx == x)
3211 ep->can_eliminate = 0;
3212 return;
3215 fmt = GET_RTX_FORMAT (code);
3216 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3218 if (*fmt == 'e')
3219 check_eliminable_occurrences (XEXP (x, i));
3220 else if (*fmt == 'E')
3222 int j;
3223 for (j = 0; j < XVECLEN (x, i); j++)
3224 check_eliminable_occurrences (XVECEXP (x, i, j));
3229 /* Scan INSN and eliminate all eliminable registers in it.
3231 If REPLACE is nonzero, do the replacement destructively. Also
3232 delete the insn as dead it if it is setting an eliminable register.
3234 If REPLACE is zero, do all our allocations in reload_obstack.
3236 If no eliminations were done and this insn doesn't require any elimination
3237 processing (these are not identical conditions: it might be updating sp,
3238 but not referencing fp; this needs to be seen during reload_as_needed so
3239 that the offset between fp and sp can be taken into consideration), zero
3240 is returned. Otherwise, 1 is returned. */
3242 static int
3243 eliminate_regs_in_insn (rtx insn, int replace)
3245 int icode = recog_memoized (insn);
3246 rtx old_body = PATTERN (insn);
3247 int insn_is_asm = asm_noperands (old_body) >= 0;
3248 rtx old_set = single_set (insn);
3249 rtx new_body;
3250 int val = 0;
3251 int i;
3252 rtx substed_operand[MAX_RECOG_OPERANDS];
3253 rtx orig_operand[MAX_RECOG_OPERANDS];
3254 struct elim_table *ep;
3255 rtx plus_src, plus_cst_src;
3257 if (! insn_is_asm && icode < 0)
3259 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3260 || GET_CODE (PATTERN (insn)) == CLOBBER
3261 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3262 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3263 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3264 return 0;
3267 if (old_set != 0 && REG_P (SET_DEST (old_set))
3268 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3270 /* Check for setting an eliminable register. */
3271 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3272 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3274 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3275 /* If this is setting the frame pointer register to the
3276 hardware frame pointer register and this is an elimination
3277 that will be done (tested above), this insn is really
3278 adjusting the frame pointer downward to compensate for
3279 the adjustment done before a nonlocal goto. */
3280 if (ep->from == FRAME_POINTER_REGNUM
3281 && ep->to == HARD_FRAME_POINTER_REGNUM)
3283 rtx base = SET_SRC (old_set);
3284 rtx base_insn = insn;
3285 HOST_WIDE_INT offset = 0;
3287 while (base != ep->to_rtx)
3289 rtx prev_insn, prev_set;
3291 if (GET_CODE (base) == PLUS
3292 && GET_CODE (XEXP (base, 1)) == CONST_INT)
3294 offset += INTVAL (XEXP (base, 1));
3295 base = XEXP (base, 0);
3297 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3298 && (prev_set = single_set (prev_insn)) != 0
3299 && rtx_equal_p (SET_DEST (prev_set), base))
3301 base = SET_SRC (prev_set);
3302 base_insn = prev_insn;
3304 else
3305 break;
3308 if (base == ep->to_rtx)
3310 rtx src
3311 = plus_constant (ep->to_rtx, offset - ep->offset);
3313 new_body = old_body;
3314 if (! replace)
3316 new_body = copy_insn (old_body);
3317 if (REG_NOTES (insn))
3318 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3320 PATTERN (insn) = new_body;
3321 old_set = single_set (insn);
3323 /* First see if this insn remains valid when we
3324 make the change. If not, keep the INSN_CODE
3325 the same and let reload fit it up. */
3326 validate_change (insn, &SET_SRC (old_set), src, 1);
3327 validate_change (insn, &SET_DEST (old_set),
3328 ep->to_rtx, 1);
3329 if (! apply_change_group ())
3331 SET_SRC (old_set) = src;
3332 SET_DEST (old_set) = ep->to_rtx;
3335 val = 1;
3336 goto done;
3339 #endif
3341 /* In this case this insn isn't serving a useful purpose. We
3342 will delete it in reload_as_needed once we know that this
3343 elimination is, in fact, being done.
3345 If REPLACE isn't set, we can't delete this insn, but needn't
3346 process it since it won't be used unless something changes. */
3347 if (replace)
3349 delete_dead_insn (insn);
3350 return 1;
3352 val = 1;
3353 goto done;
3357 /* We allow one special case which happens to work on all machines we
3358 currently support: a single set with the source or a REG_EQUAL
3359 note being a PLUS of an eliminable register and a constant. */
3360 plus_src = plus_cst_src = 0;
3361 if (old_set && REG_P (SET_DEST (old_set)))
3363 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3364 plus_src = SET_SRC (old_set);
3365 /* First see if the source is of the form (plus (...) CST). */
3366 if (plus_src
3367 && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3368 plus_cst_src = plus_src;
3369 else if (REG_P (SET_SRC (old_set))
3370 || plus_src)
3372 /* Otherwise, see if we have a REG_EQUAL note of the form
3373 (plus (...) CST). */
3374 rtx links;
3375 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3377 if ((REG_NOTE_KIND (links) == REG_EQUAL
3378 || REG_NOTE_KIND (links) == REG_EQUIV)
3379 && GET_CODE (XEXP (links, 0)) == PLUS
3380 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3382 plus_cst_src = XEXP (links, 0);
3383 break;
3388 /* Check that the first operand of the PLUS is a hard reg or
3389 the lowpart subreg of one. */
3390 if (plus_cst_src)
3392 rtx reg = XEXP (plus_cst_src, 0);
3393 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3394 reg = SUBREG_REG (reg);
3396 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3397 plus_cst_src = 0;
3400 if (plus_cst_src)
3402 rtx reg = XEXP (plus_cst_src, 0);
3403 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3405 if (GET_CODE (reg) == SUBREG)
3406 reg = SUBREG_REG (reg);
3408 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3409 if (ep->from_rtx == reg && ep->can_eliminate)
3411 rtx to_rtx = ep->to_rtx;
3412 offset += ep->offset;
3413 offset = trunc_int_for_mode (offset, GET_MODE (reg));
3415 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3416 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3417 to_rtx);
3418 /* If we have a nonzero offset, and the source is already
3419 a simple REG, the following transformation would
3420 increase the cost of the insn by replacing a simple REG
3421 with (plus (reg sp) CST). So try only when we already
3422 had a PLUS before. */
3423 if (offset == 0 || plus_src)
3425 rtx new_src = plus_constant (to_rtx, offset);
3427 new_body = old_body;
3428 if (! replace)
3430 new_body = copy_insn (old_body);
3431 if (REG_NOTES (insn))
3432 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3434 PATTERN (insn) = new_body;
3435 old_set = single_set (insn);
3437 /* First see if this insn remains valid when we make the
3438 change. If not, try to replace the whole pattern with
3439 a simple set (this may help if the original insn was a
3440 PARALLEL that was only recognized as single_set due to
3441 REG_UNUSED notes). If this isn't valid either, keep
3442 the INSN_CODE the same and let reload fix it up. */
3443 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3445 rtx new_pat = gen_rtx_SET (VOIDmode,
3446 SET_DEST (old_set), new_src);
3448 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3449 SET_SRC (old_set) = new_src;
3452 else
3453 break;
3455 val = 1;
3456 /* This can't have an effect on elimination offsets, so skip right
3457 to the end. */
3458 goto done;
3462 /* Determine the effects of this insn on elimination offsets. */
3463 elimination_effects (old_body, 0);
3465 /* Eliminate all eliminable registers occurring in operands that
3466 can be handled by reload. */
3467 extract_insn (insn);
3468 for (i = 0; i < recog_data.n_operands; i++)
3470 orig_operand[i] = recog_data.operand[i];
3471 substed_operand[i] = recog_data.operand[i];
3473 /* For an asm statement, every operand is eliminable. */
3474 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3476 bool is_set_src, in_plus;
3478 /* Check for setting a register that we know about. */
3479 if (recog_data.operand_type[i] != OP_IN
3480 && REG_P (orig_operand[i]))
3482 /* If we are assigning to a register that can be eliminated, it
3483 must be as part of a PARALLEL, since the code above handles
3484 single SETs. We must indicate that we can no longer
3485 eliminate this reg. */
3486 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3487 ep++)
3488 if (ep->from_rtx == orig_operand[i])
3489 ep->can_eliminate = 0;
3492 /* Companion to the above plus substitution, we can allow
3493 invariants as the source of a plain move. */
3494 is_set_src = false;
3495 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3496 is_set_src = true;
3497 in_plus = false;
3498 if (plus_src
3499 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3500 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3501 in_plus = true;
3503 substed_operand[i]
3504 = eliminate_regs_1 (recog_data.operand[i], 0,
3505 replace ? insn : NULL_RTX,
3506 is_set_src || in_plus);
3507 if (substed_operand[i] != orig_operand[i])
3508 val = 1;
3509 /* Terminate the search in check_eliminable_occurrences at
3510 this point. */
3511 *recog_data.operand_loc[i] = 0;
3513 /* If an output operand changed from a REG to a MEM and INSN is an
3514 insn, write a CLOBBER insn. */
3515 if (recog_data.operand_type[i] != OP_IN
3516 && REG_P (orig_operand[i])
3517 && MEM_P (substed_operand[i])
3518 && replace)
3519 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3520 insn);
3524 for (i = 0; i < recog_data.n_dups; i++)
3525 *recog_data.dup_loc[i]
3526 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3528 /* If any eliminable remain, they aren't eliminable anymore. */
3529 check_eliminable_occurrences (old_body);
3531 /* Substitute the operands; the new values are in the substed_operand
3532 array. */
3533 for (i = 0; i < recog_data.n_operands; i++)
3534 *recog_data.operand_loc[i] = substed_operand[i];
3535 for (i = 0; i < recog_data.n_dups; i++)
3536 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3538 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3539 re-recognize the insn. We do this in case we had a simple addition
3540 but now can do this as a load-address. This saves an insn in this
3541 common case.
3542 If re-recognition fails, the old insn code number will still be used,
3543 and some register operands may have changed into PLUS expressions.
3544 These will be handled by find_reloads by loading them into a register
3545 again. */
3547 if (val)
3549 /* If we aren't replacing things permanently and we changed something,
3550 make another copy to ensure that all the RTL is new. Otherwise
3551 things can go wrong if find_reload swaps commutative operands
3552 and one is inside RTL that has been copied while the other is not. */
3553 new_body = old_body;
3554 if (! replace)
3556 new_body = copy_insn (old_body);
3557 if (REG_NOTES (insn))
3558 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3560 PATTERN (insn) = new_body;
3562 /* If we had a move insn but now we don't, rerecognize it. This will
3563 cause spurious re-recognition if the old move had a PARALLEL since
3564 the new one still will, but we can't call single_set without
3565 having put NEW_BODY into the insn and the re-recognition won't
3566 hurt in this rare case. */
3567 /* ??? Why this huge if statement - why don't we just rerecognize the
3568 thing always? */
3569 if (! insn_is_asm
3570 && old_set != 0
3571 && ((REG_P (SET_SRC (old_set))
3572 && (GET_CODE (new_body) != SET
3573 || !REG_P (SET_SRC (new_body))))
3574 /* If this was a load from or store to memory, compare
3575 the MEM in recog_data.operand to the one in the insn.
3576 If they are not equal, then rerecognize the insn. */
3577 || (old_set != 0
3578 && ((MEM_P (SET_SRC (old_set))
3579 && SET_SRC (old_set) != recog_data.operand[1])
3580 || (MEM_P (SET_DEST (old_set))
3581 && SET_DEST (old_set) != recog_data.operand[0])))
3582 /* If this was an add insn before, rerecognize. */
3583 || GET_CODE (SET_SRC (old_set)) == PLUS))
3585 int new_icode = recog (PATTERN (insn), insn, 0);
3586 if (new_icode >= 0)
3587 INSN_CODE (insn) = new_icode;
3591 /* Restore the old body. If there were any changes to it, we made a copy
3592 of it while the changes were still in place, so we'll correctly return
3593 a modified insn below. */
3594 if (! replace)
3596 /* Restore the old body. */
3597 for (i = 0; i < recog_data.n_operands; i++)
3598 *recog_data.operand_loc[i] = orig_operand[i];
3599 for (i = 0; i < recog_data.n_dups; i++)
3600 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3603 /* Update all elimination pairs to reflect the status after the current
3604 insn. The changes we make were determined by the earlier call to
3605 elimination_effects.
3607 We also detect cases where register elimination cannot be done,
3608 namely, if a register would be both changed and referenced outside a MEM
3609 in the resulting insn since such an insn is often undefined and, even if
3610 not, we cannot know what meaning will be given to it. Note that it is
3611 valid to have a register used in an address in an insn that changes it
3612 (presumably with a pre- or post-increment or decrement).
3614 If anything changes, return nonzero. */
3616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3618 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3619 ep->can_eliminate = 0;
3621 ep->ref_outside_mem = 0;
3623 if (ep->previous_offset != ep->offset)
3624 val = 1;
3627 done:
3628 /* If we changed something, perform elimination in REG_NOTES. This is
3629 needed even when REPLACE is zero because a REG_DEAD note might refer
3630 to a register that we eliminate and could cause a different number
3631 of spill registers to be needed in the final reload pass than in
3632 the pre-passes. */
3633 if (val && REG_NOTES (insn) != 0)
3634 REG_NOTES (insn)
3635 = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3637 return val;
3640 /* Loop through all elimination pairs.
3641 Recalculate the number not at initial offset.
3643 Compute the maximum offset (minimum offset if the stack does not
3644 grow downward) for each elimination pair. */
3646 static void
3647 update_eliminable_offsets (void)
3649 struct elim_table *ep;
3651 num_not_at_initial_offset = 0;
3652 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3654 ep->previous_offset = ep->offset;
3655 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3656 num_not_at_initial_offset++;
3660 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3661 replacement we currently believe is valid, mark it as not eliminable if X
3662 modifies DEST in any way other than by adding a constant integer to it.
3664 If DEST is the frame pointer, we do nothing because we assume that
3665 all assignments to the hard frame pointer are nonlocal gotos and are being
3666 done at a time when they are valid and do not disturb anything else.
3667 Some machines want to eliminate a fake argument pointer with either the
3668 frame or stack pointer. Assignments to the hard frame pointer must not
3669 prevent this elimination.
3671 Called via note_stores from reload before starting its passes to scan
3672 the insns of the function. */
3674 static void
3675 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3677 unsigned int i;
3679 /* A SUBREG of a hard register here is just changing its mode. We should
3680 not see a SUBREG of an eliminable hard register, but check just in
3681 case. */
3682 if (GET_CODE (dest) == SUBREG)
3683 dest = SUBREG_REG (dest);
3685 if (dest == hard_frame_pointer_rtx)
3686 return;
3688 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3689 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3690 && (GET_CODE (x) != SET
3691 || GET_CODE (SET_SRC (x)) != PLUS
3692 || XEXP (SET_SRC (x), 0) != dest
3693 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3695 reg_eliminate[i].can_eliminate_previous
3696 = reg_eliminate[i].can_eliminate = 0;
3697 num_eliminable--;
3701 /* Verify that the initial elimination offsets did not change since the
3702 last call to set_initial_elim_offsets. This is used to catch cases
3703 where something illegal happened during reload_as_needed that could
3704 cause incorrect code to be generated if we did not check for it. */
3706 static bool
3707 verify_initial_elim_offsets (void)
3709 HOST_WIDE_INT t;
3711 if (!num_eliminable)
3712 return true;
3714 #ifdef ELIMINABLE_REGS
3716 struct elim_table *ep;
3718 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3720 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3721 if (t != ep->initial_offset)
3722 return false;
3725 #else
3726 INITIAL_FRAME_POINTER_OFFSET (t);
3727 if (t != reg_eliminate[0].initial_offset)
3728 return false;
3729 #endif
3731 return true;
3734 /* Reset all offsets on eliminable registers to their initial values. */
3736 static void
3737 set_initial_elim_offsets (void)
3739 struct elim_table *ep = reg_eliminate;
3741 #ifdef ELIMINABLE_REGS
3742 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3744 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3745 ep->previous_offset = ep->offset = ep->initial_offset;
3747 #else
3748 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3749 ep->previous_offset = ep->offset = ep->initial_offset;
3750 #endif
3752 num_not_at_initial_offset = 0;
3755 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3757 static void
3758 set_initial_eh_label_offset (rtx label)
3760 set_label_offsets (label, NULL_RTX, 1);
3763 /* Initialize the known label offsets.
3764 Set a known offset for each forced label to be at the initial offset
3765 of each elimination. We do this because we assume that all
3766 computed jumps occur from a location where each elimination is
3767 at its initial offset.
3768 For all other labels, show that we don't know the offsets. */
3770 static void
3771 set_initial_label_offsets (void)
3773 rtx x;
3774 memset (offsets_known_at, 0, num_labels);
3776 for (x = forced_labels; x; x = XEXP (x, 1))
3777 if (XEXP (x, 0))
3778 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3780 for_each_eh_label (set_initial_eh_label_offset);
3783 /* Set all elimination offsets to the known values for the code label given
3784 by INSN. */
3786 static void
3787 set_offsets_for_label (rtx insn)
3789 unsigned int i;
3790 int label_nr = CODE_LABEL_NUMBER (insn);
3791 struct elim_table *ep;
3793 num_not_at_initial_offset = 0;
3794 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3796 ep->offset = ep->previous_offset
3797 = offsets_at[label_nr - first_label_num][i];
3798 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3799 num_not_at_initial_offset++;
3803 /* See if anything that happened changes which eliminations are valid.
3804 For example, on the SPARC, whether or not the frame pointer can
3805 be eliminated can depend on what registers have been used. We need
3806 not check some conditions again (such as flag_omit_frame_pointer)
3807 since they can't have changed. */
3809 static void
3810 update_eliminables (HARD_REG_SET *pset)
3812 int previous_frame_pointer_needed = frame_pointer_needed;
3813 struct elim_table *ep;
3815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3816 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3817 #ifdef ELIMINABLE_REGS
3818 || ! CAN_ELIMINATE (ep->from, ep->to)
3819 #endif
3821 ep->can_eliminate = 0;
3823 /* Look for the case where we have discovered that we can't replace
3824 register A with register B and that means that we will now be
3825 trying to replace register A with register C. This means we can
3826 no longer replace register C with register B and we need to disable
3827 such an elimination, if it exists. This occurs often with A == ap,
3828 B == sp, and C == fp. */
3830 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3832 struct elim_table *op;
3833 int new_to = -1;
3835 if (! ep->can_eliminate && ep->can_eliminate_previous)
3837 /* Find the current elimination for ep->from, if there is a
3838 new one. */
3839 for (op = reg_eliminate;
3840 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3841 if (op->from == ep->from && op->can_eliminate)
3843 new_to = op->to;
3844 break;
3847 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3848 disable it. */
3849 for (op = reg_eliminate;
3850 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3851 if (op->from == new_to && op->to == ep->to)
3852 op->can_eliminate = 0;
3856 /* See if any registers that we thought we could eliminate the previous
3857 time are no longer eliminable. If so, something has changed and we
3858 must spill the register. Also, recompute the number of eliminable
3859 registers and see if the frame pointer is needed; it is if there is
3860 no elimination of the frame pointer that we can perform. */
3862 frame_pointer_needed = 1;
3863 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3865 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3866 && ep->to != HARD_FRAME_POINTER_REGNUM)
3867 frame_pointer_needed = 0;
3869 if (! ep->can_eliminate && ep->can_eliminate_previous)
3871 ep->can_eliminate_previous = 0;
3872 SET_HARD_REG_BIT (*pset, ep->from);
3873 num_eliminable--;
3877 /* If we didn't need a frame pointer last time, but we do now, spill
3878 the hard frame pointer. */
3879 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3880 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3883 /* Return true if X is used as the target register of an elimination. */
3885 bool
3886 elimination_target_reg_p (rtx x)
3888 struct elim_table *ep;
3890 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3891 if (ep->to_rtx == x && ep->can_eliminate)
3892 return true;
3894 return false;
3897 /* Initialize the table of registers to eliminate. */
3899 static void
3900 init_elim_table (void)
3902 struct elim_table *ep;
3903 #ifdef ELIMINABLE_REGS
3904 const struct elim_table_1 *ep1;
3905 #endif
3907 if (!reg_eliminate)
3908 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3910 /* Does this function require a frame pointer? */
3912 frame_pointer_needed = (! flag_omit_frame_pointer
3913 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3914 and restore sp for alloca. So we can't eliminate
3915 the frame pointer in that case. At some point,
3916 we should improve this by emitting the
3917 sp-adjusting insns for this case. */
3918 || (current_function_calls_alloca
3919 && EXIT_IGNORE_STACK)
3920 || current_function_accesses_prior_frames
3921 || FRAME_POINTER_REQUIRED);
3923 num_eliminable = 0;
3925 #ifdef ELIMINABLE_REGS
3926 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3927 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3929 ep->from = ep1->from;
3930 ep->to = ep1->to;
3931 ep->can_eliminate = ep->can_eliminate_previous
3932 = (CAN_ELIMINATE (ep->from, ep->to)
3933 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3935 #else
3936 reg_eliminate[0].from = reg_eliminate_1[0].from;
3937 reg_eliminate[0].to = reg_eliminate_1[0].to;
3938 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3939 = ! frame_pointer_needed;
3940 #endif
3942 /* Count the number of eliminable registers and build the FROM and TO
3943 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3944 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3945 We depend on this. */
3946 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3948 num_eliminable += ep->can_eliminate;
3949 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3950 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3954 /* Kick all pseudos out of hard register REGNO.
3956 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3957 because we found we can't eliminate some register. In the case, no pseudos
3958 are allowed to be in the register, even if they are only in a block that
3959 doesn't require spill registers, unlike the case when we are spilling this
3960 hard reg to produce another spill register.
3962 Return nonzero if any pseudos needed to be kicked out. */
3964 static void
3965 spill_hard_reg (unsigned int regno, int cant_eliminate)
3967 int i;
3969 if (cant_eliminate)
3971 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3972 df_set_regs_ever_live (regno, true);
3975 /* Spill every pseudo reg that was allocated to this reg
3976 or to something that overlaps this reg. */
3978 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3979 if (reg_renumber[i] >= 0
3980 && (unsigned int) reg_renumber[i] <= regno
3981 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3982 SET_REGNO_REG_SET (&spilled_pseudos, i);
3985 /* After find_reload_regs has been run for all insn that need reloads,
3986 and/or spill_hard_regs was called, this function is used to actually
3987 spill pseudo registers and try to reallocate them. It also sets up the
3988 spill_regs array for use by choose_reload_regs. */
3990 static int
3991 finish_spills (int global)
3993 struct insn_chain *chain;
3994 int something_changed = 0;
3995 unsigned i;
3996 reg_set_iterator rsi;
3998 /* Build the spill_regs array for the function. */
3999 /* If there are some registers still to eliminate and one of the spill regs
4000 wasn't ever used before, additional stack space may have to be
4001 allocated to store this register. Thus, we may have changed the offset
4002 between the stack and frame pointers, so mark that something has changed.
4004 One might think that we need only set VAL to 1 if this is a call-used
4005 register. However, the set of registers that must be saved by the
4006 prologue is not identical to the call-used set. For example, the
4007 register used by the call insn for the return PC is a call-used register,
4008 but must be saved by the prologue. */
4010 n_spills = 0;
4011 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4012 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4014 spill_reg_order[i] = n_spills;
4015 spill_regs[n_spills++] = i;
4016 if (num_eliminable && ! df_regs_ever_live_p (i))
4017 something_changed = 1;
4018 df_set_regs_ever_live (i, true);
4020 else
4021 spill_reg_order[i] = -1;
4023 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4024 if (! flag_ira || ! optimize || reg_renumber[i] >= 0)
4026 /* Record the current hard register the pseudo is allocated to
4027 in pseudo_previous_regs so we avoid reallocating it to the
4028 same hard reg in a later pass. */
4029 gcc_assert (reg_renumber[i] >= 0);
4031 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4032 /* Mark it as no longer having a hard register home. */
4033 reg_renumber[i] = -1;
4034 if (flag_ira && optimize)
4035 /* Inform IRA about the change. */
4036 mark_allocation_change (i);
4037 /* We will need to scan everything again. */
4038 something_changed = 1;
4041 /* Retry global register allocation if possible. */
4042 if (global)
4044 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4045 /* For every insn that needs reloads, set the registers used as spill
4046 regs in pseudo_forbidden_regs for every pseudo live across the
4047 insn. */
4048 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4050 EXECUTE_IF_SET_IN_REG_SET
4051 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4053 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4054 chain->used_spill_regs);
4056 EXECUTE_IF_SET_IN_REG_SET
4057 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4059 gcc_assert (!REGNO_REG_SET_P (&chain->saved, i));
4060 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4061 chain->used_spill_regs);
4065 if (! flag_ira || ! optimize)
4067 /* Retry allocating the spilled pseudos. For each reg,
4068 merge the various reg sets that indicate which hard regs
4069 can't be used, and call retry_global_alloc. We change
4070 spill_pseudos here to only contain pseudos that did not
4071 get a new hard register. */
4072 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4073 if (reg_old_renumber[i] != reg_renumber[i])
4075 HARD_REG_SET forbidden;
4077 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
4078 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
4079 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
4080 retry_global_alloc (i, forbidden);
4081 if (reg_renumber[i] >= 0)
4082 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4085 else
4087 /* Retry allocating the pseudos spilled in IRA and the
4088 reload. For each reg, merge the various reg sets that
4089 indicate which hard regs can't be used, and call
4090 reassign_pseudos. */
4091 unsigned int n;
4093 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4094 if (reg_old_renumber[i] != reg_renumber[i])
4096 if (reg_renumber[i] < 0)
4097 temp_pseudo_reg_arr[n++] = i;
4098 else
4099 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4101 if (reassign_pseudos (temp_pseudo_reg_arr, n, bad_spill_regs_global,
4102 pseudo_forbidden_regs, pseudo_previous_regs,
4103 &spilled_pseudos))
4104 something_changed = 1;
4108 /* Fix up the register information in the insn chain.
4109 This involves deleting those of the spilled pseudos which did not get
4110 a new hard register home from the live_{before,after} sets. */
4111 for (chain = reload_insn_chain; chain; chain = chain->next)
4113 HARD_REG_SET used_by_pseudos;
4114 HARD_REG_SET used_by_pseudos2;
4116 if (! flag_ira || ! optimize)
4118 /* Don't do it for IRA because IRA and the reload still can
4119 assign hard registers to the spilled pseudos on next
4120 reload iterations. */
4121 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4122 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4124 /* Mark any unallocated hard regs as available for spills. That
4125 makes inheritance work somewhat better. */
4126 if (chain->need_reload)
4128 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4129 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4130 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4132 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4133 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4134 /* Value of chain->used_spill_regs from previous iteration
4135 may be not included in the value calculated here because
4136 of possible removing caller-saves insns (see function
4137 delete_caller_save_insns. */
4138 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4139 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4143 CLEAR_REG_SET (&changed_allocation_pseudos);
4144 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4145 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4147 int regno = reg_renumber[i];
4148 if (reg_old_renumber[i] == regno)
4149 continue;
4151 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4153 alter_reg (i, reg_old_renumber[i], false);
4154 reg_old_renumber[i] = regno;
4155 if (dump_file)
4157 if (regno == -1)
4158 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4159 else
4160 fprintf (dump_file, " Register %d now in %d.\n\n",
4161 i, reg_renumber[i]);
4165 return something_changed;
4168 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4170 static void
4171 scan_paradoxical_subregs (rtx x)
4173 int i;
4174 const char *fmt;
4175 enum rtx_code code = GET_CODE (x);
4177 switch (code)
4179 case REG:
4180 case CONST_INT:
4181 case CONST:
4182 case SYMBOL_REF:
4183 case LABEL_REF:
4184 case CONST_DOUBLE:
4185 case CONST_FIXED:
4186 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4187 case CC0:
4188 case PC:
4189 case USE:
4190 case CLOBBER:
4191 return;
4193 case SUBREG:
4194 if (REG_P (SUBREG_REG (x))
4195 && (GET_MODE_SIZE (GET_MODE (x))
4196 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4198 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4199 = GET_MODE_SIZE (GET_MODE (x));
4200 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4202 return;
4204 default:
4205 break;
4208 fmt = GET_RTX_FORMAT (code);
4209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4211 if (fmt[i] == 'e')
4212 scan_paradoxical_subregs (XEXP (x, i));
4213 else if (fmt[i] == 'E')
4215 int j;
4216 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4217 scan_paradoxical_subregs (XVECEXP (x, i, j));
4222 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4223 examine all of the reload insns between PREV and NEXT exclusive, and
4224 annotate all that may trap. */
4226 static void
4227 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4229 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4230 unsigned int trap_count;
4231 rtx i;
4233 if (note == NULL)
4234 return;
4236 if (may_trap_p (PATTERN (insn)))
4237 trap_count = 1;
4238 else
4240 remove_note (insn, note);
4241 trap_count = 0;
4244 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4245 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4247 trap_count++;
4248 REG_NOTES (i)
4249 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
4253 /* Reload pseudo-registers into hard regs around each insn as needed.
4254 Additional register load insns are output before the insn that needs it
4255 and perhaps store insns after insns that modify the reloaded pseudo reg.
4257 reg_last_reload_reg and reg_reloaded_contents keep track of
4258 which registers are already available in reload registers.
4259 We update these for the reloads that we perform,
4260 as the insns are scanned. */
4262 static void
4263 reload_as_needed (int live_known)
4265 struct insn_chain *chain;
4266 #if defined (AUTO_INC_DEC)
4267 int i;
4268 #endif
4269 rtx x;
4271 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4272 memset (spill_reg_store, 0, sizeof spill_reg_store);
4273 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4274 INIT_REG_SET (&reg_has_output_reload);
4275 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4276 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4278 set_initial_elim_offsets ();
4280 for (chain = reload_insn_chain; chain; chain = chain->next)
4282 rtx prev = 0;
4283 rtx insn = chain->insn;
4284 rtx old_next = NEXT_INSN (insn);
4286 /* If we pass a label, copy the offsets from the label information
4287 into the current offsets of each elimination. */
4288 if (LABEL_P (insn))
4289 set_offsets_for_label (insn);
4291 else if (INSN_P (insn))
4293 regset_head regs_to_forget;
4294 INIT_REG_SET (&regs_to_forget);
4295 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4297 /* If this is a USE and CLOBBER of a MEM, ensure that any
4298 references to eliminable registers have been removed. */
4300 if ((GET_CODE (PATTERN (insn)) == USE
4301 || GET_CODE (PATTERN (insn)) == CLOBBER)
4302 && MEM_P (XEXP (PATTERN (insn), 0)))
4303 XEXP (XEXP (PATTERN (insn), 0), 0)
4304 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4305 GET_MODE (XEXP (PATTERN (insn), 0)),
4306 NULL_RTX);
4308 /* If we need to do register elimination processing, do so.
4309 This might delete the insn, in which case we are done. */
4310 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4312 eliminate_regs_in_insn (insn, 1);
4313 if (NOTE_P (insn))
4315 update_eliminable_offsets ();
4316 CLEAR_REG_SET (&regs_to_forget);
4317 continue;
4321 /* If need_elim is nonzero but need_reload is zero, one might think
4322 that we could simply set n_reloads to 0. However, find_reloads
4323 could have done some manipulation of the insn (such as swapping
4324 commutative operands), and these manipulations are lost during
4325 the first pass for every insn that needs register elimination.
4326 So the actions of find_reloads must be redone here. */
4328 if (! chain->need_elim && ! chain->need_reload
4329 && ! chain->need_operand_change)
4330 n_reloads = 0;
4331 /* First find the pseudo regs that must be reloaded for this insn.
4332 This info is returned in the tables reload_... (see reload.h).
4333 Also modify the body of INSN by substituting RELOAD
4334 rtx's for those pseudo regs. */
4335 else
4337 CLEAR_REG_SET (&reg_has_output_reload);
4338 CLEAR_HARD_REG_SET (reg_is_output_reload);
4340 find_reloads (insn, 1, spill_indirect_levels, live_known,
4341 spill_reg_order);
4344 if (n_reloads > 0)
4346 rtx next = NEXT_INSN (insn);
4347 rtx p;
4349 prev = PREV_INSN (insn);
4351 /* Now compute which reload regs to reload them into. Perhaps
4352 reusing reload regs from previous insns, or else output
4353 load insns to reload them. Maybe output store insns too.
4354 Record the choices of reload reg in reload_reg_rtx. */
4355 choose_reload_regs (chain);
4357 /* Merge any reloads that we didn't combine for fear of
4358 increasing the number of spill registers needed but now
4359 discover can be safely merged. */
4360 if (SMALL_REGISTER_CLASSES)
4361 merge_assigned_reloads (insn);
4363 /* Generate the insns to reload operands into or out of
4364 their reload regs. */
4365 emit_reload_insns (chain);
4367 /* Substitute the chosen reload regs from reload_reg_rtx
4368 into the insn's body (or perhaps into the bodies of other
4369 load and store insn that we just made for reloading
4370 and that we moved the structure into). */
4371 subst_reloads (insn);
4373 /* Adjust the exception region notes for loads and stores. */
4374 if (flag_non_call_exceptions && !CALL_P (insn))
4375 fixup_eh_region_note (insn, prev, next);
4377 /* If this was an ASM, make sure that all the reload insns
4378 we have generated are valid. If not, give an error
4379 and delete them. */
4380 if (asm_noperands (PATTERN (insn)) >= 0)
4381 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4382 if (p != insn && INSN_P (p)
4383 && GET_CODE (PATTERN (p)) != USE
4384 && (recog_memoized (p) < 0
4385 || (extract_insn (p), ! constrain_operands (1))))
4387 error_for_asm (insn,
4388 "%<asm%> operand requires "
4389 "impossible reload");
4390 delete_insn (p);
4394 if (num_eliminable && chain->need_elim)
4395 update_eliminable_offsets ();
4397 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4398 is no longer validly lying around to save a future reload.
4399 Note that this does not detect pseudos that were reloaded
4400 for this insn in order to be stored in
4401 (obeying register constraints). That is correct; such reload
4402 registers ARE still valid. */
4403 forget_marked_reloads (&regs_to_forget);
4404 CLEAR_REG_SET (&regs_to_forget);
4406 /* There may have been CLOBBER insns placed after INSN. So scan
4407 between INSN and NEXT and use them to forget old reloads. */
4408 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4409 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4410 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4412 #ifdef AUTO_INC_DEC
4413 /* Likewise for regs altered by auto-increment in this insn.
4414 REG_INC notes have been changed by reloading:
4415 find_reloads_address_1 records substitutions for them,
4416 which have been performed by subst_reloads above. */
4417 for (i = n_reloads - 1; i >= 0; i--)
4419 rtx in_reg = rld[i].in_reg;
4420 if (in_reg)
4422 enum rtx_code code = GET_CODE (in_reg);
4423 /* PRE_INC / PRE_DEC will have the reload register ending up
4424 with the same value as the stack slot, but that doesn't
4425 hold true for POST_INC / POST_DEC. Either we have to
4426 convert the memory access to a true POST_INC / POST_DEC,
4427 or we can't use the reload register for inheritance. */
4428 if ((code == POST_INC || code == POST_DEC)
4429 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4430 REGNO (rld[i].reg_rtx))
4431 /* Make sure it is the inc/dec pseudo, and not
4432 some other (e.g. output operand) pseudo. */
4433 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4434 == REGNO (XEXP (in_reg, 0))))
4437 rtx reload_reg = rld[i].reg_rtx;
4438 enum machine_mode mode = GET_MODE (reload_reg);
4439 int n = 0;
4440 rtx p;
4442 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4444 /* We really want to ignore REG_INC notes here, so
4445 use PATTERN (p) as argument to reg_set_p . */
4446 if (reg_set_p (reload_reg, PATTERN (p)))
4447 break;
4448 n = count_occurrences (PATTERN (p), reload_reg, 0);
4449 if (! n)
4450 continue;
4451 if (n == 1)
4453 n = validate_replace_rtx (reload_reg,
4454 gen_rtx_fmt_e (code,
4455 mode,
4456 reload_reg),
4459 /* We must also verify that the constraints
4460 are met after the replacement. */
4461 extract_insn (p);
4462 if (n)
4463 n = constrain_operands (1);
4464 else
4465 break;
4467 /* If the constraints were not met, then
4468 undo the replacement. */
4469 if (!n)
4471 validate_replace_rtx (gen_rtx_fmt_e (code,
4472 mode,
4473 reload_reg),
4474 reload_reg, p);
4475 break;
4479 break;
4481 if (n == 1)
4483 REG_NOTES (p)
4484 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4485 REG_NOTES (p));
4486 /* Mark this as having an output reload so that the
4487 REG_INC processing code below won't invalidate
4488 the reload for inheritance. */
4489 SET_HARD_REG_BIT (reg_is_output_reload,
4490 REGNO (reload_reg));
4491 SET_REGNO_REG_SET (&reg_has_output_reload,
4492 REGNO (XEXP (in_reg, 0)));
4494 else
4495 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4496 NULL);
4498 else if ((code == PRE_INC || code == PRE_DEC)
4499 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4500 REGNO (rld[i].reg_rtx))
4501 /* Make sure it is the inc/dec pseudo, and not
4502 some other (e.g. output operand) pseudo. */
4503 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4504 == REGNO (XEXP (in_reg, 0))))
4506 SET_HARD_REG_BIT (reg_is_output_reload,
4507 REGNO (rld[i].reg_rtx));
4508 SET_REGNO_REG_SET (&reg_has_output_reload,
4509 REGNO (XEXP (in_reg, 0)));
4513 /* If a pseudo that got a hard register is auto-incremented,
4514 we must purge records of copying it into pseudos without
4515 hard registers. */
4516 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4517 if (REG_NOTE_KIND (x) == REG_INC)
4519 /* See if this pseudo reg was reloaded in this insn.
4520 If so, its last-reload info is still valid
4521 because it is based on this insn's reload. */
4522 for (i = 0; i < n_reloads; i++)
4523 if (rld[i].out == XEXP (x, 0))
4524 break;
4526 if (i == n_reloads)
4527 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4529 #endif
4531 /* A reload reg's contents are unknown after a label. */
4532 if (LABEL_P (insn))
4533 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4535 /* Don't assume a reload reg is still good after a call insn
4536 if it is a call-used reg, or if it contains a value that will
4537 be partially clobbered by the call. */
4538 else if (CALL_P (insn))
4540 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4541 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4545 /* Clean up. */
4546 free (reg_last_reload_reg);
4547 CLEAR_REG_SET (&reg_has_output_reload);
4550 /* Discard all record of any value reloaded from X,
4551 or reloaded in X from someplace else;
4552 unless X is an output reload reg of the current insn.
4554 X may be a hard reg (the reload reg)
4555 or it may be a pseudo reg that was reloaded from.
4557 When DATA is non-NULL just mark the registers in regset
4558 to be forgotten later. */
4560 static void
4561 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4562 void *data)
4564 unsigned int regno;
4565 unsigned int nr;
4566 regset regs = (regset) data;
4568 /* note_stores does give us subregs of hard regs,
4569 subreg_regno_offset requires a hard reg. */
4570 while (GET_CODE (x) == SUBREG)
4572 /* We ignore the subreg offset when calculating the regno,
4573 because we are using the entire underlying hard register
4574 below. */
4575 x = SUBREG_REG (x);
4578 if (!REG_P (x))
4579 return;
4581 regno = REGNO (x);
4583 if (regno >= FIRST_PSEUDO_REGISTER)
4584 nr = 1;
4585 else
4587 unsigned int i;
4589 nr = hard_regno_nregs[regno][GET_MODE (x)];
4590 /* Storing into a spilled-reg invalidates its contents.
4591 This can happen if a block-local pseudo is allocated to that reg
4592 and it wasn't spilled because this block's total need is 0.
4593 Then some insn might have an optional reload and use this reg. */
4594 if (!regs)
4595 for (i = 0; i < nr; i++)
4596 /* But don't do this if the reg actually serves as an output
4597 reload reg in the current instruction. */
4598 if (n_reloads == 0
4599 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4601 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4602 spill_reg_store[regno + i] = 0;
4606 if (regs)
4607 while (nr-- > 0)
4608 SET_REGNO_REG_SET (regs, regno + nr);
4609 else
4611 /* Since value of X has changed,
4612 forget any value previously copied from it. */
4614 while (nr-- > 0)
4615 /* But don't forget a copy if this is the output reload
4616 that establishes the copy's validity. */
4617 if (n_reloads == 0
4618 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4619 reg_last_reload_reg[regno + nr] = 0;
4623 /* Forget the reloads marked in regset by previous function. */
4624 static void
4625 forget_marked_reloads (regset regs)
4627 unsigned int reg;
4628 reg_set_iterator rsi;
4629 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4631 if (reg < FIRST_PSEUDO_REGISTER
4632 /* But don't do this if the reg actually serves as an output
4633 reload reg in the current instruction. */
4634 && (n_reloads == 0
4635 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4637 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4638 spill_reg_store[reg] = 0;
4640 if (n_reloads == 0
4641 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4642 reg_last_reload_reg[reg] = 0;
4646 /* The following HARD_REG_SETs indicate when each hard register is
4647 used for a reload of various parts of the current insn. */
4649 /* If reg is unavailable for all reloads. */
4650 static HARD_REG_SET reload_reg_unavailable;
4651 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4652 static HARD_REG_SET reload_reg_used;
4653 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4654 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4655 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4656 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4657 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4658 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4659 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4660 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4661 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4662 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4663 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4664 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4665 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4666 static HARD_REG_SET reload_reg_used_in_op_addr;
4667 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4668 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4669 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4670 static HARD_REG_SET reload_reg_used_in_insn;
4671 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4672 static HARD_REG_SET reload_reg_used_in_other_addr;
4674 /* If reg is in use as a reload reg for any sort of reload. */
4675 static HARD_REG_SET reload_reg_used_at_all;
4677 /* If reg is use as an inherited reload. We just mark the first register
4678 in the group. */
4679 static HARD_REG_SET reload_reg_used_for_inherit;
4681 /* Records which hard regs are used in any way, either as explicit use or
4682 by being allocated to a pseudo during any point of the current insn. */
4683 static HARD_REG_SET reg_used_in_insn;
4685 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4686 TYPE. MODE is used to indicate how many consecutive regs are
4687 actually used. */
4689 static void
4690 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4691 enum machine_mode mode)
4693 unsigned int nregs = hard_regno_nregs[regno][mode];
4694 unsigned int i;
4696 for (i = regno; i < nregs + regno; i++)
4698 switch (type)
4700 case RELOAD_OTHER:
4701 SET_HARD_REG_BIT (reload_reg_used, i);
4702 break;
4704 case RELOAD_FOR_INPUT_ADDRESS:
4705 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4706 break;
4708 case RELOAD_FOR_INPADDR_ADDRESS:
4709 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4710 break;
4712 case RELOAD_FOR_OUTPUT_ADDRESS:
4713 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4714 break;
4716 case RELOAD_FOR_OUTADDR_ADDRESS:
4717 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4718 break;
4720 case RELOAD_FOR_OPERAND_ADDRESS:
4721 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4722 break;
4724 case RELOAD_FOR_OPADDR_ADDR:
4725 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4726 break;
4728 case RELOAD_FOR_OTHER_ADDRESS:
4729 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4730 break;
4732 case RELOAD_FOR_INPUT:
4733 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4734 break;
4736 case RELOAD_FOR_OUTPUT:
4737 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4738 break;
4740 case RELOAD_FOR_INSN:
4741 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4742 break;
4745 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4749 /* Similarly, but show REGNO is no longer in use for a reload. */
4751 static void
4752 clear_reload_reg_in_use (unsigned int regno, int opnum,
4753 enum reload_type type, enum machine_mode mode)
4755 unsigned int nregs = hard_regno_nregs[regno][mode];
4756 unsigned int start_regno, end_regno, r;
4757 int i;
4758 /* A complication is that for some reload types, inheritance might
4759 allow multiple reloads of the same types to share a reload register.
4760 We set check_opnum if we have to check only reloads with the same
4761 operand number, and check_any if we have to check all reloads. */
4762 int check_opnum = 0;
4763 int check_any = 0;
4764 HARD_REG_SET *used_in_set;
4766 switch (type)
4768 case RELOAD_OTHER:
4769 used_in_set = &reload_reg_used;
4770 break;
4772 case RELOAD_FOR_INPUT_ADDRESS:
4773 used_in_set = &reload_reg_used_in_input_addr[opnum];
4774 break;
4776 case RELOAD_FOR_INPADDR_ADDRESS:
4777 check_opnum = 1;
4778 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4779 break;
4781 case RELOAD_FOR_OUTPUT_ADDRESS:
4782 used_in_set = &reload_reg_used_in_output_addr[opnum];
4783 break;
4785 case RELOAD_FOR_OUTADDR_ADDRESS:
4786 check_opnum = 1;
4787 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4788 break;
4790 case RELOAD_FOR_OPERAND_ADDRESS:
4791 used_in_set = &reload_reg_used_in_op_addr;
4792 break;
4794 case RELOAD_FOR_OPADDR_ADDR:
4795 check_any = 1;
4796 used_in_set = &reload_reg_used_in_op_addr_reload;
4797 break;
4799 case RELOAD_FOR_OTHER_ADDRESS:
4800 used_in_set = &reload_reg_used_in_other_addr;
4801 check_any = 1;
4802 break;
4804 case RELOAD_FOR_INPUT:
4805 used_in_set = &reload_reg_used_in_input[opnum];
4806 break;
4808 case RELOAD_FOR_OUTPUT:
4809 used_in_set = &reload_reg_used_in_output[opnum];
4810 break;
4812 case RELOAD_FOR_INSN:
4813 used_in_set = &reload_reg_used_in_insn;
4814 break;
4815 default:
4816 gcc_unreachable ();
4818 /* We resolve conflicts with remaining reloads of the same type by
4819 excluding the intervals of reload registers by them from the
4820 interval of freed reload registers. Since we only keep track of
4821 one set of interval bounds, we might have to exclude somewhat
4822 more than what would be necessary if we used a HARD_REG_SET here.
4823 But this should only happen very infrequently, so there should
4824 be no reason to worry about it. */
4826 start_regno = regno;
4827 end_regno = regno + nregs;
4828 if (check_opnum || check_any)
4830 for (i = n_reloads - 1; i >= 0; i--)
4832 if (rld[i].when_needed == type
4833 && (check_any || rld[i].opnum == opnum)
4834 && rld[i].reg_rtx)
4836 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4837 unsigned int conflict_end
4838 = end_hard_regno (rld[i].mode, conflict_start);
4840 /* If there is an overlap with the first to-be-freed register,
4841 adjust the interval start. */
4842 if (conflict_start <= start_regno && conflict_end > start_regno)
4843 start_regno = conflict_end;
4844 /* Otherwise, if there is a conflict with one of the other
4845 to-be-freed registers, adjust the interval end. */
4846 if (conflict_start > start_regno && conflict_start < end_regno)
4847 end_regno = conflict_start;
4852 for (r = start_regno; r < end_regno; r++)
4853 CLEAR_HARD_REG_BIT (*used_in_set, r);
4856 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4857 specified by OPNUM and TYPE. */
4859 static int
4860 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4862 int i;
4864 /* In use for a RELOAD_OTHER means it's not available for anything. */
4865 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4866 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4867 return 0;
4869 switch (type)
4871 case RELOAD_OTHER:
4872 /* In use for anything means we can't use it for RELOAD_OTHER. */
4873 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4874 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4875 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4876 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4877 return 0;
4879 for (i = 0; i < reload_n_operands; i++)
4880 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4881 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4882 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4883 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4884 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4885 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4886 return 0;
4888 return 1;
4890 case RELOAD_FOR_INPUT:
4891 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4892 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4893 return 0;
4895 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4896 return 0;
4898 /* If it is used for some other input, can't use it. */
4899 for (i = 0; i < reload_n_operands; i++)
4900 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4901 return 0;
4903 /* If it is used in a later operand's address, can't use it. */
4904 for (i = opnum + 1; i < reload_n_operands; i++)
4905 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4906 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4907 return 0;
4909 return 1;
4911 case RELOAD_FOR_INPUT_ADDRESS:
4912 /* Can't use a register if it is used for an input address for this
4913 operand or used as an input in an earlier one. */
4914 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4915 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4916 return 0;
4918 for (i = 0; i < opnum; i++)
4919 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4920 return 0;
4922 return 1;
4924 case RELOAD_FOR_INPADDR_ADDRESS:
4925 /* Can't use a register if it is used for an input address
4926 for this operand or used as an input in an earlier
4927 one. */
4928 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4929 return 0;
4931 for (i = 0; i < opnum; i++)
4932 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4933 return 0;
4935 return 1;
4937 case RELOAD_FOR_OUTPUT_ADDRESS:
4938 /* Can't use a register if it is used for an output address for this
4939 operand or used as an output in this or a later operand. Note
4940 that multiple output operands are emitted in reverse order, so
4941 the conflicting ones are those with lower indices. */
4942 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4943 return 0;
4945 for (i = 0; i <= opnum; i++)
4946 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4947 return 0;
4949 return 1;
4951 case RELOAD_FOR_OUTADDR_ADDRESS:
4952 /* Can't use a register if it is used for an output address
4953 for this operand or used as an output in this or a
4954 later operand. Note that multiple output operands are
4955 emitted in reverse order, so the conflicting ones are
4956 those with lower indices. */
4957 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4958 return 0;
4960 for (i = 0; i <= opnum; i++)
4961 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4962 return 0;
4964 return 1;
4966 case RELOAD_FOR_OPERAND_ADDRESS:
4967 for (i = 0; i < reload_n_operands; i++)
4968 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4969 return 0;
4971 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4972 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4974 case RELOAD_FOR_OPADDR_ADDR:
4975 for (i = 0; i < reload_n_operands; i++)
4976 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4977 return 0;
4979 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4981 case RELOAD_FOR_OUTPUT:
4982 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4983 outputs, or an operand address for this or an earlier output.
4984 Note that multiple output operands are emitted in reverse order,
4985 so the conflicting ones are those with higher indices. */
4986 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4987 return 0;
4989 for (i = 0; i < reload_n_operands; i++)
4990 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4991 return 0;
4993 for (i = opnum; i < reload_n_operands; i++)
4994 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4995 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4996 return 0;
4998 return 1;
5000 case RELOAD_FOR_INSN:
5001 for (i = 0; i < reload_n_operands; i++)
5002 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5003 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5004 return 0;
5006 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5007 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5009 case RELOAD_FOR_OTHER_ADDRESS:
5010 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5012 default:
5013 gcc_unreachable ();
5017 /* Return 1 if the value in reload reg REGNO, as used by a reload
5018 needed for the part of the insn specified by OPNUM and TYPE,
5019 is still available in REGNO at the end of the insn.
5021 We can assume that the reload reg was already tested for availability
5022 at the time it is needed, and we should not check this again,
5023 in case the reg has already been marked in use. */
5025 static int
5026 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
5028 int i;
5030 switch (type)
5032 case RELOAD_OTHER:
5033 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5034 its value must reach the end. */
5035 return 1;
5037 /* If this use is for part of the insn,
5038 its value reaches if no subsequent part uses the same register.
5039 Just like the above function, don't try to do this with lots
5040 of fallthroughs. */
5042 case RELOAD_FOR_OTHER_ADDRESS:
5043 /* Here we check for everything else, since these don't conflict
5044 with anything else and everything comes later. */
5046 for (i = 0; i < reload_n_operands; i++)
5047 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5048 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5049 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5050 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5051 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5052 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5053 return 0;
5055 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5056 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5057 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5058 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5060 case RELOAD_FOR_INPUT_ADDRESS:
5061 case RELOAD_FOR_INPADDR_ADDRESS:
5062 /* Similar, except that we check only for this and subsequent inputs
5063 and the address of only subsequent inputs and we do not need
5064 to check for RELOAD_OTHER objects since they are known not to
5065 conflict. */
5067 for (i = opnum; i < reload_n_operands; i++)
5068 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5069 return 0;
5071 for (i = opnum + 1; i < reload_n_operands; i++)
5072 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5073 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5074 return 0;
5076 for (i = 0; i < reload_n_operands; i++)
5077 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5078 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5079 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5080 return 0;
5082 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5083 return 0;
5085 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5086 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5087 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5089 case RELOAD_FOR_INPUT:
5090 /* Similar to input address, except we start at the next operand for
5091 both input and input address and we do not check for
5092 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5093 would conflict. */
5095 for (i = opnum + 1; i < reload_n_operands; i++)
5096 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5097 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5098 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5099 return 0;
5101 /* ... fall through ... */
5103 case RELOAD_FOR_OPERAND_ADDRESS:
5104 /* Check outputs and their addresses. */
5106 for (i = 0; i < reload_n_operands; i++)
5107 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5108 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5109 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5110 return 0;
5112 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5114 case RELOAD_FOR_OPADDR_ADDR:
5115 for (i = 0; i < reload_n_operands; i++)
5116 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5117 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5118 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5119 return 0;
5121 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5122 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5123 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5125 case RELOAD_FOR_INSN:
5126 /* These conflict with other outputs with RELOAD_OTHER. So
5127 we need only check for output addresses. */
5129 opnum = reload_n_operands;
5131 /* ... fall through ... */
5133 case RELOAD_FOR_OUTPUT:
5134 case RELOAD_FOR_OUTPUT_ADDRESS:
5135 case RELOAD_FOR_OUTADDR_ADDRESS:
5136 /* We already know these can't conflict with a later output. So the
5137 only thing to check are later output addresses.
5138 Note that multiple output operands are emitted in reverse order,
5139 so the conflicting ones are those with lower indices. */
5140 for (i = 0; i < opnum; i++)
5141 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5142 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5143 return 0;
5145 return 1;
5147 default:
5148 gcc_unreachable ();
5152 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5153 every register in the range [REGNO, REGNO + NREGS). */
5155 static bool
5156 reload_regs_reach_end_p (unsigned int regno, int nregs,
5157 int opnum, enum reload_type type)
5159 int i;
5161 for (i = 0; i < nregs; i++)
5162 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5163 return false;
5164 return true;
5168 /* Returns whether R1 and R2 are uniquely chained: the value of one
5169 is used by the other, and that value is not used by any other
5170 reload for this insn. This is used to partially undo the decision
5171 made in find_reloads when in the case of multiple
5172 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5173 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5174 reloads. This code tries to avoid the conflict created by that
5175 change. It might be cleaner to explicitly keep track of which
5176 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5177 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5178 this after the fact. */
5179 static bool
5180 reloads_unique_chain_p (int r1, int r2)
5182 int i;
5184 /* We only check input reloads. */
5185 if (! rld[r1].in || ! rld[r2].in)
5186 return false;
5188 /* Avoid anything with output reloads. */
5189 if (rld[r1].out || rld[r2].out)
5190 return false;
5192 /* "chained" means one reload is a component of the other reload,
5193 not the same as the other reload. */
5194 if (rld[r1].opnum != rld[r2].opnum
5195 || rtx_equal_p (rld[r1].in, rld[r2].in)
5196 || rld[r1].optional || rld[r2].optional
5197 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5198 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5199 return false;
5201 for (i = 0; i < n_reloads; i ++)
5202 /* Look for input reloads that aren't our two */
5203 if (i != r1 && i != r2 && rld[i].in)
5205 /* If our reload is mentioned at all, it isn't a simple chain. */
5206 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5207 return false;
5209 return true;
5213 /* The recursive function change all occurrences of WHAT in *WHERE
5214 onto REPL. */
5215 static void
5216 substitute (rtx *where, const_rtx what, rtx repl)
5218 const char *fmt;
5219 int i;
5220 enum rtx_code code;
5222 if (*where == 0)
5223 return;
5225 if (*where == what || rtx_equal_p (*where, what))
5227 *where = repl;
5228 return;
5231 code = GET_CODE (*where);
5232 fmt = GET_RTX_FORMAT (code);
5233 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5235 if (fmt[i] == 'E')
5237 int j;
5239 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5240 substitute (&XVECEXP (*where, i, j), what, repl);
5242 else if (fmt[i] == 'e')
5243 substitute (&XEXP (*where, i), what, repl);
5247 /* The function returns TRUE if chain of reload R1 and R2 (in any
5248 order) can be evaluated without usage of intermediate register for
5249 the reload containing another reload. It is important to see
5250 gen_reload to understand what the function is trying to do. As an
5251 example, let us have reload chain
5253 r2: const
5254 r1: <something> + const
5256 and reload R2 got reload reg HR. The function returns true if
5257 there is a correct insn HR = HR + <something>. Otherwise,
5258 gen_reload will use intermediate register (and this is the reload
5259 reg for R1) to reload <something>.
5261 We need this function to find a conflict for chain reloads. In our
5262 example, if HR = HR + <something> is incorrect insn, then we cannot
5263 use HR as a reload register for R2. If we do use it then we get a
5264 wrong code:
5266 HR = const
5267 HR = <something>
5268 HR = HR + HR
5271 static bool
5272 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5274 bool result;
5275 int regno, n, code;
5276 rtx out, in, tem, insn;
5277 rtx last = get_last_insn ();
5279 /* Make r2 a component of r1. */
5280 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5282 n = r1;
5283 r1 = r2;
5284 r2 = n;
5286 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5287 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5288 gcc_assert (regno >= 0);
5289 out = gen_rtx_REG (rld[r1].mode, regno);
5290 in = copy_rtx (rld[r1].in);
5291 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5293 /* If IN is a paradoxical SUBREG, remove it and try to put the
5294 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5295 if (GET_CODE (in) == SUBREG
5296 && (GET_MODE_SIZE (GET_MODE (in))
5297 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5298 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5299 in = SUBREG_REG (in), out = tem;
5301 if (GET_CODE (in) == PLUS
5302 && (REG_P (XEXP (in, 0))
5303 || GET_CODE (XEXP (in, 0)) == SUBREG
5304 || MEM_P (XEXP (in, 0)))
5305 && (REG_P (XEXP (in, 1))
5306 || GET_CODE (XEXP (in, 1)) == SUBREG
5307 || CONSTANT_P (XEXP (in, 1))
5308 || MEM_P (XEXP (in, 1))))
5310 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5311 code = recog_memoized (insn);
5312 result = false;
5314 if (code >= 0)
5316 extract_insn (insn);
5317 /* We want constrain operands to treat this insn strictly in
5318 its validity determination, i.e., the way it would after
5319 reload has completed. */
5320 result = constrain_operands (1);
5323 delete_insns_since (last);
5324 return result;
5327 /* It looks like other cases in gen_reload are not possible for
5328 chain reloads or do need an intermediate hard registers. */
5329 return true;
5332 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5333 Return 0 otherwise.
5335 This function uses the same algorithm as reload_reg_free_p above. */
5337 static int
5338 reloads_conflict (int r1, int r2)
5340 enum reload_type r1_type = rld[r1].when_needed;
5341 enum reload_type r2_type = rld[r2].when_needed;
5342 int r1_opnum = rld[r1].opnum;
5343 int r2_opnum = rld[r2].opnum;
5345 /* RELOAD_OTHER conflicts with everything. */
5346 if (r2_type == RELOAD_OTHER)
5347 return 1;
5349 /* Otherwise, check conflicts differently for each type. */
5351 switch (r1_type)
5353 case RELOAD_FOR_INPUT:
5354 return (r2_type == RELOAD_FOR_INSN
5355 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5356 || r2_type == RELOAD_FOR_OPADDR_ADDR
5357 || r2_type == RELOAD_FOR_INPUT
5358 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5359 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5360 && r2_opnum > r1_opnum));
5362 case RELOAD_FOR_INPUT_ADDRESS:
5363 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5364 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5366 case RELOAD_FOR_INPADDR_ADDRESS:
5367 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5368 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5370 case RELOAD_FOR_OUTPUT_ADDRESS:
5371 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5372 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5374 case RELOAD_FOR_OUTADDR_ADDRESS:
5375 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5376 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5378 case RELOAD_FOR_OPERAND_ADDRESS:
5379 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5380 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5381 && (!reloads_unique_chain_p (r1, r2)
5382 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5384 case RELOAD_FOR_OPADDR_ADDR:
5385 return (r2_type == RELOAD_FOR_INPUT
5386 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5388 case RELOAD_FOR_OUTPUT:
5389 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5390 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5391 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5392 && r2_opnum >= r1_opnum));
5394 case RELOAD_FOR_INSN:
5395 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5396 || r2_type == RELOAD_FOR_INSN
5397 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5399 case RELOAD_FOR_OTHER_ADDRESS:
5400 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5402 case RELOAD_OTHER:
5403 return 1;
5405 default:
5406 gcc_unreachable ();
5410 /* Indexed by reload number, 1 if incoming value
5411 inherited from previous insns. */
5412 static char reload_inherited[MAX_RELOADS];
5414 /* For an inherited reload, this is the insn the reload was inherited from,
5415 if we know it. Otherwise, this is 0. */
5416 static rtx reload_inheritance_insn[MAX_RELOADS];
5418 /* If nonzero, this is a place to get the value of the reload,
5419 rather than using reload_in. */
5420 static rtx reload_override_in[MAX_RELOADS];
5422 /* For each reload, the hard register number of the register used,
5423 or -1 if we did not need a register for this reload. */
5424 static int reload_spill_index[MAX_RELOADS];
5426 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5427 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5429 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5430 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5432 /* Subroutine of free_for_value_p, used to check a single register.
5433 START_REGNO is the starting regno of the full reload register
5434 (possibly comprising multiple hard registers) that we are considering. */
5436 static int
5437 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5438 enum reload_type type, rtx value, rtx out,
5439 int reloadnum, int ignore_address_reloads)
5441 int time1;
5442 /* Set if we see an input reload that must not share its reload register
5443 with any new earlyclobber, but might otherwise share the reload
5444 register with an output or input-output reload. */
5445 int check_earlyclobber = 0;
5446 int i;
5447 int copy = 0;
5449 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5450 return 0;
5452 if (out == const0_rtx)
5454 copy = 1;
5455 out = NULL_RTX;
5458 /* We use some pseudo 'time' value to check if the lifetimes of the
5459 new register use would overlap with the one of a previous reload
5460 that is not read-only or uses a different value.
5461 The 'time' used doesn't have to be linear in any shape or form, just
5462 monotonic.
5463 Some reload types use different 'buckets' for each operand.
5464 So there are MAX_RECOG_OPERANDS different time values for each
5465 such reload type.
5466 We compute TIME1 as the time when the register for the prospective
5467 new reload ceases to be live, and TIME2 for each existing
5468 reload as the time when that the reload register of that reload
5469 becomes live.
5470 Where there is little to be gained by exact lifetime calculations,
5471 we just make conservative assumptions, i.e. a longer lifetime;
5472 this is done in the 'default:' cases. */
5473 switch (type)
5475 case RELOAD_FOR_OTHER_ADDRESS:
5476 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5477 time1 = copy ? 0 : 1;
5478 break;
5479 case RELOAD_OTHER:
5480 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5481 break;
5482 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5483 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5484 respectively, to the time values for these, we get distinct time
5485 values. To get distinct time values for each operand, we have to
5486 multiply opnum by at least three. We round that up to four because
5487 multiply by four is often cheaper. */
5488 case RELOAD_FOR_INPADDR_ADDRESS:
5489 time1 = opnum * 4 + 2;
5490 break;
5491 case RELOAD_FOR_INPUT_ADDRESS:
5492 time1 = opnum * 4 + 3;
5493 break;
5494 case RELOAD_FOR_INPUT:
5495 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5496 executes (inclusive). */
5497 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5498 break;
5499 case RELOAD_FOR_OPADDR_ADDR:
5500 /* opnum * 4 + 4
5501 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5502 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5503 break;
5504 case RELOAD_FOR_OPERAND_ADDRESS:
5505 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5506 is executed. */
5507 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5508 break;
5509 case RELOAD_FOR_OUTADDR_ADDRESS:
5510 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5511 break;
5512 case RELOAD_FOR_OUTPUT_ADDRESS:
5513 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5514 break;
5515 default:
5516 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5519 for (i = 0; i < n_reloads; i++)
5521 rtx reg = rld[i].reg_rtx;
5522 if (reg && REG_P (reg)
5523 && ((unsigned) regno - true_regnum (reg)
5524 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5525 && i != reloadnum)
5527 rtx other_input = rld[i].in;
5529 /* If the other reload loads the same input value, that
5530 will not cause a conflict only if it's loading it into
5531 the same register. */
5532 if (true_regnum (reg) != start_regno)
5533 other_input = NULL_RTX;
5534 if (! other_input || ! rtx_equal_p (other_input, value)
5535 || rld[i].out || out)
5537 int time2;
5538 switch (rld[i].when_needed)
5540 case RELOAD_FOR_OTHER_ADDRESS:
5541 time2 = 0;
5542 break;
5543 case RELOAD_FOR_INPADDR_ADDRESS:
5544 /* find_reloads makes sure that a
5545 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5546 by at most one - the first -
5547 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5548 address reload is inherited, the address address reload
5549 goes away, so we can ignore this conflict. */
5550 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5551 && ignore_address_reloads
5552 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5553 Then the address address is still needed to store
5554 back the new address. */
5555 && ! rld[reloadnum].out)
5556 continue;
5557 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5558 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5559 reloads go away. */
5560 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5561 && ignore_address_reloads
5562 /* Unless we are reloading an auto_inc expression. */
5563 && ! rld[reloadnum].out)
5564 continue;
5565 time2 = rld[i].opnum * 4 + 2;
5566 break;
5567 case RELOAD_FOR_INPUT_ADDRESS:
5568 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5569 && ignore_address_reloads
5570 && ! rld[reloadnum].out)
5571 continue;
5572 time2 = rld[i].opnum * 4 + 3;
5573 break;
5574 case RELOAD_FOR_INPUT:
5575 time2 = rld[i].opnum * 4 + 4;
5576 check_earlyclobber = 1;
5577 break;
5578 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5579 == MAX_RECOG_OPERAND * 4 */
5580 case RELOAD_FOR_OPADDR_ADDR:
5581 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5582 && ignore_address_reloads
5583 && ! rld[reloadnum].out)
5584 continue;
5585 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5586 break;
5587 case RELOAD_FOR_OPERAND_ADDRESS:
5588 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5589 check_earlyclobber = 1;
5590 break;
5591 case RELOAD_FOR_INSN:
5592 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5593 break;
5594 case RELOAD_FOR_OUTPUT:
5595 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5596 instruction is executed. */
5597 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5598 break;
5599 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5600 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5601 value. */
5602 case RELOAD_FOR_OUTADDR_ADDRESS:
5603 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5604 && ignore_address_reloads
5605 && ! rld[reloadnum].out)
5606 continue;
5607 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5608 break;
5609 case RELOAD_FOR_OUTPUT_ADDRESS:
5610 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5611 break;
5612 case RELOAD_OTHER:
5613 /* If there is no conflict in the input part, handle this
5614 like an output reload. */
5615 if (! rld[i].in || rtx_equal_p (other_input, value))
5617 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5618 /* Earlyclobbered outputs must conflict with inputs. */
5619 if (earlyclobber_operand_p (rld[i].out))
5620 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5622 break;
5624 time2 = 1;
5625 /* RELOAD_OTHER might be live beyond instruction execution,
5626 but this is not obvious when we set time2 = 1. So check
5627 here if there might be a problem with the new reload
5628 clobbering the register used by the RELOAD_OTHER. */
5629 if (out)
5630 return 0;
5631 break;
5632 default:
5633 return 0;
5635 if ((time1 >= time2
5636 && (! rld[i].in || rld[i].out
5637 || ! rtx_equal_p (other_input, value)))
5638 || (out && rld[reloadnum].out_reg
5639 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5640 return 0;
5645 /* Earlyclobbered outputs must conflict with inputs. */
5646 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5647 return 0;
5649 return 1;
5652 /* Return 1 if the value in reload reg REGNO, as used by a reload
5653 needed for the part of the insn specified by OPNUM and TYPE,
5654 may be used to load VALUE into it.
5656 MODE is the mode in which the register is used, this is needed to
5657 determine how many hard regs to test.
5659 Other read-only reloads with the same value do not conflict
5660 unless OUT is nonzero and these other reloads have to live while
5661 output reloads live.
5662 If OUT is CONST0_RTX, this is a special case: it means that the
5663 test should not be for using register REGNO as reload register, but
5664 for copying from register REGNO into the reload register.
5666 RELOADNUM is the number of the reload we want to load this value for;
5667 a reload does not conflict with itself.
5669 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5670 reloads that load an address for the very reload we are considering.
5672 The caller has to make sure that there is no conflict with the return
5673 register. */
5675 static int
5676 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5677 enum reload_type type, rtx value, rtx out, int reloadnum,
5678 int ignore_address_reloads)
5680 int nregs = hard_regno_nregs[regno][mode];
5681 while (nregs-- > 0)
5682 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5683 value, out, reloadnum,
5684 ignore_address_reloads))
5685 return 0;
5686 return 1;
5689 /* Return nonzero if the rtx X is invariant over the current function. */
5690 /* ??? Actually, the places where we use this expect exactly what is
5691 tested here, and not everything that is function invariant. In
5692 particular, the frame pointer and arg pointer are special cased;
5693 pic_offset_table_rtx is not, and we must not spill these things to
5694 memory. */
5697 function_invariant_p (const_rtx x)
5699 if (CONSTANT_P (x))
5700 return 1;
5701 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5702 return 1;
5703 if (GET_CODE (x) == PLUS
5704 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5705 && CONSTANT_P (XEXP (x, 1)))
5706 return 1;
5707 return 0;
5710 /* Determine whether the reload reg X overlaps any rtx'es used for
5711 overriding inheritance. Return nonzero if so. */
5713 static int
5714 conflicts_with_override (rtx x)
5716 int i;
5717 for (i = 0; i < n_reloads; i++)
5718 if (reload_override_in[i]
5719 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5720 return 1;
5721 return 0;
5724 /* Give an error message saying we failed to find a reload for INSN,
5725 and clear out reload R. */
5726 static void
5727 failed_reload (rtx insn, int r)
5729 if (asm_noperands (PATTERN (insn)) < 0)
5730 /* It's the compiler's fault. */
5731 fatal_insn ("could not find a spill register", insn);
5733 /* It's the user's fault; the operand's mode and constraint
5734 don't match. Disable this reload so we don't crash in final. */
5735 error_for_asm (insn,
5736 "%<asm%> operand constraint incompatible with operand size");
5737 rld[r].in = 0;
5738 rld[r].out = 0;
5739 rld[r].reg_rtx = 0;
5740 rld[r].optional = 1;
5741 rld[r].secondary_p = 1;
5744 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5745 for reload R. If it's valid, get an rtx for it. Return nonzero if
5746 successful. */
5747 static int
5748 set_reload_reg (int i, int r)
5750 int regno;
5751 rtx reg = spill_reg_rtx[i];
5753 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5754 spill_reg_rtx[i] = reg
5755 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5757 regno = true_regnum (reg);
5759 /* Detect when the reload reg can't hold the reload mode.
5760 This used to be one `if', but Sequent compiler can't handle that. */
5761 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5763 enum machine_mode test_mode = VOIDmode;
5764 if (rld[r].in)
5765 test_mode = GET_MODE (rld[r].in);
5766 /* If rld[r].in has VOIDmode, it means we will load it
5767 in whatever mode the reload reg has: to wit, rld[r].mode.
5768 We have already tested that for validity. */
5769 /* Aside from that, we need to test that the expressions
5770 to reload from or into have modes which are valid for this
5771 reload register. Otherwise the reload insns would be invalid. */
5772 if (! (rld[r].in != 0 && test_mode != VOIDmode
5773 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5774 if (! (rld[r].out != 0
5775 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5777 /* The reg is OK. */
5778 last_spill_reg = i;
5780 /* Mark as in use for this insn the reload regs we use
5781 for this. */
5782 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5783 rld[r].when_needed, rld[r].mode);
5785 rld[r].reg_rtx = reg;
5786 reload_spill_index[r] = spill_regs[i];
5787 return 1;
5790 return 0;
5793 /* Find a spill register to use as a reload register for reload R.
5794 LAST_RELOAD is nonzero if this is the last reload for the insn being
5795 processed.
5797 Set rld[R].reg_rtx to the register allocated.
5799 We return 1 if successful, or 0 if we couldn't find a spill reg and
5800 we didn't change anything. */
5802 static int
5803 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5804 int last_reload)
5806 int i, pass, count;
5808 /* If we put this reload ahead, thinking it is a group,
5809 then insist on finding a group. Otherwise we can grab a
5810 reg that some other reload needs.
5811 (That can happen when we have a 68000 DATA_OR_FP_REG
5812 which is a group of data regs or one fp reg.)
5813 We need not be so restrictive if there are no more reloads
5814 for this insn.
5816 ??? Really it would be nicer to have smarter handling
5817 for that kind of reg class, where a problem like this is normal.
5818 Perhaps those classes should be avoided for reloading
5819 by use of more alternatives. */
5821 int force_group = rld[r].nregs > 1 && ! last_reload;
5823 /* If we want a single register and haven't yet found one,
5824 take any reg in the right class and not in use.
5825 If we want a consecutive group, here is where we look for it.
5827 We use two passes so we can first look for reload regs to
5828 reuse, which are already in use for other reloads in this insn,
5829 and only then use additional registers.
5830 I think that maximizing reuse is needed to make sure we don't
5831 run out of reload regs. Suppose we have three reloads, and
5832 reloads A and B can share regs. These need two regs.
5833 Suppose A and B are given different regs.
5834 That leaves none for C. */
5835 for (pass = 0; pass < 2; pass++)
5837 /* I is the index in spill_regs.
5838 We advance it round-robin between insns to use all spill regs
5839 equally, so that inherited reloads have a chance
5840 of leapfrogging each other. */
5842 i = last_spill_reg;
5844 for (count = 0; count < n_spills; count++)
5846 int class = (int) rld[r].class;
5847 int regnum;
5849 i++;
5850 if (i >= n_spills)
5851 i -= n_spills;
5852 regnum = spill_regs[i];
5854 if ((reload_reg_free_p (regnum, rld[r].opnum,
5855 rld[r].when_needed)
5856 || (rld[r].in
5857 /* We check reload_reg_used to make sure we
5858 don't clobber the return register. */
5859 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5860 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5861 rld[r].when_needed, rld[r].in,
5862 rld[r].out, r, 1)))
5863 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5864 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5865 /* Look first for regs to share, then for unshared. But
5866 don't share regs used for inherited reloads; they are
5867 the ones we want to preserve. */
5868 && (pass
5869 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5870 regnum)
5871 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5872 regnum))))
5874 int nr = hard_regno_nregs[regnum][rld[r].mode];
5875 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5876 (on 68000) got us two FP regs. If NR is 1,
5877 we would reject both of them. */
5878 if (force_group)
5879 nr = rld[r].nregs;
5880 /* If we need only one reg, we have already won. */
5881 if (nr == 1)
5883 /* But reject a single reg if we demand a group. */
5884 if (force_group)
5885 continue;
5886 break;
5888 /* Otherwise check that as many consecutive regs as we need
5889 are available here. */
5890 while (nr > 1)
5892 int regno = regnum + nr - 1;
5893 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5894 && spill_reg_order[regno] >= 0
5895 && reload_reg_free_p (regno, rld[r].opnum,
5896 rld[r].when_needed)))
5897 break;
5898 nr--;
5900 if (nr == 1)
5901 break;
5905 /* If we found something on pass 1, omit pass 2. */
5906 if (count < n_spills)
5907 break;
5910 /* We should have found a spill register by now. */
5911 if (count >= n_spills)
5912 return 0;
5914 /* I is the index in SPILL_REG_RTX of the reload register we are to
5915 allocate. Get an rtx for it and find its register number. */
5917 return set_reload_reg (i, r);
5920 /* Initialize all the tables needed to allocate reload registers.
5921 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5922 is the array we use to restore the reg_rtx field for every reload. */
5924 static void
5925 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5927 int i;
5929 for (i = 0; i < n_reloads; i++)
5930 rld[i].reg_rtx = save_reload_reg_rtx[i];
5932 memset (reload_inherited, 0, MAX_RELOADS);
5933 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5934 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5936 CLEAR_HARD_REG_SET (reload_reg_used);
5937 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5938 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5939 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5940 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5941 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5943 CLEAR_HARD_REG_SET (reg_used_in_insn);
5945 HARD_REG_SET tmp;
5946 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5947 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5948 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5949 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5950 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5951 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5954 for (i = 0; i < reload_n_operands; i++)
5956 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5957 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5958 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5959 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5960 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5961 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5964 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5966 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5968 for (i = 0; i < n_reloads; i++)
5969 /* If we have already decided to use a certain register,
5970 don't use it in another way. */
5971 if (rld[i].reg_rtx)
5972 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5973 rld[i].when_needed, rld[i].mode);
5976 /* Assign hard reg targets for the pseudo-registers we must reload
5977 into hard regs for this insn.
5978 Also output the instructions to copy them in and out of the hard regs.
5980 For machines with register classes, we are responsible for
5981 finding a reload reg in the proper class. */
5983 static void
5984 choose_reload_regs (struct insn_chain *chain)
5986 rtx insn = chain->insn;
5987 int i, j;
5988 unsigned int max_group_size = 1;
5989 enum reg_class group_class = NO_REGS;
5990 int pass, win, inheritance;
5992 rtx save_reload_reg_rtx[MAX_RELOADS];
5994 /* In order to be certain of getting the registers we need,
5995 we must sort the reloads into order of increasing register class.
5996 Then our grabbing of reload registers will parallel the process
5997 that provided the reload registers.
5999 Also note whether any of the reloads wants a consecutive group of regs.
6000 If so, record the maximum size of the group desired and what
6001 register class contains all the groups needed by this insn. */
6003 for (j = 0; j < n_reloads; j++)
6005 reload_order[j] = j;
6006 if (rld[j].reg_rtx != NULL_RTX)
6008 gcc_assert (REG_P (rld[j].reg_rtx)
6009 && HARD_REGISTER_P (rld[j].reg_rtx));
6010 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6012 else
6013 reload_spill_index[j] = -1;
6015 if (rld[j].nregs > 1)
6017 max_group_size = MAX (rld[j].nregs, max_group_size);
6018 group_class
6019 = reg_class_superunion[(int) rld[j].class][(int) group_class];
6022 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6025 if (n_reloads > 1)
6026 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6028 /* If -O, try first with inheritance, then turning it off.
6029 If not -O, don't do inheritance.
6030 Using inheritance when not optimizing leads to paradoxes
6031 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6032 because one side of the comparison might be inherited. */
6033 win = 0;
6034 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6036 choose_reload_regs_init (chain, save_reload_reg_rtx);
6038 /* Process the reloads in order of preference just found.
6039 Beyond this point, subregs can be found in reload_reg_rtx.
6041 This used to look for an existing reloaded home for all of the
6042 reloads, and only then perform any new reloads. But that could lose
6043 if the reloads were done out of reg-class order because a later
6044 reload with a looser constraint might have an old home in a register
6045 needed by an earlier reload with a tighter constraint.
6047 To solve this, we make two passes over the reloads, in the order
6048 described above. In the first pass we try to inherit a reload
6049 from a previous insn. If there is a later reload that needs a
6050 class that is a proper subset of the class being processed, we must
6051 also allocate a spill register during the first pass.
6053 Then make a second pass over the reloads to allocate any reloads
6054 that haven't been given registers yet. */
6056 for (j = 0; j < n_reloads; j++)
6058 int r = reload_order[j];
6059 rtx search_equiv = NULL_RTX;
6061 /* Ignore reloads that got marked inoperative. */
6062 if (rld[r].out == 0 && rld[r].in == 0
6063 && ! rld[r].secondary_p)
6064 continue;
6066 /* If find_reloads chose to use reload_in or reload_out as a reload
6067 register, we don't need to chose one. Otherwise, try even if it
6068 found one since we might save an insn if we find the value lying
6069 around.
6070 Try also when reload_in is a pseudo without a hard reg. */
6071 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6072 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6073 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6074 && !MEM_P (rld[r].in)
6075 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6076 continue;
6078 #if 0 /* No longer needed for correct operation.
6079 It might give better code, or might not; worth an experiment? */
6080 /* If this is an optional reload, we can't inherit from earlier insns
6081 until we are sure that any non-optional reloads have been allocated.
6082 The following code takes advantage of the fact that optional reloads
6083 are at the end of reload_order. */
6084 if (rld[r].optional != 0)
6085 for (i = 0; i < j; i++)
6086 if ((rld[reload_order[i]].out != 0
6087 || rld[reload_order[i]].in != 0
6088 || rld[reload_order[i]].secondary_p)
6089 && ! rld[reload_order[i]].optional
6090 && rld[reload_order[i]].reg_rtx == 0)
6091 allocate_reload_reg (chain, reload_order[i], 0);
6092 #endif
6094 /* First see if this pseudo is already available as reloaded
6095 for a previous insn. We cannot try to inherit for reloads
6096 that are smaller than the maximum number of registers needed
6097 for groups unless the register we would allocate cannot be used
6098 for the groups.
6100 We could check here to see if this is a secondary reload for
6101 an object that is already in a register of the desired class.
6102 This would avoid the need for the secondary reload register.
6103 But this is complex because we can't easily determine what
6104 objects might want to be loaded via this reload. So let a
6105 register be allocated here. In `emit_reload_insns' we suppress
6106 one of the loads in the case described above. */
6108 if (inheritance)
6110 int byte = 0;
6111 int regno = -1;
6112 enum machine_mode mode = VOIDmode;
6114 if (rld[r].in == 0)
6116 else if (REG_P (rld[r].in))
6118 regno = REGNO (rld[r].in);
6119 mode = GET_MODE (rld[r].in);
6121 else if (REG_P (rld[r].in_reg))
6123 regno = REGNO (rld[r].in_reg);
6124 mode = GET_MODE (rld[r].in_reg);
6126 else if (GET_CODE (rld[r].in_reg) == SUBREG
6127 && REG_P (SUBREG_REG (rld[r].in_reg)))
6129 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6130 if (regno < FIRST_PSEUDO_REGISTER)
6131 regno = subreg_regno (rld[r].in_reg);
6132 else
6133 byte = SUBREG_BYTE (rld[r].in_reg);
6134 mode = GET_MODE (rld[r].in_reg);
6136 #ifdef AUTO_INC_DEC
6137 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6138 && REG_P (XEXP (rld[r].in_reg, 0)))
6140 regno = REGNO (XEXP (rld[r].in_reg, 0));
6141 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6142 rld[r].out = rld[r].in;
6144 #endif
6145 #if 0
6146 /* This won't work, since REGNO can be a pseudo reg number.
6147 Also, it takes much more hair to keep track of all the things
6148 that can invalidate an inherited reload of part of a pseudoreg. */
6149 else if (GET_CODE (rld[r].in) == SUBREG
6150 && REG_P (SUBREG_REG (rld[r].in)))
6151 regno = subreg_regno (rld[r].in);
6152 #endif
6154 if (regno >= 0
6155 && reg_last_reload_reg[regno] != 0
6156 #ifdef CANNOT_CHANGE_MODE_CLASS
6157 /* Verify that the register it's in can be used in
6158 mode MODE. */
6159 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6160 GET_MODE (reg_last_reload_reg[regno]),
6161 mode)
6162 #endif
6165 enum reg_class class = rld[r].class, last_class;
6166 rtx last_reg = reg_last_reload_reg[regno];
6167 enum machine_mode need_mode;
6169 i = REGNO (last_reg);
6170 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6171 last_class = REGNO_REG_CLASS (i);
6173 if (byte == 0)
6174 need_mode = mode;
6175 else
6176 need_mode
6177 = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
6178 + byte * BITS_PER_UNIT,
6179 GET_MODE_CLASS (mode));
6181 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6182 >= GET_MODE_SIZE (need_mode))
6183 && reg_reloaded_contents[i] == regno
6184 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6185 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6186 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
6187 /* Even if we can't use this register as a reload
6188 register, we might use it for reload_override_in,
6189 if copying it to the desired class is cheap
6190 enough. */
6191 || ((REGISTER_MOVE_COST (mode, last_class, class)
6192 < MEMORY_MOVE_COST (mode, class, 1))
6193 && (secondary_reload_class (1, class, mode,
6194 last_reg)
6195 == NO_REGS)
6196 #ifdef SECONDARY_MEMORY_NEEDED
6197 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
6198 mode)
6199 #endif
6202 && (rld[r].nregs == max_group_size
6203 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6205 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6206 rld[r].when_needed, rld[r].in,
6207 const0_rtx, r, 1))
6209 /* If a group is needed, verify that all the subsequent
6210 registers still have their values intact. */
6211 int nr = hard_regno_nregs[i][rld[r].mode];
6212 int k;
6214 for (k = 1; k < nr; k++)
6215 if (reg_reloaded_contents[i + k] != regno
6216 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6217 break;
6219 if (k == nr)
6221 int i1;
6222 int bad_for_class;
6224 last_reg = (GET_MODE (last_reg) == mode
6225 ? last_reg : gen_rtx_REG (mode, i));
6227 bad_for_class = 0;
6228 for (k = 0; k < nr; k++)
6229 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6230 i+k);
6232 /* We found a register that contains the
6233 value we need. If this register is the
6234 same as an `earlyclobber' operand of the
6235 current insn, just mark it as a place to
6236 reload from since we can't use it as the
6237 reload register itself. */
6239 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6240 if (reg_overlap_mentioned_for_reload_p
6241 (reg_last_reload_reg[regno],
6242 reload_earlyclobbers[i1]))
6243 break;
6245 if (i1 != n_earlyclobbers
6246 || ! (free_for_value_p (i, rld[r].mode,
6247 rld[r].opnum,
6248 rld[r].when_needed, rld[r].in,
6249 rld[r].out, r, 1))
6250 /* Don't use it if we'd clobber a pseudo reg. */
6251 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6252 && rld[r].out
6253 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6254 /* Don't clobber the frame pointer. */
6255 || (i == HARD_FRAME_POINTER_REGNUM
6256 && frame_pointer_needed
6257 && rld[r].out)
6258 /* Don't really use the inherited spill reg
6259 if we need it wider than we've got it. */
6260 || (GET_MODE_SIZE (rld[r].mode)
6261 > GET_MODE_SIZE (mode))
6262 || bad_for_class
6264 /* If find_reloads chose reload_out as reload
6265 register, stay with it - that leaves the
6266 inherited register for subsequent reloads. */
6267 || (rld[r].out && rld[r].reg_rtx
6268 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6270 if (! rld[r].optional)
6272 reload_override_in[r] = last_reg;
6273 reload_inheritance_insn[r]
6274 = reg_reloaded_insn[i];
6277 else
6279 int k;
6280 /* We can use this as a reload reg. */
6281 /* Mark the register as in use for this part of
6282 the insn. */
6283 mark_reload_reg_in_use (i,
6284 rld[r].opnum,
6285 rld[r].when_needed,
6286 rld[r].mode);
6287 rld[r].reg_rtx = last_reg;
6288 reload_inherited[r] = 1;
6289 reload_inheritance_insn[r]
6290 = reg_reloaded_insn[i];
6291 reload_spill_index[r] = i;
6292 for (k = 0; k < nr; k++)
6293 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6294 i + k);
6301 /* Here's another way to see if the value is already lying around. */
6302 if (inheritance
6303 && rld[r].in != 0
6304 && ! reload_inherited[r]
6305 && rld[r].out == 0
6306 && (CONSTANT_P (rld[r].in)
6307 || GET_CODE (rld[r].in) == PLUS
6308 || REG_P (rld[r].in)
6309 || MEM_P (rld[r].in))
6310 && (rld[r].nregs == max_group_size
6311 || ! reg_classes_intersect_p (rld[r].class, group_class)))
6312 search_equiv = rld[r].in;
6313 /* If this is an output reload from a simple move insn, look
6314 if an equivalence for the input is available. */
6315 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6317 rtx set = single_set (insn);
6319 if (set
6320 && rtx_equal_p (rld[r].out, SET_DEST (set))
6321 && CONSTANT_P (SET_SRC (set)))
6322 search_equiv = SET_SRC (set);
6325 if (search_equiv)
6327 rtx equiv
6328 = find_equiv_reg (search_equiv, insn, rld[r].class,
6329 -1, NULL, 0, rld[r].mode);
6330 int regno = 0;
6332 if (equiv != 0)
6334 if (REG_P (equiv))
6335 regno = REGNO (equiv);
6336 else
6338 /* This must be a SUBREG of a hard register.
6339 Make a new REG since this might be used in an
6340 address and not all machines support SUBREGs
6341 there. */
6342 gcc_assert (GET_CODE (equiv) == SUBREG);
6343 regno = subreg_regno (equiv);
6344 equiv = gen_rtx_REG (rld[r].mode, regno);
6345 /* If we choose EQUIV as the reload register, but the
6346 loop below decides to cancel the inheritance, we'll
6347 end up reloading EQUIV in rld[r].mode, not the mode
6348 it had originally. That isn't safe when EQUIV isn't
6349 available as a spill register since its value might
6350 still be live at this point. */
6351 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6352 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6353 equiv = 0;
6357 /* If we found a spill reg, reject it unless it is free
6358 and of the desired class. */
6359 if (equiv != 0)
6361 int regs_used = 0;
6362 int bad_for_class = 0;
6363 int max_regno = regno + rld[r].nregs;
6365 for (i = regno; i < max_regno; i++)
6367 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6369 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6373 if ((regs_used
6374 && ! free_for_value_p (regno, rld[r].mode,
6375 rld[r].opnum, rld[r].when_needed,
6376 rld[r].in, rld[r].out, r, 1))
6377 || bad_for_class)
6378 equiv = 0;
6381 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6382 equiv = 0;
6384 /* We found a register that contains the value we need.
6385 If this register is the same as an `earlyclobber' operand
6386 of the current insn, just mark it as a place to reload from
6387 since we can't use it as the reload register itself. */
6389 if (equiv != 0)
6390 for (i = 0; i < n_earlyclobbers; i++)
6391 if (reg_overlap_mentioned_for_reload_p (equiv,
6392 reload_earlyclobbers[i]))
6394 if (! rld[r].optional)
6395 reload_override_in[r] = equiv;
6396 equiv = 0;
6397 break;
6400 /* If the equiv register we have found is explicitly clobbered
6401 in the current insn, it depends on the reload type if we
6402 can use it, use it for reload_override_in, or not at all.
6403 In particular, we then can't use EQUIV for a
6404 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6406 if (equiv != 0)
6408 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6409 switch (rld[r].when_needed)
6411 case RELOAD_FOR_OTHER_ADDRESS:
6412 case RELOAD_FOR_INPADDR_ADDRESS:
6413 case RELOAD_FOR_INPUT_ADDRESS:
6414 case RELOAD_FOR_OPADDR_ADDR:
6415 break;
6416 case RELOAD_OTHER:
6417 case RELOAD_FOR_INPUT:
6418 case RELOAD_FOR_OPERAND_ADDRESS:
6419 if (! rld[r].optional)
6420 reload_override_in[r] = equiv;
6421 /* Fall through. */
6422 default:
6423 equiv = 0;
6424 break;
6426 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6427 switch (rld[r].when_needed)
6429 case RELOAD_FOR_OTHER_ADDRESS:
6430 case RELOAD_FOR_INPADDR_ADDRESS:
6431 case RELOAD_FOR_INPUT_ADDRESS:
6432 case RELOAD_FOR_OPADDR_ADDR:
6433 case RELOAD_FOR_OPERAND_ADDRESS:
6434 case RELOAD_FOR_INPUT:
6435 break;
6436 case RELOAD_OTHER:
6437 if (! rld[r].optional)
6438 reload_override_in[r] = equiv;
6439 /* Fall through. */
6440 default:
6441 equiv = 0;
6442 break;
6446 /* If we found an equivalent reg, say no code need be generated
6447 to load it, and use it as our reload reg. */
6448 if (equiv != 0
6449 && (regno != HARD_FRAME_POINTER_REGNUM
6450 || !frame_pointer_needed))
6452 int nr = hard_regno_nregs[regno][rld[r].mode];
6453 int k;
6454 rld[r].reg_rtx = equiv;
6455 reload_inherited[r] = 1;
6457 /* If reg_reloaded_valid is not set for this register,
6458 there might be a stale spill_reg_store lying around.
6459 We must clear it, since otherwise emit_reload_insns
6460 might delete the store. */
6461 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6462 spill_reg_store[regno] = NULL_RTX;
6463 /* If any of the hard registers in EQUIV are spill
6464 registers, mark them as in use for this insn. */
6465 for (k = 0; k < nr; k++)
6467 i = spill_reg_order[regno + k];
6468 if (i >= 0)
6470 mark_reload_reg_in_use (regno, rld[r].opnum,
6471 rld[r].when_needed,
6472 rld[r].mode);
6473 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6474 regno + k);
6480 /* If we found a register to use already, or if this is an optional
6481 reload, we are done. */
6482 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6483 continue;
6485 #if 0
6486 /* No longer needed for correct operation. Might or might
6487 not give better code on the average. Want to experiment? */
6489 /* See if there is a later reload that has a class different from our
6490 class that intersects our class or that requires less register
6491 than our reload. If so, we must allocate a register to this
6492 reload now, since that reload might inherit a previous reload
6493 and take the only available register in our class. Don't do this
6494 for optional reloads since they will force all previous reloads
6495 to be allocated. Also don't do this for reloads that have been
6496 turned off. */
6498 for (i = j + 1; i < n_reloads; i++)
6500 int s = reload_order[i];
6502 if ((rld[s].in == 0 && rld[s].out == 0
6503 && ! rld[s].secondary_p)
6504 || rld[s].optional)
6505 continue;
6507 if ((rld[s].class != rld[r].class
6508 && reg_classes_intersect_p (rld[r].class,
6509 rld[s].class))
6510 || rld[s].nregs < rld[r].nregs)
6511 break;
6514 if (i == n_reloads)
6515 continue;
6517 allocate_reload_reg (chain, r, j == n_reloads - 1);
6518 #endif
6521 /* Now allocate reload registers for anything non-optional that
6522 didn't get one yet. */
6523 for (j = 0; j < n_reloads; j++)
6525 int r = reload_order[j];
6527 /* Ignore reloads that got marked inoperative. */
6528 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6529 continue;
6531 /* Skip reloads that already have a register allocated or are
6532 optional. */
6533 if (rld[r].reg_rtx != 0 || rld[r].optional)
6534 continue;
6536 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6537 break;
6540 /* If that loop got all the way, we have won. */
6541 if (j == n_reloads)
6543 win = 1;
6544 break;
6547 /* Loop around and try without any inheritance. */
6550 if (! win)
6552 /* First undo everything done by the failed attempt
6553 to allocate with inheritance. */
6554 choose_reload_regs_init (chain, save_reload_reg_rtx);
6556 /* Some sanity tests to verify that the reloads found in the first
6557 pass are identical to the ones we have now. */
6558 gcc_assert (chain->n_reloads == n_reloads);
6560 for (i = 0; i < n_reloads; i++)
6562 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6563 continue;
6564 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6565 for (j = 0; j < n_spills; j++)
6566 if (spill_regs[j] == chain->rld[i].regno)
6567 if (! set_reload_reg (j, i))
6568 failed_reload (chain->insn, i);
6572 /* If we thought we could inherit a reload, because it seemed that
6573 nothing else wanted the same reload register earlier in the insn,
6574 verify that assumption, now that all reloads have been assigned.
6575 Likewise for reloads where reload_override_in has been set. */
6577 /* If doing expensive optimizations, do one preliminary pass that doesn't
6578 cancel any inheritance, but removes reloads that have been needed only
6579 for reloads that we know can be inherited. */
6580 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6582 for (j = 0; j < n_reloads; j++)
6584 int r = reload_order[j];
6585 rtx check_reg;
6586 if (reload_inherited[r] && rld[r].reg_rtx)
6587 check_reg = rld[r].reg_rtx;
6588 else if (reload_override_in[r]
6589 && (REG_P (reload_override_in[r])
6590 || GET_CODE (reload_override_in[r]) == SUBREG))
6591 check_reg = reload_override_in[r];
6592 else
6593 continue;
6594 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6595 rld[r].opnum, rld[r].when_needed, rld[r].in,
6596 (reload_inherited[r]
6597 ? rld[r].out : const0_rtx),
6598 r, 1))
6600 if (pass)
6601 continue;
6602 reload_inherited[r] = 0;
6603 reload_override_in[r] = 0;
6605 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6606 reload_override_in, then we do not need its related
6607 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6608 likewise for other reload types.
6609 We handle this by removing a reload when its only replacement
6610 is mentioned in reload_in of the reload we are going to inherit.
6611 A special case are auto_inc expressions; even if the input is
6612 inherited, we still need the address for the output. We can
6613 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6614 If we succeeded removing some reload and we are doing a preliminary
6615 pass just to remove such reloads, make another pass, since the
6616 removal of one reload might allow us to inherit another one. */
6617 else if (rld[r].in
6618 && rld[r].out != rld[r].in
6619 && remove_address_replacements (rld[r].in) && pass)
6620 pass = 2;
6624 /* Now that reload_override_in is known valid,
6625 actually override reload_in. */
6626 for (j = 0; j < n_reloads; j++)
6627 if (reload_override_in[j])
6628 rld[j].in = reload_override_in[j];
6630 /* If this reload won't be done because it has been canceled or is
6631 optional and not inherited, clear reload_reg_rtx so other
6632 routines (such as subst_reloads) don't get confused. */
6633 for (j = 0; j < n_reloads; j++)
6634 if (rld[j].reg_rtx != 0
6635 && ((rld[j].optional && ! reload_inherited[j])
6636 || (rld[j].in == 0 && rld[j].out == 0
6637 && ! rld[j].secondary_p)))
6639 int regno = true_regnum (rld[j].reg_rtx);
6641 if (spill_reg_order[regno] >= 0)
6642 clear_reload_reg_in_use (regno, rld[j].opnum,
6643 rld[j].when_needed, rld[j].mode);
6644 rld[j].reg_rtx = 0;
6645 reload_spill_index[j] = -1;
6648 /* Record which pseudos and which spill regs have output reloads. */
6649 for (j = 0; j < n_reloads; j++)
6651 int r = reload_order[j];
6653 i = reload_spill_index[r];
6655 /* I is nonneg if this reload uses a register.
6656 If rld[r].reg_rtx is 0, this is an optional reload
6657 that we opted to ignore. */
6658 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6659 && rld[r].reg_rtx != 0)
6661 int nregno = REGNO (rld[r].out_reg);
6662 int nr = 1;
6664 if (nregno < FIRST_PSEUDO_REGISTER)
6665 nr = hard_regno_nregs[nregno][rld[r].mode];
6667 while (--nr >= 0)
6668 SET_REGNO_REG_SET (&reg_has_output_reload,
6669 nregno + nr);
6671 if (i >= 0)
6673 nr = hard_regno_nregs[i][rld[r].mode];
6674 while (--nr >= 0)
6675 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6678 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6679 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6680 || rld[r].when_needed == RELOAD_FOR_INSN);
6685 /* Deallocate the reload register for reload R. This is called from
6686 remove_address_replacements. */
6688 void
6689 deallocate_reload_reg (int r)
6691 int regno;
6693 if (! rld[r].reg_rtx)
6694 return;
6695 regno = true_regnum (rld[r].reg_rtx);
6696 rld[r].reg_rtx = 0;
6697 if (spill_reg_order[regno] >= 0)
6698 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6699 rld[r].mode);
6700 reload_spill_index[r] = -1;
6703 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6704 reloads of the same item for fear that we might not have enough reload
6705 registers. However, normally they will get the same reload register
6706 and hence actually need not be loaded twice.
6708 Here we check for the most common case of this phenomenon: when we have
6709 a number of reloads for the same object, each of which were allocated
6710 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6711 reload, and is not modified in the insn itself. If we find such,
6712 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6713 This will not increase the number of spill registers needed and will
6714 prevent redundant code. */
6716 static void
6717 merge_assigned_reloads (rtx insn)
6719 int i, j;
6721 /* Scan all the reloads looking for ones that only load values and
6722 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6723 assigned and not modified by INSN. */
6725 for (i = 0; i < n_reloads; i++)
6727 int conflicting_input = 0;
6728 int max_input_address_opnum = -1;
6729 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6731 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6732 || rld[i].out != 0 || rld[i].reg_rtx == 0
6733 || reg_set_p (rld[i].reg_rtx, insn))
6734 continue;
6736 /* Look at all other reloads. Ensure that the only use of this
6737 reload_reg_rtx is in a reload that just loads the same value
6738 as we do. Note that any secondary reloads must be of the identical
6739 class since the values, modes, and result registers are the
6740 same, so we need not do anything with any secondary reloads. */
6742 for (j = 0; j < n_reloads; j++)
6744 if (i == j || rld[j].reg_rtx == 0
6745 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6746 rld[i].reg_rtx))
6747 continue;
6749 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6750 && rld[j].opnum > max_input_address_opnum)
6751 max_input_address_opnum = rld[j].opnum;
6753 /* If the reload regs aren't exactly the same (e.g, different modes)
6754 or if the values are different, we can't merge this reload.
6755 But if it is an input reload, we might still merge
6756 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6758 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6759 || rld[j].out != 0 || rld[j].in == 0
6760 || ! rtx_equal_p (rld[i].in, rld[j].in))
6762 if (rld[j].when_needed != RELOAD_FOR_INPUT
6763 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6764 || rld[i].opnum > rld[j].opnum)
6765 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6766 break;
6767 conflicting_input = 1;
6768 if (min_conflicting_input_opnum > rld[j].opnum)
6769 min_conflicting_input_opnum = rld[j].opnum;
6773 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6774 we, in fact, found any matching reloads. */
6776 if (j == n_reloads
6777 && max_input_address_opnum <= min_conflicting_input_opnum)
6779 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6781 for (j = 0; j < n_reloads; j++)
6782 if (i != j && rld[j].reg_rtx != 0
6783 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6784 && (! conflicting_input
6785 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6786 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6788 rld[i].when_needed = RELOAD_OTHER;
6789 rld[j].in = 0;
6790 reload_spill_index[j] = -1;
6791 transfer_replacements (i, j);
6794 /* If this is now RELOAD_OTHER, look for any reloads that
6795 load parts of this operand and set them to
6796 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6797 RELOAD_OTHER for outputs. Note that this test is
6798 equivalent to looking for reloads for this operand
6799 number.
6801 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6802 it may share registers with a RELOAD_FOR_INPUT, so we can
6803 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6804 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6806 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6807 instruction is assigned the same register as the earlier
6808 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6809 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6810 instruction to be deleted later on. */
6812 if (rld[i].when_needed == RELOAD_OTHER)
6813 for (j = 0; j < n_reloads; j++)
6814 if (rld[j].in != 0
6815 && rld[j].when_needed != RELOAD_OTHER
6816 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6817 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6818 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6819 && (! conflicting_input
6820 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6821 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6822 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6823 rld[i].in))
6825 int k;
6827 rld[j].when_needed
6828 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6829 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6830 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6832 /* Check to see if we accidentally converted two
6833 reloads that use the same reload register with
6834 different inputs to the same type. If so, the
6835 resulting code won't work. */
6836 if (rld[j].reg_rtx)
6837 for (k = 0; k < j; k++)
6838 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6839 || rld[k].when_needed != rld[j].when_needed
6840 || !rtx_equal_p (rld[k].reg_rtx,
6841 rld[j].reg_rtx)
6842 || rtx_equal_p (rld[k].in,
6843 rld[j].in));
6849 /* These arrays are filled by emit_reload_insns and its subroutines. */
6850 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6851 static rtx other_input_address_reload_insns = 0;
6852 static rtx other_input_reload_insns = 0;
6853 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6854 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6855 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6856 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6857 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6858 static rtx operand_reload_insns = 0;
6859 static rtx other_operand_reload_insns = 0;
6860 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6862 /* Values to be put in spill_reg_store are put here first. */
6863 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6864 static HARD_REG_SET reg_reloaded_died;
6866 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6867 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6868 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6869 adjusted register, and return true. Otherwise, return false. */
6870 static bool
6871 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6872 enum reg_class new_class,
6873 enum machine_mode new_mode)
6876 rtx reg;
6878 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6880 unsigned regno = REGNO (reg);
6882 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6883 continue;
6884 if (GET_MODE (reg) != new_mode)
6886 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6887 continue;
6888 if (hard_regno_nregs[regno][new_mode]
6889 > hard_regno_nregs[regno][GET_MODE (reg)])
6890 continue;
6891 reg = reload_adjust_reg_for_mode (reg, new_mode);
6893 *reload_reg = reg;
6894 return true;
6896 return false;
6899 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6900 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6901 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6902 adjusted register, and return true. Otherwise, return false. */
6903 static bool
6904 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6905 enum insn_code icode)
6908 enum reg_class new_class = scratch_reload_class (icode);
6909 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6911 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6912 new_class, new_mode);
6915 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6916 has the number J. OLD contains the value to be used as input. */
6918 static void
6919 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6920 rtx old, int j)
6922 rtx insn = chain->insn;
6923 rtx reloadreg;
6924 rtx oldequiv_reg = 0;
6925 rtx oldequiv = 0;
6926 int special = 0;
6927 enum machine_mode mode;
6928 rtx *where;
6930 /* delete_output_reload is only invoked properly if old contains
6931 the original pseudo register. Since this is replaced with a
6932 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6933 find the pseudo in RELOAD_IN_REG. */
6934 if (reload_override_in[j]
6935 && REG_P (rl->in_reg))
6937 oldequiv = old;
6938 old = rl->in_reg;
6940 if (oldequiv == 0)
6941 oldequiv = old;
6942 else if (REG_P (oldequiv))
6943 oldequiv_reg = oldequiv;
6944 else if (GET_CODE (oldequiv) == SUBREG)
6945 oldequiv_reg = SUBREG_REG (oldequiv);
6947 reloadreg = reload_reg_rtx_for_input[j];
6948 mode = GET_MODE (reloadreg);
6950 /* If we are reloading from a register that was recently stored in
6951 with an output-reload, see if we can prove there was
6952 actually no need to store the old value in it. */
6954 if (optimize && REG_P (oldequiv)
6955 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6956 && spill_reg_store[REGNO (oldequiv)]
6957 && REG_P (old)
6958 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6959 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6960 rl->out_reg)))
6961 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6963 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6964 OLDEQUIV. */
6966 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6967 oldequiv = SUBREG_REG (oldequiv);
6968 if (GET_MODE (oldequiv) != VOIDmode
6969 && mode != GET_MODE (oldequiv))
6970 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6972 /* Switch to the right place to emit the reload insns. */
6973 switch (rl->when_needed)
6975 case RELOAD_OTHER:
6976 where = &other_input_reload_insns;
6977 break;
6978 case RELOAD_FOR_INPUT:
6979 where = &input_reload_insns[rl->opnum];
6980 break;
6981 case RELOAD_FOR_INPUT_ADDRESS:
6982 where = &input_address_reload_insns[rl->opnum];
6983 break;
6984 case RELOAD_FOR_INPADDR_ADDRESS:
6985 where = &inpaddr_address_reload_insns[rl->opnum];
6986 break;
6987 case RELOAD_FOR_OUTPUT_ADDRESS:
6988 where = &output_address_reload_insns[rl->opnum];
6989 break;
6990 case RELOAD_FOR_OUTADDR_ADDRESS:
6991 where = &outaddr_address_reload_insns[rl->opnum];
6992 break;
6993 case RELOAD_FOR_OPERAND_ADDRESS:
6994 where = &operand_reload_insns;
6995 break;
6996 case RELOAD_FOR_OPADDR_ADDR:
6997 where = &other_operand_reload_insns;
6998 break;
6999 case RELOAD_FOR_OTHER_ADDRESS:
7000 where = &other_input_address_reload_insns;
7001 break;
7002 default:
7003 gcc_unreachable ();
7006 push_to_sequence (*where);
7008 /* Auto-increment addresses must be reloaded in a special way. */
7009 if (rl->out && ! rl->out_reg)
7011 /* We are not going to bother supporting the case where a
7012 incremented register can't be copied directly from
7013 OLDEQUIV since this seems highly unlikely. */
7014 gcc_assert (rl->secondary_in_reload < 0);
7016 if (reload_inherited[j])
7017 oldequiv = reloadreg;
7019 old = XEXP (rl->in_reg, 0);
7021 if (optimize && REG_P (oldequiv)
7022 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7023 && spill_reg_store[REGNO (oldequiv)]
7024 && REG_P (old)
7025 && (dead_or_set_p (insn,
7026 spill_reg_stored_to[REGNO (oldequiv)])
7027 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7028 old)))
7029 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7031 /* Prevent normal processing of this reload. */
7032 special = 1;
7033 /* Output a special code sequence for this case. */
7034 new_spill_reg_store[REGNO (reloadreg)]
7035 = inc_for_reload (reloadreg, oldequiv, rl->out,
7036 rl->inc);
7039 /* If we are reloading a pseudo-register that was set by the previous
7040 insn, see if we can get rid of that pseudo-register entirely
7041 by redirecting the previous insn into our reload register. */
7043 else if (optimize && REG_P (old)
7044 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7045 && dead_or_set_p (insn, old)
7046 /* This is unsafe if some other reload
7047 uses the same reg first. */
7048 && ! conflicts_with_override (reloadreg)
7049 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7050 rl->when_needed, old, rl->out, j, 0))
7052 rtx temp = PREV_INSN (insn);
7053 while (temp && NOTE_P (temp))
7054 temp = PREV_INSN (temp);
7055 if (temp
7056 && NONJUMP_INSN_P (temp)
7057 && GET_CODE (PATTERN (temp)) == SET
7058 && SET_DEST (PATTERN (temp)) == old
7059 /* Make sure we can access insn_operand_constraint. */
7060 && asm_noperands (PATTERN (temp)) < 0
7061 /* This is unsafe if operand occurs more than once in current
7062 insn. Perhaps some occurrences aren't reloaded. */
7063 && count_occurrences (PATTERN (insn), old, 0) == 1)
7065 rtx old = SET_DEST (PATTERN (temp));
7066 /* Store into the reload register instead of the pseudo. */
7067 SET_DEST (PATTERN (temp)) = reloadreg;
7069 /* Verify that resulting insn is valid. */
7070 extract_insn (temp);
7071 if (constrain_operands (1))
7073 /* If the previous insn is an output reload, the source is
7074 a reload register, and its spill_reg_store entry will
7075 contain the previous destination. This is now
7076 invalid. */
7077 if (REG_P (SET_SRC (PATTERN (temp)))
7078 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7080 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7081 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7084 /* If these are the only uses of the pseudo reg,
7085 pretend for GDB it lives in the reload reg we used. */
7086 if (REG_N_DEATHS (REGNO (old)) == 1
7087 && REG_N_SETS (REGNO (old)) == 1)
7089 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7090 if (flag_ira && optimize)
7091 /* Inform IRA about the change. */
7092 mark_allocation_change (REGNO (old));
7093 alter_reg (REGNO (old), -1, false);
7095 special = 1;
7097 else
7099 SET_DEST (PATTERN (temp)) = old;
7104 /* We can't do that, so output an insn to load RELOADREG. */
7106 /* If we have a secondary reload, pick up the secondary register
7107 and icode, if any. If OLDEQUIV and OLD are different or
7108 if this is an in-out reload, recompute whether or not we
7109 still need a secondary register and what the icode should
7110 be. If we still need a secondary register and the class or
7111 icode is different, go back to reloading from OLD if using
7112 OLDEQUIV means that we got the wrong type of register. We
7113 cannot have different class or icode due to an in-out reload
7114 because we don't make such reloads when both the input and
7115 output need secondary reload registers. */
7117 if (! special && rl->secondary_in_reload >= 0)
7119 rtx second_reload_reg = 0;
7120 rtx third_reload_reg = 0;
7121 int secondary_reload = rl->secondary_in_reload;
7122 rtx real_oldequiv = oldequiv;
7123 rtx real_old = old;
7124 rtx tmp;
7125 enum insn_code icode;
7126 enum insn_code tertiary_icode = CODE_FOR_nothing;
7128 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7129 and similarly for OLD.
7130 See comments in get_secondary_reload in reload.c. */
7131 /* If it is a pseudo that cannot be replaced with its
7132 equivalent MEM, we must fall back to reload_in, which
7133 will have all the necessary substitutions registered.
7134 Likewise for a pseudo that can't be replaced with its
7135 equivalent constant.
7137 Take extra care for subregs of such pseudos. Note that
7138 we cannot use reg_equiv_mem in this case because it is
7139 not in the right mode. */
7141 tmp = oldequiv;
7142 if (GET_CODE (tmp) == SUBREG)
7143 tmp = SUBREG_REG (tmp);
7144 if (REG_P (tmp)
7145 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7146 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7147 || reg_equiv_constant[REGNO (tmp)] != 0))
7149 if (! reg_equiv_mem[REGNO (tmp)]
7150 || num_not_at_initial_offset
7151 || GET_CODE (oldequiv) == SUBREG)
7152 real_oldequiv = rl->in;
7153 else
7154 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7157 tmp = old;
7158 if (GET_CODE (tmp) == SUBREG)
7159 tmp = SUBREG_REG (tmp);
7160 if (REG_P (tmp)
7161 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7162 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7163 || reg_equiv_constant[REGNO (tmp)] != 0))
7165 if (! reg_equiv_mem[REGNO (tmp)]
7166 || num_not_at_initial_offset
7167 || GET_CODE (old) == SUBREG)
7168 real_old = rl->in;
7169 else
7170 real_old = reg_equiv_mem[REGNO (tmp)];
7173 second_reload_reg = rld[secondary_reload].reg_rtx;
7174 if (rld[secondary_reload].secondary_in_reload >= 0)
7176 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7178 third_reload_reg = rld[tertiary_reload].reg_rtx;
7179 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7180 /* We'd have to add more code for quartary reloads. */
7181 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7183 icode = rl->secondary_in_icode;
7185 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7186 || (rl->in != 0 && rl->out != 0))
7188 secondary_reload_info sri, sri2;
7189 enum reg_class new_class, new_t_class;
7191 sri.icode = CODE_FOR_nothing;
7192 sri.prev_sri = NULL;
7193 new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
7194 mode, &sri);
7196 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7197 second_reload_reg = 0;
7198 else if (new_class == NO_REGS)
7200 if (reload_adjust_reg_for_icode (&second_reload_reg,
7201 third_reload_reg, sri.icode))
7202 icode = sri.icode, third_reload_reg = 0;
7203 else
7204 oldequiv = old, real_oldequiv = real_old;
7206 else if (sri.icode != CODE_FOR_nothing)
7207 /* We currently lack a way to express this in reloads. */
7208 gcc_unreachable ();
7209 else
7211 sri2.icode = CODE_FOR_nothing;
7212 sri2.prev_sri = &sri;
7213 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7214 new_class, mode, &sri);
7215 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7217 if (reload_adjust_reg_for_temp (&second_reload_reg,
7218 third_reload_reg,
7219 new_class, mode))
7220 third_reload_reg = 0, tertiary_icode = sri2.icode;
7221 else
7222 oldequiv = old, real_oldequiv = real_old;
7224 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7226 rtx intermediate = second_reload_reg;
7228 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7229 new_class, mode)
7230 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7231 sri2.icode))
7233 second_reload_reg = intermediate;
7234 tertiary_icode = sri2.icode;
7236 else
7237 oldequiv = old, real_oldequiv = real_old;
7239 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7241 rtx intermediate = second_reload_reg;
7243 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7244 new_class, mode)
7245 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7246 new_t_class, mode))
7248 second_reload_reg = intermediate;
7249 tertiary_icode = sri2.icode;
7251 else
7252 oldequiv = old, real_oldequiv = real_old;
7254 else
7255 /* This could be handled more intelligently too. */
7256 oldequiv = old, real_oldequiv = real_old;
7260 /* If we still need a secondary reload register, check
7261 to see if it is being used as a scratch or intermediate
7262 register and generate code appropriately. If we need
7263 a scratch register, use REAL_OLDEQUIV since the form of
7264 the insn may depend on the actual address if it is
7265 a MEM. */
7267 if (second_reload_reg)
7269 if (icode != CODE_FOR_nothing)
7271 /* We'd have to add extra code to handle this case. */
7272 gcc_assert (!third_reload_reg);
7274 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7275 second_reload_reg));
7276 special = 1;
7278 else
7280 /* See if we need a scratch register to load the
7281 intermediate register (a tertiary reload). */
7282 if (tertiary_icode != CODE_FOR_nothing)
7284 emit_insn ((GEN_FCN (tertiary_icode)
7285 (second_reload_reg, real_oldequiv,
7286 third_reload_reg)));
7288 else if (third_reload_reg)
7290 gen_reload (third_reload_reg, real_oldequiv,
7291 rl->opnum,
7292 rl->when_needed);
7293 gen_reload (second_reload_reg, third_reload_reg,
7294 rl->opnum,
7295 rl->when_needed);
7297 else
7298 gen_reload (second_reload_reg, real_oldequiv,
7299 rl->opnum,
7300 rl->when_needed);
7302 oldequiv = second_reload_reg;
7307 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7309 rtx real_oldequiv = oldequiv;
7311 if ((REG_P (oldequiv)
7312 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7313 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7314 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7315 || (GET_CODE (oldequiv) == SUBREG
7316 && REG_P (SUBREG_REG (oldequiv))
7317 && (REGNO (SUBREG_REG (oldequiv))
7318 >= FIRST_PSEUDO_REGISTER)
7319 && ((reg_equiv_memory_loc
7320 [REGNO (SUBREG_REG (oldequiv))] != 0)
7321 || (reg_equiv_constant
7322 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7323 || (CONSTANT_P (oldequiv)
7324 && (PREFERRED_RELOAD_CLASS (oldequiv,
7325 REGNO_REG_CLASS (REGNO (reloadreg)))
7326 == NO_REGS)))
7327 real_oldequiv = rl->in;
7328 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7329 rl->when_needed);
7332 if (flag_non_call_exceptions)
7333 copy_eh_notes (insn, get_insns ());
7335 /* End this sequence. */
7336 *where = get_insns ();
7337 end_sequence ();
7339 /* Update reload_override_in so that delete_address_reloads_1
7340 can see the actual register usage. */
7341 if (oldequiv_reg)
7342 reload_override_in[j] = oldequiv;
7345 /* Generate insns to for the output reload RL, which is for the insn described
7346 by CHAIN and has the number J. */
7347 static void
7348 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7349 int j)
7351 rtx reloadreg;
7352 rtx insn = chain->insn;
7353 int special = 0;
7354 rtx old = rl->out;
7355 enum machine_mode mode;
7356 rtx p;
7357 rtx rl_reg_rtx;
7359 if (rl->when_needed == RELOAD_OTHER)
7360 start_sequence ();
7361 else
7362 push_to_sequence (output_reload_insns[rl->opnum]);
7364 rl_reg_rtx = reload_reg_rtx_for_output[j];
7365 mode = GET_MODE (rl_reg_rtx);
7367 reloadreg = rl_reg_rtx;
7369 /* If we need two reload regs, set RELOADREG to the intermediate
7370 one, since it will be stored into OLD. We might need a secondary
7371 register only for an input reload, so check again here. */
7373 if (rl->secondary_out_reload >= 0)
7375 rtx real_old = old;
7376 int secondary_reload = rl->secondary_out_reload;
7377 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7379 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7380 && reg_equiv_mem[REGNO (old)] != 0)
7381 real_old = reg_equiv_mem[REGNO (old)];
7383 if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
7385 rtx second_reloadreg = reloadreg;
7386 reloadreg = rld[secondary_reload].reg_rtx;
7388 /* See if RELOADREG is to be used as a scratch register
7389 or as an intermediate register. */
7390 if (rl->secondary_out_icode != CODE_FOR_nothing)
7392 /* We'd have to add extra code to handle this case. */
7393 gcc_assert (tertiary_reload < 0);
7395 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7396 (real_old, second_reloadreg, reloadreg)));
7397 special = 1;
7399 else
7401 /* See if we need both a scratch and intermediate reload
7402 register. */
7404 enum insn_code tertiary_icode
7405 = rld[secondary_reload].secondary_out_icode;
7407 /* We'd have to add more code for quartary reloads. */
7408 gcc_assert (tertiary_reload < 0
7409 || rld[tertiary_reload].secondary_out_reload < 0);
7411 if (GET_MODE (reloadreg) != mode)
7412 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7414 if (tertiary_icode != CODE_FOR_nothing)
7416 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7417 rtx tem;
7419 /* Copy primary reload reg to secondary reload reg.
7420 (Note that these have been swapped above, then
7421 secondary reload reg to OLD using our insn.) */
7423 /* If REAL_OLD is a paradoxical SUBREG, remove it
7424 and try to put the opposite SUBREG on
7425 RELOADREG. */
7426 if (GET_CODE (real_old) == SUBREG
7427 && (GET_MODE_SIZE (GET_MODE (real_old))
7428 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7429 && 0 != (tem = gen_lowpart_common
7430 (GET_MODE (SUBREG_REG (real_old)),
7431 reloadreg)))
7432 real_old = SUBREG_REG (real_old), reloadreg = tem;
7434 gen_reload (reloadreg, second_reloadreg,
7435 rl->opnum, rl->when_needed);
7436 emit_insn ((GEN_FCN (tertiary_icode)
7437 (real_old, reloadreg, third_reloadreg)));
7438 special = 1;
7441 else
7443 /* Copy between the reload regs here and then to
7444 OUT later. */
7446 gen_reload (reloadreg, second_reloadreg,
7447 rl->opnum, rl->when_needed);
7448 if (tertiary_reload >= 0)
7450 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7452 gen_reload (third_reloadreg, reloadreg,
7453 rl->opnum, rl->when_needed);
7454 reloadreg = third_reloadreg;
7461 /* Output the last reload insn. */
7462 if (! special)
7464 rtx set;
7466 /* Don't output the last reload if OLD is not the dest of
7467 INSN and is in the src and is clobbered by INSN. */
7468 if (! flag_expensive_optimizations
7469 || !REG_P (old)
7470 || !(set = single_set (insn))
7471 || rtx_equal_p (old, SET_DEST (set))
7472 || !reg_mentioned_p (old, SET_SRC (set))
7473 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7474 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7475 gen_reload (old, reloadreg, rl->opnum,
7476 rl->when_needed);
7479 /* Look at all insns we emitted, just to be safe. */
7480 for (p = get_insns (); p; p = NEXT_INSN (p))
7481 if (INSN_P (p))
7483 rtx pat = PATTERN (p);
7485 /* If this output reload doesn't come from a spill reg,
7486 clear any memory of reloaded copies of the pseudo reg.
7487 If this output reload comes from a spill reg,
7488 reg_has_output_reload will make this do nothing. */
7489 note_stores (pat, forget_old_reloads_1, NULL);
7491 if (reg_mentioned_p (rl_reg_rtx, pat))
7493 rtx set = single_set (insn);
7494 if (reload_spill_index[j] < 0
7495 && set
7496 && SET_SRC (set) == rl_reg_rtx)
7498 int src = REGNO (SET_SRC (set));
7500 reload_spill_index[j] = src;
7501 SET_HARD_REG_BIT (reg_is_output_reload, src);
7502 if (find_regno_note (insn, REG_DEAD, src))
7503 SET_HARD_REG_BIT (reg_reloaded_died, src);
7505 if (HARD_REGISTER_P (rl_reg_rtx))
7507 int s = rl->secondary_out_reload;
7508 set = single_set (p);
7509 /* If this reload copies only to the secondary reload
7510 register, the secondary reload does the actual
7511 store. */
7512 if (s >= 0 && set == NULL_RTX)
7513 /* We can't tell what function the secondary reload
7514 has and where the actual store to the pseudo is
7515 made; leave new_spill_reg_store alone. */
7517 else if (s >= 0
7518 && SET_SRC (set) == rl_reg_rtx
7519 && SET_DEST (set) == rld[s].reg_rtx)
7521 /* Usually the next instruction will be the
7522 secondary reload insn; if we can confirm
7523 that it is, setting new_spill_reg_store to
7524 that insn will allow an extra optimization. */
7525 rtx s_reg = rld[s].reg_rtx;
7526 rtx next = NEXT_INSN (p);
7527 rld[s].out = rl->out;
7528 rld[s].out_reg = rl->out_reg;
7529 set = single_set (next);
7530 if (set && SET_SRC (set) == s_reg
7531 && ! new_spill_reg_store[REGNO (s_reg)])
7533 SET_HARD_REG_BIT (reg_is_output_reload,
7534 REGNO (s_reg));
7535 new_spill_reg_store[REGNO (s_reg)] = next;
7538 else
7539 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7544 if (rl->when_needed == RELOAD_OTHER)
7546 emit_insn (other_output_reload_insns[rl->opnum]);
7547 other_output_reload_insns[rl->opnum] = get_insns ();
7549 else
7550 output_reload_insns[rl->opnum] = get_insns ();
7552 if (flag_non_call_exceptions)
7553 copy_eh_notes (insn, get_insns ());
7555 end_sequence ();
7558 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7559 and has the number J. */
7560 static void
7561 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7563 rtx insn = chain->insn;
7564 rtx old = (rl->in && MEM_P (rl->in)
7565 ? rl->in_reg : rl->in);
7566 rtx reg_rtx = rl->reg_rtx;
7568 if (old && reg_rtx)
7570 enum machine_mode mode;
7572 /* Determine the mode to reload in.
7573 This is very tricky because we have three to choose from.
7574 There is the mode the insn operand wants (rl->inmode).
7575 There is the mode of the reload register RELOADREG.
7576 There is the intrinsic mode of the operand, which we could find
7577 by stripping some SUBREGs.
7578 It turns out that RELOADREG's mode is irrelevant:
7579 we can change that arbitrarily.
7581 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7582 then the reload reg may not support QImode moves, so use SImode.
7583 If foo is in memory due to spilling a pseudo reg, this is safe,
7584 because the QImode value is in the least significant part of a
7585 slot big enough for a SImode. If foo is some other sort of
7586 memory reference, then it is impossible to reload this case,
7587 so previous passes had better make sure this never happens.
7589 Then consider a one-word union which has SImode and one of its
7590 members is a float, being fetched as (SUBREG:SF union:SI).
7591 We must fetch that as SFmode because we could be loading into
7592 a float-only register. In this case OLD's mode is correct.
7594 Consider an immediate integer: it has VOIDmode. Here we need
7595 to get a mode from something else.
7597 In some cases, there is a fourth mode, the operand's
7598 containing mode. If the insn specifies a containing mode for
7599 this operand, it overrides all others.
7601 I am not sure whether the algorithm here is always right,
7602 but it does the right things in those cases. */
7604 mode = GET_MODE (old);
7605 if (mode == VOIDmode)
7606 mode = rl->inmode;
7608 /* We cannot use gen_lowpart_common since it can do the wrong thing
7609 when REG_RTX has a multi-word mode. Note that REG_RTX must
7610 always be a REG here. */
7611 if (GET_MODE (reg_rtx) != mode)
7612 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7614 reload_reg_rtx_for_input[j] = reg_rtx;
7616 if (old != 0
7617 /* AUTO_INC reloads need to be handled even if inherited. We got an
7618 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7619 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7620 && ! rtx_equal_p (reg_rtx, old)
7621 && reg_rtx != 0)
7622 emit_input_reload_insns (chain, rld + j, old, j);
7624 /* When inheriting a wider reload, we have a MEM in rl->in,
7625 e.g. inheriting a SImode output reload for
7626 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7627 if (optimize && reload_inherited[j] && rl->in
7628 && MEM_P (rl->in)
7629 && MEM_P (rl->in_reg)
7630 && reload_spill_index[j] >= 0
7631 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7632 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7634 /* If we are reloading a register that was recently stored in with an
7635 output-reload, see if we can prove there was
7636 actually no need to store the old value in it. */
7638 if (optimize
7639 && (reload_inherited[j] || reload_override_in[j])
7640 && reg_rtx
7641 && REG_P (reg_rtx)
7642 && spill_reg_store[REGNO (reg_rtx)] != 0
7643 #if 0
7644 /* There doesn't seem to be any reason to restrict this to pseudos
7645 and doing so loses in the case where we are copying from a
7646 register of the wrong class. */
7647 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7648 #endif
7649 /* The insn might have already some references to stackslots
7650 replaced by MEMs, while reload_out_reg still names the
7651 original pseudo. */
7652 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7653 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7654 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7657 /* Do output reloading for reload RL, which is for the insn described by
7658 CHAIN and has the number J.
7659 ??? At some point we need to support handling output reloads of
7660 JUMP_INSNs or insns that set cc0. */
7661 static void
7662 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7664 rtx note, old;
7665 rtx insn = chain->insn;
7666 /* If this is an output reload that stores something that is
7667 not loaded in this same reload, see if we can eliminate a previous
7668 store. */
7669 rtx pseudo = rl->out_reg;
7670 rtx reg_rtx = rl->reg_rtx;
7672 if (rl->out && reg_rtx)
7674 enum machine_mode mode;
7676 /* Determine the mode to reload in.
7677 See comments above (for input reloading). */
7678 mode = GET_MODE (rl->out);
7679 if (mode == VOIDmode)
7681 /* VOIDmode should never happen for an output. */
7682 if (asm_noperands (PATTERN (insn)) < 0)
7683 /* It's the compiler's fault. */
7684 fatal_insn ("VOIDmode on an output", insn);
7685 error_for_asm (insn, "output operand is constant in %<asm%>");
7686 /* Prevent crash--use something we know is valid. */
7687 mode = word_mode;
7688 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7690 if (GET_MODE (reg_rtx) != mode)
7691 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7693 reload_reg_rtx_for_output[j] = reg_rtx;
7695 if (pseudo
7696 && optimize
7697 && REG_P (pseudo)
7698 && ! rtx_equal_p (rl->in_reg, pseudo)
7699 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7700 && reg_last_reload_reg[REGNO (pseudo)])
7702 int pseudo_no = REGNO (pseudo);
7703 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7705 /* We don't need to test full validity of last_regno for
7706 inherit here; we only want to know if the store actually
7707 matches the pseudo. */
7708 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7709 && reg_reloaded_contents[last_regno] == pseudo_no
7710 && spill_reg_store[last_regno]
7711 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7712 delete_output_reload (insn, j, last_regno, reg_rtx);
7715 old = rl->out_reg;
7716 if (old == 0
7717 || reg_rtx == 0
7718 || rtx_equal_p (old, reg_rtx))
7719 return;
7721 /* An output operand that dies right away does need a reload,
7722 but need not be copied from it. Show the new location in the
7723 REG_UNUSED note. */
7724 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7725 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7727 XEXP (note, 0) = reg_rtx;
7728 return;
7730 /* Likewise for a SUBREG of an operand that dies. */
7731 else if (GET_CODE (old) == SUBREG
7732 && REG_P (SUBREG_REG (old))
7733 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7734 SUBREG_REG (old))))
7736 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7737 return;
7739 else if (GET_CODE (old) == SCRATCH)
7740 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7741 but we don't want to make an output reload. */
7742 return;
7744 /* If is a JUMP_INSN, we can't support output reloads yet. */
7745 gcc_assert (NONJUMP_INSN_P (insn));
7747 emit_output_reload_insns (chain, rld + j, j);
7750 /* A reload copies values of MODE from register SRC to register DEST.
7751 Return true if it can be treated for inheritance purposes like a
7752 group of reloads, each one reloading a single hard register. The
7753 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7754 occupy the same number of hard registers. */
7756 static bool
7757 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7758 int src ATTRIBUTE_UNUSED,
7759 enum machine_mode mode ATTRIBUTE_UNUSED)
7761 #ifdef CANNOT_CHANGE_MODE_CLASS
7762 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7763 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7764 #else
7765 return true;
7766 #endif
7769 /* Output insns to reload values in and out of the chosen reload regs. */
7771 static void
7772 emit_reload_insns (struct insn_chain *chain)
7774 rtx insn = chain->insn;
7776 int j;
7778 CLEAR_HARD_REG_SET (reg_reloaded_died);
7780 for (j = 0; j < reload_n_operands; j++)
7781 input_reload_insns[j] = input_address_reload_insns[j]
7782 = inpaddr_address_reload_insns[j]
7783 = output_reload_insns[j] = output_address_reload_insns[j]
7784 = outaddr_address_reload_insns[j]
7785 = other_output_reload_insns[j] = 0;
7786 other_input_address_reload_insns = 0;
7787 other_input_reload_insns = 0;
7788 operand_reload_insns = 0;
7789 other_operand_reload_insns = 0;
7791 /* Dump reloads into the dump file. */
7792 if (dump_file)
7794 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7795 debug_reload_to_stream (dump_file);
7798 /* Now output the instructions to copy the data into and out of the
7799 reload registers. Do these in the order that the reloads were reported,
7800 since reloads of base and index registers precede reloads of operands
7801 and the operands may need the base and index registers reloaded. */
7803 for (j = 0; j < n_reloads; j++)
7805 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7807 unsigned int i;
7809 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7810 new_spill_reg_store[i] = 0;
7813 do_input_reload (chain, rld + j, j);
7814 do_output_reload (chain, rld + j, j);
7817 /* Now write all the insns we made for reloads in the order expected by
7818 the allocation functions. Prior to the insn being reloaded, we write
7819 the following reloads:
7821 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7823 RELOAD_OTHER reloads.
7825 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7826 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7827 RELOAD_FOR_INPUT reload for the operand.
7829 RELOAD_FOR_OPADDR_ADDRS reloads.
7831 RELOAD_FOR_OPERAND_ADDRESS reloads.
7833 After the insn being reloaded, we write the following:
7835 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7836 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7837 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7838 reloads for the operand. The RELOAD_OTHER output reloads are
7839 output in descending order by reload number. */
7841 emit_insn_before (other_input_address_reload_insns, insn);
7842 emit_insn_before (other_input_reload_insns, insn);
7844 for (j = 0; j < reload_n_operands; j++)
7846 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7847 emit_insn_before (input_address_reload_insns[j], insn);
7848 emit_insn_before (input_reload_insns[j], insn);
7851 emit_insn_before (other_operand_reload_insns, insn);
7852 emit_insn_before (operand_reload_insns, insn);
7854 for (j = 0; j < reload_n_operands; j++)
7856 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7857 x = emit_insn_after (output_address_reload_insns[j], x);
7858 x = emit_insn_after (output_reload_insns[j], x);
7859 emit_insn_after (other_output_reload_insns[j], x);
7862 /* For all the spill regs newly reloaded in this instruction,
7863 record what they were reloaded from, so subsequent instructions
7864 can inherit the reloads.
7866 Update spill_reg_store for the reloads of this insn.
7867 Copy the elements that were updated in the loop above. */
7869 for (j = 0; j < n_reloads; j++)
7871 int r = reload_order[j];
7872 int i = reload_spill_index[r];
7874 /* If this is a non-inherited input reload from a pseudo, we must
7875 clear any memory of a previous store to the same pseudo. Only do
7876 something if there will not be an output reload for the pseudo
7877 being reloaded. */
7878 if (rld[r].in_reg != 0
7879 && ! (reload_inherited[r] || reload_override_in[r]))
7881 rtx reg = rld[r].in_reg;
7883 if (GET_CODE (reg) == SUBREG)
7884 reg = SUBREG_REG (reg);
7886 if (REG_P (reg)
7887 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7888 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7890 int nregno = REGNO (reg);
7892 if (reg_last_reload_reg[nregno])
7894 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7896 if (reg_reloaded_contents[last_regno] == nregno)
7897 spill_reg_store[last_regno] = 0;
7902 /* I is nonneg if this reload used a register.
7903 If rld[r].reg_rtx is 0, this is an optional reload
7904 that we opted to ignore. */
7906 if (i >= 0 && rld[r].reg_rtx != 0)
7908 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7909 int k;
7911 /* For a multi register reload, we need to check if all or part
7912 of the value lives to the end. */
7913 for (k = 0; k < nr; k++)
7914 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7915 rld[r].when_needed))
7916 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7918 /* Maybe the spill reg contains a copy of reload_out. */
7919 if (rld[r].out != 0
7920 && (REG_P (rld[r].out)
7921 #ifdef AUTO_INC_DEC
7922 || ! rld[r].out_reg
7923 #endif
7924 || REG_P (rld[r].out_reg)))
7926 rtx reg;
7927 enum machine_mode mode;
7928 int regno, nregs;
7930 reg = reload_reg_rtx_for_output[r];
7931 mode = GET_MODE (reg);
7932 regno = REGNO (reg);
7933 nregs = hard_regno_nregs[regno][mode];
7934 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7935 rld[r].when_needed))
7937 rtx out = (REG_P (rld[r].out)
7938 ? rld[r].out
7939 : rld[r].out_reg
7940 ? rld[r].out_reg
7941 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7942 int out_regno = REGNO (out);
7943 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7944 : hard_regno_nregs[out_regno][mode]);
7945 bool piecemeal;
7947 spill_reg_store[regno] = new_spill_reg_store[regno];
7948 spill_reg_stored_to[regno] = out;
7949 reg_last_reload_reg[out_regno] = reg;
7951 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7952 && nregs == out_nregs
7953 && inherit_piecemeal_p (out_regno, regno, mode));
7955 /* If OUT_REGNO is a hard register, it may occupy more than
7956 one register. If it does, say what is in the
7957 rest of the registers assuming that both registers
7958 agree on how many words the object takes. If not,
7959 invalidate the subsequent registers. */
7961 if (HARD_REGISTER_NUM_P (out_regno))
7962 for (k = 1; k < out_nregs; k++)
7963 reg_last_reload_reg[out_regno + k]
7964 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
7966 /* Now do the inverse operation. */
7967 for (k = 0; k < nregs; k++)
7969 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7970 reg_reloaded_contents[regno + k]
7971 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7972 ? out_regno
7973 : out_regno + k);
7974 reg_reloaded_insn[regno + k] = insn;
7975 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7976 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7977 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7978 regno + k);
7979 else
7980 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7981 regno + k);
7985 /* Maybe the spill reg contains a copy of reload_in. Only do
7986 something if there will not be an output reload for
7987 the register being reloaded. */
7988 else if (rld[r].out_reg == 0
7989 && rld[r].in != 0
7990 && ((REG_P (rld[r].in)
7991 && !HARD_REGISTER_P (rld[r].in)
7992 && !REGNO_REG_SET_P (&reg_has_output_reload,
7993 REGNO (rld[r].in)))
7994 || (REG_P (rld[r].in_reg)
7995 && !REGNO_REG_SET_P (&reg_has_output_reload,
7996 REGNO (rld[r].in_reg))))
7997 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
7999 rtx reg;
8000 enum machine_mode mode;
8001 int regno, nregs;
8003 reg = reload_reg_rtx_for_input[r];
8004 mode = GET_MODE (reg);
8005 regno = REGNO (reg);
8006 nregs = hard_regno_nregs[regno][mode];
8007 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8008 rld[r].when_needed))
8010 int in_regno;
8011 int in_nregs;
8012 rtx in;
8013 bool piecemeal;
8015 if (REG_P (rld[r].in)
8016 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8017 in = rld[r].in;
8018 else if (REG_P (rld[r].in_reg))
8019 in = rld[r].in_reg;
8020 else
8021 in = XEXP (rld[r].in_reg, 0);
8022 in_regno = REGNO (in);
8024 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8025 : hard_regno_nregs[in_regno][mode]);
8027 reg_last_reload_reg[in_regno] = reg;
8029 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8030 && nregs == in_nregs
8031 && inherit_piecemeal_p (regno, in_regno, mode));
8033 if (HARD_REGISTER_NUM_P (in_regno))
8034 for (k = 1; k < in_nregs; k++)
8035 reg_last_reload_reg[in_regno + k]
8036 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8038 /* Unless we inherited this reload, show we haven't
8039 recently done a store.
8040 Previous stores of inherited auto_inc expressions
8041 also have to be discarded. */
8042 if (! reload_inherited[r]
8043 || (rld[r].out && ! rld[r].out_reg))
8044 spill_reg_store[regno] = 0;
8046 for (k = 0; k < nregs; k++)
8048 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8049 reg_reloaded_contents[regno + k]
8050 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8051 ? in_regno
8052 : in_regno + k);
8053 reg_reloaded_insn[regno + k] = insn;
8054 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8055 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8056 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8057 regno + k);
8058 else
8059 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8060 regno + k);
8066 /* The following if-statement was #if 0'd in 1.34 (or before...).
8067 It's reenabled in 1.35 because supposedly nothing else
8068 deals with this problem. */
8070 /* If a register gets output-reloaded from a non-spill register,
8071 that invalidates any previous reloaded copy of it.
8072 But forget_old_reloads_1 won't get to see it, because
8073 it thinks only about the original insn. So invalidate it here.
8074 Also do the same thing for RELOAD_OTHER constraints where the
8075 output is discarded. */
8076 if (i < 0
8077 && ((rld[r].out != 0
8078 && (REG_P (rld[r].out)
8079 || (MEM_P (rld[r].out)
8080 && REG_P (rld[r].out_reg))))
8081 || (rld[r].out == 0 && rld[r].out_reg
8082 && REG_P (rld[r].out_reg))))
8084 rtx out = ((rld[r].out && REG_P (rld[r].out))
8085 ? rld[r].out : rld[r].out_reg);
8086 int out_regno = REGNO (out);
8087 enum machine_mode mode = GET_MODE (out);
8089 /* REG_RTX is now set or clobbered by the main instruction.
8090 As the comment above explains, forget_old_reloads_1 only
8091 sees the original instruction, and there is no guarantee
8092 that the original instruction also clobbered REG_RTX.
8093 For example, if find_reloads sees that the input side of
8094 a matched operand pair dies in this instruction, it may
8095 use the input register as the reload register.
8097 Calling forget_old_reloads_1 is a waste of effort if
8098 REG_RTX is also the output register.
8100 If we know that REG_RTX holds the value of a pseudo
8101 register, the code after the call will record that fact. */
8102 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8103 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8105 if (!HARD_REGISTER_NUM_P (out_regno))
8107 rtx src_reg, store_insn = NULL_RTX;
8109 reg_last_reload_reg[out_regno] = 0;
8111 /* If we can find a hard register that is stored, record
8112 the storing insn so that we may delete this insn with
8113 delete_output_reload. */
8114 src_reg = reload_reg_rtx_for_output[r];
8116 /* If this is an optional reload, try to find the source reg
8117 from an input reload. */
8118 if (! src_reg)
8120 rtx set = single_set (insn);
8121 if (set && SET_DEST (set) == rld[r].out)
8123 int k;
8125 src_reg = SET_SRC (set);
8126 store_insn = insn;
8127 for (k = 0; k < n_reloads; k++)
8129 if (rld[k].in == src_reg)
8131 src_reg = reload_reg_rtx_for_input[k];
8132 break;
8137 else
8138 store_insn = new_spill_reg_store[REGNO (src_reg)];
8139 if (src_reg && REG_P (src_reg)
8140 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8142 int src_regno, src_nregs, k;
8143 rtx note;
8145 gcc_assert (GET_MODE (src_reg) == mode);
8146 src_regno = REGNO (src_reg);
8147 src_nregs = hard_regno_nregs[src_regno][mode];
8148 /* The place where to find a death note varies with
8149 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8150 necessarily checked exactly in the code that moves
8151 notes, so just check both locations. */
8152 note = find_regno_note (insn, REG_DEAD, src_regno);
8153 if (! note && store_insn)
8154 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8155 for (k = 0; k < src_nregs; k++)
8157 spill_reg_store[src_regno + k] = store_insn;
8158 spill_reg_stored_to[src_regno + k] = out;
8159 reg_reloaded_contents[src_regno + k] = out_regno;
8160 reg_reloaded_insn[src_regno + k] = store_insn;
8161 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8162 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8163 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8164 mode))
8165 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8166 src_regno + k);
8167 else
8168 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8169 src_regno + k);
8170 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8171 if (note)
8172 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8173 else
8174 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8176 reg_last_reload_reg[out_regno] = src_reg;
8177 /* We have to set reg_has_output_reload here, or else
8178 forget_old_reloads_1 will clear reg_last_reload_reg
8179 right away. */
8180 SET_REGNO_REG_SET (&reg_has_output_reload,
8181 out_regno);
8184 else
8186 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8188 for (k = 0; k < out_nregs; k++)
8189 reg_last_reload_reg[out_regno + k] = 0;
8193 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8196 /* Go through the motions to emit INSN and test if it is strictly valid.
8197 Return the emitted insn if valid, else return NULL. */
8199 static rtx
8200 emit_insn_if_valid_for_reload (rtx insn)
8202 rtx last = get_last_insn ();
8203 int code;
8205 insn = emit_insn (insn);
8206 code = recog_memoized (insn);
8208 if (code >= 0)
8210 extract_insn (insn);
8211 /* We want constrain operands to treat this insn strictly in its
8212 validity determination, i.e., the way it would after reload has
8213 completed. */
8214 if (constrain_operands (1))
8215 return insn;
8218 delete_insns_since (last);
8219 return NULL;
8222 /* Emit code to perform a reload from IN (which may be a reload register) to
8223 OUT (which may also be a reload register). IN or OUT is from operand
8224 OPNUM with reload type TYPE.
8226 Returns first insn emitted. */
8228 static rtx
8229 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8231 rtx last = get_last_insn ();
8232 rtx tem;
8234 /* If IN is a paradoxical SUBREG, remove it and try to put the
8235 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8236 if (GET_CODE (in) == SUBREG
8237 && (GET_MODE_SIZE (GET_MODE (in))
8238 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8239 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8240 in = SUBREG_REG (in), out = tem;
8241 else if (GET_CODE (out) == SUBREG
8242 && (GET_MODE_SIZE (GET_MODE (out))
8243 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8244 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8245 out = SUBREG_REG (out), in = tem;
8247 /* How to do this reload can get quite tricky. Normally, we are being
8248 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8249 register that didn't get a hard register. In that case we can just
8250 call emit_move_insn.
8252 We can also be asked to reload a PLUS that adds a register or a MEM to
8253 another register, constant or MEM. This can occur during frame pointer
8254 elimination and while reloading addresses. This case is handled by
8255 trying to emit a single insn to perform the add. If it is not valid,
8256 we use a two insn sequence.
8258 Or we can be asked to reload an unary operand that was a fragment of
8259 an addressing mode, into a register. If it isn't recognized as-is,
8260 we try making the unop operand and the reload-register the same:
8261 (set reg:X (unop:X expr:Y))
8262 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8264 Finally, we could be called to handle an 'o' constraint by putting
8265 an address into a register. In that case, we first try to do this
8266 with a named pattern of "reload_load_address". If no such pattern
8267 exists, we just emit a SET insn and hope for the best (it will normally
8268 be valid on machines that use 'o').
8270 This entire process is made complex because reload will never
8271 process the insns we generate here and so we must ensure that
8272 they will fit their constraints and also by the fact that parts of
8273 IN might be being reloaded separately and replaced with spill registers.
8274 Because of this, we are, in some sense, just guessing the right approach
8275 here. The one listed above seems to work.
8277 ??? At some point, this whole thing needs to be rethought. */
8279 if (GET_CODE (in) == PLUS
8280 && (REG_P (XEXP (in, 0))
8281 || GET_CODE (XEXP (in, 0)) == SUBREG
8282 || MEM_P (XEXP (in, 0)))
8283 && (REG_P (XEXP (in, 1))
8284 || GET_CODE (XEXP (in, 1)) == SUBREG
8285 || CONSTANT_P (XEXP (in, 1))
8286 || MEM_P (XEXP (in, 1))))
8288 /* We need to compute the sum of a register or a MEM and another
8289 register, constant, or MEM, and put it into the reload
8290 register. The best possible way of doing this is if the machine
8291 has a three-operand ADD insn that accepts the required operands.
8293 The simplest approach is to try to generate such an insn and see if it
8294 is recognized and matches its constraints. If so, it can be used.
8296 It might be better not to actually emit the insn unless it is valid,
8297 but we need to pass the insn as an operand to `recog' and
8298 `extract_insn' and it is simpler to emit and then delete the insn if
8299 not valid than to dummy things up. */
8301 rtx op0, op1, tem, insn;
8302 int code;
8304 op0 = find_replacement (&XEXP (in, 0));
8305 op1 = find_replacement (&XEXP (in, 1));
8307 /* Since constraint checking is strict, commutativity won't be
8308 checked, so we need to do that here to avoid spurious failure
8309 if the add instruction is two-address and the second operand
8310 of the add is the same as the reload reg, which is frequently
8311 the case. If the insn would be A = B + A, rearrange it so
8312 it will be A = A + B as constrain_operands expects. */
8314 if (REG_P (XEXP (in, 1))
8315 && REGNO (out) == REGNO (XEXP (in, 1)))
8316 tem = op0, op0 = op1, op1 = tem;
8318 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8319 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8321 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8322 if (insn)
8323 return insn;
8325 /* If that failed, we must use a conservative two-insn sequence.
8327 Use a move to copy one operand into the reload register. Prefer
8328 to reload a constant, MEM or pseudo since the move patterns can
8329 handle an arbitrary operand. If OP1 is not a constant, MEM or
8330 pseudo and OP1 is not a valid operand for an add instruction, then
8331 reload OP1.
8333 After reloading one of the operands into the reload register, add
8334 the reload register to the output register.
8336 If there is another way to do this for a specific machine, a
8337 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8338 we emit below. */
8340 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8342 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8343 || (REG_P (op1)
8344 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8345 || (code != CODE_FOR_nothing
8346 && ! ((*insn_data[code].operand[2].predicate)
8347 (op1, insn_data[code].operand[2].mode))))
8348 tem = op0, op0 = op1, op1 = tem;
8350 gen_reload (out, op0, opnum, type);
8352 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8353 This fixes a problem on the 32K where the stack pointer cannot
8354 be used as an operand of an add insn. */
8356 if (rtx_equal_p (op0, op1))
8357 op1 = out;
8359 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8360 if (insn)
8362 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8363 set_unique_reg_note (insn, REG_EQUIV, in);
8364 return insn;
8367 /* If that failed, copy the address register to the reload register.
8368 Then add the constant to the reload register. */
8370 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8371 gen_reload (out, op1, opnum, type);
8372 insn = emit_insn (gen_add2_insn (out, op0));
8373 set_unique_reg_note (insn, REG_EQUIV, in);
8376 #ifdef SECONDARY_MEMORY_NEEDED
8377 /* If we need a memory location to do the move, do it that way. */
8378 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
8379 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8380 && (REG_P (out) || GET_CODE (out) == SUBREG)
8381 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8382 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8383 REGNO_REG_CLASS (reg_or_subregno (out)),
8384 GET_MODE (out)))
8386 /* Get the memory to use and rewrite both registers to its mode. */
8387 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8389 if (GET_MODE (loc) != GET_MODE (out))
8390 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8392 if (GET_MODE (loc) != GET_MODE (in))
8393 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8395 gen_reload (loc, in, opnum, type);
8396 gen_reload (out, loc, opnum, type);
8398 #endif
8399 else if (REG_P (out) && UNARY_P (in))
8401 rtx insn;
8402 rtx op1;
8403 rtx out_moded;
8404 rtx set;
8406 op1 = find_replacement (&XEXP (in, 0));
8407 if (op1 != XEXP (in, 0))
8408 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8410 /* First, try a plain SET. */
8411 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8412 if (set)
8413 return set;
8415 /* If that failed, move the inner operand to the reload
8416 register, and try the same unop with the inner expression
8417 replaced with the reload register. */
8419 if (GET_MODE (op1) != GET_MODE (out))
8420 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8421 else
8422 out_moded = out;
8424 gen_reload (out_moded, op1, opnum, type);
8426 insn
8427 = gen_rtx_SET (VOIDmode, out,
8428 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8429 out_moded));
8430 insn = emit_insn_if_valid_for_reload (insn);
8431 if (insn)
8433 set_unique_reg_note (insn, REG_EQUIV, in);
8434 return insn;
8437 fatal_insn ("Failure trying to reload:", set);
8439 /* If IN is a simple operand, use gen_move_insn. */
8440 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8442 tem = emit_insn (gen_move_insn (out, in));
8443 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8444 mark_jump_label (in, tem, 0);
8447 #ifdef HAVE_reload_load_address
8448 else if (HAVE_reload_load_address)
8449 emit_insn (gen_reload_load_address (out, in));
8450 #endif
8452 /* Otherwise, just write (set OUT IN) and hope for the best. */
8453 else
8454 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8456 /* Return the first insn emitted.
8457 We can not just return get_last_insn, because there may have
8458 been multiple instructions emitted. Also note that gen_move_insn may
8459 emit more than one insn itself, so we can not assume that there is one
8460 insn emitted per emit_insn_before call. */
8462 return last ? NEXT_INSN (last) : get_insns ();
8465 /* Delete a previously made output-reload whose result we now believe
8466 is not needed. First we double-check.
8468 INSN is the insn now being processed.
8469 LAST_RELOAD_REG is the hard register number for which we want to delete
8470 the last output reload.
8471 J is the reload-number that originally used REG. The caller has made
8472 certain that reload J doesn't use REG any longer for input.
8473 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8475 static void
8476 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8478 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8479 rtx reg = spill_reg_stored_to[last_reload_reg];
8480 int k;
8481 int n_occurrences;
8482 int n_inherited = 0;
8483 rtx i1;
8484 rtx substed;
8486 /* It is possible that this reload has been only used to set another reload
8487 we eliminated earlier and thus deleted this instruction too. */
8488 if (INSN_DELETED_P (output_reload_insn))
8489 return;
8491 /* Get the raw pseudo-register referred to. */
8493 while (GET_CODE (reg) == SUBREG)
8494 reg = SUBREG_REG (reg);
8495 substed = reg_equiv_memory_loc[REGNO (reg)];
8497 /* This is unsafe if the operand occurs more often in the current
8498 insn than it is inherited. */
8499 for (k = n_reloads - 1; k >= 0; k--)
8501 rtx reg2 = rld[k].in;
8502 if (! reg2)
8503 continue;
8504 if (MEM_P (reg2) || reload_override_in[k])
8505 reg2 = rld[k].in_reg;
8506 #ifdef AUTO_INC_DEC
8507 if (rld[k].out && ! rld[k].out_reg)
8508 reg2 = XEXP (rld[k].in_reg, 0);
8509 #endif
8510 while (GET_CODE (reg2) == SUBREG)
8511 reg2 = SUBREG_REG (reg2);
8512 if (rtx_equal_p (reg2, reg))
8514 if (reload_inherited[k] || reload_override_in[k] || k == j)
8515 n_inherited++;
8516 else
8517 return;
8520 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8521 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8522 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8523 reg, 0);
8524 if (substed)
8525 n_occurrences += count_occurrences (PATTERN (insn),
8526 eliminate_regs (substed, 0,
8527 NULL_RTX), 0);
8528 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8530 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8531 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8533 if (n_occurrences > n_inherited)
8534 return;
8536 /* If the pseudo-reg we are reloading is no longer referenced
8537 anywhere between the store into it and here,
8538 and we're within the same basic block, then the value can only
8539 pass through the reload reg and end up here.
8540 Otherwise, give up--return. */
8541 for (i1 = NEXT_INSN (output_reload_insn);
8542 i1 != insn; i1 = NEXT_INSN (i1))
8544 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8545 return;
8546 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8547 && reg_mentioned_p (reg, PATTERN (i1)))
8549 /* If this is USE in front of INSN, we only have to check that
8550 there are no more references than accounted for by inheritance. */
8551 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8553 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8554 i1 = NEXT_INSN (i1);
8556 if (n_occurrences <= n_inherited && i1 == insn)
8557 break;
8558 return;
8562 /* We will be deleting the insn. Remove the spill reg information. */
8563 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8565 spill_reg_store[last_reload_reg + k] = 0;
8566 spill_reg_stored_to[last_reload_reg + k] = 0;
8569 /* The caller has already checked that REG dies or is set in INSN.
8570 It has also checked that we are optimizing, and thus some
8571 inaccuracies in the debugging information are acceptable.
8572 So we could just delete output_reload_insn. But in some cases
8573 we can improve the debugging information without sacrificing
8574 optimization - maybe even improving the code: See if the pseudo
8575 reg has been completely replaced with reload regs. If so, delete
8576 the store insn and forget we had a stack slot for the pseudo. */
8577 if (rld[j].out != rld[j].in
8578 && REG_N_DEATHS (REGNO (reg)) == 1
8579 && REG_N_SETS (REGNO (reg)) == 1
8580 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8581 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8583 rtx i2;
8585 /* We know that it was used only between here and the beginning of
8586 the current basic block. (We also know that the last use before
8587 INSN was the output reload we are thinking of deleting, but never
8588 mind that.) Search that range; see if any ref remains. */
8589 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8591 rtx set = single_set (i2);
8593 /* Uses which just store in the pseudo don't count,
8594 since if they are the only uses, they are dead. */
8595 if (set != 0 && SET_DEST (set) == reg)
8596 continue;
8597 if (LABEL_P (i2)
8598 || JUMP_P (i2))
8599 break;
8600 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8601 && reg_mentioned_p (reg, PATTERN (i2)))
8603 /* Some other ref remains; just delete the output reload we
8604 know to be dead. */
8605 delete_address_reloads (output_reload_insn, insn);
8606 delete_insn (output_reload_insn);
8607 return;
8611 /* Delete the now-dead stores into this pseudo. Note that this
8612 loop also takes care of deleting output_reload_insn. */
8613 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8615 rtx set = single_set (i2);
8617 if (set != 0 && SET_DEST (set) == reg)
8619 delete_address_reloads (i2, insn);
8620 delete_insn (i2);
8622 if (LABEL_P (i2)
8623 || JUMP_P (i2))
8624 break;
8627 /* For the debugging info, say the pseudo lives in this reload reg. */
8628 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8629 if (flag_ira && optimize)
8630 /* Inform IRA about the change. */
8631 mark_allocation_change (REGNO (reg));
8632 alter_reg (REGNO (reg), -1, false);
8634 else
8636 delete_address_reloads (output_reload_insn, insn);
8637 delete_insn (output_reload_insn);
8641 /* We are going to delete DEAD_INSN. Recursively delete loads of
8642 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8643 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8644 static void
8645 delete_address_reloads (rtx dead_insn, rtx current_insn)
8647 rtx set = single_set (dead_insn);
8648 rtx set2, dst, prev, next;
8649 if (set)
8651 rtx dst = SET_DEST (set);
8652 if (MEM_P (dst))
8653 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8655 /* If we deleted the store from a reloaded post_{in,de}c expression,
8656 we can delete the matching adds. */
8657 prev = PREV_INSN (dead_insn);
8658 next = NEXT_INSN (dead_insn);
8659 if (! prev || ! next)
8660 return;
8661 set = single_set (next);
8662 set2 = single_set (prev);
8663 if (! set || ! set2
8664 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8665 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8666 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8667 return;
8668 dst = SET_DEST (set);
8669 if (! rtx_equal_p (dst, SET_DEST (set2))
8670 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8671 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8672 || (INTVAL (XEXP (SET_SRC (set), 1))
8673 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8674 return;
8675 delete_related_insns (prev);
8676 delete_related_insns (next);
8679 /* Subfunction of delete_address_reloads: process registers found in X. */
8680 static void
8681 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8683 rtx prev, set, dst, i2;
8684 int i, j;
8685 enum rtx_code code = GET_CODE (x);
8687 if (code != REG)
8689 const char *fmt = GET_RTX_FORMAT (code);
8690 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8692 if (fmt[i] == 'e')
8693 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8694 else if (fmt[i] == 'E')
8696 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8697 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8698 current_insn);
8701 return;
8704 if (spill_reg_order[REGNO (x)] < 0)
8705 return;
8707 /* Scan backwards for the insn that sets x. This might be a way back due
8708 to inheritance. */
8709 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8711 code = GET_CODE (prev);
8712 if (code == CODE_LABEL || code == JUMP_INSN)
8713 return;
8714 if (!INSN_P (prev))
8715 continue;
8716 if (reg_set_p (x, PATTERN (prev)))
8717 break;
8718 if (reg_referenced_p (x, PATTERN (prev)))
8719 return;
8721 if (! prev || INSN_UID (prev) < reload_first_uid)
8722 return;
8723 /* Check that PREV only sets the reload register. */
8724 set = single_set (prev);
8725 if (! set)
8726 return;
8727 dst = SET_DEST (set);
8728 if (!REG_P (dst)
8729 || ! rtx_equal_p (dst, x))
8730 return;
8731 if (! reg_set_p (dst, PATTERN (dead_insn)))
8733 /* Check if DST was used in a later insn -
8734 it might have been inherited. */
8735 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8737 if (LABEL_P (i2))
8738 break;
8739 if (! INSN_P (i2))
8740 continue;
8741 if (reg_referenced_p (dst, PATTERN (i2)))
8743 /* If there is a reference to the register in the current insn,
8744 it might be loaded in a non-inherited reload. If no other
8745 reload uses it, that means the register is set before
8746 referenced. */
8747 if (i2 == current_insn)
8749 for (j = n_reloads - 1; j >= 0; j--)
8750 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8751 || reload_override_in[j] == dst)
8752 return;
8753 for (j = n_reloads - 1; j >= 0; j--)
8754 if (rld[j].in && rld[j].reg_rtx == dst)
8755 break;
8756 if (j >= 0)
8757 break;
8759 return;
8761 if (JUMP_P (i2))
8762 break;
8763 /* If DST is still live at CURRENT_INSN, check if it is used for
8764 any reload. Note that even if CURRENT_INSN sets DST, we still
8765 have to check the reloads. */
8766 if (i2 == current_insn)
8768 for (j = n_reloads - 1; j >= 0; j--)
8769 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8770 || reload_override_in[j] == dst)
8771 return;
8772 /* ??? We can't finish the loop here, because dst might be
8773 allocated to a pseudo in this block if no reload in this
8774 block needs any of the classes containing DST - see
8775 spill_hard_reg. There is no easy way to tell this, so we
8776 have to scan till the end of the basic block. */
8778 if (reg_set_p (dst, PATTERN (i2)))
8779 break;
8782 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8783 reg_reloaded_contents[REGNO (dst)] = -1;
8784 delete_insn (prev);
8787 /* Output reload-insns to reload VALUE into RELOADREG.
8788 VALUE is an autoincrement or autodecrement RTX whose operand
8789 is a register or memory location;
8790 so reloading involves incrementing that location.
8791 IN is either identical to VALUE, or some cheaper place to reload from.
8793 INC_AMOUNT is the number to increment or decrement by (always positive).
8794 This cannot be deduced from VALUE.
8796 Return the instruction that stores into RELOADREG. */
8798 static rtx
8799 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8801 /* REG or MEM to be copied and incremented. */
8802 rtx incloc = find_replacement (&XEXP (value, 0));
8803 /* Nonzero if increment after copying. */
8804 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8805 || GET_CODE (value) == POST_MODIFY);
8806 rtx last;
8807 rtx inc;
8808 rtx add_insn;
8809 int code;
8810 rtx store;
8811 rtx real_in = in == value ? incloc : in;
8813 /* No hard register is equivalent to this register after
8814 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8815 we could inc/dec that register as well (maybe even using it for
8816 the source), but I'm not sure it's worth worrying about. */
8817 if (REG_P (incloc))
8818 reg_last_reload_reg[REGNO (incloc)] = 0;
8820 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8822 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8823 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8825 else
8827 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8828 inc_amount = -inc_amount;
8830 inc = GEN_INT (inc_amount);
8833 /* If this is post-increment, first copy the location to the reload reg. */
8834 if (post && real_in != reloadreg)
8835 emit_insn (gen_move_insn (reloadreg, real_in));
8837 if (in == value)
8839 /* See if we can directly increment INCLOC. Use a method similar to
8840 that in gen_reload. */
8842 last = get_last_insn ();
8843 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8844 gen_rtx_PLUS (GET_MODE (incloc),
8845 incloc, inc)));
8847 code = recog_memoized (add_insn);
8848 if (code >= 0)
8850 extract_insn (add_insn);
8851 if (constrain_operands (1))
8853 /* If this is a pre-increment and we have incremented the value
8854 where it lives, copy the incremented value to RELOADREG to
8855 be used as an address. */
8857 if (! post)
8858 emit_insn (gen_move_insn (reloadreg, incloc));
8860 return add_insn;
8863 delete_insns_since (last);
8866 /* If couldn't do the increment directly, must increment in RELOADREG.
8867 The way we do this depends on whether this is pre- or post-increment.
8868 For pre-increment, copy INCLOC to the reload register, increment it
8869 there, then save back. */
8871 if (! post)
8873 if (in != reloadreg)
8874 emit_insn (gen_move_insn (reloadreg, real_in));
8875 emit_insn (gen_add2_insn (reloadreg, inc));
8876 store = emit_insn (gen_move_insn (incloc, reloadreg));
8878 else
8880 /* Postincrement.
8881 Because this might be a jump insn or a compare, and because RELOADREG
8882 may not be available after the insn in an input reload, we must do
8883 the incrementation before the insn being reloaded for.
8885 We have already copied IN to RELOADREG. Increment the copy in
8886 RELOADREG, save that back, then decrement RELOADREG so it has
8887 the original value. */
8889 emit_insn (gen_add2_insn (reloadreg, inc));
8890 store = emit_insn (gen_move_insn (incloc, reloadreg));
8891 if (GET_CODE (inc) == CONST_INT)
8892 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8893 else
8894 emit_insn (gen_sub2_insn (reloadreg, inc));
8897 return store;
8900 #ifdef AUTO_INC_DEC
8901 static void
8902 add_auto_inc_notes (rtx insn, rtx x)
8904 enum rtx_code code = GET_CODE (x);
8905 const char *fmt;
8906 int i, j;
8908 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8910 REG_NOTES (insn)
8911 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8912 return;
8915 /* Scan all the operand sub-expressions. */
8916 fmt = GET_RTX_FORMAT (code);
8917 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8919 if (fmt[i] == 'e')
8920 add_auto_inc_notes (insn, XEXP (x, i));
8921 else if (fmt[i] == 'E')
8922 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8923 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8926 #endif
8928 /* Copy EH notes from an insn to its reloads. */
8929 static void
8930 copy_eh_notes (rtx insn, rtx x)
8932 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8933 if (eh_note)
8935 for (; x != 0; x = NEXT_INSN (x))
8937 if (may_trap_p (PATTERN (x)))
8938 REG_NOTES (x)
8939 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8940 REG_NOTES (x));
8945 /* This is used by reload pass, that does emit some instructions after
8946 abnormal calls moving basic block end, but in fact it wants to emit
8947 them on the edge. Looks for abnormal call edges, find backward the
8948 proper call and fix the damage.
8950 Similar handle instructions throwing exceptions internally. */
8951 void
8952 fixup_abnormal_edges (void)
8954 bool inserted = false;
8955 basic_block bb;
8957 FOR_EACH_BB (bb)
8959 edge e;
8960 edge_iterator ei;
8962 /* Look for cases we are interested in - calls or instructions causing
8963 exceptions. */
8964 FOR_EACH_EDGE (e, ei, bb->succs)
8966 if (e->flags & EDGE_ABNORMAL_CALL)
8967 break;
8968 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8969 == (EDGE_ABNORMAL | EDGE_EH))
8970 break;
8972 if (e && !CALL_P (BB_END (bb))
8973 && !can_throw_internal (BB_END (bb)))
8975 rtx insn;
8977 /* Get past the new insns generated. Allow notes, as the insns
8978 may be already deleted. */
8979 insn = BB_END (bb);
8980 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8981 && !can_throw_internal (insn)
8982 && insn != BB_HEAD (bb))
8983 insn = PREV_INSN (insn);
8985 if (CALL_P (insn) || can_throw_internal (insn))
8987 rtx stop, next;
8989 stop = NEXT_INSN (BB_END (bb));
8990 BB_END (bb) = insn;
8991 insn = NEXT_INSN (insn);
8993 FOR_EACH_EDGE (e, ei, bb->succs)
8994 if (e->flags & EDGE_FALLTHRU)
8995 break;
8997 while (insn && insn != stop)
8999 next = NEXT_INSN (insn);
9000 if (INSN_P (insn))
9002 delete_insn (insn);
9004 /* Sometimes there's still the return value USE.
9005 If it's placed after a trapping call (i.e. that
9006 call is the last insn anyway), we have no fallthru
9007 edge. Simply delete this use and don't try to insert
9008 on the non-existent edge. */
9009 if (GET_CODE (PATTERN (insn)) != USE)
9011 /* We're not deleting it, we're moving it. */
9012 INSN_DELETED_P (insn) = 0;
9013 PREV_INSN (insn) = NULL_RTX;
9014 NEXT_INSN (insn) = NULL_RTX;
9016 insert_insn_on_edge (insn, e);
9017 inserted = true;
9020 else if (!BARRIER_P (insn))
9021 set_block_for_insn (insn, NULL);
9022 insn = next;
9026 /* It may be that we don't find any such trapping insn. In this
9027 case we discovered quite late that the insn that had been
9028 marked as can_throw_internal in fact couldn't trap at all.
9029 So we should in fact delete the EH edges out of the block. */
9030 else
9031 purge_dead_edges (bb);
9035 /* We've possibly turned single trapping insn into multiple ones. */
9036 if (flag_non_call_exceptions)
9038 sbitmap blocks;
9039 blocks = sbitmap_alloc (last_basic_block);
9040 sbitmap_ones (blocks);
9041 find_many_sub_basic_blocks (blocks);
9042 sbitmap_free (blocks);
9045 if (inserted)
9046 commit_edge_insertions ();
9048 #ifdef ENABLE_CHECKING
9049 /* Verify that we didn't turn one trapping insn into many, and that
9050 we found and corrected all of the problems wrt fixups on the
9051 fallthru edge. */
9052 verify_flow_info ();
9053 #endif