re PR c++/43108 (mixed complex<T> multiplication horribly inefficient)
[official-gcc.git] / gcc / reload1.c
blobee90f358d9e5dec21438e3364ec7f1d1b34f5303
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "df.h"
49 #include "target.h"
50 #include "emit-rtl.h"
52 /* This file contains the reload pass of the compiler, which is
53 run after register allocation has been done. It checks that
54 each insn is valid (operands required to be in registers really
55 are in registers of the proper class) and fixes up invalid ones
56 by copying values temporarily into registers for the insns
57 that need them.
59 The results of register allocation are described by the vector
60 reg_renumber; the insns still contain pseudo regs, but reg_renumber
61 can be used to find which hard reg, if any, a pseudo reg is in.
63 The technique we always use is to free up a few hard regs that are
64 called ``reload regs'', and for each place where a pseudo reg
65 must be in a hard reg, copy it temporarily into one of the reload regs.
67 Reload regs are allocated locally for every instruction that needs
68 reloads. When there are pseudos which are allocated to a register that
69 has been chosen as a reload reg, such pseudos must be ``spilled''.
70 This means that they go to other hard regs, or to stack slots if no other
71 available hard regs can be found. Spilling can invalidate more
72 insns, requiring additional need for reloads, so we must keep checking
73 until the process stabilizes.
75 For machines with different classes of registers, we must keep track
76 of the register class needed for each reload, and make sure that
77 we allocate enough reload registers of each class.
79 The file reload.c contains the code that checks one insn for
80 validity and reports the reloads that it needs. This file
81 is in charge of scanning the entire rtl code, accumulating the
82 reload needs, spilling, assigning reload registers to use for
83 fixing up each insn, and generating the new insns to copy values
84 into the reload registers. */
86 /* During reload_as_needed, element N contains a REG rtx for the hard reg
87 into which reg N has been reloaded (perhaps for a previous insn). */
88 static rtx *reg_last_reload_reg;
90 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92 static regset_head reg_has_output_reload;
94 /* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96 static HARD_REG_SET reg_is_output_reload;
98 /* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102 rtx *reg_equiv_constant;
104 /* Element N is an invariant value to which pseudo reg N is equivalent.
105 eliminate_regs_in_insn uses this to replace pseudos in particular
106 contexts. */
107 rtx *reg_equiv_invariant;
109 /* Element N is a memory location to which pseudo reg N is equivalent,
110 prior to any register elimination (such as frame pointer to stack
111 pointer). Depending on whether or not it is a valid address, this value
112 is transferred to either reg_equiv_address or reg_equiv_mem. */
113 rtx *reg_equiv_memory_loc;
115 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116 collector can keep track of what is inside. */
117 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
119 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
120 This is used when the address is not valid as a memory address
121 (because its displacement is too big for the machine.) */
122 rtx *reg_equiv_address;
124 /* Element N is the memory slot to which pseudo reg N is equivalent,
125 or zero if pseudo reg N is not equivalent to a memory slot. */
126 rtx *reg_equiv_mem;
128 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129 alternate representations of the location of pseudo reg N. */
130 rtx *reg_equiv_alt_mem_list;
132 /* Widest width in which each pseudo reg is referred to (via subreg). */
133 static unsigned int *reg_max_ref_width;
135 /* Element N is the list of insns that initialized reg N from its equivalent
136 constant or memory slot. */
137 rtx *reg_equiv_init;
138 int reg_equiv_init_size;
140 /* Vector to remember old contents of reg_renumber before spilling. */
141 static short *reg_old_renumber;
143 /* During reload_as_needed, element N contains the last pseudo regno reloaded
144 into hard register N. If that pseudo reg occupied more than one register,
145 reg_reloaded_contents points to that pseudo for each spill register in
146 use; all of these must remain set for an inheritance to occur. */
147 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
149 /* During reload_as_needed, element N contains the insn for which
150 hard register N was last used. Its contents are significant only
151 when reg_reloaded_valid is set for this register. */
152 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
154 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
155 static HARD_REG_SET reg_reloaded_valid;
156 /* Indicate if the register was dead at the end of the reload.
157 This is only valid if reg_reloaded_contents is set and valid. */
158 static HARD_REG_SET reg_reloaded_dead;
160 /* Indicate whether the register's current value is one that is not
161 safe to retain across a call, even for registers that are normally
162 call-saved. This is only meaningful for members of reg_reloaded_valid. */
163 static HARD_REG_SET reg_reloaded_call_part_clobbered;
165 /* Number of spill-regs so far; number of valid elements of spill_regs. */
166 static int n_spills;
168 /* In parallel with spill_regs, contains REG rtx's for those regs.
169 Holds the last rtx used for any given reg, or 0 if it has never
170 been used for spilling yet. This rtx is reused, provided it has
171 the proper mode. */
172 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
174 /* In parallel with spill_regs, contains nonzero for a spill reg
175 that was stored after the last time it was used.
176 The precise value is the insn generated to do the store. */
177 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
179 /* This is the register that was stored with spill_reg_store. This is a
180 copy of reload_out / reload_out_reg when the value was stored; if
181 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
182 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
184 /* This table is the inverse mapping of spill_regs:
185 indexed by hard reg number,
186 it contains the position of that reg in spill_regs,
187 or -1 for something that is not in spill_regs.
189 ?!? This is no longer accurate. */
190 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
192 /* This reg set indicates registers that can't be used as spill registers for
193 the currently processed insn. These are the hard registers which are live
194 during the insn, but not allocated to pseudos, as well as fixed
195 registers. */
196 static HARD_REG_SET bad_spill_regs;
198 /* These are the hard registers that can't be used as spill register for any
199 insn. This includes registers used for user variables and registers that
200 we can't eliminate. A register that appears in this set also can't be used
201 to retry register allocation. */
202 static HARD_REG_SET bad_spill_regs_global;
204 /* Describes order of use of registers for reloading
205 of spilled pseudo-registers. `n_spills' is the number of
206 elements that are actually valid; new ones are added at the end.
208 Both spill_regs and spill_reg_order are used on two occasions:
209 once during find_reload_regs, where they keep track of the spill registers
210 for a single insn, but also during reload_as_needed where they show all
211 the registers ever used by reload. For the latter case, the information
212 is calculated during finish_spills. */
213 static short spill_regs[FIRST_PSEUDO_REGISTER];
215 /* This vector of reg sets indicates, for each pseudo, which hard registers
216 may not be used for retrying global allocation because the register was
217 formerly spilled from one of them. If we allowed reallocating a pseudo to
218 a register that it was already allocated to, reload might not
219 terminate. */
220 static HARD_REG_SET *pseudo_previous_regs;
222 /* This vector of reg sets indicates, for each pseudo, which hard
223 registers may not be used for retrying global allocation because they
224 are used as spill registers during one of the insns in which the
225 pseudo is live. */
226 static HARD_REG_SET *pseudo_forbidden_regs;
228 /* All hard regs that have been used as spill registers for any insn are
229 marked in this set. */
230 static HARD_REG_SET used_spill_regs;
232 /* Index of last register assigned as a spill register. We allocate in
233 a round-robin fashion. */
234 static int last_spill_reg;
236 /* Nonzero if indirect addressing is supported on the machine; this means
237 that spilling (REG n) does not require reloading it into a register in
238 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
239 value indicates the level of indirect addressing supported, e.g., two
240 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 a hard register. */
242 static char spill_indirect_levels;
244 /* Nonzero if indirect addressing is supported when the innermost MEM is
245 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
246 which these are valid is the same as spill_indirect_levels, above. */
247 char indirect_symref_ok;
249 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
250 char double_reg_address_ok;
252 /* Record the stack slot for each spilled hard register. */
253 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
255 /* Width allocated so far for that stack slot. */
256 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
258 /* Record which pseudos needed to be spilled. */
259 static regset_head spilled_pseudos;
261 /* Record which pseudos changed their allocation in finish_spills. */
262 static regset_head changed_allocation_pseudos;
264 /* Used for communication between order_regs_for_reload and count_pseudo.
265 Used to avoid counting one pseudo twice. */
266 static regset_head pseudos_counted;
268 /* First uid used by insns created by reload in this function.
269 Used in find_equiv_reg. */
270 int reload_first_uid;
272 /* Flag set by local-alloc or global-alloc if anything is live in
273 a call-clobbered reg across calls. */
274 int caller_save_needed;
276 /* Set to 1 while reload_as_needed is operating.
277 Required by some machines to handle any generated moves differently. */
278 int reload_in_progress = 0;
280 /* These arrays record the insn_code of insns that may be needed to
281 perform input and output reloads of special objects. They provide a
282 place to pass a scratch register. */
283 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
284 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
286 /* This obstack is used for allocation of rtl during register elimination.
287 The allocated storage can be freed once find_reloads has processed the
288 insn. */
289 static struct obstack reload_obstack;
291 /* Points to the beginning of the reload_obstack. All insn_chain structures
292 are allocated first. */
293 static char *reload_startobj;
295 /* The point after all insn_chain structures. Used to quickly deallocate
296 memory allocated in copy_reloads during calculate_needs_all_insns. */
297 static char *reload_firstobj;
299 /* This points before all local rtl generated by register elimination.
300 Used to quickly free all memory after processing one insn. */
301 static char *reload_insn_firstobj;
303 /* List of insn_chain instructions, one for every insn that reload needs to
304 examine. */
305 struct insn_chain *reload_insn_chain;
307 /* List of all insns needing reloads. */
308 static struct insn_chain *insns_need_reload;
310 /* This structure is used to record information about register eliminations.
311 Each array entry describes one possible way of eliminating a register
312 in favor of another. If there is more than one way of eliminating a
313 particular register, the most preferred should be specified first. */
315 struct elim_table
317 int from; /* Register number to be eliminated. */
318 int to; /* Register number used as replacement. */
319 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
320 int can_eliminate; /* Nonzero if this elimination can be done. */
321 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
322 target hook in previous scan over insns
323 made by reload. */
324 HOST_WIDE_INT offset; /* Current offset between the two regs. */
325 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
326 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
327 rtx from_rtx; /* REG rtx for the register to be eliminated.
328 We cannot simply compare the number since
329 we might then spuriously replace a hard
330 register corresponding to a pseudo
331 assigned to the reg to be eliminated. */
332 rtx to_rtx; /* REG rtx for the replacement. */
335 static struct elim_table *reg_eliminate = 0;
337 /* This is an intermediate structure to initialize the table. It has
338 exactly the members provided by ELIMINABLE_REGS. */
339 static const struct elim_table_1
341 const int from;
342 const int to;
343 } reg_eliminate_1[] =
345 /* If a set of eliminable registers was specified, define the table from it.
346 Otherwise, default to the normal case of the frame pointer being
347 replaced by the stack pointer. */
349 #ifdef ELIMINABLE_REGS
350 ELIMINABLE_REGS;
351 #else
352 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
353 #endif
355 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
357 /* Record the number of pending eliminations that have an offset not equal
358 to their initial offset. If nonzero, we use a new copy of each
359 replacement result in any insns encountered. */
360 int num_not_at_initial_offset;
362 /* Count the number of registers that we may be able to eliminate. */
363 static int num_eliminable;
364 /* And the number of registers that are equivalent to a constant that
365 can be eliminated to frame_pointer / arg_pointer + constant. */
366 static int num_eliminable_invariants;
368 /* For each label, we record the offset of each elimination. If we reach
369 a label by more than one path and an offset differs, we cannot do the
370 elimination. This information is indexed by the difference of the
371 number of the label and the first label number. We can't offset the
372 pointer itself as this can cause problems on machines with segmented
373 memory. The first table is an array of flags that records whether we
374 have yet encountered a label and the second table is an array of arrays,
375 one entry in the latter array for each elimination. */
377 static int first_label_num;
378 static char *offsets_known_at;
379 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
381 /* Number of labels in the current function. */
383 static int num_labels;
385 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
386 static void maybe_fix_stack_asms (void);
387 static void copy_reloads (struct insn_chain *);
388 static void calculate_needs_all_insns (int);
389 static int find_reg (struct insn_chain *, int);
390 static void find_reload_regs (struct insn_chain *);
391 static void select_reload_regs (void);
392 static void delete_caller_save_insns (void);
394 static void spill_failure (rtx, enum reg_class);
395 static void count_spilled_pseudo (int, int, int);
396 static void delete_dead_insn (rtx);
397 static void alter_reg (int, int, bool);
398 static void set_label_offsets (rtx, rtx, int);
399 static void check_eliminable_occurrences (rtx);
400 static void elimination_effects (rtx, enum machine_mode);
401 static int eliminate_regs_in_insn (rtx, int);
402 static void update_eliminable_offsets (void);
403 static void mark_not_eliminable (rtx, const_rtx, void *);
404 static void set_initial_elim_offsets (void);
405 static bool verify_initial_elim_offsets (void);
406 static void set_initial_label_offsets (void);
407 static void set_offsets_for_label (rtx);
408 static void init_elim_table (void);
409 static void update_eliminables (HARD_REG_SET *);
410 static void spill_hard_reg (unsigned int, int);
411 static int finish_spills (int);
412 static void scan_paradoxical_subregs (rtx);
413 static void count_pseudo (int);
414 static void order_regs_for_reload (struct insn_chain *);
415 static void reload_as_needed (int);
416 static void forget_old_reloads_1 (rtx, const_rtx, void *);
417 static void forget_marked_reloads (regset);
418 static int reload_reg_class_lower (const void *, const void *);
419 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
420 enum machine_mode);
421 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
422 enum machine_mode);
423 static int reload_reg_free_p (unsigned int, int, enum reload_type);
424 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
425 rtx, rtx, int, int);
426 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
427 rtx, rtx, int, int);
428 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
429 static int allocate_reload_reg (struct insn_chain *, int, int);
430 static int conflicts_with_override (rtx);
431 static void failed_reload (rtx, int);
432 static int set_reload_reg (int, int);
433 static void choose_reload_regs_init (struct insn_chain *, rtx *);
434 static void choose_reload_regs (struct insn_chain *);
435 static void merge_assigned_reloads (rtx);
436 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
437 rtx, int);
438 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
439 int);
440 static void do_input_reload (struct insn_chain *, struct reload *, int);
441 static void do_output_reload (struct insn_chain *, struct reload *, int);
442 static void emit_reload_insns (struct insn_chain *);
443 static void delete_output_reload (rtx, int, int, rtx);
444 static void delete_address_reloads (rtx, rtx);
445 static void delete_address_reloads_1 (rtx, rtx, rtx);
446 static rtx inc_for_reload (rtx, rtx, rtx, int);
447 #ifdef AUTO_INC_DEC
448 static void add_auto_inc_notes (rtx, rtx);
449 #endif
450 static void substitute (rtx *, const_rtx, rtx);
451 static bool gen_reload_chain_without_interm_reg_p (int, int);
452 static int reloads_conflict (int, int);
453 static rtx gen_reload (rtx, rtx, int, enum reload_type);
454 static rtx emit_insn_if_valid_for_reload (rtx);
456 /* Initialize the reload pass. This is called at the beginning of compilation
457 and may be called again if the target is reinitialized. */
459 void
460 init_reload (void)
462 int i;
464 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
465 Set spill_indirect_levels to the number of levels such addressing is
466 permitted, zero if it is not permitted at all. */
468 rtx tem
469 = gen_rtx_MEM (Pmode,
470 gen_rtx_PLUS (Pmode,
471 gen_rtx_REG (Pmode,
472 LAST_VIRTUAL_REGISTER + 1),
473 GEN_INT (4)));
474 spill_indirect_levels = 0;
476 while (memory_address_p (QImode, tem))
478 spill_indirect_levels++;
479 tem = gen_rtx_MEM (Pmode, tem);
482 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
484 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
485 indirect_symref_ok = memory_address_p (QImode, tem);
487 /* See if reg+reg is a valid (and offsettable) address. */
489 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
491 tem = gen_rtx_PLUS (Pmode,
492 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
493 gen_rtx_REG (Pmode, i));
495 /* This way, we make sure that reg+reg is an offsettable address. */
496 tem = plus_constant (tem, 4);
498 if (memory_address_p (QImode, tem))
500 double_reg_address_ok = 1;
501 break;
505 /* Initialize obstack for our rtl allocation. */
506 gcc_obstack_init (&reload_obstack);
507 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
509 INIT_REG_SET (&spilled_pseudos);
510 INIT_REG_SET (&changed_allocation_pseudos);
511 INIT_REG_SET (&pseudos_counted);
514 /* List of insn chains that are currently unused. */
515 static struct insn_chain *unused_insn_chains = 0;
517 /* Allocate an empty insn_chain structure. */
518 struct insn_chain *
519 new_insn_chain (void)
521 struct insn_chain *c;
523 if (unused_insn_chains == 0)
525 c = XOBNEW (&reload_obstack, struct insn_chain);
526 INIT_REG_SET (&c->live_throughout);
527 INIT_REG_SET (&c->dead_or_set);
529 else
531 c = unused_insn_chains;
532 unused_insn_chains = c->next;
534 c->is_caller_save_insn = 0;
535 c->need_operand_change = 0;
536 c->need_reload = 0;
537 c->need_elim = 0;
538 return c;
541 /* Small utility function to set all regs in hard reg set TO which are
542 allocated to pseudos in regset FROM. */
544 void
545 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
547 unsigned int regno;
548 reg_set_iterator rsi;
550 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
552 int r = reg_renumber[regno];
554 if (r < 0)
556 /* reload_combine uses the information from DF_LIVE_IN,
557 which might still contain registers that have not
558 actually been allocated since they have an
559 equivalence. */
560 gcc_assert (ira_conflicts_p || reload_completed);
562 else
563 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
567 /* Replace all pseudos found in LOC with their corresponding
568 equivalences. */
570 static void
571 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
573 rtx x = *loc;
574 enum rtx_code code;
575 const char *fmt;
576 int i, j;
578 if (! x)
579 return;
581 code = GET_CODE (x);
582 if (code == REG)
584 unsigned int regno = REGNO (x);
586 if (regno < FIRST_PSEUDO_REGISTER)
587 return;
589 x = eliminate_regs (x, mem_mode, usage);
590 if (x != *loc)
592 *loc = x;
593 replace_pseudos_in (loc, mem_mode, usage);
594 return;
597 if (reg_equiv_constant[regno])
598 *loc = reg_equiv_constant[regno];
599 else if (reg_equiv_mem[regno])
600 *loc = reg_equiv_mem[regno];
601 else if (reg_equiv_address[regno])
602 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
603 else
605 gcc_assert (!REG_P (regno_reg_rtx[regno])
606 || REGNO (regno_reg_rtx[regno]) != regno);
607 *loc = regno_reg_rtx[regno];
610 return;
612 else if (code == MEM)
614 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
615 return;
618 /* Process each of our operands recursively. */
619 fmt = GET_RTX_FORMAT (code);
620 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
621 if (*fmt == 'e')
622 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
623 else if (*fmt == 'E')
624 for (j = 0; j < XVECLEN (x, i); j++)
625 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
628 /* Determine if the current function has an exception receiver block
629 that reaches the exit block via non-exceptional edges */
631 static bool
632 has_nonexceptional_receiver (void)
634 edge e;
635 edge_iterator ei;
636 basic_block *tos, *worklist, bb;
638 /* If we're not optimizing, then just err on the safe side. */
639 if (!optimize)
640 return true;
642 /* First determine which blocks can reach exit via normal paths. */
643 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
645 FOR_EACH_BB (bb)
646 bb->flags &= ~BB_REACHABLE;
648 /* Place the exit block on our worklist. */
649 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
650 *tos++ = EXIT_BLOCK_PTR;
652 /* Iterate: find everything reachable from what we've already seen. */
653 while (tos != worklist)
655 bb = *--tos;
657 FOR_EACH_EDGE (e, ei, bb->preds)
658 if (!(e->flags & EDGE_ABNORMAL))
660 basic_block src = e->src;
662 if (!(src->flags & BB_REACHABLE))
664 src->flags |= BB_REACHABLE;
665 *tos++ = src;
669 free (worklist);
671 /* Now see if there's a reachable block with an exceptional incoming
672 edge. */
673 FOR_EACH_BB (bb)
674 if (bb->flags & BB_REACHABLE)
675 FOR_EACH_EDGE (e, ei, bb->preds)
676 if (e->flags & EDGE_ABNORMAL)
677 return true;
679 /* No exceptional block reached exit unexceptionally. */
680 return false;
684 /* Global variables used by reload and its subroutines. */
686 /* Set during calculate_needs if an insn needs register elimination. */
687 static int something_needs_elimination;
688 /* Set during calculate_needs if an insn needs an operand changed. */
689 static int something_needs_operands_changed;
691 /* Nonzero means we couldn't get enough spill regs. */
692 static int failure;
694 /* Temporary array of pseudo-register number. */
695 static int *temp_pseudo_reg_arr;
697 /* Main entry point for the reload pass.
699 FIRST is the first insn of the function being compiled.
701 GLOBAL nonzero means we were called from global_alloc
702 and should attempt to reallocate any pseudoregs that we
703 displace from hard regs we will use for reloads.
704 If GLOBAL is zero, we do not have enough information to do that,
705 so any pseudo reg that is spilled must go to the stack.
707 Return value is nonzero if reload failed
708 and we must not do any more for this function. */
711 reload (rtx first, int global)
713 int i, n;
714 rtx insn;
715 struct elim_table *ep;
716 basic_block bb;
718 /* Make sure even insns with volatile mem refs are recognizable. */
719 init_recog ();
721 failure = 0;
723 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
725 /* Make sure that the last insn in the chain
726 is not something that needs reloading. */
727 emit_note (NOTE_INSN_DELETED);
729 /* Enable find_equiv_reg to distinguish insns made by reload. */
730 reload_first_uid = get_max_uid ();
732 #ifdef SECONDARY_MEMORY_NEEDED
733 /* Initialize the secondary memory table. */
734 clear_secondary_mem ();
735 #endif
737 /* We don't have a stack slot for any spill reg yet. */
738 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
739 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
741 /* Initialize the save area information for caller-save, in case some
742 are needed. */
743 init_save_areas ();
745 /* Compute which hard registers are now in use
746 as homes for pseudo registers.
747 This is done here rather than (eg) in global_alloc
748 because this point is reached even if not optimizing. */
749 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
750 mark_home_live (i);
752 /* A function that has a nonlocal label that can reach the exit
753 block via non-exceptional paths must save all call-saved
754 registers. */
755 if (cfun->has_nonlocal_label
756 && has_nonexceptional_receiver ())
757 crtl->saves_all_registers = 1;
759 if (crtl->saves_all_registers)
760 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
761 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
762 df_set_regs_ever_live (i, true);
764 /* Find all the pseudo registers that didn't get hard regs
765 but do have known equivalent constants or memory slots.
766 These include parameters (known equivalent to parameter slots)
767 and cse'd or loop-moved constant memory addresses.
769 Record constant equivalents in reg_equiv_constant
770 so they will be substituted by find_reloads.
771 Record memory equivalents in reg_mem_equiv so they can
772 be substituted eventually by altering the REG-rtx's. */
774 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
775 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
776 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
777 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
778 reg_equiv_address = XCNEWVEC (rtx, max_regno);
779 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
780 reg_old_renumber = XCNEWVEC (short, max_regno);
781 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
782 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
783 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
785 CLEAR_HARD_REG_SET (bad_spill_regs_global);
787 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
788 to. Also find all paradoxical subregs and find largest such for
789 each pseudo. */
791 num_eliminable_invariants = 0;
792 for (insn = first; insn; insn = NEXT_INSN (insn))
794 rtx set = single_set (insn);
796 /* We may introduce USEs that we want to remove at the end, so
797 we'll mark them with QImode. Make sure there are no
798 previously-marked insns left by say regmove. */
799 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
800 && GET_MODE (insn) != VOIDmode)
801 PUT_MODE (insn, VOIDmode);
803 if (NONDEBUG_INSN_P (insn))
804 scan_paradoxical_subregs (PATTERN (insn));
806 if (set != 0 && REG_P (SET_DEST (set)))
808 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
809 rtx x;
811 if (! note)
812 continue;
814 i = REGNO (SET_DEST (set));
815 x = XEXP (note, 0);
817 if (i <= LAST_VIRTUAL_REGISTER)
818 continue;
820 if (! function_invariant_p (x)
821 || ! flag_pic
822 /* A function invariant is often CONSTANT_P but may
823 include a register. We promise to only pass
824 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
825 || (CONSTANT_P (x)
826 && LEGITIMATE_PIC_OPERAND_P (x)))
828 /* It can happen that a REG_EQUIV note contains a MEM
829 that is not a legitimate memory operand. As later
830 stages of reload assume that all addresses found
831 in the reg_equiv_* arrays were originally legitimate,
832 we ignore such REG_EQUIV notes. */
833 if (memory_operand (x, VOIDmode))
835 /* Always unshare the equivalence, so we can
836 substitute into this insn without touching the
837 equivalence. */
838 reg_equiv_memory_loc[i] = copy_rtx (x);
840 else if (function_invariant_p (x))
842 if (GET_CODE (x) == PLUS)
844 /* This is PLUS of frame pointer and a constant,
845 and might be shared. Unshare it. */
846 reg_equiv_invariant[i] = copy_rtx (x);
847 num_eliminable_invariants++;
849 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
851 reg_equiv_invariant[i] = x;
852 num_eliminable_invariants++;
854 else if (LEGITIMATE_CONSTANT_P (x))
855 reg_equiv_constant[i] = x;
856 else
858 reg_equiv_memory_loc[i]
859 = force_const_mem (GET_MODE (SET_DEST (set)), x);
860 if (! reg_equiv_memory_loc[i])
861 reg_equiv_init[i] = NULL_RTX;
864 else
866 reg_equiv_init[i] = NULL_RTX;
867 continue;
870 else
871 reg_equiv_init[i] = NULL_RTX;
875 if (dump_file)
876 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
877 if (reg_equiv_init[i])
879 fprintf (dump_file, "init_insns for %u: ", i);
880 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
881 fprintf (dump_file, "\n");
884 init_elim_table ();
886 first_label_num = get_first_label_num ();
887 num_labels = max_label_num () - first_label_num;
889 /* Allocate the tables used to store offset information at labels. */
890 /* We used to use alloca here, but the size of what it would try to
891 allocate would occasionally cause it to exceed the stack limit and
892 cause a core dump. */
893 offsets_known_at = XNEWVEC (char, num_labels);
894 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
896 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
897 stack slots to the pseudos that lack hard regs or equivalents.
898 Do not touch virtual registers. */
900 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
901 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
902 temp_pseudo_reg_arr[n++] = i;
904 if (ira_conflicts_p)
905 /* Ask IRA to order pseudo-registers for better stack slot
906 sharing. */
907 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
909 for (i = 0; i < n; i++)
910 alter_reg (temp_pseudo_reg_arr[i], -1, false);
912 /* If we have some registers we think can be eliminated, scan all insns to
913 see if there is an insn that sets one of these registers to something
914 other than itself plus a constant. If so, the register cannot be
915 eliminated. Doing this scan here eliminates an extra pass through the
916 main reload loop in the most common case where register elimination
917 cannot be done. */
918 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
919 if (INSN_P (insn))
920 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
922 maybe_fix_stack_asms ();
924 insns_need_reload = 0;
925 something_needs_elimination = 0;
927 /* Initialize to -1, which means take the first spill register. */
928 last_spill_reg = -1;
930 /* Spill any hard regs that we know we can't eliminate. */
931 CLEAR_HARD_REG_SET (used_spill_regs);
932 /* There can be multiple ways to eliminate a register;
933 they should be listed adjacently.
934 Elimination for any register fails only if all possible ways fail. */
935 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
937 int from = ep->from;
938 int can_eliminate = 0;
941 can_eliminate |= ep->can_eliminate;
942 ep++;
944 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
945 if (! can_eliminate)
946 spill_hard_reg (from, 1);
949 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
950 if (frame_pointer_needed)
951 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
952 #endif
953 finish_spills (global);
955 /* From now on, we may need to generate moves differently. We may also
956 allow modifications of insns which cause them to not be recognized.
957 Any such modifications will be cleaned up during reload itself. */
958 reload_in_progress = 1;
960 /* This loop scans the entire function each go-round
961 and repeats until one repetition spills no additional hard regs. */
962 for (;;)
964 int something_changed;
965 int did_spill;
966 HOST_WIDE_INT starting_frame_size;
968 starting_frame_size = get_frame_size ();
970 set_initial_elim_offsets ();
971 set_initial_label_offsets ();
973 /* For each pseudo register that has an equivalent location defined,
974 try to eliminate any eliminable registers (such as the frame pointer)
975 assuming initial offsets for the replacement register, which
976 is the normal case.
978 If the resulting location is directly addressable, substitute
979 the MEM we just got directly for the old REG.
981 If it is not addressable but is a constant or the sum of a hard reg
982 and constant, it is probably not addressable because the constant is
983 out of range, in that case record the address; we will generate
984 hairy code to compute the address in a register each time it is
985 needed. Similarly if it is a hard register, but one that is not
986 valid as an address register.
988 If the location is not addressable, but does not have one of the
989 above forms, assign a stack slot. We have to do this to avoid the
990 potential of producing lots of reloads if, e.g., a location involves
991 a pseudo that didn't get a hard register and has an equivalent memory
992 location that also involves a pseudo that didn't get a hard register.
994 Perhaps at some point we will improve reload_when_needed handling
995 so this problem goes away. But that's very hairy. */
997 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
998 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1000 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1001 NULL_RTX);
1003 if (strict_memory_address_addr_space_p
1004 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
1005 MEM_ADDR_SPACE (x)))
1006 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1007 else if (CONSTANT_P (XEXP (x, 0))
1008 || (REG_P (XEXP (x, 0))
1009 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1010 || (GET_CODE (XEXP (x, 0)) == PLUS
1011 && REG_P (XEXP (XEXP (x, 0), 0))
1012 && (REGNO (XEXP (XEXP (x, 0), 0))
1013 < FIRST_PSEUDO_REGISTER)
1014 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1015 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1016 else
1018 /* Make a new stack slot. Then indicate that something
1019 changed so we go back and recompute offsets for
1020 eliminable registers because the allocation of memory
1021 below might change some offset. reg_equiv_{mem,address}
1022 will be set up for this pseudo on the next pass around
1023 the loop. */
1024 reg_equiv_memory_loc[i] = 0;
1025 reg_equiv_init[i] = 0;
1026 alter_reg (i, -1, true);
1030 if (caller_save_needed)
1031 setup_save_areas ();
1033 /* If we allocated another stack slot, redo elimination bookkeeping. */
1034 if (starting_frame_size != get_frame_size ())
1035 continue;
1036 if (starting_frame_size && crtl->stack_alignment_needed)
1038 /* If we have a stack frame, we must align it now. The
1039 stack size may be a part of the offset computation for
1040 register elimination. So if this changes the stack size,
1041 then repeat the elimination bookkeeping. We don't
1042 realign when there is no stack, as that will cause a
1043 stack frame when none is needed should
1044 STARTING_FRAME_OFFSET not be already aligned to
1045 STACK_BOUNDARY. */
1046 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1047 if (starting_frame_size != get_frame_size ())
1048 continue;
1051 if (caller_save_needed)
1053 save_call_clobbered_regs ();
1054 /* That might have allocated new insn_chain structures. */
1055 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1058 calculate_needs_all_insns (global);
1060 if (! ira_conflicts_p)
1061 /* Don't do it for IRA. We need this info because we don't
1062 change live_throughout and dead_or_set for chains when IRA
1063 is used. */
1064 CLEAR_REG_SET (&spilled_pseudos);
1066 did_spill = 0;
1068 something_changed = 0;
1070 /* If we allocated any new memory locations, make another pass
1071 since it might have changed elimination offsets. */
1072 if (starting_frame_size != get_frame_size ())
1073 something_changed = 1;
1075 /* Even if the frame size remained the same, we might still have
1076 changed elimination offsets, e.g. if find_reloads called
1077 force_const_mem requiring the back end to allocate a constant
1078 pool base register that needs to be saved on the stack. */
1079 else if (!verify_initial_elim_offsets ())
1080 something_changed = 1;
1083 HARD_REG_SET to_spill;
1084 CLEAR_HARD_REG_SET (to_spill);
1085 update_eliminables (&to_spill);
1086 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1089 if (TEST_HARD_REG_BIT (to_spill, i))
1091 spill_hard_reg (i, 1);
1092 did_spill = 1;
1094 /* Regardless of the state of spills, if we previously had
1095 a register that we thought we could eliminate, but now can
1096 not eliminate, we must run another pass.
1098 Consider pseudos which have an entry in reg_equiv_* which
1099 reference an eliminable register. We must make another pass
1100 to update reg_equiv_* so that we do not substitute in the
1101 old value from when we thought the elimination could be
1102 performed. */
1103 something_changed = 1;
1107 select_reload_regs ();
1108 if (failure)
1109 goto failed;
1111 if (insns_need_reload != 0 || did_spill)
1112 something_changed |= finish_spills (global);
1114 if (! something_changed)
1115 break;
1117 if (caller_save_needed)
1118 delete_caller_save_insns ();
1120 obstack_free (&reload_obstack, reload_firstobj);
1123 /* If global-alloc was run, notify it of any register eliminations we have
1124 done. */
1125 if (global)
1126 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1127 if (ep->can_eliminate)
1128 mark_elimination (ep->from, ep->to);
1130 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1131 If that insn didn't set the register (i.e., it copied the register to
1132 memory), just delete that insn instead of the equivalencing insn plus
1133 anything now dead. If we call delete_dead_insn on that insn, we may
1134 delete the insn that actually sets the register if the register dies
1135 there and that is incorrect. */
1137 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1139 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1141 rtx list;
1142 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1144 rtx equiv_insn = XEXP (list, 0);
1146 /* If we already deleted the insn or if it may trap, we can't
1147 delete it. The latter case shouldn't happen, but can
1148 if an insn has a variable address, gets a REG_EH_REGION
1149 note added to it, and then gets converted into a load
1150 from a constant address. */
1151 if (NOTE_P (equiv_insn)
1152 || can_throw_internal (equiv_insn))
1154 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1155 delete_dead_insn (equiv_insn);
1156 else
1157 SET_INSN_DELETED (equiv_insn);
1162 /* Use the reload registers where necessary
1163 by generating move instructions to move the must-be-register
1164 values into or out of the reload registers. */
1166 if (insns_need_reload != 0 || something_needs_elimination
1167 || something_needs_operands_changed)
1169 HOST_WIDE_INT old_frame_size = get_frame_size ();
1171 reload_as_needed (global);
1173 gcc_assert (old_frame_size == get_frame_size ());
1175 gcc_assert (verify_initial_elim_offsets ());
1178 /* If we were able to eliminate the frame pointer, show that it is no
1179 longer live at the start of any basic block. If it ls live by
1180 virtue of being in a pseudo, that pseudo will be marked live
1181 and hence the frame pointer will be known to be live via that
1182 pseudo. */
1184 if (! frame_pointer_needed)
1185 FOR_EACH_BB (bb)
1186 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1188 /* Come here (with failure set nonzero) if we can't get enough spill
1189 regs. */
1190 failed:
1192 CLEAR_REG_SET (&changed_allocation_pseudos);
1193 CLEAR_REG_SET (&spilled_pseudos);
1194 reload_in_progress = 0;
1196 /* Now eliminate all pseudo regs by modifying them into
1197 their equivalent memory references.
1198 The REG-rtx's for the pseudos are modified in place,
1199 so all insns that used to refer to them now refer to memory.
1201 For a reg that has a reg_equiv_address, all those insns
1202 were changed by reloading so that no insns refer to it any longer;
1203 but the DECL_RTL of a variable decl may refer to it,
1204 and if so this causes the debugging info to mention the variable. */
1206 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1208 rtx addr = 0;
1210 if (reg_equiv_mem[i])
1211 addr = XEXP (reg_equiv_mem[i], 0);
1213 if (reg_equiv_address[i])
1214 addr = reg_equiv_address[i];
1216 if (addr)
1218 if (reg_renumber[i] < 0)
1220 rtx reg = regno_reg_rtx[i];
1222 REG_USERVAR_P (reg) = 0;
1223 PUT_CODE (reg, MEM);
1224 XEXP (reg, 0) = addr;
1225 if (reg_equiv_memory_loc[i])
1226 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1227 else
1229 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1230 MEM_ATTRS (reg) = 0;
1232 MEM_NOTRAP_P (reg) = 1;
1234 else if (reg_equiv_mem[i])
1235 XEXP (reg_equiv_mem[i], 0) = addr;
1238 /* We don't want complex addressing modes in debug insns
1239 if simpler ones will do, so delegitimize equivalences
1240 in debug insns. */
1241 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1243 rtx reg = regno_reg_rtx[i];
1244 rtx equiv = 0;
1245 df_ref use, next;
1247 if (reg_equiv_constant[i])
1248 equiv = reg_equiv_constant[i];
1249 else if (reg_equiv_invariant[i])
1250 equiv = reg_equiv_invariant[i];
1251 else if (reg && MEM_P (reg))
1252 equiv = targetm.delegitimize_address (reg);
1253 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1254 equiv = reg;
1256 if (equiv == reg)
1257 continue;
1259 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1261 insn = DF_REF_INSN (use);
1263 /* Make sure the next ref is for a different instruction,
1264 so that we're not affected by the rescan. */
1265 next = DF_REF_NEXT_REG (use);
1266 while (next && DF_REF_INSN (next) == insn)
1267 next = DF_REF_NEXT_REG (next);
1269 if (DEBUG_INSN_P (insn))
1271 if (!equiv)
1273 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1274 df_insn_rescan_debug_internal (insn);
1276 else
1277 INSN_VAR_LOCATION_LOC (insn)
1278 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1279 reg, equiv);
1285 /* We must set reload_completed now since the cleanup_subreg_operands call
1286 below will re-recognize each insn and reload may have generated insns
1287 which are only valid during and after reload. */
1288 reload_completed = 1;
1290 /* Make a pass over all the insns and delete all USEs which we inserted
1291 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1292 notes. Delete all CLOBBER insns, except those that refer to the return
1293 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1294 from misarranging variable-array code, and simplify (subreg (reg))
1295 operands. Strip and regenerate REG_INC notes that may have been moved
1296 around. */
1298 for (insn = first; insn; insn = NEXT_INSN (insn))
1299 if (INSN_P (insn))
1301 rtx *pnote;
1303 if (CALL_P (insn))
1304 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1305 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1307 if ((GET_CODE (PATTERN (insn)) == USE
1308 /* We mark with QImode USEs introduced by reload itself. */
1309 && (GET_MODE (insn) == QImode
1310 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1311 || (GET_CODE (PATTERN (insn)) == CLOBBER
1312 && (!MEM_P (XEXP (PATTERN (insn), 0))
1313 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1314 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1315 && XEXP (XEXP (PATTERN (insn), 0), 0)
1316 != stack_pointer_rtx))
1317 && (!REG_P (XEXP (PATTERN (insn), 0))
1318 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1320 delete_insn (insn);
1321 continue;
1324 /* Some CLOBBERs may survive until here and still reference unassigned
1325 pseudos with const equivalent, which may in turn cause ICE in later
1326 passes if the reference remains in place. */
1327 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1328 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1329 VOIDmode, PATTERN (insn));
1331 /* Discard obvious no-ops, even without -O. This optimization
1332 is fast and doesn't interfere with debugging. */
1333 if (NONJUMP_INSN_P (insn)
1334 && GET_CODE (PATTERN (insn)) == SET
1335 && REG_P (SET_SRC (PATTERN (insn)))
1336 && REG_P (SET_DEST (PATTERN (insn)))
1337 && (REGNO (SET_SRC (PATTERN (insn)))
1338 == REGNO (SET_DEST (PATTERN (insn)))))
1340 delete_insn (insn);
1341 continue;
1344 pnote = &REG_NOTES (insn);
1345 while (*pnote != 0)
1347 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1348 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1349 || REG_NOTE_KIND (*pnote) == REG_INC)
1350 *pnote = XEXP (*pnote, 1);
1351 else
1352 pnote = &XEXP (*pnote, 1);
1355 #ifdef AUTO_INC_DEC
1356 add_auto_inc_notes (insn, PATTERN (insn));
1357 #endif
1359 /* Simplify (subreg (reg)) if it appears as an operand. */
1360 cleanup_subreg_operands (insn);
1362 /* Clean up invalid ASMs so that they don't confuse later passes.
1363 See PR 21299. */
1364 if (asm_noperands (PATTERN (insn)) >= 0)
1366 extract_insn (insn);
1367 if (!constrain_operands (1))
1369 error_for_asm (insn,
1370 "%<asm%> operand has impossible constraints");
1371 delete_insn (insn);
1372 continue;
1377 /* If we are doing generic stack checking, give a warning if this
1378 function's frame size is larger than we expect. */
1379 if (flag_stack_check == GENERIC_STACK_CHECK)
1381 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1382 static int verbose_warned = 0;
1384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1385 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1386 size += UNITS_PER_WORD;
1388 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1390 warning (0, "frame size too large for reliable stack checking");
1391 if (! verbose_warned)
1393 warning (0, "try reducing the number of local variables");
1394 verbose_warned = 1;
1399 /* Indicate that we no longer have known memory locations or constants. */
1400 if (reg_equiv_constant)
1401 free (reg_equiv_constant);
1402 if (reg_equiv_invariant)
1403 free (reg_equiv_invariant);
1404 reg_equiv_constant = 0;
1405 reg_equiv_invariant = 0;
1406 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1407 reg_equiv_memory_loc = 0;
1409 free (temp_pseudo_reg_arr);
1411 if (offsets_known_at)
1412 free (offsets_known_at);
1413 if (offsets_at)
1414 free (offsets_at);
1416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1417 if (reg_equiv_alt_mem_list[i])
1418 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1419 free (reg_equiv_alt_mem_list);
1421 free (reg_equiv_mem);
1422 reg_equiv_init = 0;
1423 free (reg_equiv_address);
1424 free (reg_max_ref_width);
1425 free (reg_old_renumber);
1426 free (pseudo_previous_regs);
1427 free (pseudo_forbidden_regs);
1429 CLEAR_HARD_REG_SET (used_spill_regs);
1430 for (i = 0; i < n_spills; i++)
1431 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1433 /* Free all the insn_chain structures at once. */
1434 obstack_free (&reload_obstack, reload_startobj);
1435 unused_insn_chains = 0;
1436 fixup_abnormal_edges ();
1438 /* Replacing pseudos with their memory equivalents might have
1439 created shared rtx. Subsequent passes would get confused
1440 by this, so unshare everything here. */
1441 unshare_all_rtl_again (first);
1443 #ifdef STACK_BOUNDARY
1444 /* init_emit has set the alignment of the hard frame pointer
1445 to STACK_BOUNDARY. It is very likely no longer valid if
1446 the hard frame pointer was used for register allocation. */
1447 if (!frame_pointer_needed)
1448 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1449 #endif
1451 return failure;
1454 /* Yet another special case. Unfortunately, reg-stack forces people to
1455 write incorrect clobbers in asm statements. These clobbers must not
1456 cause the register to appear in bad_spill_regs, otherwise we'll call
1457 fatal_insn later. We clear the corresponding regnos in the live
1458 register sets to avoid this.
1459 The whole thing is rather sick, I'm afraid. */
1461 static void
1462 maybe_fix_stack_asms (void)
1464 #ifdef STACK_REGS
1465 const char *constraints[MAX_RECOG_OPERANDS];
1466 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1467 struct insn_chain *chain;
1469 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1471 int i, noperands;
1472 HARD_REG_SET clobbered, allowed;
1473 rtx pat;
1475 if (! INSN_P (chain->insn)
1476 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1477 continue;
1478 pat = PATTERN (chain->insn);
1479 if (GET_CODE (pat) != PARALLEL)
1480 continue;
1482 CLEAR_HARD_REG_SET (clobbered);
1483 CLEAR_HARD_REG_SET (allowed);
1485 /* First, make a mask of all stack regs that are clobbered. */
1486 for (i = 0; i < XVECLEN (pat, 0); i++)
1488 rtx t = XVECEXP (pat, 0, i);
1489 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1490 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1493 /* Get the operand values and constraints out of the insn. */
1494 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1495 constraints, operand_mode, NULL);
1497 /* For every operand, see what registers are allowed. */
1498 for (i = 0; i < noperands; i++)
1500 const char *p = constraints[i];
1501 /* For every alternative, we compute the class of registers allowed
1502 for reloading in CLS, and merge its contents into the reg set
1503 ALLOWED. */
1504 int cls = (int) NO_REGS;
1506 for (;;)
1508 char c = *p;
1510 if (c == '\0' || c == ',' || c == '#')
1512 /* End of one alternative - mark the regs in the current
1513 class, and reset the class. */
1514 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1515 cls = NO_REGS;
1516 p++;
1517 if (c == '#')
1518 do {
1519 c = *p++;
1520 } while (c != '\0' && c != ',');
1521 if (c == '\0')
1522 break;
1523 continue;
1526 switch (c)
1528 case '=': case '+': case '*': case '%': case '?': case '!':
1529 case '0': case '1': case '2': case '3': case '4': case '<':
1530 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1531 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1532 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1533 case TARGET_MEM_CONSTRAINT:
1534 break;
1536 case 'p':
1537 cls = (int) reg_class_subunion[cls]
1538 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1539 break;
1541 case 'g':
1542 case 'r':
1543 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1544 break;
1546 default:
1547 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1548 cls = (int) reg_class_subunion[cls]
1549 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1550 else
1551 cls = (int) reg_class_subunion[cls]
1552 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1554 p += CONSTRAINT_LEN (c, p);
1557 /* Those of the registers which are clobbered, but allowed by the
1558 constraints, must be usable as reload registers. So clear them
1559 out of the life information. */
1560 AND_HARD_REG_SET (allowed, clobbered);
1561 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1562 if (TEST_HARD_REG_BIT (allowed, i))
1564 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1565 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1569 #endif
1572 /* Copy the global variables n_reloads and rld into the corresponding elts
1573 of CHAIN. */
1574 static void
1575 copy_reloads (struct insn_chain *chain)
1577 chain->n_reloads = n_reloads;
1578 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1579 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1580 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1583 /* Walk the chain of insns, and determine for each whether it needs reloads
1584 and/or eliminations. Build the corresponding insns_need_reload list, and
1585 set something_needs_elimination as appropriate. */
1586 static void
1587 calculate_needs_all_insns (int global)
1589 struct insn_chain **pprev_reload = &insns_need_reload;
1590 struct insn_chain *chain, *next = 0;
1592 something_needs_elimination = 0;
1594 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1595 for (chain = reload_insn_chain; chain != 0; chain = next)
1597 rtx insn = chain->insn;
1599 next = chain->next;
1601 /* Clear out the shortcuts. */
1602 chain->n_reloads = 0;
1603 chain->need_elim = 0;
1604 chain->need_reload = 0;
1605 chain->need_operand_change = 0;
1607 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1608 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1609 what effects this has on the known offsets at labels. */
1611 if (LABEL_P (insn) || JUMP_P (insn)
1612 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1613 set_label_offsets (insn, insn, 0);
1615 if (INSN_P (insn))
1617 rtx old_body = PATTERN (insn);
1618 int old_code = INSN_CODE (insn);
1619 rtx old_notes = REG_NOTES (insn);
1620 int did_elimination = 0;
1621 int operands_changed = 0;
1622 rtx set = single_set (insn);
1624 /* Skip insns that only set an equivalence. */
1625 if (set && REG_P (SET_DEST (set))
1626 && reg_renumber[REGNO (SET_DEST (set))] < 0
1627 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1628 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1629 && reg_equiv_init[REGNO (SET_DEST (set))])
1630 continue;
1632 /* If needed, eliminate any eliminable registers. */
1633 if (num_eliminable || num_eliminable_invariants)
1634 did_elimination = eliminate_regs_in_insn (insn, 0);
1636 /* Analyze the instruction. */
1637 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1638 global, spill_reg_order);
1640 /* If a no-op set needs more than one reload, this is likely
1641 to be something that needs input address reloads. We
1642 can't get rid of this cleanly later, and it is of no use
1643 anyway, so discard it now.
1644 We only do this when expensive_optimizations is enabled,
1645 since this complements reload inheritance / output
1646 reload deletion, and it can make debugging harder. */
1647 if (flag_expensive_optimizations && n_reloads > 1)
1649 rtx set = single_set (insn);
1650 if (set
1652 ((SET_SRC (set) == SET_DEST (set)
1653 && REG_P (SET_SRC (set))
1654 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1655 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1656 && reg_renumber[REGNO (SET_SRC (set))] < 0
1657 && reg_renumber[REGNO (SET_DEST (set))] < 0
1658 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1659 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1660 && rtx_equal_p (reg_equiv_memory_loc
1661 [REGNO (SET_SRC (set))],
1662 reg_equiv_memory_loc
1663 [REGNO (SET_DEST (set))]))))
1665 if (ira_conflicts_p)
1666 /* Inform IRA about the insn deletion. */
1667 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1668 REGNO (SET_SRC (set)));
1669 delete_insn (insn);
1670 /* Delete it from the reload chain. */
1671 if (chain->prev)
1672 chain->prev->next = next;
1673 else
1674 reload_insn_chain = next;
1675 if (next)
1676 next->prev = chain->prev;
1677 chain->next = unused_insn_chains;
1678 unused_insn_chains = chain;
1679 continue;
1682 if (num_eliminable)
1683 update_eliminable_offsets ();
1685 /* Remember for later shortcuts which insns had any reloads or
1686 register eliminations. */
1687 chain->need_elim = did_elimination;
1688 chain->need_reload = n_reloads > 0;
1689 chain->need_operand_change = operands_changed;
1691 /* Discard any register replacements done. */
1692 if (did_elimination)
1694 obstack_free (&reload_obstack, reload_insn_firstobj);
1695 PATTERN (insn) = old_body;
1696 INSN_CODE (insn) = old_code;
1697 REG_NOTES (insn) = old_notes;
1698 something_needs_elimination = 1;
1701 something_needs_operands_changed |= operands_changed;
1703 if (n_reloads != 0)
1705 copy_reloads (chain);
1706 *pprev_reload = chain;
1707 pprev_reload = &chain->next_need_reload;
1711 *pprev_reload = 0;
1714 /* Comparison function for qsort to decide which of two reloads
1715 should be handled first. *P1 and *P2 are the reload numbers. */
1717 static int
1718 reload_reg_class_lower (const void *r1p, const void *r2p)
1720 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1721 int t;
1723 /* Consider required reloads before optional ones. */
1724 t = rld[r1].optional - rld[r2].optional;
1725 if (t != 0)
1726 return t;
1728 /* Count all solitary classes before non-solitary ones. */
1729 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1730 - (reg_class_size[(int) rld[r1].rclass] == 1));
1731 if (t != 0)
1732 return t;
1734 /* Aside from solitaires, consider all multi-reg groups first. */
1735 t = rld[r2].nregs - rld[r1].nregs;
1736 if (t != 0)
1737 return t;
1739 /* Consider reloads in order of increasing reg-class number. */
1740 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1741 if (t != 0)
1742 return t;
1744 /* If reloads are equally urgent, sort by reload number,
1745 so that the results of qsort leave nothing to chance. */
1746 return r1 - r2;
1749 /* The cost of spilling each hard reg. */
1750 static int spill_cost[FIRST_PSEUDO_REGISTER];
1752 /* When spilling multiple hard registers, we use SPILL_COST for the first
1753 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1754 only the first hard reg for a multi-reg pseudo. */
1755 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1757 /* Map of hard regno to pseudo regno currently occupying the hard
1758 reg. */
1759 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1761 /* Update the spill cost arrays, considering that pseudo REG is live. */
1763 static void
1764 count_pseudo (int reg)
1766 int freq = REG_FREQ (reg);
1767 int r = reg_renumber[reg];
1768 int nregs;
1770 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1771 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1772 /* Ignore spilled pseudo-registers which can be here only if IRA
1773 is used. */
1774 || (ira_conflicts_p && r < 0))
1775 return;
1777 SET_REGNO_REG_SET (&pseudos_counted, reg);
1779 gcc_assert (r >= 0);
1781 spill_add_cost[r] += freq;
1782 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1783 while (nregs-- > 0)
1785 hard_regno_to_pseudo_regno[r + nregs] = reg;
1786 spill_cost[r + nregs] += freq;
1790 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1791 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1793 static void
1794 order_regs_for_reload (struct insn_chain *chain)
1796 unsigned i;
1797 HARD_REG_SET used_by_pseudos;
1798 HARD_REG_SET used_by_pseudos2;
1799 reg_set_iterator rsi;
1801 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1803 memset (spill_cost, 0, sizeof spill_cost);
1804 memset (spill_add_cost, 0, sizeof spill_add_cost);
1805 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1806 hard_regno_to_pseudo_regno[i] = -1;
1808 /* Count number of uses of each hard reg by pseudo regs allocated to it
1809 and then order them by decreasing use. First exclude hard registers
1810 that are live in or across this insn. */
1812 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1813 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1814 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1815 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1817 /* Now find out which pseudos are allocated to it, and update
1818 hard_reg_n_uses. */
1819 CLEAR_REG_SET (&pseudos_counted);
1821 EXECUTE_IF_SET_IN_REG_SET
1822 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1824 count_pseudo (i);
1826 EXECUTE_IF_SET_IN_REG_SET
1827 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1829 count_pseudo (i);
1831 CLEAR_REG_SET (&pseudos_counted);
1834 /* Vector of reload-numbers showing the order in which the reloads should
1835 be processed. */
1836 static short reload_order[MAX_RELOADS];
1838 /* This is used to keep track of the spill regs used in one insn. */
1839 static HARD_REG_SET used_spill_regs_local;
1841 /* We decided to spill hard register SPILLED, which has a size of
1842 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1843 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1844 update SPILL_COST/SPILL_ADD_COST. */
1846 static void
1847 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1849 int freq = REG_FREQ (reg);
1850 int r = reg_renumber[reg];
1851 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1853 /* Ignore spilled pseudo-registers which can be here only if IRA is
1854 used. */
1855 if ((ira_conflicts_p && r < 0)
1856 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1857 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1858 return;
1860 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1862 spill_add_cost[r] -= freq;
1863 while (nregs-- > 0)
1865 hard_regno_to_pseudo_regno[r + nregs] = -1;
1866 spill_cost[r + nregs] -= freq;
1870 /* Find reload register to use for reload number ORDER. */
1872 static int
1873 find_reg (struct insn_chain *chain, int order)
1875 int rnum = reload_order[order];
1876 struct reload *rl = rld + rnum;
1877 int best_cost = INT_MAX;
1878 int best_reg = -1;
1879 unsigned int i, j, n;
1880 int k;
1881 HARD_REG_SET not_usable;
1882 HARD_REG_SET used_by_other_reload;
1883 reg_set_iterator rsi;
1884 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1885 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1887 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1888 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1889 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1891 CLEAR_HARD_REG_SET (used_by_other_reload);
1892 for (k = 0; k < order; k++)
1894 int other = reload_order[k];
1896 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1897 for (j = 0; j < rld[other].nregs; j++)
1898 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1901 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1903 #ifdef REG_ALLOC_ORDER
1904 unsigned int regno = reg_alloc_order[i];
1905 #else
1906 unsigned int regno = i;
1907 #endif
1909 if (! TEST_HARD_REG_BIT (not_usable, regno)
1910 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1911 && HARD_REGNO_MODE_OK (regno, rl->mode))
1913 int this_cost = spill_cost[regno];
1914 int ok = 1;
1915 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1917 for (j = 1; j < this_nregs; j++)
1919 this_cost += spill_add_cost[regno + j];
1920 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1921 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1922 ok = 0;
1924 if (! ok)
1925 continue;
1927 if (ira_conflicts_p)
1929 /* Ask IRA to find a better pseudo-register for
1930 spilling. */
1931 for (n = j = 0; j < this_nregs; j++)
1933 int r = hard_regno_to_pseudo_regno[regno + j];
1935 if (r < 0)
1936 continue;
1937 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1938 regno_pseudo_regs[n++] = r;
1940 regno_pseudo_regs[n++] = -1;
1941 if (best_reg < 0
1942 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1943 best_regno_pseudo_regs,
1944 rl->in, rl->out,
1945 chain->insn))
1947 best_reg = regno;
1948 for (j = 0;; j++)
1950 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1951 if (regno_pseudo_regs[j] < 0)
1952 break;
1955 continue;
1958 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1959 this_cost--;
1960 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1961 this_cost--;
1962 if (this_cost < best_cost
1963 /* Among registers with equal cost, prefer caller-saved ones, or
1964 use REG_ALLOC_ORDER if it is defined. */
1965 || (this_cost == best_cost
1966 #ifdef REG_ALLOC_ORDER
1967 && (inv_reg_alloc_order[regno]
1968 < inv_reg_alloc_order[best_reg])
1969 #else
1970 && call_used_regs[regno]
1971 && ! call_used_regs[best_reg]
1972 #endif
1975 best_reg = regno;
1976 best_cost = this_cost;
1980 if (best_reg == -1)
1981 return 0;
1983 if (dump_file)
1984 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1986 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1987 rl->regno = best_reg;
1989 EXECUTE_IF_SET_IN_REG_SET
1990 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1992 count_spilled_pseudo (best_reg, rl->nregs, j);
1995 EXECUTE_IF_SET_IN_REG_SET
1996 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1998 count_spilled_pseudo (best_reg, rl->nregs, j);
2001 for (i = 0; i < rl->nregs; i++)
2003 gcc_assert (spill_cost[best_reg + i] == 0);
2004 gcc_assert (spill_add_cost[best_reg + i] == 0);
2005 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2006 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2008 return 1;
2011 /* Find more reload regs to satisfy the remaining need of an insn, which
2012 is given by CHAIN.
2013 Do it by ascending class number, since otherwise a reg
2014 might be spilled for a big class and might fail to count
2015 for a smaller class even though it belongs to that class. */
2017 static void
2018 find_reload_regs (struct insn_chain *chain)
2020 int i;
2022 /* In order to be certain of getting the registers we need,
2023 we must sort the reloads into order of increasing register class.
2024 Then our grabbing of reload registers will parallel the process
2025 that provided the reload registers. */
2026 for (i = 0; i < chain->n_reloads; i++)
2028 /* Show whether this reload already has a hard reg. */
2029 if (chain->rld[i].reg_rtx)
2031 int regno = REGNO (chain->rld[i].reg_rtx);
2032 chain->rld[i].regno = regno;
2033 chain->rld[i].nregs
2034 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2036 else
2037 chain->rld[i].regno = -1;
2038 reload_order[i] = i;
2041 n_reloads = chain->n_reloads;
2042 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2044 CLEAR_HARD_REG_SET (used_spill_regs_local);
2046 if (dump_file)
2047 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2049 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2051 /* Compute the order of preference for hard registers to spill. */
2053 order_regs_for_reload (chain);
2055 for (i = 0; i < n_reloads; i++)
2057 int r = reload_order[i];
2059 /* Ignore reloads that got marked inoperative. */
2060 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2061 && ! rld[r].optional
2062 && rld[r].regno == -1)
2063 if (! find_reg (chain, i))
2065 if (dump_file)
2066 fprintf (dump_file, "reload failure for reload %d\n", r);
2067 spill_failure (chain->insn, rld[r].rclass);
2068 failure = 1;
2069 return;
2073 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2074 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2076 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2079 static void
2080 select_reload_regs (void)
2082 struct insn_chain *chain;
2084 /* Try to satisfy the needs for each insn. */
2085 for (chain = insns_need_reload; chain != 0;
2086 chain = chain->next_need_reload)
2087 find_reload_regs (chain);
2090 /* Delete all insns that were inserted by emit_caller_save_insns during
2091 this iteration. */
2092 static void
2093 delete_caller_save_insns (void)
2095 struct insn_chain *c = reload_insn_chain;
2097 while (c != 0)
2099 while (c != 0 && c->is_caller_save_insn)
2101 struct insn_chain *next = c->next;
2102 rtx insn = c->insn;
2104 if (c == reload_insn_chain)
2105 reload_insn_chain = next;
2106 delete_insn (insn);
2108 if (next)
2109 next->prev = c->prev;
2110 if (c->prev)
2111 c->prev->next = next;
2112 c->next = unused_insn_chains;
2113 unused_insn_chains = c;
2114 c = next;
2116 if (c != 0)
2117 c = c->next;
2121 /* Handle the failure to find a register to spill.
2122 INSN should be one of the insns which needed this particular spill reg. */
2124 static void
2125 spill_failure (rtx insn, enum reg_class rclass)
2127 if (asm_noperands (PATTERN (insn)) >= 0)
2128 error_for_asm (insn, "can't find a register in class %qs while "
2129 "reloading %<asm%>",
2130 reg_class_names[rclass]);
2131 else
2133 error ("unable to find a register to spill in class %qs",
2134 reg_class_names[rclass]);
2136 if (dump_file)
2138 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2139 debug_reload_to_stream (dump_file);
2141 fatal_insn ("this is the insn:", insn);
2145 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2146 data that is dead in INSN. */
2148 static void
2149 delete_dead_insn (rtx insn)
2151 rtx prev = prev_real_insn (insn);
2152 rtx prev_dest;
2154 /* If the previous insn sets a register that dies in our insn, delete it
2155 too. */
2156 if (prev && GET_CODE (PATTERN (prev)) == SET
2157 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2158 && reg_mentioned_p (prev_dest, PATTERN (insn))
2159 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2160 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2161 delete_dead_insn (prev);
2163 SET_INSN_DELETED (insn);
2166 /* Modify the home of pseudo-reg I.
2167 The new home is present in reg_renumber[I].
2169 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2170 or it may be -1, meaning there is none or it is not relevant.
2171 This is used so that all pseudos spilled from a given hard reg
2172 can share one stack slot. */
2174 static void
2175 alter_reg (int i, int from_reg, bool dont_share_p)
2177 /* When outputting an inline function, this can happen
2178 for a reg that isn't actually used. */
2179 if (regno_reg_rtx[i] == 0)
2180 return;
2182 /* If the reg got changed to a MEM at rtl-generation time,
2183 ignore it. */
2184 if (!REG_P (regno_reg_rtx[i]))
2185 return;
2187 /* Modify the reg-rtx to contain the new hard reg
2188 number or else to contain its pseudo reg number. */
2189 SET_REGNO (regno_reg_rtx[i],
2190 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2192 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2193 allocate a stack slot for it. */
2195 if (reg_renumber[i] < 0
2196 && REG_N_REFS (i) > 0
2197 && reg_equiv_constant[i] == 0
2198 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2199 && reg_equiv_memory_loc[i] == 0)
2201 rtx x = NULL_RTX;
2202 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2203 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2204 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2205 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2206 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2207 int adjust = 0;
2209 if (ira_conflicts_p)
2211 /* Mark the spill for IRA. */
2212 SET_REGNO_REG_SET (&spilled_pseudos, i);
2213 if (!dont_share_p)
2214 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2217 if (x)
2220 /* Each pseudo reg has an inherent size which comes from its own mode,
2221 and a total size which provides room for paradoxical subregs
2222 which refer to the pseudo reg in wider modes.
2224 We can use a slot already allocated if it provides both
2225 enough inherent space and enough total space.
2226 Otherwise, we allocate a new slot, making sure that it has no less
2227 inherent space, and no less total space, then the previous slot. */
2228 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2230 rtx stack_slot;
2232 /* No known place to spill from => no slot to reuse. */
2233 x = assign_stack_local (mode, total_size,
2234 min_align > inherent_align
2235 || total_size > inherent_size ? -1 : 0);
2237 stack_slot = x;
2239 /* Cancel the big-endian correction done in assign_stack_local.
2240 Get the address of the beginning of the slot. This is so we
2241 can do a big-endian correction unconditionally below. */
2242 if (BYTES_BIG_ENDIAN)
2244 adjust = inherent_size - total_size;
2245 if (adjust)
2246 stack_slot
2247 = adjust_address_nv (x, mode_for_size (total_size
2248 * BITS_PER_UNIT,
2249 MODE_INT, 1),
2250 adjust);
2253 if (! dont_share_p && ira_conflicts_p)
2254 /* Inform IRA about allocation a new stack slot. */
2255 ira_mark_new_stack_slot (stack_slot, i, total_size);
2258 /* Reuse a stack slot if possible. */
2259 else if (spill_stack_slot[from_reg] != 0
2260 && spill_stack_slot_width[from_reg] >= total_size
2261 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2262 >= inherent_size)
2263 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2264 x = spill_stack_slot[from_reg];
2266 /* Allocate a bigger slot. */
2267 else
2269 /* Compute maximum size needed, both for inherent size
2270 and for total size. */
2271 rtx stack_slot;
2273 if (spill_stack_slot[from_reg])
2275 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2276 > inherent_size)
2277 mode = GET_MODE (spill_stack_slot[from_reg]);
2278 if (spill_stack_slot_width[from_reg] > total_size)
2279 total_size = spill_stack_slot_width[from_reg];
2280 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2281 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2284 /* Make a slot with that size. */
2285 x = assign_stack_local (mode, total_size,
2286 min_align > inherent_align
2287 || total_size > inherent_size ? -1 : 0);
2288 stack_slot = x;
2290 /* Cancel the big-endian correction done in assign_stack_local.
2291 Get the address of the beginning of the slot. This is so we
2292 can do a big-endian correction unconditionally below. */
2293 if (BYTES_BIG_ENDIAN)
2295 adjust = GET_MODE_SIZE (mode) - total_size;
2296 if (adjust)
2297 stack_slot
2298 = adjust_address_nv (x, mode_for_size (total_size
2299 * BITS_PER_UNIT,
2300 MODE_INT, 1),
2301 adjust);
2304 spill_stack_slot[from_reg] = stack_slot;
2305 spill_stack_slot_width[from_reg] = total_size;
2308 /* On a big endian machine, the "address" of the slot
2309 is the address of the low part that fits its inherent mode. */
2310 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2311 adjust += (total_size - inherent_size);
2313 /* If we have any adjustment to make, or if the stack slot is the
2314 wrong mode, make a new stack slot. */
2315 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2317 /* Set all of the memory attributes as appropriate for a spill. */
2318 set_mem_attrs_for_spill (x);
2320 /* Save the stack slot for later. */
2321 reg_equiv_memory_loc[i] = x;
2325 /* Mark the slots in regs_ever_live for the hard regs used by
2326 pseudo-reg number REGNO, accessed in MODE. */
2328 static void
2329 mark_home_live_1 (int regno, enum machine_mode mode)
2331 int i, lim;
2333 i = reg_renumber[regno];
2334 if (i < 0)
2335 return;
2336 lim = end_hard_regno (mode, i);
2337 while (i < lim)
2338 df_set_regs_ever_live(i++, true);
2341 /* Mark the slots in regs_ever_live for the hard regs
2342 used by pseudo-reg number REGNO. */
2344 void
2345 mark_home_live (int regno)
2347 if (reg_renumber[regno] >= 0)
2348 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2351 /* This function handles the tracking of elimination offsets around branches.
2353 X is a piece of RTL being scanned.
2355 INSN is the insn that it came from, if any.
2357 INITIAL_P is nonzero if we are to set the offset to be the initial
2358 offset and zero if we are setting the offset of the label to be the
2359 current offset. */
2361 static void
2362 set_label_offsets (rtx x, rtx insn, int initial_p)
2364 enum rtx_code code = GET_CODE (x);
2365 rtx tem;
2366 unsigned int i;
2367 struct elim_table *p;
2369 switch (code)
2371 case LABEL_REF:
2372 if (LABEL_REF_NONLOCAL_P (x))
2373 return;
2375 x = XEXP (x, 0);
2377 /* ... fall through ... */
2379 case CODE_LABEL:
2380 /* If we know nothing about this label, set the desired offsets. Note
2381 that this sets the offset at a label to be the offset before a label
2382 if we don't know anything about the label. This is not correct for
2383 the label after a BARRIER, but is the best guess we can make. If
2384 we guessed wrong, we will suppress an elimination that might have
2385 been possible had we been able to guess correctly. */
2387 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2389 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2390 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2391 = (initial_p ? reg_eliminate[i].initial_offset
2392 : reg_eliminate[i].offset);
2393 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2396 /* Otherwise, if this is the definition of a label and it is
2397 preceded by a BARRIER, set our offsets to the known offset of
2398 that label. */
2400 else if (x == insn
2401 && (tem = prev_nonnote_insn (insn)) != 0
2402 && BARRIER_P (tem))
2403 set_offsets_for_label (insn);
2404 else
2405 /* If neither of the above cases is true, compare each offset
2406 with those previously recorded and suppress any eliminations
2407 where the offsets disagree. */
2409 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2410 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2411 != (initial_p ? reg_eliminate[i].initial_offset
2412 : reg_eliminate[i].offset))
2413 reg_eliminate[i].can_eliminate = 0;
2415 return;
2417 case JUMP_INSN:
2418 set_label_offsets (PATTERN (insn), insn, initial_p);
2420 /* ... fall through ... */
2422 case INSN:
2423 case CALL_INSN:
2424 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2425 to indirectly and hence must have all eliminations at their
2426 initial offsets. */
2427 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2428 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2429 set_label_offsets (XEXP (tem, 0), insn, 1);
2430 return;
2432 case PARALLEL:
2433 case ADDR_VEC:
2434 case ADDR_DIFF_VEC:
2435 /* Each of the labels in the parallel or address vector must be
2436 at their initial offsets. We want the first field for PARALLEL
2437 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2439 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2440 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2441 insn, initial_p);
2442 return;
2444 case SET:
2445 /* We only care about setting PC. If the source is not RETURN,
2446 IF_THEN_ELSE, or a label, disable any eliminations not at
2447 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2448 isn't one of those possibilities. For branches to a label,
2449 call ourselves recursively.
2451 Note that this can disable elimination unnecessarily when we have
2452 a non-local goto since it will look like a non-constant jump to
2453 someplace in the current function. This isn't a significant
2454 problem since such jumps will normally be when all elimination
2455 pairs are back to their initial offsets. */
2457 if (SET_DEST (x) != pc_rtx)
2458 return;
2460 switch (GET_CODE (SET_SRC (x)))
2462 case PC:
2463 case RETURN:
2464 return;
2466 case LABEL_REF:
2467 set_label_offsets (SET_SRC (x), insn, initial_p);
2468 return;
2470 case IF_THEN_ELSE:
2471 tem = XEXP (SET_SRC (x), 1);
2472 if (GET_CODE (tem) == LABEL_REF)
2473 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2474 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2475 break;
2477 tem = XEXP (SET_SRC (x), 2);
2478 if (GET_CODE (tem) == LABEL_REF)
2479 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2480 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2481 break;
2482 return;
2484 default:
2485 break;
2488 /* If we reach here, all eliminations must be at their initial
2489 offset because we are doing a jump to a variable address. */
2490 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2491 if (p->offset != p->initial_offset)
2492 p->can_eliminate = 0;
2493 break;
2495 default:
2496 break;
2500 /* Scan X and replace any eliminable registers (such as fp) with a
2501 replacement (such as sp), plus an offset.
2503 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2504 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2505 MEM, we are allowed to replace a sum of a register and the constant zero
2506 with the register, which we cannot do outside a MEM. In addition, we need
2507 to record the fact that a register is referenced outside a MEM.
2509 If INSN is an insn, it is the insn containing X. If we replace a REG
2510 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2511 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2512 the REG is being modified.
2514 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2515 That's used when we eliminate in expressions stored in notes.
2516 This means, do not set ref_outside_mem even if the reference
2517 is outside of MEMs.
2519 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2520 replacements done assuming all offsets are at their initial values. If
2521 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2522 encounter, return the actual location so that find_reloads will do
2523 the proper thing. */
2525 static rtx
2526 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2527 bool may_use_invariant)
2529 enum rtx_code code = GET_CODE (x);
2530 struct elim_table *ep;
2531 int regno;
2532 rtx new_rtx;
2533 int i, j;
2534 const char *fmt;
2535 int copied = 0;
2537 if (! current_function_decl)
2538 return x;
2540 switch (code)
2542 case CONST_INT:
2543 case CONST_DOUBLE:
2544 case CONST_FIXED:
2545 case CONST_VECTOR:
2546 case CONST:
2547 case SYMBOL_REF:
2548 case CODE_LABEL:
2549 case PC:
2550 case CC0:
2551 case ASM_INPUT:
2552 case ADDR_VEC:
2553 case ADDR_DIFF_VEC:
2554 case RETURN:
2555 return x;
2557 case REG:
2558 regno = REGNO (x);
2560 /* First handle the case where we encounter a bare register that
2561 is eliminable. Replace it with a PLUS. */
2562 if (regno < FIRST_PSEUDO_REGISTER)
2564 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2565 ep++)
2566 if (ep->from_rtx == x && ep->can_eliminate)
2567 return plus_constant (ep->to_rtx, ep->previous_offset);
2570 else if (reg_renumber && reg_renumber[regno] < 0
2571 && reg_equiv_invariant && reg_equiv_invariant[regno])
2573 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2574 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2575 mem_mode, insn, true);
2576 /* There exists at least one use of REGNO that cannot be
2577 eliminated. Prevent the defining insn from being deleted. */
2578 reg_equiv_init[regno] = NULL_RTX;
2579 alter_reg (regno, -1, true);
2581 return x;
2583 /* You might think handling MINUS in a manner similar to PLUS is a
2584 good idea. It is not. It has been tried multiple times and every
2585 time the change has had to have been reverted.
2587 Other parts of reload know a PLUS is special (gen_reload for example)
2588 and require special code to handle code a reloaded PLUS operand.
2590 Also consider backends where the flags register is clobbered by a
2591 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2592 lea instruction comes to mind). If we try to reload a MINUS, we
2593 may kill the flags register that was holding a useful value.
2595 So, please before trying to handle MINUS, consider reload as a
2596 whole instead of this little section as well as the backend issues. */
2597 case PLUS:
2598 /* If this is the sum of an eliminable register and a constant, rework
2599 the sum. */
2600 if (REG_P (XEXP (x, 0))
2601 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2602 && CONSTANT_P (XEXP (x, 1)))
2604 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2605 ep++)
2606 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2608 /* The only time we want to replace a PLUS with a REG (this
2609 occurs when the constant operand of the PLUS is the negative
2610 of the offset) is when we are inside a MEM. We won't want
2611 to do so at other times because that would change the
2612 structure of the insn in a way that reload can't handle.
2613 We special-case the commonest situation in
2614 eliminate_regs_in_insn, so just replace a PLUS with a
2615 PLUS here, unless inside a MEM. */
2616 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2617 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2618 return ep->to_rtx;
2619 else
2620 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2621 plus_constant (XEXP (x, 1),
2622 ep->previous_offset));
2625 /* If the register is not eliminable, we are done since the other
2626 operand is a constant. */
2627 return x;
2630 /* If this is part of an address, we want to bring any constant to the
2631 outermost PLUS. We will do this by doing register replacement in
2632 our operands and seeing if a constant shows up in one of them.
2634 Note that there is no risk of modifying the structure of the insn,
2635 since we only get called for its operands, thus we are either
2636 modifying the address inside a MEM, or something like an address
2637 operand of a load-address insn. */
2640 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2641 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2643 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2645 /* If one side is a PLUS and the other side is a pseudo that
2646 didn't get a hard register but has a reg_equiv_constant,
2647 we must replace the constant here since it may no longer
2648 be in the position of any operand. */
2649 if (GET_CODE (new0) == PLUS && REG_P (new1)
2650 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2651 && reg_renumber[REGNO (new1)] < 0
2652 && reg_equiv_constant != 0
2653 && reg_equiv_constant[REGNO (new1)] != 0)
2654 new1 = reg_equiv_constant[REGNO (new1)];
2655 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2656 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2657 && reg_renumber[REGNO (new0)] < 0
2658 && reg_equiv_constant[REGNO (new0)] != 0)
2659 new0 = reg_equiv_constant[REGNO (new0)];
2661 new_rtx = form_sum (GET_MODE (x), new0, new1);
2663 /* As above, if we are not inside a MEM we do not want to
2664 turn a PLUS into something else. We might try to do so here
2665 for an addition of 0 if we aren't optimizing. */
2666 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2667 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2668 else
2669 return new_rtx;
2672 return x;
2674 case MULT:
2675 /* If this is the product of an eliminable register and a
2676 constant, apply the distribute law and move the constant out
2677 so that we have (plus (mult ..) ..). This is needed in order
2678 to keep load-address insns valid. This case is pathological.
2679 We ignore the possibility of overflow here. */
2680 if (REG_P (XEXP (x, 0))
2681 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2682 && CONST_INT_P (XEXP (x, 1)))
2683 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2684 ep++)
2685 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2687 if (! mem_mode
2688 /* Refs inside notes or in DEBUG_INSNs don't count for
2689 this purpose. */
2690 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2691 || GET_CODE (insn) == INSN_LIST
2692 || DEBUG_INSN_P (insn))))
2693 ep->ref_outside_mem = 1;
2695 return
2696 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2697 ep->previous_offset * INTVAL (XEXP (x, 1)));
2700 /* ... fall through ... */
2702 case CALL:
2703 case COMPARE:
2704 /* See comments before PLUS about handling MINUS. */
2705 case MINUS:
2706 case DIV: case UDIV:
2707 case MOD: case UMOD:
2708 case AND: case IOR: case XOR:
2709 case ROTATERT: case ROTATE:
2710 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2711 case NE: case EQ:
2712 case GE: case GT: case GEU: case GTU:
2713 case LE: case LT: case LEU: case LTU:
2715 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2716 rtx new1 = XEXP (x, 1)
2717 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2719 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2720 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2722 return x;
2724 case EXPR_LIST:
2725 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2726 if (XEXP (x, 0))
2728 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2729 if (new_rtx != XEXP (x, 0))
2731 /* If this is a REG_DEAD note, it is not valid anymore.
2732 Using the eliminated version could result in creating a
2733 REG_DEAD note for the stack or frame pointer. */
2734 if (REG_NOTE_KIND (x) == REG_DEAD)
2735 return (XEXP (x, 1)
2736 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2737 : NULL_RTX);
2739 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2743 /* ... fall through ... */
2745 case INSN_LIST:
2746 /* Now do eliminations in the rest of the chain. If this was
2747 an EXPR_LIST, this might result in allocating more memory than is
2748 strictly needed, but it simplifies the code. */
2749 if (XEXP (x, 1))
2751 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2752 if (new_rtx != XEXP (x, 1))
2753 return
2754 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2756 return x;
2758 case PRE_INC:
2759 case POST_INC:
2760 case PRE_DEC:
2761 case POST_DEC:
2762 /* We do not support elimination of a register that is modified.
2763 elimination_effects has already make sure that this does not
2764 happen. */
2765 return x;
2767 case PRE_MODIFY:
2768 case POST_MODIFY:
2769 /* We do not support elimination of a register that is modified.
2770 elimination_effects has already make sure that this does not
2771 happen. The only remaining case we need to consider here is
2772 that the increment value may be an eliminable register. */
2773 if (GET_CODE (XEXP (x, 1)) == PLUS
2774 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2776 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2777 insn, true);
2779 if (new_rtx != XEXP (XEXP (x, 1), 1))
2780 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2781 gen_rtx_PLUS (GET_MODE (x),
2782 XEXP (x, 0), new_rtx));
2784 return x;
2786 case STRICT_LOW_PART:
2787 case NEG: case NOT:
2788 case SIGN_EXTEND: case ZERO_EXTEND:
2789 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2790 case FLOAT: case FIX:
2791 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2792 case ABS:
2793 case SQRT:
2794 case FFS:
2795 case CLZ:
2796 case CTZ:
2797 case POPCOUNT:
2798 case PARITY:
2799 case BSWAP:
2800 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2801 if (new_rtx != XEXP (x, 0))
2802 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2803 return x;
2805 case SUBREG:
2806 /* Similar to above processing, but preserve SUBREG_BYTE.
2807 Convert (subreg (mem)) to (mem) if not paradoxical.
2808 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2809 pseudo didn't get a hard reg, we must replace this with the
2810 eliminated version of the memory location because push_reload
2811 may do the replacement in certain circumstances. */
2812 if (REG_P (SUBREG_REG (x))
2813 && (GET_MODE_SIZE (GET_MODE (x))
2814 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2815 && reg_equiv_memory_loc != 0
2816 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2818 new_rtx = SUBREG_REG (x);
2820 else
2821 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2823 if (new_rtx != SUBREG_REG (x))
2825 int x_size = GET_MODE_SIZE (GET_MODE (x));
2826 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2828 if (MEM_P (new_rtx)
2829 && ((x_size < new_size
2830 #ifdef WORD_REGISTER_OPERATIONS
2831 /* On these machines, combine can create rtl of the form
2832 (set (subreg:m1 (reg:m2 R) 0) ...)
2833 where m1 < m2, and expects something interesting to
2834 happen to the entire word. Moreover, it will use the
2835 (reg:m2 R) later, expecting all bits to be preserved.
2836 So if the number of words is the same, preserve the
2837 subreg so that push_reload can see it. */
2838 && ! ((x_size - 1) / UNITS_PER_WORD
2839 == (new_size -1 ) / UNITS_PER_WORD)
2840 #endif
2842 || x_size == new_size)
2844 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2845 else
2846 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2849 return x;
2851 case MEM:
2852 /* Our only special processing is to pass the mode of the MEM to our
2853 recursive call and copy the flags. While we are here, handle this
2854 case more efficiently. */
2855 return
2856 replace_equiv_address_nv (x,
2857 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2858 insn, true));
2860 case USE:
2861 /* Handle insn_list USE that a call to a pure function may generate. */
2862 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2863 if (new_rtx != XEXP (x, 0))
2864 return gen_rtx_USE (GET_MODE (x), new_rtx);
2865 return x;
2867 case CLOBBER:
2868 gcc_assert (insn && DEBUG_INSN_P (insn));
2869 break;
2871 case ASM_OPERANDS:
2872 case SET:
2873 gcc_unreachable ();
2875 default:
2876 break;
2879 /* Process each of our operands recursively. If any have changed, make a
2880 copy of the rtx. */
2881 fmt = GET_RTX_FORMAT (code);
2882 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2884 if (*fmt == 'e')
2886 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2887 if (new_rtx != XEXP (x, i) && ! copied)
2889 x = shallow_copy_rtx (x);
2890 copied = 1;
2892 XEXP (x, i) = new_rtx;
2894 else if (*fmt == 'E')
2896 int copied_vec = 0;
2897 for (j = 0; j < XVECLEN (x, i); j++)
2899 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2900 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2902 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2903 XVEC (x, i)->elem);
2904 if (! copied)
2906 x = shallow_copy_rtx (x);
2907 copied = 1;
2909 XVEC (x, i) = new_v;
2910 copied_vec = 1;
2912 XVECEXP (x, i, j) = new_rtx;
2917 return x;
2921 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2923 return eliminate_regs_1 (x, mem_mode, insn, false);
2926 /* Scan rtx X for modifications of elimination target registers. Update
2927 the table of eliminables to reflect the changed state. MEM_MODE is
2928 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2930 static void
2931 elimination_effects (rtx x, enum machine_mode mem_mode)
2933 enum rtx_code code = GET_CODE (x);
2934 struct elim_table *ep;
2935 int regno;
2936 int i, j;
2937 const char *fmt;
2939 switch (code)
2941 case CONST_INT:
2942 case CONST_DOUBLE:
2943 case CONST_FIXED:
2944 case CONST_VECTOR:
2945 case CONST:
2946 case SYMBOL_REF:
2947 case CODE_LABEL:
2948 case PC:
2949 case CC0:
2950 case ASM_INPUT:
2951 case ADDR_VEC:
2952 case ADDR_DIFF_VEC:
2953 case RETURN:
2954 return;
2956 case REG:
2957 regno = REGNO (x);
2959 /* First handle the case where we encounter a bare register that
2960 is eliminable. Replace it with a PLUS. */
2961 if (regno < FIRST_PSEUDO_REGISTER)
2963 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2964 ep++)
2965 if (ep->from_rtx == x && ep->can_eliminate)
2967 if (! mem_mode)
2968 ep->ref_outside_mem = 1;
2969 return;
2973 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2974 && reg_equiv_constant[regno]
2975 && ! function_invariant_p (reg_equiv_constant[regno]))
2976 elimination_effects (reg_equiv_constant[regno], mem_mode);
2977 return;
2979 case PRE_INC:
2980 case POST_INC:
2981 case PRE_DEC:
2982 case POST_DEC:
2983 case POST_MODIFY:
2984 case PRE_MODIFY:
2985 /* If we modify the source of an elimination rule, disable it. */
2986 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2987 if (ep->from_rtx == XEXP (x, 0))
2988 ep->can_eliminate = 0;
2990 /* If we modify the target of an elimination rule by adding a constant,
2991 update its offset. If we modify the target in any other way, we'll
2992 have to disable the rule as well. */
2993 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2994 if (ep->to_rtx == XEXP (x, 0))
2996 int size = GET_MODE_SIZE (mem_mode);
2998 /* If more bytes than MEM_MODE are pushed, account for them. */
2999 #ifdef PUSH_ROUNDING
3000 if (ep->to_rtx == stack_pointer_rtx)
3001 size = PUSH_ROUNDING (size);
3002 #endif
3003 if (code == PRE_DEC || code == POST_DEC)
3004 ep->offset += size;
3005 else if (code == PRE_INC || code == POST_INC)
3006 ep->offset -= size;
3007 else if (code == PRE_MODIFY || code == POST_MODIFY)
3009 if (GET_CODE (XEXP (x, 1)) == PLUS
3010 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3011 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3012 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3013 else
3014 ep->can_eliminate = 0;
3018 /* These two aren't unary operators. */
3019 if (code == POST_MODIFY || code == PRE_MODIFY)
3020 break;
3022 /* Fall through to generic unary operation case. */
3023 case STRICT_LOW_PART:
3024 case NEG: case NOT:
3025 case SIGN_EXTEND: case ZERO_EXTEND:
3026 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3027 case FLOAT: case FIX:
3028 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3029 case ABS:
3030 case SQRT:
3031 case FFS:
3032 case CLZ:
3033 case CTZ:
3034 case POPCOUNT:
3035 case PARITY:
3036 case BSWAP:
3037 elimination_effects (XEXP (x, 0), mem_mode);
3038 return;
3040 case SUBREG:
3041 if (REG_P (SUBREG_REG (x))
3042 && (GET_MODE_SIZE (GET_MODE (x))
3043 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3044 && reg_equiv_memory_loc != 0
3045 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3046 return;
3048 elimination_effects (SUBREG_REG (x), mem_mode);
3049 return;
3051 case USE:
3052 /* If using a register that is the source of an eliminate we still
3053 think can be performed, note it cannot be performed since we don't
3054 know how this register is used. */
3055 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3056 if (ep->from_rtx == XEXP (x, 0))
3057 ep->can_eliminate = 0;
3059 elimination_effects (XEXP (x, 0), mem_mode);
3060 return;
3062 case CLOBBER:
3063 /* If clobbering a register that is the replacement register for an
3064 elimination we still think can be performed, note that it cannot
3065 be performed. Otherwise, we need not be concerned about it. */
3066 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3067 if (ep->to_rtx == XEXP (x, 0))
3068 ep->can_eliminate = 0;
3070 elimination_effects (XEXP (x, 0), mem_mode);
3071 return;
3073 case SET:
3074 /* Check for setting a register that we know about. */
3075 if (REG_P (SET_DEST (x)))
3077 /* See if this is setting the replacement register for an
3078 elimination.
3080 If DEST is the hard frame pointer, we do nothing because we
3081 assume that all assignments to the frame pointer are for
3082 non-local gotos and are being done at a time when they are valid
3083 and do not disturb anything else. Some machines want to
3084 eliminate a fake argument pointer (or even a fake frame pointer)
3085 with either the real frame or the stack pointer. Assignments to
3086 the hard frame pointer must not prevent this elimination. */
3088 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3089 ep++)
3090 if (ep->to_rtx == SET_DEST (x)
3091 && SET_DEST (x) != hard_frame_pointer_rtx)
3093 /* If it is being incremented, adjust the offset. Otherwise,
3094 this elimination can't be done. */
3095 rtx src = SET_SRC (x);
3097 if (GET_CODE (src) == PLUS
3098 && XEXP (src, 0) == SET_DEST (x)
3099 && CONST_INT_P (XEXP (src, 1)))
3100 ep->offset -= INTVAL (XEXP (src, 1));
3101 else
3102 ep->can_eliminate = 0;
3106 elimination_effects (SET_DEST (x), VOIDmode);
3107 elimination_effects (SET_SRC (x), VOIDmode);
3108 return;
3110 case MEM:
3111 /* Our only special processing is to pass the mode of the MEM to our
3112 recursive call. */
3113 elimination_effects (XEXP (x, 0), GET_MODE (x));
3114 return;
3116 default:
3117 break;
3120 fmt = GET_RTX_FORMAT (code);
3121 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3123 if (*fmt == 'e')
3124 elimination_effects (XEXP (x, i), mem_mode);
3125 else if (*fmt == 'E')
3126 for (j = 0; j < XVECLEN (x, i); j++)
3127 elimination_effects (XVECEXP (x, i, j), mem_mode);
3131 /* Descend through rtx X and verify that no references to eliminable registers
3132 remain. If any do remain, mark the involved register as not
3133 eliminable. */
3135 static void
3136 check_eliminable_occurrences (rtx x)
3138 const char *fmt;
3139 int i;
3140 enum rtx_code code;
3142 if (x == 0)
3143 return;
3145 code = GET_CODE (x);
3147 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3149 struct elim_table *ep;
3151 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3152 if (ep->from_rtx == x)
3153 ep->can_eliminate = 0;
3154 return;
3157 fmt = GET_RTX_FORMAT (code);
3158 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3160 if (*fmt == 'e')
3161 check_eliminable_occurrences (XEXP (x, i));
3162 else if (*fmt == 'E')
3164 int j;
3165 for (j = 0; j < XVECLEN (x, i); j++)
3166 check_eliminable_occurrences (XVECEXP (x, i, j));
3171 /* Scan INSN and eliminate all eliminable registers in it.
3173 If REPLACE is nonzero, do the replacement destructively. Also
3174 delete the insn as dead it if it is setting an eliminable register.
3176 If REPLACE is zero, do all our allocations in reload_obstack.
3178 If no eliminations were done and this insn doesn't require any elimination
3179 processing (these are not identical conditions: it might be updating sp,
3180 but not referencing fp; this needs to be seen during reload_as_needed so
3181 that the offset between fp and sp can be taken into consideration), zero
3182 is returned. Otherwise, 1 is returned. */
3184 static int
3185 eliminate_regs_in_insn (rtx insn, int replace)
3187 int icode = recog_memoized (insn);
3188 rtx old_body = PATTERN (insn);
3189 int insn_is_asm = asm_noperands (old_body) >= 0;
3190 rtx old_set = single_set (insn);
3191 rtx new_body;
3192 int val = 0;
3193 int i;
3194 rtx substed_operand[MAX_RECOG_OPERANDS];
3195 rtx orig_operand[MAX_RECOG_OPERANDS];
3196 struct elim_table *ep;
3197 rtx plus_src, plus_cst_src;
3199 if (! insn_is_asm && icode < 0)
3201 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3202 || GET_CODE (PATTERN (insn)) == CLOBBER
3203 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3204 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3205 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3206 || DEBUG_INSN_P (insn));
3207 if (DEBUG_INSN_P (insn))
3208 INSN_VAR_LOCATION_LOC (insn)
3209 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3210 return 0;
3213 if (old_set != 0 && REG_P (SET_DEST (old_set))
3214 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3216 /* Check for setting an eliminable register. */
3217 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3218 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3220 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3221 /* If this is setting the frame pointer register to the
3222 hardware frame pointer register and this is an elimination
3223 that will be done (tested above), this insn is really
3224 adjusting the frame pointer downward to compensate for
3225 the adjustment done before a nonlocal goto. */
3226 if (ep->from == FRAME_POINTER_REGNUM
3227 && ep->to == HARD_FRAME_POINTER_REGNUM)
3229 rtx base = SET_SRC (old_set);
3230 rtx base_insn = insn;
3231 HOST_WIDE_INT offset = 0;
3233 while (base != ep->to_rtx)
3235 rtx prev_insn, prev_set;
3237 if (GET_CODE (base) == PLUS
3238 && CONST_INT_P (XEXP (base, 1)))
3240 offset += INTVAL (XEXP (base, 1));
3241 base = XEXP (base, 0);
3243 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3244 && (prev_set = single_set (prev_insn)) != 0
3245 && rtx_equal_p (SET_DEST (prev_set), base))
3247 base = SET_SRC (prev_set);
3248 base_insn = prev_insn;
3250 else
3251 break;
3254 if (base == ep->to_rtx)
3256 rtx src
3257 = plus_constant (ep->to_rtx, offset - ep->offset);
3259 new_body = old_body;
3260 if (! replace)
3262 new_body = copy_insn (old_body);
3263 if (REG_NOTES (insn))
3264 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3266 PATTERN (insn) = new_body;
3267 old_set = single_set (insn);
3269 /* First see if this insn remains valid when we
3270 make the change. If not, keep the INSN_CODE
3271 the same and let reload fit it up. */
3272 validate_change (insn, &SET_SRC (old_set), src, 1);
3273 validate_change (insn, &SET_DEST (old_set),
3274 ep->to_rtx, 1);
3275 if (! apply_change_group ())
3277 SET_SRC (old_set) = src;
3278 SET_DEST (old_set) = ep->to_rtx;
3281 val = 1;
3282 goto done;
3285 #endif
3287 /* In this case this insn isn't serving a useful purpose. We
3288 will delete it in reload_as_needed once we know that this
3289 elimination is, in fact, being done.
3291 If REPLACE isn't set, we can't delete this insn, but needn't
3292 process it since it won't be used unless something changes. */
3293 if (replace)
3295 delete_dead_insn (insn);
3296 return 1;
3298 val = 1;
3299 goto done;
3303 /* We allow one special case which happens to work on all machines we
3304 currently support: a single set with the source or a REG_EQUAL
3305 note being a PLUS of an eliminable register and a constant. */
3306 plus_src = plus_cst_src = 0;
3307 if (old_set && REG_P (SET_DEST (old_set)))
3309 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3310 plus_src = SET_SRC (old_set);
3311 /* First see if the source is of the form (plus (...) CST). */
3312 if (plus_src
3313 && CONST_INT_P (XEXP (plus_src, 1)))
3314 plus_cst_src = plus_src;
3315 else if (REG_P (SET_SRC (old_set))
3316 || plus_src)
3318 /* Otherwise, see if we have a REG_EQUAL note of the form
3319 (plus (...) CST). */
3320 rtx links;
3321 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3323 if ((REG_NOTE_KIND (links) == REG_EQUAL
3324 || REG_NOTE_KIND (links) == REG_EQUIV)
3325 && GET_CODE (XEXP (links, 0)) == PLUS
3326 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3328 plus_cst_src = XEXP (links, 0);
3329 break;
3334 /* Check that the first operand of the PLUS is a hard reg or
3335 the lowpart subreg of one. */
3336 if (plus_cst_src)
3338 rtx reg = XEXP (plus_cst_src, 0);
3339 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3340 reg = SUBREG_REG (reg);
3342 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3343 plus_cst_src = 0;
3346 if (plus_cst_src)
3348 rtx reg = XEXP (plus_cst_src, 0);
3349 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3351 if (GET_CODE (reg) == SUBREG)
3352 reg = SUBREG_REG (reg);
3354 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3355 if (ep->from_rtx == reg && ep->can_eliminate)
3357 rtx to_rtx = ep->to_rtx;
3358 offset += ep->offset;
3359 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3361 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3362 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3363 to_rtx);
3364 /* If we have a nonzero offset, and the source is already
3365 a simple REG, the following transformation would
3366 increase the cost of the insn by replacing a simple REG
3367 with (plus (reg sp) CST). So try only when we already
3368 had a PLUS before. */
3369 if (offset == 0 || plus_src)
3371 rtx new_src = plus_constant (to_rtx, offset);
3373 new_body = old_body;
3374 if (! replace)
3376 new_body = copy_insn (old_body);
3377 if (REG_NOTES (insn))
3378 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3380 PATTERN (insn) = new_body;
3381 old_set = single_set (insn);
3383 /* First see if this insn remains valid when we make the
3384 change. If not, try to replace the whole pattern with
3385 a simple set (this may help if the original insn was a
3386 PARALLEL that was only recognized as single_set due to
3387 REG_UNUSED notes). If this isn't valid either, keep
3388 the INSN_CODE the same and let reload fix it up. */
3389 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3391 rtx new_pat = gen_rtx_SET (VOIDmode,
3392 SET_DEST (old_set), new_src);
3394 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3395 SET_SRC (old_set) = new_src;
3398 else
3399 break;
3401 val = 1;
3402 /* This can't have an effect on elimination offsets, so skip right
3403 to the end. */
3404 goto done;
3408 /* Determine the effects of this insn on elimination offsets. */
3409 elimination_effects (old_body, VOIDmode);
3411 /* Eliminate all eliminable registers occurring in operands that
3412 can be handled by reload. */
3413 extract_insn (insn);
3414 for (i = 0; i < recog_data.n_operands; i++)
3416 orig_operand[i] = recog_data.operand[i];
3417 substed_operand[i] = recog_data.operand[i];
3419 /* For an asm statement, every operand is eliminable. */
3420 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3422 bool is_set_src, in_plus;
3424 /* Check for setting a register that we know about. */
3425 if (recog_data.operand_type[i] != OP_IN
3426 && REG_P (orig_operand[i]))
3428 /* If we are assigning to a register that can be eliminated, it
3429 must be as part of a PARALLEL, since the code above handles
3430 single SETs. We must indicate that we can no longer
3431 eliminate this reg. */
3432 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3433 ep++)
3434 if (ep->from_rtx == orig_operand[i])
3435 ep->can_eliminate = 0;
3438 /* Companion to the above plus substitution, we can allow
3439 invariants as the source of a plain move. */
3440 is_set_src = false;
3441 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3442 is_set_src = true;
3443 in_plus = false;
3444 if (plus_src
3445 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3446 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3447 in_plus = true;
3449 substed_operand[i]
3450 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3451 replace ? insn : NULL_RTX,
3452 is_set_src || in_plus);
3453 if (substed_operand[i] != orig_operand[i])
3454 val = 1;
3455 /* Terminate the search in check_eliminable_occurrences at
3456 this point. */
3457 *recog_data.operand_loc[i] = 0;
3459 /* If an output operand changed from a REG to a MEM and INSN is an
3460 insn, write a CLOBBER insn. */
3461 if (recog_data.operand_type[i] != OP_IN
3462 && REG_P (orig_operand[i])
3463 && MEM_P (substed_operand[i])
3464 && replace)
3465 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3469 for (i = 0; i < recog_data.n_dups; i++)
3470 *recog_data.dup_loc[i]
3471 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3473 /* If any eliminable remain, they aren't eliminable anymore. */
3474 check_eliminable_occurrences (old_body);
3476 /* Substitute the operands; the new values are in the substed_operand
3477 array. */
3478 for (i = 0; i < recog_data.n_operands; i++)
3479 *recog_data.operand_loc[i] = substed_operand[i];
3480 for (i = 0; i < recog_data.n_dups; i++)
3481 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3483 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3484 re-recognize the insn. We do this in case we had a simple addition
3485 but now can do this as a load-address. This saves an insn in this
3486 common case.
3487 If re-recognition fails, the old insn code number will still be used,
3488 and some register operands may have changed into PLUS expressions.
3489 These will be handled by find_reloads by loading them into a register
3490 again. */
3492 if (val)
3494 /* If we aren't replacing things permanently and we changed something,
3495 make another copy to ensure that all the RTL is new. Otherwise
3496 things can go wrong if find_reload swaps commutative operands
3497 and one is inside RTL that has been copied while the other is not. */
3498 new_body = old_body;
3499 if (! replace)
3501 new_body = copy_insn (old_body);
3502 if (REG_NOTES (insn))
3503 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3505 PATTERN (insn) = new_body;
3507 /* If we had a move insn but now we don't, rerecognize it. This will
3508 cause spurious re-recognition if the old move had a PARALLEL since
3509 the new one still will, but we can't call single_set without
3510 having put NEW_BODY into the insn and the re-recognition won't
3511 hurt in this rare case. */
3512 /* ??? Why this huge if statement - why don't we just rerecognize the
3513 thing always? */
3514 if (! insn_is_asm
3515 && old_set != 0
3516 && ((REG_P (SET_SRC (old_set))
3517 && (GET_CODE (new_body) != SET
3518 || !REG_P (SET_SRC (new_body))))
3519 /* If this was a load from or store to memory, compare
3520 the MEM in recog_data.operand to the one in the insn.
3521 If they are not equal, then rerecognize the insn. */
3522 || (old_set != 0
3523 && ((MEM_P (SET_SRC (old_set))
3524 && SET_SRC (old_set) != recog_data.operand[1])
3525 || (MEM_P (SET_DEST (old_set))
3526 && SET_DEST (old_set) != recog_data.operand[0])))
3527 /* If this was an add insn before, rerecognize. */
3528 || GET_CODE (SET_SRC (old_set)) == PLUS))
3530 int new_icode = recog (PATTERN (insn), insn, 0);
3531 if (new_icode >= 0)
3532 INSN_CODE (insn) = new_icode;
3536 /* Restore the old body. If there were any changes to it, we made a copy
3537 of it while the changes were still in place, so we'll correctly return
3538 a modified insn below. */
3539 if (! replace)
3541 /* Restore the old body. */
3542 for (i = 0; i < recog_data.n_operands; i++)
3543 *recog_data.operand_loc[i] = orig_operand[i];
3544 for (i = 0; i < recog_data.n_dups; i++)
3545 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3548 /* Update all elimination pairs to reflect the status after the current
3549 insn. The changes we make were determined by the earlier call to
3550 elimination_effects.
3552 We also detect cases where register elimination cannot be done,
3553 namely, if a register would be both changed and referenced outside a MEM
3554 in the resulting insn since such an insn is often undefined and, even if
3555 not, we cannot know what meaning will be given to it. Note that it is
3556 valid to have a register used in an address in an insn that changes it
3557 (presumably with a pre- or post-increment or decrement).
3559 If anything changes, return nonzero. */
3561 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3563 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3564 ep->can_eliminate = 0;
3566 ep->ref_outside_mem = 0;
3568 if (ep->previous_offset != ep->offset)
3569 val = 1;
3572 done:
3573 /* If we changed something, perform elimination in REG_NOTES. This is
3574 needed even when REPLACE is zero because a REG_DEAD note might refer
3575 to a register that we eliminate and could cause a different number
3576 of spill registers to be needed in the final reload pass than in
3577 the pre-passes. */
3578 if (val && REG_NOTES (insn) != 0)
3579 REG_NOTES (insn)
3580 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3582 return val;
3585 /* Loop through all elimination pairs.
3586 Recalculate the number not at initial offset.
3588 Compute the maximum offset (minimum offset if the stack does not
3589 grow downward) for each elimination pair. */
3591 static void
3592 update_eliminable_offsets (void)
3594 struct elim_table *ep;
3596 num_not_at_initial_offset = 0;
3597 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3599 ep->previous_offset = ep->offset;
3600 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3601 num_not_at_initial_offset++;
3605 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3606 replacement we currently believe is valid, mark it as not eliminable if X
3607 modifies DEST in any way other than by adding a constant integer to it.
3609 If DEST is the frame pointer, we do nothing because we assume that
3610 all assignments to the hard frame pointer are nonlocal gotos and are being
3611 done at a time when they are valid and do not disturb anything else.
3612 Some machines want to eliminate a fake argument pointer with either the
3613 frame or stack pointer. Assignments to the hard frame pointer must not
3614 prevent this elimination.
3616 Called via note_stores from reload before starting its passes to scan
3617 the insns of the function. */
3619 static void
3620 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3622 unsigned int i;
3624 /* A SUBREG of a hard register here is just changing its mode. We should
3625 not see a SUBREG of an eliminable hard register, but check just in
3626 case. */
3627 if (GET_CODE (dest) == SUBREG)
3628 dest = SUBREG_REG (dest);
3630 if (dest == hard_frame_pointer_rtx)
3631 return;
3633 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3634 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3635 && (GET_CODE (x) != SET
3636 || GET_CODE (SET_SRC (x)) != PLUS
3637 || XEXP (SET_SRC (x), 0) != dest
3638 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3640 reg_eliminate[i].can_eliminate_previous
3641 = reg_eliminate[i].can_eliminate = 0;
3642 num_eliminable--;
3646 /* Verify that the initial elimination offsets did not change since the
3647 last call to set_initial_elim_offsets. This is used to catch cases
3648 where something illegal happened during reload_as_needed that could
3649 cause incorrect code to be generated if we did not check for it. */
3651 static bool
3652 verify_initial_elim_offsets (void)
3654 HOST_WIDE_INT t;
3656 if (!num_eliminable)
3657 return true;
3659 #ifdef ELIMINABLE_REGS
3661 struct elim_table *ep;
3663 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3665 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3666 if (t != ep->initial_offset)
3667 return false;
3670 #else
3671 INITIAL_FRAME_POINTER_OFFSET (t);
3672 if (t != reg_eliminate[0].initial_offset)
3673 return false;
3674 #endif
3676 return true;
3679 /* Reset all offsets on eliminable registers to their initial values. */
3681 static void
3682 set_initial_elim_offsets (void)
3684 struct elim_table *ep = reg_eliminate;
3686 #ifdef ELIMINABLE_REGS
3687 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3689 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3690 ep->previous_offset = ep->offset = ep->initial_offset;
3692 #else
3693 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3694 ep->previous_offset = ep->offset = ep->initial_offset;
3695 #endif
3697 num_not_at_initial_offset = 0;
3700 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3702 static void
3703 set_initial_eh_label_offset (rtx label)
3705 set_label_offsets (label, NULL_RTX, 1);
3708 /* Initialize the known label offsets.
3709 Set a known offset for each forced label to be at the initial offset
3710 of each elimination. We do this because we assume that all
3711 computed jumps occur from a location where each elimination is
3712 at its initial offset.
3713 For all other labels, show that we don't know the offsets. */
3715 static void
3716 set_initial_label_offsets (void)
3718 rtx x;
3719 memset (offsets_known_at, 0, num_labels);
3721 for (x = forced_labels; x; x = XEXP (x, 1))
3722 if (XEXP (x, 0))
3723 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3725 for_each_eh_label (set_initial_eh_label_offset);
3728 /* Set all elimination offsets to the known values for the code label given
3729 by INSN. */
3731 static void
3732 set_offsets_for_label (rtx insn)
3734 unsigned int i;
3735 int label_nr = CODE_LABEL_NUMBER (insn);
3736 struct elim_table *ep;
3738 num_not_at_initial_offset = 0;
3739 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3741 ep->offset = ep->previous_offset
3742 = offsets_at[label_nr - first_label_num][i];
3743 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3744 num_not_at_initial_offset++;
3748 /* See if anything that happened changes which eliminations are valid.
3749 For example, on the SPARC, whether or not the frame pointer can
3750 be eliminated can depend on what registers have been used. We need
3751 not check some conditions again (such as flag_omit_frame_pointer)
3752 since they can't have changed. */
3754 static void
3755 update_eliminables (HARD_REG_SET *pset)
3757 int previous_frame_pointer_needed = frame_pointer_needed;
3758 struct elim_table *ep;
3760 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3761 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3762 && targetm.frame_pointer_required ())
3763 #ifdef ELIMINABLE_REGS
3764 || ! targetm.can_eliminate (ep->from, ep->to)
3765 #endif
3767 ep->can_eliminate = 0;
3769 /* Look for the case where we have discovered that we can't replace
3770 register A with register B and that means that we will now be
3771 trying to replace register A with register C. This means we can
3772 no longer replace register C with register B and we need to disable
3773 such an elimination, if it exists. This occurs often with A == ap,
3774 B == sp, and C == fp. */
3776 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3778 struct elim_table *op;
3779 int new_to = -1;
3781 if (! ep->can_eliminate && ep->can_eliminate_previous)
3783 /* Find the current elimination for ep->from, if there is a
3784 new one. */
3785 for (op = reg_eliminate;
3786 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3787 if (op->from == ep->from && op->can_eliminate)
3789 new_to = op->to;
3790 break;
3793 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3794 disable it. */
3795 for (op = reg_eliminate;
3796 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3797 if (op->from == new_to && op->to == ep->to)
3798 op->can_eliminate = 0;
3802 /* See if any registers that we thought we could eliminate the previous
3803 time are no longer eliminable. If so, something has changed and we
3804 must spill the register. Also, recompute the number of eliminable
3805 registers and see if the frame pointer is needed; it is if there is
3806 no elimination of the frame pointer that we can perform. */
3808 frame_pointer_needed = 1;
3809 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3811 if (ep->can_eliminate
3812 && ep->from == FRAME_POINTER_REGNUM
3813 && ep->to != HARD_FRAME_POINTER_REGNUM
3814 && (! SUPPORTS_STACK_ALIGNMENT
3815 || ! crtl->stack_realign_needed))
3816 frame_pointer_needed = 0;
3818 if (! ep->can_eliminate && ep->can_eliminate_previous)
3820 ep->can_eliminate_previous = 0;
3821 SET_HARD_REG_BIT (*pset, ep->from);
3822 num_eliminable--;
3826 /* If we didn't need a frame pointer last time, but we do now, spill
3827 the hard frame pointer. */
3828 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3829 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3832 /* Return true if X is used as the target register of an elimination. */
3834 bool
3835 elimination_target_reg_p (rtx x)
3837 struct elim_table *ep;
3839 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3840 if (ep->to_rtx == x && ep->can_eliminate)
3841 return true;
3843 return false;
3846 /* Initialize the table of registers to eliminate.
3847 Pre-condition: global flag frame_pointer_needed has been set before
3848 calling this function. */
3850 static void
3851 init_elim_table (void)
3853 struct elim_table *ep;
3854 #ifdef ELIMINABLE_REGS
3855 const struct elim_table_1 *ep1;
3856 #endif
3858 if (!reg_eliminate)
3859 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3861 num_eliminable = 0;
3863 #ifdef ELIMINABLE_REGS
3864 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3865 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3867 ep->from = ep1->from;
3868 ep->to = ep1->to;
3869 ep->can_eliminate = ep->can_eliminate_previous
3870 = (targetm.can_eliminate (ep->from, ep->to)
3871 && ! (ep->to == STACK_POINTER_REGNUM
3872 && frame_pointer_needed
3873 && (! SUPPORTS_STACK_ALIGNMENT
3874 || ! stack_realign_fp)));
3876 #else
3877 reg_eliminate[0].from = reg_eliminate_1[0].from;
3878 reg_eliminate[0].to = reg_eliminate_1[0].to;
3879 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3880 = ! frame_pointer_needed;
3881 #endif
3883 /* Count the number of eliminable registers and build the FROM and TO
3884 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3885 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3886 We depend on this. */
3887 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3889 num_eliminable += ep->can_eliminate;
3890 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3891 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3895 /* Kick all pseudos out of hard register REGNO.
3897 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3898 because we found we can't eliminate some register. In the case, no pseudos
3899 are allowed to be in the register, even if they are only in a block that
3900 doesn't require spill registers, unlike the case when we are spilling this
3901 hard reg to produce another spill register.
3903 Return nonzero if any pseudos needed to be kicked out. */
3905 static void
3906 spill_hard_reg (unsigned int regno, int cant_eliminate)
3908 int i;
3910 if (cant_eliminate)
3912 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3913 df_set_regs_ever_live (regno, true);
3916 /* Spill every pseudo reg that was allocated to this reg
3917 or to something that overlaps this reg. */
3919 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3920 if (reg_renumber[i] >= 0
3921 && (unsigned int) reg_renumber[i] <= regno
3922 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3923 SET_REGNO_REG_SET (&spilled_pseudos, i);
3926 /* After find_reload_regs has been run for all insn that need reloads,
3927 and/or spill_hard_regs was called, this function is used to actually
3928 spill pseudo registers and try to reallocate them. It also sets up the
3929 spill_regs array for use by choose_reload_regs. */
3931 static int
3932 finish_spills (int global)
3934 struct insn_chain *chain;
3935 int something_changed = 0;
3936 unsigned i;
3937 reg_set_iterator rsi;
3939 /* Build the spill_regs array for the function. */
3940 /* If there are some registers still to eliminate and one of the spill regs
3941 wasn't ever used before, additional stack space may have to be
3942 allocated to store this register. Thus, we may have changed the offset
3943 between the stack and frame pointers, so mark that something has changed.
3945 One might think that we need only set VAL to 1 if this is a call-used
3946 register. However, the set of registers that must be saved by the
3947 prologue is not identical to the call-used set. For example, the
3948 register used by the call insn for the return PC is a call-used register,
3949 but must be saved by the prologue. */
3951 n_spills = 0;
3952 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3953 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3955 spill_reg_order[i] = n_spills;
3956 spill_regs[n_spills++] = i;
3957 if (num_eliminable && ! df_regs_ever_live_p (i))
3958 something_changed = 1;
3959 df_set_regs_ever_live (i, true);
3961 else
3962 spill_reg_order[i] = -1;
3964 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3965 if (! ira_conflicts_p || reg_renumber[i] >= 0)
3967 /* Record the current hard register the pseudo is allocated to
3968 in pseudo_previous_regs so we avoid reallocating it to the
3969 same hard reg in a later pass. */
3970 gcc_assert (reg_renumber[i] >= 0);
3972 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3973 /* Mark it as no longer having a hard register home. */
3974 reg_renumber[i] = -1;
3975 if (ira_conflicts_p)
3976 /* Inform IRA about the change. */
3977 ira_mark_allocation_change (i);
3978 /* We will need to scan everything again. */
3979 something_changed = 1;
3982 /* Retry global register allocation if possible. */
3983 if (global && ira_conflicts_p)
3985 unsigned int n;
3987 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3988 /* For every insn that needs reloads, set the registers used as spill
3989 regs in pseudo_forbidden_regs for every pseudo live across the
3990 insn. */
3991 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3993 EXECUTE_IF_SET_IN_REG_SET
3994 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3996 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3997 chain->used_spill_regs);
3999 EXECUTE_IF_SET_IN_REG_SET
4000 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4002 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4003 chain->used_spill_regs);
4007 /* Retry allocating the pseudos spilled in IRA and the
4008 reload. For each reg, merge the various reg sets that
4009 indicate which hard regs can't be used, and call
4010 ira_reassign_pseudos. */
4011 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4012 if (reg_old_renumber[i] != reg_renumber[i])
4014 if (reg_renumber[i] < 0)
4015 temp_pseudo_reg_arr[n++] = i;
4016 else
4017 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4019 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4020 bad_spill_regs_global,
4021 pseudo_forbidden_regs, pseudo_previous_regs,
4022 &spilled_pseudos))
4023 something_changed = 1;
4025 /* Fix up the register information in the insn chain.
4026 This involves deleting those of the spilled pseudos which did not get
4027 a new hard register home from the live_{before,after} sets. */
4028 for (chain = reload_insn_chain; chain; chain = chain->next)
4030 HARD_REG_SET used_by_pseudos;
4031 HARD_REG_SET used_by_pseudos2;
4033 if (! ira_conflicts_p)
4035 /* Don't do it for IRA because IRA and the reload still can
4036 assign hard registers to the spilled pseudos on next
4037 reload iterations. */
4038 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4039 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4041 /* Mark any unallocated hard regs as available for spills. That
4042 makes inheritance work somewhat better. */
4043 if (chain->need_reload)
4045 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4046 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4047 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4049 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4050 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4051 /* Value of chain->used_spill_regs from previous iteration
4052 may be not included in the value calculated here because
4053 of possible removing caller-saves insns (see function
4054 delete_caller_save_insns. */
4055 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4056 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4060 CLEAR_REG_SET (&changed_allocation_pseudos);
4061 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4062 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4064 int regno = reg_renumber[i];
4065 if (reg_old_renumber[i] == regno)
4066 continue;
4068 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4070 alter_reg (i, reg_old_renumber[i], false);
4071 reg_old_renumber[i] = regno;
4072 if (dump_file)
4074 if (regno == -1)
4075 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4076 else
4077 fprintf (dump_file, " Register %d now in %d.\n\n",
4078 i, reg_renumber[i]);
4082 return something_changed;
4085 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4087 static void
4088 scan_paradoxical_subregs (rtx x)
4090 int i;
4091 const char *fmt;
4092 enum rtx_code code = GET_CODE (x);
4094 switch (code)
4096 case REG:
4097 case CONST_INT:
4098 case CONST:
4099 case SYMBOL_REF:
4100 case LABEL_REF:
4101 case CONST_DOUBLE:
4102 case CONST_FIXED:
4103 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4104 case CC0:
4105 case PC:
4106 case USE:
4107 case CLOBBER:
4108 return;
4110 case SUBREG:
4111 if (REG_P (SUBREG_REG (x))
4112 && (GET_MODE_SIZE (GET_MODE (x))
4113 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4115 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4116 = GET_MODE_SIZE (GET_MODE (x));
4117 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4119 return;
4121 default:
4122 break;
4125 fmt = GET_RTX_FORMAT (code);
4126 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4128 if (fmt[i] == 'e')
4129 scan_paradoxical_subregs (XEXP (x, i));
4130 else if (fmt[i] == 'E')
4132 int j;
4133 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4134 scan_paradoxical_subregs (XVECEXP (x, i, j));
4139 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4140 examine all of the reload insns between PREV and NEXT exclusive, and
4141 annotate all that may trap. */
4143 static void
4144 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4146 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4147 if (note == NULL)
4148 return;
4149 if (!insn_could_throw_p (insn))
4150 remove_note (insn, note);
4151 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4154 /* Reload pseudo-registers into hard regs around each insn as needed.
4155 Additional register load insns are output before the insn that needs it
4156 and perhaps store insns after insns that modify the reloaded pseudo reg.
4158 reg_last_reload_reg and reg_reloaded_contents keep track of
4159 which registers are already available in reload registers.
4160 We update these for the reloads that we perform,
4161 as the insns are scanned. */
4163 static void
4164 reload_as_needed (int live_known)
4166 struct insn_chain *chain;
4167 #if defined (AUTO_INC_DEC)
4168 int i;
4169 #endif
4170 rtx x;
4172 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4173 memset (spill_reg_store, 0, sizeof spill_reg_store);
4174 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4175 INIT_REG_SET (&reg_has_output_reload);
4176 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4177 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4179 set_initial_elim_offsets ();
4181 for (chain = reload_insn_chain; chain; chain = chain->next)
4183 rtx prev = 0;
4184 rtx insn = chain->insn;
4185 rtx old_next = NEXT_INSN (insn);
4186 #ifdef AUTO_INC_DEC
4187 rtx old_prev = PREV_INSN (insn);
4188 #endif
4190 /* If we pass a label, copy the offsets from the label information
4191 into the current offsets of each elimination. */
4192 if (LABEL_P (insn))
4193 set_offsets_for_label (insn);
4195 else if (INSN_P (insn))
4197 regset_head regs_to_forget;
4198 INIT_REG_SET (&regs_to_forget);
4199 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4201 /* If this is a USE and CLOBBER of a MEM, ensure that any
4202 references to eliminable registers have been removed. */
4204 if ((GET_CODE (PATTERN (insn)) == USE
4205 || GET_CODE (PATTERN (insn)) == CLOBBER)
4206 && MEM_P (XEXP (PATTERN (insn), 0)))
4207 XEXP (XEXP (PATTERN (insn), 0), 0)
4208 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4209 GET_MODE (XEXP (PATTERN (insn), 0)),
4210 NULL_RTX);
4212 /* If we need to do register elimination processing, do so.
4213 This might delete the insn, in which case we are done. */
4214 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4216 eliminate_regs_in_insn (insn, 1);
4217 if (NOTE_P (insn))
4219 update_eliminable_offsets ();
4220 CLEAR_REG_SET (&regs_to_forget);
4221 continue;
4225 /* If need_elim is nonzero but need_reload is zero, one might think
4226 that we could simply set n_reloads to 0. However, find_reloads
4227 could have done some manipulation of the insn (such as swapping
4228 commutative operands), and these manipulations are lost during
4229 the first pass for every insn that needs register elimination.
4230 So the actions of find_reloads must be redone here. */
4232 if (! chain->need_elim && ! chain->need_reload
4233 && ! chain->need_operand_change)
4234 n_reloads = 0;
4235 /* First find the pseudo regs that must be reloaded for this insn.
4236 This info is returned in the tables reload_... (see reload.h).
4237 Also modify the body of INSN by substituting RELOAD
4238 rtx's for those pseudo regs. */
4239 else
4241 CLEAR_REG_SET (&reg_has_output_reload);
4242 CLEAR_HARD_REG_SET (reg_is_output_reload);
4244 find_reloads (insn, 1, spill_indirect_levels, live_known,
4245 spill_reg_order);
4248 if (n_reloads > 0)
4250 rtx next = NEXT_INSN (insn);
4251 rtx p;
4253 prev = PREV_INSN (insn);
4255 /* Now compute which reload regs to reload them into. Perhaps
4256 reusing reload regs from previous insns, or else output
4257 load insns to reload them. Maybe output store insns too.
4258 Record the choices of reload reg in reload_reg_rtx. */
4259 choose_reload_regs (chain);
4261 /* Merge any reloads that we didn't combine for fear of
4262 increasing the number of spill registers needed but now
4263 discover can be safely merged. */
4264 if (SMALL_REGISTER_CLASSES)
4265 merge_assigned_reloads (insn);
4267 /* Generate the insns to reload operands into or out of
4268 their reload regs. */
4269 emit_reload_insns (chain);
4271 /* Substitute the chosen reload regs from reload_reg_rtx
4272 into the insn's body (or perhaps into the bodies of other
4273 load and store insn that we just made for reloading
4274 and that we moved the structure into). */
4275 subst_reloads (insn);
4277 /* Adjust the exception region notes for loads and stores. */
4278 if (flag_non_call_exceptions && !CALL_P (insn))
4279 fixup_eh_region_note (insn, prev, next);
4281 /* If this was an ASM, make sure that all the reload insns
4282 we have generated are valid. If not, give an error
4283 and delete them. */
4284 if (asm_noperands (PATTERN (insn)) >= 0)
4285 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4286 if (p != insn && INSN_P (p)
4287 && GET_CODE (PATTERN (p)) != USE
4288 && (recog_memoized (p) < 0
4289 || (extract_insn (p), ! constrain_operands (1))))
4291 error_for_asm (insn,
4292 "%<asm%> operand requires "
4293 "impossible reload");
4294 delete_insn (p);
4298 if (num_eliminable && chain->need_elim)
4299 update_eliminable_offsets ();
4301 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4302 is no longer validly lying around to save a future reload.
4303 Note that this does not detect pseudos that were reloaded
4304 for this insn in order to be stored in
4305 (obeying register constraints). That is correct; such reload
4306 registers ARE still valid. */
4307 forget_marked_reloads (&regs_to_forget);
4308 CLEAR_REG_SET (&regs_to_forget);
4310 /* There may have been CLOBBER insns placed after INSN. So scan
4311 between INSN and NEXT and use them to forget old reloads. */
4312 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4313 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4314 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4316 #ifdef AUTO_INC_DEC
4317 /* Likewise for regs altered by auto-increment in this insn.
4318 REG_INC notes have been changed by reloading:
4319 find_reloads_address_1 records substitutions for them,
4320 which have been performed by subst_reloads above. */
4321 for (i = n_reloads - 1; i >= 0; i--)
4323 rtx in_reg = rld[i].in_reg;
4324 if (in_reg)
4326 enum rtx_code code = GET_CODE (in_reg);
4327 /* PRE_INC / PRE_DEC will have the reload register ending up
4328 with the same value as the stack slot, but that doesn't
4329 hold true for POST_INC / POST_DEC. Either we have to
4330 convert the memory access to a true POST_INC / POST_DEC,
4331 or we can't use the reload register for inheritance. */
4332 if ((code == POST_INC || code == POST_DEC)
4333 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4334 REGNO (rld[i].reg_rtx))
4335 /* Make sure it is the inc/dec pseudo, and not
4336 some other (e.g. output operand) pseudo. */
4337 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4338 == REGNO (XEXP (in_reg, 0))))
4341 rtx reload_reg = rld[i].reg_rtx;
4342 enum machine_mode mode = GET_MODE (reload_reg);
4343 int n = 0;
4344 rtx p;
4346 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4348 /* We really want to ignore REG_INC notes here, so
4349 use PATTERN (p) as argument to reg_set_p . */
4350 if (reg_set_p (reload_reg, PATTERN (p)))
4351 break;
4352 n = count_occurrences (PATTERN (p), reload_reg, 0);
4353 if (! n)
4354 continue;
4355 if (n == 1)
4357 rtx replace_reg
4358 = gen_rtx_fmt_e (code, mode, reload_reg);
4360 validate_replace_rtx_group (reload_reg,
4361 replace_reg, p);
4362 n = verify_changes (0);
4364 /* We must also verify that the constraints
4365 are met after the replacement. Make sure
4366 extract_insn is only called for an insn
4367 where the replacements were found to be
4368 valid so far. */
4369 if (n)
4371 extract_insn (p);
4372 n = constrain_operands (1);
4375 /* If the constraints were not met, then
4376 undo the replacement, else confirm it. */
4377 if (!n)
4378 cancel_changes (0);
4379 else
4380 confirm_change_group ();
4382 break;
4384 if (n == 1)
4386 add_reg_note (p, REG_INC, reload_reg);
4387 /* Mark this as having an output reload so that the
4388 REG_INC processing code below won't invalidate
4389 the reload for inheritance. */
4390 SET_HARD_REG_BIT (reg_is_output_reload,
4391 REGNO (reload_reg));
4392 SET_REGNO_REG_SET (&reg_has_output_reload,
4393 REGNO (XEXP (in_reg, 0)));
4395 else
4396 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4397 NULL);
4399 else if ((code == PRE_INC || code == PRE_DEC)
4400 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4401 REGNO (rld[i].reg_rtx))
4402 /* Make sure it is the inc/dec pseudo, and not
4403 some other (e.g. output operand) pseudo. */
4404 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4405 == REGNO (XEXP (in_reg, 0))))
4407 SET_HARD_REG_BIT (reg_is_output_reload,
4408 REGNO (rld[i].reg_rtx));
4409 SET_REGNO_REG_SET (&reg_has_output_reload,
4410 REGNO (XEXP (in_reg, 0)));
4412 else if (code == PRE_INC || code == PRE_DEC
4413 || code == POST_INC || code == POST_DEC)
4415 int in_regno = REGNO (XEXP (in_reg, 0));
4417 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4419 int in_hard_regno;
4420 bool forget_p = true;
4422 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4423 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4424 in_hard_regno))
4426 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4427 x != old_next;
4428 x = NEXT_INSN (x))
4429 if (x == reg_reloaded_insn[in_hard_regno])
4431 forget_p = false;
4432 break;
4435 /* If for some reasons, we didn't set up
4436 reg_last_reload_reg in this insn,
4437 invalidate inheritance from previous
4438 insns for the incremented/decremented
4439 register. Such registers will be not in
4440 reg_has_output_reload. Invalidate it
4441 also if the corresponding element in
4442 reg_reloaded_insn is also
4443 invalidated. */
4444 if (forget_p)
4445 forget_old_reloads_1 (XEXP (in_reg, 0),
4446 NULL_RTX, NULL);
4451 /* If a pseudo that got a hard register is auto-incremented,
4452 we must purge records of copying it into pseudos without
4453 hard registers. */
4454 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4455 if (REG_NOTE_KIND (x) == REG_INC)
4457 /* See if this pseudo reg was reloaded in this insn.
4458 If so, its last-reload info is still valid
4459 because it is based on this insn's reload. */
4460 for (i = 0; i < n_reloads; i++)
4461 if (rld[i].out == XEXP (x, 0))
4462 break;
4464 if (i == n_reloads)
4465 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4467 #endif
4469 /* A reload reg's contents are unknown after a label. */
4470 if (LABEL_P (insn))
4471 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4473 /* Don't assume a reload reg is still good after a call insn
4474 if it is a call-used reg, or if it contains a value that will
4475 be partially clobbered by the call. */
4476 else if (CALL_P (insn))
4478 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4479 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4483 /* Clean up. */
4484 free (reg_last_reload_reg);
4485 CLEAR_REG_SET (&reg_has_output_reload);
4488 /* Discard all record of any value reloaded from X,
4489 or reloaded in X from someplace else;
4490 unless X is an output reload reg of the current insn.
4492 X may be a hard reg (the reload reg)
4493 or it may be a pseudo reg that was reloaded from.
4495 When DATA is non-NULL just mark the registers in regset
4496 to be forgotten later. */
4498 static void
4499 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4500 void *data)
4502 unsigned int regno;
4503 unsigned int nr;
4504 regset regs = (regset) data;
4506 /* note_stores does give us subregs of hard regs,
4507 subreg_regno_offset requires a hard reg. */
4508 while (GET_CODE (x) == SUBREG)
4510 /* We ignore the subreg offset when calculating the regno,
4511 because we are using the entire underlying hard register
4512 below. */
4513 x = SUBREG_REG (x);
4516 if (!REG_P (x))
4517 return;
4519 regno = REGNO (x);
4521 if (regno >= FIRST_PSEUDO_REGISTER)
4522 nr = 1;
4523 else
4525 unsigned int i;
4527 nr = hard_regno_nregs[regno][GET_MODE (x)];
4528 /* Storing into a spilled-reg invalidates its contents.
4529 This can happen if a block-local pseudo is allocated to that reg
4530 and it wasn't spilled because this block's total need is 0.
4531 Then some insn might have an optional reload and use this reg. */
4532 if (!regs)
4533 for (i = 0; i < nr; i++)
4534 /* But don't do this if the reg actually serves as an output
4535 reload reg in the current instruction. */
4536 if (n_reloads == 0
4537 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4539 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4540 spill_reg_store[regno + i] = 0;
4544 if (regs)
4545 while (nr-- > 0)
4546 SET_REGNO_REG_SET (regs, regno + nr);
4547 else
4549 /* Since value of X has changed,
4550 forget any value previously copied from it. */
4552 while (nr-- > 0)
4553 /* But don't forget a copy if this is the output reload
4554 that establishes the copy's validity. */
4555 if (n_reloads == 0
4556 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4557 reg_last_reload_reg[regno + nr] = 0;
4561 /* Forget the reloads marked in regset by previous function. */
4562 static void
4563 forget_marked_reloads (regset regs)
4565 unsigned int reg;
4566 reg_set_iterator rsi;
4567 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4569 if (reg < FIRST_PSEUDO_REGISTER
4570 /* But don't do this if the reg actually serves as an output
4571 reload reg in the current instruction. */
4572 && (n_reloads == 0
4573 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4575 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4576 spill_reg_store[reg] = 0;
4578 if (n_reloads == 0
4579 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4580 reg_last_reload_reg[reg] = 0;
4584 /* The following HARD_REG_SETs indicate when each hard register is
4585 used for a reload of various parts of the current insn. */
4587 /* If reg is unavailable for all reloads. */
4588 static HARD_REG_SET reload_reg_unavailable;
4589 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4590 static HARD_REG_SET reload_reg_used;
4591 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4592 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4593 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4594 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4595 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4596 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4597 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4598 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4599 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4600 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4601 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4602 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4603 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4604 static HARD_REG_SET reload_reg_used_in_op_addr;
4605 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4606 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4607 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4608 static HARD_REG_SET reload_reg_used_in_insn;
4609 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4610 static HARD_REG_SET reload_reg_used_in_other_addr;
4612 /* If reg is in use as a reload reg for any sort of reload. */
4613 static HARD_REG_SET reload_reg_used_at_all;
4615 /* If reg is use as an inherited reload. We just mark the first register
4616 in the group. */
4617 static HARD_REG_SET reload_reg_used_for_inherit;
4619 /* Records which hard regs are used in any way, either as explicit use or
4620 by being allocated to a pseudo during any point of the current insn. */
4621 static HARD_REG_SET reg_used_in_insn;
4623 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4624 TYPE. MODE is used to indicate how many consecutive regs are
4625 actually used. */
4627 static void
4628 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4629 enum machine_mode mode)
4631 unsigned int nregs = hard_regno_nregs[regno][mode];
4632 unsigned int i;
4634 for (i = regno; i < nregs + regno; i++)
4636 switch (type)
4638 case RELOAD_OTHER:
4639 SET_HARD_REG_BIT (reload_reg_used, i);
4640 break;
4642 case RELOAD_FOR_INPUT_ADDRESS:
4643 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4644 break;
4646 case RELOAD_FOR_INPADDR_ADDRESS:
4647 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4648 break;
4650 case RELOAD_FOR_OUTPUT_ADDRESS:
4651 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4652 break;
4654 case RELOAD_FOR_OUTADDR_ADDRESS:
4655 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4656 break;
4658 case RELOAD_FOR_OPERAND_ADDRESS:
4659 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4660 break;
4662 case RELOAD_FOR_OPADDR_ADDR:
4663 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4664 break;
4666 case RELOAD_FOR_OTHER_ADDRESS:
4667 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4668 break;
4670 case RELOAD_FOR_INPUT:
4671 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4672 break;
4674 case RELOAD_FOR_OUTPUT:
4675 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4676 break;
4678 case RELOAD_FOR_INSN:
4679 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4680 break;
4683 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4687 /* Similarly, but show REGNO is no longer in use for a reload. */
4689 static void
4690 clear_reload_reg_in_use (unsigned int regno, int opnum,
4691 enum reload_type type, enum machine_mode mode)
4693 unsigned int nregs = hard_regno_nregs[regno][mode];
4694 unsigned int start_regno, end_regno, r;
4695 int i;
4696 /* A complication is that for some reload types, inheritance might
4697 allow multiple reloads of the same types to share a reload register.
4698 We set check_opnum if we have to check only reloads with the same
4699 operand number, and check_any if we have to check all reloads. */
4700 int check_opnum = 0;
4701 int check_any = 0;
4702 HARD_REG_SET *used_in_set;
4704 switch (type)
4706 case RELOAD_OTHER:
4707 used_in_set = &reload_reg_used;
4708 break;
4710 case RELOAD_FOR_INPUT_ADDRESS:
4711 used_in_set = &reload_reg_used_in_input_addr[opnum];
4712 break;
4714 case RELOAD_FOR_INPADDR_ADDRESS:
4715 check_opnum = 1;
4716 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4717 break;
4719 case RELOAD_FOR_OUTPUT_ADDRESS:
4720 used_in_set = &reload_reg_used_in_output_addr[opnum];
4721 break;
4723 case RELOAD_FOR_OUTADDR_ADDRESS:
4724 check_opnum = 1;
4725 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4726 break;
4728 case RELOAD_FOR_OPERAND_ADDRESS:
4729 used_in_set = &reload_reg_used_in_op_addr;
4730 break;
4732 case RELOAD_FOR_OPADDR_ADDR:
4733 check_any = 1;
4734 used_in_set = &reload_reg_used_in_op_addr_reload;
4735 break;
4737 case RELOAD_FOR_OTHER_ADDRESS:
4738 used_in_set = &reload_reg_used_in_other_addr;
4739 check_any = 1;
4740 break;
4742 case RELOAD_FOR_INPUT:
4743 used_in_set = &reload_reg_used_in_input[opnum];
4744 break;
4746 case RELOAD_FOR_OUTPUT:
4747 used_in_set = &reload_reg_used_in_output[opnum];
4748 break;
4750 case RELOAD_FOR_INSN:
4751 used_in_set = &reload_reg_used_in_insn;
4752 break;
4753 default:
4754 gcc_unreachable ();
4756 /* We resolve conflicts with remaining reloads of the same type by
4757 excluding the intervals of reload registers by them from the
4758 interval of freed reload registers. Since we only keep track of
4759 one set of interval bounds, we might have to exclude somewhat
4760 more than what would be necessary if we used a HARD_REG_SET here.
4761 But this should only happen very infrequently, so there should
4762 be no reason to worry about it. */
4764 start_regno = regno;
4765 end_regno = regno + nregs;
4766 if (check_opnum || check_any)
4768 for (i = n_reloads - 1; i >= 0; i--)
4770 if (rld[i].when_needed == type
4771 && (check_any || rld[i].opnum == opnum)
4772 && rld[i].reg_rtx)
4774 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4775 unsigned int conflict_end
4776 = end_hard_regno (rld[i].mode, conflict_start);
4778 /* If there is an overlap with the first to-be-freed register,
4779 adjust the interval start. */
4780 if (conflict_start <= start_regno && conflict_end > start_regno)
4781 start_regno = conflict_end;
4782 /* Otherwise, if there is a conflict with one of the other
4783 to-be-freed registers, adjust the interval end. */
4784 if (conflict_start > start_regno && conflict_start < end_regno)
4785 end_regno = conflict_start;
4790 for (r = start_regno; r < end_regno; r++)
4791 CLEAR_HARD_REG_BIT (*used_in_set, r);
4794 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4795 specified by OPNUM and TYPE. */
4797 static int
4798 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4800 int i;
4802 /* In use for a RELOAD_OTHER means it's not available for anything. */
4803 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4804 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4805 return 0;
4807 switch (type)
4809 case RELOAD_OTHER:
4810 /* In use for anything means we can't use it for RELOAD_OTHER. */
4811 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4815 return 0;
4817 for (i = 0; i < reload_n_operands; i++)
4818 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4819 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4820 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4821 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4822 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4823 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4824 return 0;
4826 return 1;
4828 case RELOAD_FOR_INPUT:
4829 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4830 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4831 return 0;
4833 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4834 return 0;
4836 /* If it is used for some other input, can't use it. */
4837 for (i = 0; i < reload_n_operands; i++)
4838 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4839 return 0;
4841 /* If it is used in a later operand's address, can't use it. */
4842 for (i = opnum + 1; i < reload_n_operands; i++)
4843 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4844 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4845 return 0;
4847 return 1;
4849 case RELOAD_FOR_INPUT_ADDRESS:
4850 /* Can't use a register if it is used for an input address for this
4851 operand or used as an input in an earlier one. */
4852 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4853 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4854 return 0;
4856 for (i = 0; i < opnum; i++)
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4858 return 0;
4860 return 1;
4862 case RELOAD_FOR_INPADDR_ADDRESS:
4863 /* Can't use a register if it is used for an input address
4864 for this operand or used as an input in an earlier
4865 one. */
4866 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4867 return 0;
4869 for (i = 0; i < opnum; i++)
4870 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4871 return 0;
4873 return 1;
4875 case RELOAD_FOR_OUTPUT_ADDRESS:
4876 /* Can't use a register if it is used for an output address for this
4877 operand or used as an output in this or a later operand. Note
4878 that multiple output operands are emitted in reverse order, so
4879 the conflicting ones are those with lower indices. */
4880 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4881 return 0;
4883 for (i = 0; i <= opnum; i++)
4884 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4885 return 0;
4887 return 1;
4889 case RELOAD_FOR_OUTADDR_ADDRESS:
4890 /* Can't use a register if it is used for an output address
4891 for this operand or used as an output in this or a
4892 later operand. Note that multiple output operands are
4893 emitted in reverse order, so the conflicting ones are
4894 those with lower indices. */
4895 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4896 return 0;
4898 for (i = 0; i <= opnum; i++)
4899 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4900 return 0;
4902 return 1;
4904 case RELOAD_FOR_OPERAND_ADDRESS:
4905 for (i = 0; i < reload_n_operands; i++)
4906 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4907 return 0;
4909 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4910 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4912 case RELOAD_FOR_OPADDR_ADDR:
4913 for (i = 0; i < reload_n_operands; i++)
4914 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4915 return 0;
4917 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4919 case RELOAD_FOR_OUTPUT:
4920 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4921 outputs, or an operand address for this or an earlier output.
4922 Note that multiple output operands are emitted in reverse order,
4923 so the conflicting ones are those with higher indices. */
4924 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4925 return 0;
4927 for (i = 0; i < reload_n_operands; i++)
4928 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4929 return 0;
4931 for (i = opnum; i < reload_n_operands; i++)
4932 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4933 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4934 return 0;
4936 return 1;
4938 case RELOAD_FOR_INSN:
4939 for (i = 0; i < reload_n_operands; i++)
4940 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4941 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4942 return 0;
4944 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4945 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4947 case RELOAD_FOR_OTHER_ADDRESS:
4948 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4950 default:
4951 gcc_unreachable ();
4955 /* Return 1 if the value in reload reg REGNO, as used by a reload
4956 needed for the part of the insn specified by OPNUM and TYPE,
4957 is still available in REGNO at the end of the insn.
4959 We can assume that the reload reg was already tested for availability
4960 at the time it is needed, and we should not check this again,
4961 in case the reg has already been marked in use. */
4963 static int
4964 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4966 int i;
4968 switch (type)
4970 case RELOAD_OTHER:
4971 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4972 its value must reach the end. */
4973 return 1;
4975 /* If this use is for part of the insn,
4976 its value reaches if no subsequent part uses the same register.
4977 Just like the above function, don't try to do this with lots
4978 of fallthroughs. */
4980 case RELOAD_FOR_OTHER_ADDRESS:
4981 /* Here we check for everything else, since these don't conflict
4982 with anything else and everything comes later. */
4984 for (i = 0; i < reload_n_operands; i++)
4985 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4986 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4987 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4988 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4989 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4990 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4991 return 0;
4993 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4994 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4995 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4996 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4998 case RELOAD_FOR_INPUT_ADDRESS:
4999 case RELOAD_FOR_INPADDR_ADDRESS:
5000 /* Similar, except that we check only for this and subsequent inputs
5001 and the address of only subsequent inputs and we do not need
5002 to check for RELOAD_OTHER objects since they are known not to
5003 conflict. */
5005 for (i = opnum; i < reload_n_operands; i++)
5006 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5007 return 0;
5009 for (i = opnum + 1; i < reload_n_operands; i++)
5010 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5011 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5012 return 0;
5014 for (i = 0; i < reload_n_operands; i++)
5015 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5016 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5017 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5018 return 0;
5020 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5021 return 0;
5023 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5024 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5025 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5027 case RELOAD_FOR_INPUT:
5028 /* Similar to input address, except we start at the next operand for
5029 both input and input address and we do not check for
5030 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5031 would conflict. */
5033 for (i = opnum + 1; i < reload_n_operands; i++)
5034 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5035 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5036 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5037 return 0;
5039 /* ... fall through ... */
5041 case RELOAD_FOR_OPERAND_ADDRESS:
5042 /* Check outputs and their addresses. */
5044 for (i = 0; i < reload_n_operands; i++)
5045 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5046 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5047 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5048 return 0;
5050 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5052 case RELOAD_FOR_OPADDR_ADDR:
5053 for (i = 0; i < reload_n_operands; i++)
5054 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5055 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5056 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5057 return 0;
5059 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5060 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5061 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5063 case RELOAD_FOR_INSN:
5064 /* These conflict with other outputs with RELOAD_OTHER. So
5065 we need only check for output addresses. */
5067 opnum = reload_n_operands;
5069 /* ... fall through ... */
5071 case RELOAD_FOR_OUTPUT:
5072 case RELOAD_FOR_OUTPUT_ADDRESS:
5073 case RELOAD_FOR_OUTADDR_ADDRESS:
5074 /* We already know these can't conflict with a later output. So the
5075 only thing to check are later output addresses.
5076 Note that multiple output operands are emitted in reverse order,
5077 so the conflicting ones are those with lower indices. */
5078 for (i = 0; i < opnum; i++)
5079 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5080 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5081 return 0;
5083 return 1;
5085 default:
5086 gcc_unreachable ();
5090 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5091 every register in the range [REGNO, REGNO + NREGS). */
5093 static bool
5094 reload_regs_reach_end_p (unsigned int regno, int nregs,
5095 int opnum, enum reload_type type)
5097 int i;
5099 for (i = 0; i < nregs; i++)
5100 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5101 return false;
5102 return true;
5106 /* Returns whether R1 and R2 are uniquely chained: the value of one
5107 is used by the other, and that value is not used by any other
5108 reload for this insn. This is used to partially undo the decision
5109 made in find_reloads when in the case of multiple
5110 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5111 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5112 reloads. This code tries to avoid the conflict created by that
5113 change. It might be cleaner to explicitly keep track of which
5114 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5115 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5116 this after the fact. */
5117 static bool
5118 reloads_unique_chain_p (int r1, int r2)
5120 int i;
5122 /* We only check input reloads. */
5123 if (! rld[r1].in || ! rld[r2].in)
5124 return false;
5126 /* Avoid anything with output reloads. */
5127 if (rld[r1].out || rld[r2].out)
5128 return false;
5130 /* "chained" means one reload is a component of the other reload,
5131 not the same as the other reload. */
5132 if (rld[r1].opnum != rld[r2].opnum
5133 || rtx_equal_p (rld[r1].in, rld[r2].in)
5134 || rld[r1].optional || rld[r2].optional
5135 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5136 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5137 return false;
5139 for (i = 0; i < n_reloads; i ++)
5140 /* Look for input reloads that aren't our two */
5141 if (i != r1 && i != r2 && rld[i].in)
5143 /* If our reload is mentioned at all, it isn't a simple chain. */
5144 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5145 return false;
5147 return true;
5151 /* The recursive function change all occurrences of WHAT in *WHERE
5152 onto REPL. */
5153 static void
5154 substitute (rtx *where, const_rtx what, rtx repl)
5156 const char *fmt;
5157 int i;
5158 enum rtx_code code;
5160 if (*where == 0)
5161 return;
5163 if (*where == what || rtx_equal_p (*where, what))
5165 *where = repl;
5166 return;
5169 code = GET_CODE (*where);
5170 fmt = GET_RTX_FORMAT (code);
5171 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5173 if (fmt[i] == 'E')
5175 int j;
5177 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5178 substitute (&XVECEXP (*where, i, j), what, repl);
5180 else if (fmt[i] == 'e')
5181 substitute (&XEXP (*where, i), what, repl);
5185 /* The function returns TRUE if chain of reload R1 and R2 (in any
5186 order) can be evaluated without usage of intermediate register for
5187 the reload containing another reload. It is important to see
5188 gen_reload to understand what the function is trying to do. As an
5189 example, let us have reload chain
5191 r2: const
5192 r1: <something> + const
5194 and reload R2 got reload reg HR. The function returns true if
5195 there is a correct insn HR = HR + <something>. Otherwise,
5196 gen_reload will use intermediate register (and this is the reload
5197 reg for R1) to reload <something>.
5199 We need this function to find a conflict for chain reloads. In our
5200 example, if HR = HR + <something> is incorrect insn, then we cannot
5201 use HR as a reload register for R2. If we do use it then we get a
5202 wrong code:
5204 HR = const
5205 HR = <something>
5206 HR = HR + HR
5209 static bool
5210 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5212 bool result;
5213 int regno, n, code;
5214 rtx out, in, tem, insn;
5215 rtx last = get_last_insn ();
5217 /* Make r2 a component of r1. */
5218 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5220 n = r1;
5221 r1 = r2;
5222 r2 = n;
5224 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5225 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5226 gcc_assert (regno >= 0);
5227 out = gen_rtx_REG (rld[r1].mode, regno);
5228 in = copy_rtx (rld[r1].in);
5229 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5231 /* If IN is a paradoxical SUBREG, remove it and try to put the
5232 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5233 if (GET_CODE (in) == SUBREG
5234 && (GET_MODE_SIZE (GET_MODE (in))
5235 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5236 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5237 in = SUBREG_REG (in), out = tem;
5239 if (GET_CODE (in) == PLUS
5240 && (REG_P (XEXP (in, 0))
5241 || GET_CODE (XEXP (in, 0)) == SUBREG
5242 || MEM_P (XEXP (in, 0)))
5243 && (REG_P (XEXP (in, 1))
5244 || GET_CODE (XEXP (in, 1)) == SUBREG
5245 || CONSTANT_P (XEXP (in, 1))
5246 || MEM_P (XEXP (in, 1))))
5248 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5249 code = recog_memoized (insn);
5250 result = false;
5252 if (code >= 0)
5254 extract_insn (insn);
5255 /* We want constrain operands to treat this insn strictly in
5256 its validity determination, i.e., the way it would after
5257 reload has completed. */
5258 result = constrain_operands (1);
5261 delete_insns_since (last);
5262 return result;
5265 /* It looks like other cases in gen_reload are not possible for
5266 chain reloads or do need an intermediate hard registers. */
5267 return true;
5270 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5271 Return 0 otherwise.
5273 This function uses the same algorithm as reload_reg_free_p above. */
5275 static int
5276 reloads_conflict (int r1, int r2)
5278 enum reload_type r1_type = rld[r1].when_needed;
5279 enum reload_type r2_type = rld[r2].when_needed;
5280 int r1_opnum = rld[r1].opnum;
5281 int r2_opnum = rld[r2].opnum;
5283 /* RELOAD_OTHER conflicts with everything. */
5284 if (r2_type == RELOAD_OTHER)
5285 return 1;
5287 /* Otherwise, check conflicts differently for each type. */
5289 switch (r1_type)
5291 case RELOAD_FOR_INPUT:
5292 return (r2_type == RELOAD_FOR_INSN
5293 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5294 || r2_type == RELOAD_FOR_OPADDR_ADDR
5295 || r2_type == RELOAD_FOR_INPUT
5296 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5297 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5298 && r2_opnum > r1_opnum));
5300 case RELOAD_FOR_INPUT_ADDRESS:
5301 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5302 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5304 case RELOAD_FOR_INPADDR_ADDRESS:
5305 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5306 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5308 case RELOAD_FOR_OUTPUT_ADDRESS:
5309 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5310 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5312 case RELOAD_FOR_OUTADDR_ADDRESS:
5313 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5314 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5316 case RELOAD_FOR_OPERAND_ADDRESS:
5317 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5318 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5319 && (!reloads_unique_chain_p (r1, r2)
5320 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5322 case RELOAD_FOR_OPADDR_ADDR:
5323 return (r2_type == RELOAD_FOR_INPUT
5324 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5326 case RELOAD_FOR_OUTPUT:
5327 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5328 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5329 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5330 && r2_opnum >= r1_opnum));
5332 case RELOAD_FOR_INSN:
5333 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5334 || r2_type == RELOAD_FOR_INSN
5335 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5337 case RELOAD_FOR_OTHER_ADDRESS:
5338 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5340 case RELOAD_OTHER:
5341 return 1;
5343 default:
5344 gcc_unreachable ();
5348 /* Indexed by reload number, 1 if incoming value
5349 inherited from previous insns. */
5350 static char reload_inherited[MAX_RELOADS];
5352 /* For an inherited reload, this is the insn the reload was inherited from,
5353 if we know it. Otherwise, this is 0. */
5354 static rtx reload_inheritance_insn[MAX_RELOADS];
5356 /* If nonzero, this is a place to get the value of the reload,
5357 rather than using reload_in. */
5358 static rtx reload_override_in[MAX_RELOADS];
5360 /* For each reload, the hard register number of the register used,
5361 or -1 if we did not need a register for this reload. */
5362 static int reload_spill_index[MAX_RELOADS];
5364 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5365 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5367 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5368 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5370 /* Subroutine of free_for_value_p, used to check a single register.
5371 START_REGNO is the starting regno of the full reload register
5372 (possibly comprising multiple hard registers) that we are considering. */
5374 static int
5375 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5376 enum reload_type type, rtx value, rtx out,
5377 int reloadnum, int ignore_address_reloads)
5379 int time1;
5380 /* Set if we see an input reload that must not share its reload register
5381 with any new earlyclobber, but might otherwise share the reload
5382 register with an output or input-output reload. */
5383 int check_earlyclobber = 0;
5384 int i;
5385 int copy = 0;
5387 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5388 return 0;
5390 if (out == const0_rtx)
5392 copy = 1;
5393 out = NULL_RTX;
5396 /* We use some pseudo 'time' value to check if the lifetimes of the
5397 new register use would overlap with the one of a previous reload
5398 that is not read-only or uses a different value.
5399 The 'time' used doesn't have to be linear in any shape or form, just
5400 monotonic.
5401 Some reload types use different 'buckets' for each operand.
5402 So there are MAX_RECOG_OPERANDS different time values for each
5403 such reload type.
5404 We compute TIME1 as the time when the register for the prospective
5405 new reload ceases to be live, and TIME2 for each existing
5406 reload as the time when that the reload register of that reload
5407 becomes live.
5408 Where there is little to be gained by exact lifetime calculations,
5409 we just make conservative assumptions, i.e. a longer lifetime;
5410 this is done in the 'default:' cases. */
5411 switch (type)
5413 case RELOAD_FOR_OTHER_ADDRESS:
5414 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5415 time1 = copy ? 0 : 1;
5416 break;
5417 case RELOAD_OTHER:
5418 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5419 break;
5420 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5421 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5422 respectively, to the time values for these, we get distinct time
5423 values. To get distinct time values for each operand, we have to
5424 multiply opnum by at least three. We round that up to four because
5425 multiply by four is often cheaper. */
5426 case RELOAD_FOR_INPADDR_ADDRESS:
5427 time1 = opnum * 4 + 2;
5428 break;
5429 case RELOAD_FOR_INPUT_ADDRESS:
5430 time1 = opnum * 4 + 3;
5431 break;
5432 case RELOAD_FOR_INPUT:
5433 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5434 executes (inclusive). */
5435 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5436 break;
5437 case RELOAD_FOR_OPADDR_ADDR:
5438 /* opnum * 4 + 4
5439 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5440 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5441 break;
5442 case RELOAD_FOR_OPERAND_ADDRESS:
5443 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5444 is executed. */
5445 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5446 break;
5447 case RELOAD_FOR_OUTADDR_ADDRESS:
5448 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5449 break;
5450 case RELOAD_FOR_OUTPUT_ADDRESS:
5451 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5452 break;
5453 default:
5454 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5457 for (i = 0; i < n_reloads; i++)
5459 rtx reg = rld[i].reg_rtx;
5460 if (reg && REG_P (reg)
5461 && ((unsigned) regno - true_regnum (reg)
5462 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5463 && i != reloadnum)
5465 rtx other_input = rld[i].in;
5467 /* If the other reload loads the same input value, that
5468 will not cause a conflict only if it's loading it into
5469 the same register. */
5470 if (true_regnum (reg) != start_regno)
5471 other_input = NULL_RTX;
5472 if (! other_input || ! rtx_equal_p (other_input, value)
5473 || rld[i].out || out)
5475 int time2;
5476 switch (rld[i].when_needed)
5478 case RELOAD_FOR_OTHER_ADDRESS:
5479 time2 = 0;
5480 break;
5481 case RELOAD_FOR_INPADDR_ADDRESS:
5482 /* find_reloads makes sure that a
5483 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5484 by at most one - the first -
5485 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5486 address reload is inherited, the address address reload
5487 goes away, so we can ignore this conflict. */
5488 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5489 && ignore_address_reloads
5490 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5491 Then the address address is still needed to store
5492 back the new address. */
5493 && ! rld[reloadnum].out)
5494 continue;
5495 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5496 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5497 reloads go away. */
5498 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5499 && ignore_address_reloads
5500 /* Unless we are reloading an auto_inc expression. */
5501 && ! rld[reloadnum].out)
5502 continue;
5503 time2 = rld[i].opnum * 4 + 2;
5504 break;
5505 case RELOAD_FOR_INPUT_ADDRESS:
5506 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5507 && ignore_address_reloads
5508 && ! rld[reloadnum].out)
5509 continue;
5510 time2 = rld[i].opnum * 4 + 3;
5511 break;
5512 case RELOAD_FOR_INPUT:
5513 time2 = rld[i].opnum * 4 + 4;
5514 check_earlyclobber = 1;
5515 break;
5516 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5517 == MAX_RECOG_OPERAND * 4 */
5518 case RELOAD_FOR_OPADDR_ADDR:
5519 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5520 && ignore_address_reloads
5521 && ! rld[reloadnum].out)
5522 continue;
5523 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5524 break;
5525 case RELOAD_FOR_OPERAND_ADDRESS:
5526 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5527 check_earlyclobber = 1;
5528 break;
5529 case RELOAD_FOR_INSN:
5530 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5531 break;
5532 case RELOAD_FOR_OUTPUT:
5533 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5534 instruction is executed. */
5535 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5536 break;
5537 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5538 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5539 value. */
5540 case RELOAD_FOR_OUTADDR_ADDRESS:
5541 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5542 && ignore_address_reloads
5543 && ! rld[reloadnum].out)
5544 continue;
5545 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5546 break;
5547 case RELOAD_FOR_OUTPUT_ADDRESS:
5548 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5549 break;
5550 case RELOAD_OTHER:
5551 /* If there is no conflict in the input part, handle this
5552 like an output reload. */
5553 if (! rld[i].in || rtx_equal_p (other_input, value))
5555 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5556 /* Earlyclobbered outputs must conflict with inputs. */
5557 if (earlyclobber_operand_p (rld[i].out))
5558 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5560 break;
5562 time2 = 1;
5563 /* RELOAD_OTHER might be live beyond instruction execution,
5564 but this is not obvious when we set time2 = 1. So check
5565 here if there might be a problem with the new reload
5566 clobbering the register used by the RELOAD_OTHER. */
5567 if (out)
5568 return 0;
5569 break;
5570 default:
5571 return 0;
5573 if ((time1 >= time2
5574 && (! rld[i].in || rld[i].out
5575 || ! rtx_equal_p (other_input, value)))
5576 || (out && rld[reloadnum].out_reg
5577 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5578 return 0;
5583 /* Earlyclobbered outputs must conflict with inputs. */
5584 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5585 return 0;
5587 return 1;
5590 /* Return 1 if the value in reload reg REGNO, as used by a reload
5591 needed for the part of the insn specified by OPNUM and TYPE,
5592 may be used to load VALUE into it.
5594 MODE is the mode in which the register is used, this is needed to
5595 determine how many hard regs to test.
5597 Other read-only reloads with the same value do not conflict
5598 unless OUT is nonzero and these other reloads have to live while
5599 output reloads live.
5600 If OUT is CONST0_RTX, this is a special case: it means that the
5601 test should not be for using register REGNO as reload register, but
5602 for copying from register REGNO into the reload register.
5604 RELOADNUM is the number of the reload we want to load this value for;
5605 a reload does not conflict with itself.
5607 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5608 reloads that load an address for the very reload we are considering.
5610 The caller has to make sure that there is no conflict with the return
5611 register. */
5613 static int
5614 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5615 enum reload_type type, rtx value, rtx out, int reloadnum,
5616 int ignore_address_reloads)
5618 int nregs = hard_regno_nregs[regno][mode];
5619 while (nregs-- > 0)
5620 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5621 value, out, reloadnum,
5622 ignore_address_reloads))
5623 return 0;
5624 return 1;
5627 /* Return nonzero if the rtx X is invariant over the current function. */
5628 /* ??? Actually, the places where we use this expect exactly what is
5629 tested here, and not everything that is function invariant. In
5630 particular, the frame pointer and arg pointer are special cased;
5631 pic_offset_table_rtx is not, and we must not spill these things to
5632 memory. */
5635 function_invariant_p (const_rtx x)
5637 if (CONSTANT_P (x))
5638 return 1;
5639 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5640 return 1;
5641 if (GET_CODE (x) == PLUS
5642 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5643 && CONSTANT_P (XEXP (x, 1)))
5644 return 1;
5645 return 0;
5648 /* Determine whether the reload reg X overlaps any rtx'es used for
5649 overriding inheritance. Return nonzero if so. */
5651 static int
5652 conflicts_with_override (rtx x)
5654 int i;
5655 for (i = 0; i < n_reloads; i++)
5656 if (reload_override_in[i]
5657 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5658 return 1;
5659 return 0;
5662 /* Give an error message saying we failed to find a reload for INSN,
5663 and clear out reload R. */
5664 static void
5665 failed_reload (rtx insn, int r)
5667 if (asm_noperands (PATTERN (insn)) < 0)
5668 /* It's the compiler's fault. */
5669 fatal_insn ("could not find a spill register", insn);
5671 /* It's the user's fault; the operand's mode and constraint
5672 don't match. Disable this reload so we don't crash in final. */
5673 error_for_asm (insn,
5674 "%<asm%> operand constraint incompatible with operand size");
5675 rld[r].in = 0;
5676 rld[r].out = 0;
5677 rld[r].reg_rtx = 0;
5678 rld[r].optional = 1;
5679 rld[r].secondary_p = 1;
5682 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5683 for reload R. If it's valid, get an rtx for it. Return nonzero if
5684 successful. */
5685 static int
5686 set_reload_reg (int i, int r)
5688 int regno;
5689 rtx reg = spill_reg_rtx[i];
5691 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5692 spill_reg_rtx[i] = reg
5693 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5695 regno = true_regnum (reg);
5697 /* Detect when the reload reg can't hold the reload mode.
5698 This used to be one `if', but Sequent compiler can't handle that. */
5699 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5701 enum machine_mode test_mode = VOIDmode;
5702 if (rld[r].in)
5703 test_mode = GET_MODE (rld[r].in);
5704 /* If rld[r].in has VOIDmode, it means we will load it
5705 in whatever mode the reload reg has: to wit, rld[r].mode.
5706 We have already tested that for validity. */
5707 /* Aside from that, we need to test that the expressions
5708 to reload from or into have modes which are valid for this
5709 reload register. Otherwise the reload insns would be invalid. */
5710 if (! (rld[r].in != 0 && test_mode != VOIDmode
5711 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5712 if (! (rld[r].out != 0
5713 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5715 /* The reg is OK. */
5716 last_spill_reg = i;
5718 /* Mark as in use for this insn the reload regs we use
5719 for this. */
5720 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5721 rld[r].when_needed, rld[r].mode);
5723 rld[r].reg_rtx = reg;
5724 reload_spill_index[r] = spill_regs[i];
5725 return 1;
5728 return 0;
5731 /* Find a spill register to use as a reload register for reload R.
5732 LAST_RELOAD is nonzero if this is the last reload for the insn being
5733 processed.
5735 Set rld[R].reg_rtx to the register allocated.
5737 We return 1 if successful, or 0 if we couldn't find a spill reg and
5738 we didn't change anything. */
5740 static int
5741 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5742 int last_reload)
5744 int i, pass, count;
5746 /* If we put this reload ahead, thinking it is a group,
5747 then insist on finding a group. Otherwise we can grab a
5748 reg that some other reload needs.
5749 (That can happen when we have a 68000 DATA_OR_FP_REG
5750 which is a group of data regs or one fp reg.)
5751 We need not be so restrictive if there are no more reloads
5752 for this insn.
5754 ??? Really it would be nicer to have smarter handling
5755 for that kind of reg class, where a problem like this is normal.
5756 Perhaps those classes should be avoided for reloading
5757 by use of more alternatives. */
5759 int force_group = rld[r].nregs > 1 && ! last_reload;
5761 /* If we want a single register and haven't yet found one,
5762 take any reg in the right class and not in use.
5763 If we want a consecutive group, here is where we look for it.
5765 We use two passes so we can first look for reload regs to
5766 reuse, which are already in use for other reloads in this insn,
5767 and only then use additional registers.
5768 I think that maximizing reuse is needed to make sure we don't
5769 run out of reload regs. Suppose we have three reloads, and
5770 reloads A and B can share regs. These need two regs.
5771 Suppose A and B are given different regs.
5772 That leaves none for C. */
5773 for (pass = 0; pass < 2; pass++)
5775 /* I is the index in spill_regs.
5776 We advance it round-robin between insns to use all spill regs
5777 equally, so that inherited reloads have a chance
5778 of leapfrogging each other. */
5780 i = last_spill_reg;
5782 for (count = 0; count < n_spills; count++)
5784 int rclass = (int) rld[r].rclass;
5785 int regnum;
5787 i++;
5788 if (i >= n_spills)
5789 i -= n_spills;
5790 regnum = spill_regs[i];
5792 if ((reload_reg_free_p (regnum, rld[r].opnum,
5793 rld[r].when_needed)
5794 || (rld[r].in
5795 /* We check reload_reg_used to make sure we
5796 don't clobber the return register. */
5797 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5798 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5799 rld[r].when_needed, rld[r].in,
5800 rld[r].out, r, 1)))
5801 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
5802 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5803 /* Look first for regs to share, then for unshared. But
5804 don't share regs used for inherited reloads; they are
5805 the ones we want to preserve. */
5806 && (pass
5807 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5808 regnum)
5809 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5810 regnum))))
5812 int nr = hard_regno_nregs[regnum][rld[r].mode];
5813 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5814 (on 68000) got us two FP regs. If NR is 1,
5815 we would reject both of them. */
5816 if (force_group)
5817 nr = rld[r].nregs;
5818 /* If we need only one reg, we have already won. */
5819 if (nr == 1)
5821 /* But reject a single reg if we demand a group. */
5822 if (force_group)
5823 continue;
5824 break;
5826 /* Otherwise check that as many consecutive regs as we need
5827 are available here. */
5828 while (nr > 1)
5830 int regno = regnum + nr - 1;
5831 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
5832 && spill_reg_order[regno] >= 0
5833 && reload_reg_free_p (regno, rld[r].opnum,
5834 rld[r].when_needed)))
5835 break;
5836 nr--;
5838 if (nr == 1)
5839 break;
5843 /* If we found something on pass 1, omit pass 2. */
5844 if (count < n_spills)
5845 break;
5848 /* We should have found a spill register by now. */
5849 if (count >= n_spills)
5850 return 0;
5852 /* I is the index in SPILL_REG_RTX of the reload register we are to
5853 allocate. Get an rtx for it and find its register number. */
5855 return set_reload_reg (i, r);
5858 /* Initialize all the tables needed to allocate reload registers.
5859 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5860 is the array we use to restore the reg_rtx field for every reload. */
5862 static void
5863 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5865 int i;
5867 for (i = 0; i < n_reloads; i++)
5868 rld[i].reg_rtx = save_reload_reg_rtx[i];
5870 memset (reload_inherited, 0, MAX_RELOADS);
5871 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5872 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5874 CLEAR_HARD_REG_SET (reload_reg_used);
5875 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5876 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5877 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5878 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5879 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5881 CLEAR_HARD_REG_SET (reg_used_in_insn);
5883 HARD_REG_SET tmp;
5884 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5885 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5886 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5887 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5888 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5889 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5892 for (i = 0; i < reload_n_operands; i++)
5894 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5895 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5896 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5897 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5898 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5899 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5902 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5904 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5906 for (i = 0; i < n_reloads; i++)
5907 /* If we have already decided to use a certain register,
5908 don't use it in another way. */
5909 if (rld[i].reg_rtx)
5910 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5911 rld[i].when_needed, rld[i].mode);
5914 /* Assign hard reg targets for the pseudo-registers we must reload
5915 into hard regs for this insn.
5916 Also output the instructions to copy them in and out of the hard regs.
5918 For machines with register classes, we are responsible for
5919 finding a reload reg in the proper class. */
5921 static void
5922 choose_reload_regs (struct insn_chain *chain)
5924 rtx insn = chain->insn;
5925 int i, j;
5926 unsigned int max_group_size = 1;
5927 enum reg_class group_class = NO_REGS;
5928 int pass, win, inheritance;
5930 rtx save_reload_reg_rtx[MAX_RELOADS];
5932 /* In order to be certain of getting the registers we need,
5933 we must sort the reloads into order of increasing register class.
5934 Then our grabbing of reload registers will parallel the process
5935 that provided the reload registers.
5937 Also note whether any of the reloads wants a consecutive group of regs.
5938 If so, record the maximum size of the group desired and what
5939 register class contains all the groups needed by this insn. */
5941 for (j = 0; j < n_reloads; j++)
5943 reload_order[j] = j;
5944 if (rld[j].reg_rtx != NULL_RTX)
5946 gcc_assert (REG_P (rld[j].reg_rtx)
5947 && HARD_REGISTER_P (rld[j].reg_rtx));
5948 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5950 else
5951 reload_spill_index[j] = -1;
5953 if (rld[j].nregs > 1)
5955 max_group_size = MAX (rld[j].nregs, max_group_size);
5956 group_class
5957 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
5960 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5963 if (n_reloads > 1)
5964 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5966 /* If -O, try first with inheritance, then turning it off.
5967 If not -O, don't do inheritance.
5968 Using inheritance when not optimizing leads to paradoxes
5969 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5970 because one side of the comparison might be inherited. */
5971 win = 0;
5972 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5974 choose_reload_regs_init (chain, save_reload_reg_rtx);
5976 /* Process the reloads in order of preference just found.
5977 Beyond this point, subregs can be found in reload_reg_rtx.
5979 This used to look for an existing reloaded home for all of the
5980 reloads, and only then perform any new reloads. But that could lose
5981 if the reloads were done out of reg-class order because a later
5982 reload with a looser constraint might have an old home in a register
5983 needed by an earlier reload with a tighter constraint.
5985 To solve this, we make two passes over the reloads, in the order
5986 described above. In the first pass we try to inherit a reload
5987 from a previous insn. If there is a later reload that needs a
5988 class that is a proper subset of the class being processed, we must
5989 also allocate a spill register during the first pass.
5991 Then make a second pass over the reloads to allocate any reloads
5992 that haven't been given registers yet. */
5994 for (j = 0; j < n_reloads; j++)
5996 int r = reload_order[j];
5997 rtx search_equiv = NULL_RTX;
5999 /* Ignore reloads that got marked inoperative. */
6000 if (rld[r].out == 0 && rld[r].in == 0
6001 && ! rld[r].secondary_p)
6002 continue;
6004 /* If find_reloads chose to use reload_in or reload_out as a reload
6005 register, we don't need to chose one. Otherwise, try even if it
6006 found one since we might save an insn if we find the value lying
6007 around.
6008 Try also when reload_in is a pseudo without a hard reg. */
6009 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6010 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6011 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6012 && !MEM_P (rld[r].in)
6013 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6014 continue;
6016 #if 0 /* No longer needed for correct operation.
6017 It might give better code, or might not; worth an experiment? */
6018 /* If this is an optional reload, we can't inherit from earlier insns
6019 until we are sure that any non-optional reloads have been allocated.
6020 The following code takes advantage of the fact that optional reloads
6021 are at the end of reload_order. */
6022 if (rld[r].optional != 0)
6023 for (i = 0; i < j; i++)
6024 if ((rld[reload_order[i]].out != 0
6025 || rld[reload_order[i]].in != 0
6026 || rld[reload_order[i]].secondary_p)
6027 && ! rld[reload_order[i]].optional
6028 && rld[reload_order[i]].reg_rtx == 0)
6029 allocate_reload_reg (chain, reload_order[i], 0);
6030 #endif
6032 /* First see if this pseudo is already available as reloaded
6033 for a previous insn. We cannot try to inherit for reloads
6034 that are smaller than the maximum number of registers needed
6035 for groups unless the register we would allocate cannot be used
6036 for the groups.
6038 We could check here to see if this is a secondary reload for
6039 an object that is already in a register of the desired class.
6040 This would avoid the need for the secondary reload register.
6041 But this is complex because we can't easily determine what
6042 objects might want to be loaded via this reload. So let a
6043 register be allocated here. In `emit_reload_insns' we suppress
6044 one of the loads in the case described above. */
6046 if (inheritance)
6048 int byte = 0;
6049 int regno = -1;
6050 enum machine_mode mode = VOIDmode;
6052 if (rld[r].in == 0)
6054 else if (REG_P (rld[r].in))
6056 regno = REGNO (rld[r].in);
6057 mode = GET_MODE (rld[r].in);
6059 else if (REG_P (rld[r].in_reg))
6061 regno = REGNO (rld[r].in_reg);
6062 mode = GET_MODE (rld[r].in_reg);
6064 else if (GET_CODE (rld[r].in_reg) == SUBREG
6065 && REG_P (SUBREG_REG (rld[r].in_reg)))
6067 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6068 if (regno < FIRST_PSEUDO_REGISTER)
6069 regno = subreg_regno (rld[r].in_reg);
6070 else
6071 byte = SUBREG_BYTE (rld[r].in_reg);
6072 mode = GET_MODE (rld[r].in_reg);
6074 #ifdef AUTO_INC_DEC
6075 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6076 && REG_P (XEXP (rld[r].in_reg, 0)))
6078 regno = REGNO (XEXP (rld[r].in_reg, 0));
6079 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6080 rld[r].out = rld[r].in;
6082 #endif
6083 #if 0
6084 /* This won't work, since REGNO can be a pseudo reg number.
6085 Also, it takes much more hair to keep track of all the things
6086 that can invalidate an inherited reload of part of a pseudoreg. */
6087 else if (GET_CODE (rld[r].in) == SUBREG
6088 && REG_P (SUBREG_REG (rld[r].in)))
6089 regno = subreg_regno (rld[r].in);
6090 #endif
6092 if (regno >= 0
6093 && reg_last_reload_reg[regno] != 0
6094 #ifdef CANNOT_CHANGE_MODE_CLASS
6095 /* Verify that the register it's in can be used in
6096 mode MODE. */
6097 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6098 GET_MODE (reg_last_reload_reg[regno]),
6099 mode)
6100 #endif
6103 enum reg_class rclass = rld[r].rclass, last_class;
6104 rtx last_reg = reg_last_reload_reg[regno];
6105 enum machine_mode need_mode;
6107 i = REGNO (last_reg);
6108 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6109 last_class = REGNO_REG_CLASS (i);
6111 if (byte == 0)
6112 need_mode = mode;
6113 else
6114 need_mode
6115 = smallest_mode_for_size
6116 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6117 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6118 ? MODE_INT : GET_MODE_CLASS (mode));
6120 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6121 >= GET_MODE_SIZE (need_mode))
6122 && reg_reloaded_contents[i] == regno
6123 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6124 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6125 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6126 /* Even if we can't use this register as a reload
6127 register, we might use it for reload_override_in,
6128 if copying it to the desired class is cheap
6129 enough. */
6130 || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6131 < MEMORY_MOVE_COST (mode, rclass, 1))
6132 && (secondary_reload_class (1, rclass, mode,
6133 last_reg)
6134 == NO_REGS)
6135 #ifdef SECONDARY_MEMORY_NEEDED
6136 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6137 mode)
6138 #endif
6141 && (rld[r].nregs == max_group_size
6142 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6144 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6145 rld[r].when_needed, rld[r].in,
6146 const0_rtx, r, 1))
6148 /* If a group is needed, verify that all the subsequent
6149 registers still have their values intact. */
6150 int nr = hard_regno_nregs[i][rld[r].mode];
6151 int k;
6153 for (k = 1; k < nr; k++)
6154 if (reg_reloaded_contents[i + k] != regno
6155 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6156 break;
6158 if (k == nr)
6160 int i1;
6161 int bad_for_class;
6163 last_reg = (GET_MODE (last_reg) == mode
6164 ? last_reg : gen_rtx_REG (mode, i));
6166 bad_for_class = 0;
6167 for (k = 0; k < nr; k++)
6168 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6169 i+k);
6171 /* We found a register that contains the
6172 value we need. If this register is the
6173 same as an `earlyclobber' operand of the
6174 current insn, just mark it as a place to
6175 reload from since we can't use it as the
6176 reload register itself. */
6178 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6179 if (reg_overlap_mentioned_for_reload_p
6180 (reg_last_reload_reg[regno],
6181 reload_earlyclobbers[i1]))
6182 break;
6184 if (i1 != n_earlyclobbers
6185 || ! (free_for_value_p (i, rld[r].mode,
6186 rld[r].opnum,
6187 rld[r].when_needed, rld[r].in,
6188 rld[r].out, r, 1))
6189 /* Don't use it if we'd clobber a pseudo reg. */
6190 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6191 && rld[r].out
6192 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6193 /* Don't clobber the frame pointer. */
6194 || (i == HARD_FRAME_POINTER_REGNUM
6195 && frame_pointer_needed
6196 && rld[r].out)
6197 /* Don't really use the inherited spill reg
6198 if we need it wider than we've got it. */
6199 || (GET_MODE_SIZE (rld[r].mode)
6200 > GET_MODE_SIZE (mode))
6201 || bad_for_class
6203 /* If find_reloads chose reload_out as reload
6204 register, stay with it - that leaves the
6205 inherited register for subsequent reloads. */
6206 || (rld[r].out && rld[r].reg_rtx
6207 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6209 if (! rld[r].optional)
6211 reload_override_in[r] = last_reg;
6212 reload_inheritance_insn[r]
6213 = reg_reloaded_insn[i];
6216 else
6218 int k;
6219 /* We can use this as a reload reg. */
6220 /* Mark the register as in use for this part of
6221 the insn. */
6222 mark_reload_reg_in_use (i,
6223 rld[r].opnum,
6224 rld[r].when_needed,
6225 rld[r].mode);
6226 rld[r].reg_rtx = last_reg;
6227 reload_inherited[r] = 1;
6228 reload_inheritance_insn[r]
6229 = reg_reloaded_insn[i];
6230 reload_spill_index[r] = i;
6231 for (k = 0; k < nr; k++)
6232 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6233 i + k);
6240 /* Here's another way to see if the value is already lying around. */
6241 if (inheritance
6242 && rld[r].in != 0
6243 && ! reload_inherited[r]
6244 && rld[r].out == 0
6245 && (CONSTANT_P (rld[r].in)
6246 || GET_CODE (rld[r].in) == PLUS
6247 || REG_P (rld[r].in)
6248 || MEM_P (rld[r].in))
6249 && (rld[r].nregs == max_group_size
6250 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6251 search_equiv = rld[r].in;
6252 /* If this is an output reload from a simple move insn, look
6253 if an equivalence for the input is available. */
6254 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6256 rtx set = single_set (insn);
6258 if (set
6259 && rtx_equal_p (rld[r].out, SET_DEST (set))
6260 && CONSTANT_P (SET_SRC (set)))
6261 search_equiv = SET_SRC (set);
6264 if (search_equiv)
6266 rtx equiv
6267 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6268 -1, NULL, 0, rld[r].mode);
6269 int regno = 0;
6271 if (equiv != 0)
6273 if (REG_P (equiv))
6274 regno = REGNO (equiv);
6275 else
6277 /* This must be a SUBREG of a hard register.
6278 Make a new REG since this might be used in an
6279 address and not all machines support SUBREGs
6280 there. */
6281 gcc_assert (GET_CODE (equiv) == SUBREG);
6282 regno = subreg_regno (equiv);
6283 equiv = gen_rtx_REG (rld[r].mode, regno);
6284 /* If we choose EQUIV as the reload register, but the
6285 loop below decides to cancel the inheritance, we'll
6286 end up reloading EQUIV in rld[r].mode, not the mode
6287 it had originally. That isn't safe when EQUIV isn't
6288 available as a spill register since its value might
6289 still be live at this point. */
6290 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6291 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6292 equiv = 0;
6296 /* If we found a spill reg, reject it unless it is free
6297 and of the desired class. */
6298 if (equiv != 0)
6300 int regs_used = 0;
6301 int bad_for_class = 0;
6302 int max_regno = regno + rld[r].nregs;
6304 for (i = regno; i < max_regno; i++)
6306 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6308 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6312 if ((regs_used
6313 && ! free_for_value_p (regno, rld[r].mode,
6314 rld[r].opnum, rld[r].when_needed,
6315 rld[r].in, rld[r].out, r, 1))
6316 || bad_for_class)
6317 equiv = 0;
6320 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6321 equiv = 0;
6323 /* We found a register that contains the value we need.
6324 If this register is the same as an `earlyclobber' operand
6325 of the current insn, just mark it as a place to reload from
6326 since we can't use it as the reload register itself. */
6328 if (equiv != 0)
6329 for (i = 0; i < n_earlyclobbers; i++)
6330 if (reg_overlap_mentioned_for_reload_p (equiv,
6331 reload_earlyclobbers[i]))
6333 if (! rld[r].optional)
6334 reload_override_in[r] = equiv;
6335 equiv = 0;
6336 break;
6339 /* If the equiv register we have found is explicitly clobbered
6340 in the current insn, it depends on the reload type if we
6341 can use it, use it for reload_override_in, or not at all.
6342 In particular, we then can't use EQUIV for a
6343 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6345 if (equiv != 0)
6347 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6348 switch (rld[r].when_needed)
6350 case RELOAD_FOR_OTHER_ADDRESS:
6351 case RELOAD_FOR_INPADDR_ADDRESS:
6352 case RELOAD_FOR_INPUT_ADDRESS:
6353 case RELOAD_FOR_OPADDR_ADDR:
6354 break;
6355 case RELOAD_OTHER:
6356 case RELOAD_FOR_INPUT:
6357 case RELOAD_FOR_OPERAND_ADDRESS:
6358 if (! rld[r].optional)
6359 reload_override_in[r] = equiv;
6360 /* Fall through. */
6361 default:
6362 equiv = 0;
6363 break;
6365 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6366 switch (rld[r].when_needed)
6368 case RELOAD_FOR_OTHER_ADDRESS:
6369 case RELOAD_FOR_INPADDR_ADDRESS:
6370 case RELOAD_FOR_INPUT_ADDRESS:
6371 case RELOAD_FOR_OPADDR_ADDR:
6372 case RELOAD_FOR_OPERAND_ADDRESS:
6373 case RELOAD_FOR_INPUT:
6374 break;
6375 case RELOAD_OTHER:
6376 if (! rld[r].optional)
6377 reload_override_in[r] = equiv;
6378 /* Fall through. */
6379 default:
6380 equiv = 0;
6381 break;
6385 /* If we found an equivalent reg, say no code need be generated
6386 to load it, and use it as our reload reg. */
6387 if (equiv != 0
6388 && (regno != HARD_FRAME_POINTER_REGNUM
6389 || !frame_pointer_needed))
6391 int nr = hard_regno_nregs[regno][rld[r].mode];
6392 int k;
6393 rld[r].reg_rtx = equiv;
6394 reload_spill_index[r] = regno;
6395 reload_inherited[r] = 1;
6397 /* If reg_reloaded_valid is not set for this register,
6398 there might be a stale spill_reg_store lying around.
6399 We must clear it, since otherwise emit_reload_insns
6400 might delete the store. */
6401 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6402 spill_reg_store[regno] = NULL_RTX;
6403 /* If any of the hard registers in EQUIV are spill
6404 registers, mark them as in use for this insn. */
6405 for (k = 0; k < nr; k++)
6407 i = spill_reg_order[regno + k];
6408 if (i >= 0)
6410 mark_reload_reg_in_use (regno, rld[r].opnum,
6411 rld[r].when_needed,
6412 rld[r].mode);
6413 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6414 regno + k);
6420 /* If we found a register to use already, or if this is an optional
6421 reload, we are done. */
6422 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6423 continue;
6425 #if 0
6426 /* No longer needed for correct operation. Might or might
6427 not give better code on the average. Want to experiment? */
6429 /* See if there is a later reload that has a class different from our
6430 class that intersects our class or that requires less register
6431 than our reload. If so, we must allocate a register to this
6432 reload now, since that reload might inherit a previous reload
6433 and take the only available register in our class. Don't do this
6434 for optional reloads since they will force all previous reloads
6435 to be allocated. Also don't do this for reloads that have been
6436 turned off. */
6438 for (i = j + 1; i < n_reloads; i++)
6440 int s = reload_order[i];
6442 if ((rld[s].in == 0 && rld[s].out == 0
6443 && ! rld[s].secondary_p)
6444 || rld[s].optional)
6445 continue;
6447 if ((rld[s].rclass != rld[r].rclass
6448 && reg_classes_intersect_p (rld[r].rclass,
6449 rld[s].rclass))
6450 || rld[s].nregs < rld[r].nregs)
6451 break;
6454 if (i == n_reloads)
6455 continue;
6457 allocate_reload_reg (chain, r, j == n_reloads - 1);
6458 #endif
6461 /* Now allocate reload registers for anything non-optional that
6462 didn't get one yet. */
6463 for (j = 0; j < n_reloads; j++)
6465 int r = reload_order[j];
6467 /* Ignore reloads that got marked inoperative. */
6468 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6469 continue;
6471 /* Skip reloads that already have a register allocated or are
6472 optional. */
6473 if (rld[r].reg_rtx != 0 || rld[r].optional)
6474 continue;
6476 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6477 break;
6480 /* If that loop got all the way, we have won. */
6481 if (j == n_reloads)
6483 win = 1;
6484 break;
6487 /* Loop around and try without any inheritance. */
6490 if (! win)
6492 /* First undo everything done by the failed attempt
6493 to allocate with inheritance. */
6494 choose_reload_regs_init (chain, save_reload_reg_rtx);
6496 /* Some sanity tests to verify that the reloads found in the first
6497 pass are identical to the ones we have now. */
6498 gcc_assert (chain->n_reloads == n_reloads);
6500 for (i = 0; i < n_reloads; i++)
6502 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6503 continue;
6504 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6505 for (j = 0; j < n_spills; j++)
6506 if (spill_regs[j] == chain->rld[i].regno)
6507 if (! set_reload_reg (j, i))
6508 failed_reload (chain->insn, i);
6512 /* If we thought we could inherit a reload, because it seemed that
6513 nothing else wanted the same reload register earlier in the insn,
6514 verify that assumption, now that all reloads have been assigned.
6515 Likewise for reloads where reload_override_in has been set. */
6517 /* If doing expensive optimizations, do one preliminary pass that doesn't
6518 cancel any inheritance, but removes reloads that have been needed only
6519 for reloads that we know can be inherited. */
6520 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6522 for (j = 0; j < n_reloads; j++)
6524 int r = reload_order[j];
6525 rtx check_reg;
6526 if (reload_inherited[r] && rld[r].reg_rtx)
6527 check_reg = rld[r].reg_rtx;
6528 else if (reload_override_in[r]
6529 && (REG_P (reload_override_in[r])
6530 || GET_CODE (reload_override_in[r]) == SUBREG))
6531 check_reg = reload_override_in[r];
6532 else
6533 continue;
6534 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6535 rld[r].opnum, rld[r].when_needed, rld[r].in,
6536 (reload_inherited[r]
6537 ? rld[r].out : const0_rtx),
6538 r, 1))
6540 if (pass)
6541 continue;
6542 reload_inherited[r] = 0;
6543 reload_override_in[r] = 0;
6545 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6546 reload_override_in, then we do not need its related
6547 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6548 likewise for other reload types.
6549 We handle this by removing a reload when its only replacement
6550 is mentioned in reload_in of the reload we are going to inherit.
6551 A special case are auto_inc expressions; even if the input is
6552 inherited, we still need the address for the output. We can
6553 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6554 If we succeeded removing some reload and we are doing a preliminary
6555 pass just to remove such reloads, make another pass, since the
6556 removal of one reload might allow us to inherit another one. */
6557 else if (rld[r].in
6558 && rld[r].out != rld[r].in
6559 && remove_address_replacements (rld[r].in) && pass)
6560 pass = 2;
6564 /* Now that reload_override_in is known valid,
6565 actually override reload_in. */
6566 for (j = 0; j < n_reloads; j++)
6567 if (reload_override_in[j])
6568 rld[j].in = reload_override_in[j];
6570 /* If this reload won't be done because it has been canceled or is
6571 optional and not inherited, clear reload_reg_rtx so other
6572 routines (such as subst_reloads) don't get confused. */
6573 for (j = 0; j < n_reloads; j++)
6574 if (rld[j].reg_rtx != 0
6575 && ((rld[j].optional && ! reload_inherited[j])
6576 || (rld[j].in == 0 && rld[j].out == 0
6577 && ! rld[j].secondary_p)))
6579 int regno = true_regnum (rld[j].reg_rtx);
6581 if (spill_reg_order[regno] >= 0)
6582 clear_reload_reg_in_use (regno, rld[j].opnum,
6583 rld[j].when_needed, rld[j].mode);
6584 rld[j].reg_rtx = 0;
6585 reload_spill_index[j] = -1;
6588 /* Record which pseudos and which spill regs have output reloads. */
6589 for (j = 0; j < n_reloads; j++)
6591 int r = reload_order[j];
6593 i = reload_spill_index[r];
6595 /* I is nonneg if this reload uses a register.
6596 If rld[r].reg_rtx is 0, this is an optional reload
6597 that we opted to ignore. */
6598 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6599 && rld[r].reg_rtx != 0)
6601 int nregno = REGNO (rld[r].out_reg);
6602 int nr = 1;
6604 if (nregno < FIRST_PSEUDO_REGISTER)
6605 nr = hard_regno_nregs[nregno][rld[r].mode];
6607 while (--nr >= 0)
6608 SET_REGNO_REG_SET (&reg_has_output_reload,
6609 nregno + nr);
6611 if (i >= 0)
6613 nr = hard_regno_nregs[i][rld[r].mode];
6614 while (--nr >= 0)
6615 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6618 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6619 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6620 || rld[r].when_needed == RELOAD_FOR_INSN);
6625 /* Deallocate the reload register for reload R. This is called from
6626 remove_address_replacements. */
6628 void
6629 deallocate_reload_reg (int r)
6631 int regno;
6633 if (! rld[r].reg_rtx)
6634 return;
6635 regno = true_regnum (rld[r].reg_rtx);
6636 rld[r].reg_rtx = 0;
6637 if (spill_reg_order[regno] >= 0)
6638 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6639 rld[r].mode);
6640 reload_spill_index[r] = -1;
6643 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6644 reloads of the same item for fear that we might not have enough reload
6645 registers. However, normally they will get the same reload register
6646 and hence actually need not be loaded twice.
6648 Here we check for the most common case of this phenomenon: when we have
6649 a number of reloads for the same object, each of which were allocated
6650 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6651 reload, and is not modified in the insn itself. If we find such,
6652 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6653 This will not increase the number of spill registers needed and will
6654 prevent redundant code. */
6656 static void
6657 merge_assigned_reloads (rtx insn)
6659 int i, j;
6661 /* Scan all the reloads looking for ones that only load values and
6662 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6663 assigned and not modified by INSN. */
6665 for (i = 0; i < n_reloads; i++)
6667 int conflicting_input = 0;
6668 int max_input_address_opnum = -1;
6669 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6671 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6672 || rld[i].out != 0 || rld[i].reg_rtx == 0
6673 || reg_set_p (rld[i].reg_rtx, insn))
6674 continue;
6676 /* Look at all other reloads. Ensure that the only use of this
6677 reload_reg_rtx is in a reload that just loads the same value
6678 as we do. Note that any secondary reloads must be of the identical
6679 class since the values, modes, and result registers are the
6680 same, so we need not do anything with any secondary reloads. */
6682 for (j = 0; j < n_reloads; j++)
6684 if (i == j || rld[j].reg_rtx == 0
6685 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6686 rld[i].reg_rtx))
6687 continue;
6689 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6690 && rld[j].opnum > max_input_address_opnum)
6691 max_input_address_opnum = rld[j].opnum;
6693 /* If the reload regs aren't exactly the same (e.g, different modes)
6694 or if the values are different, we can't merge this reload.
6695 But if it is an input reload, we might still merge
6696 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6698 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6699 || rld[j].out != 0 || rld[j].in == 0
6700 || ! rtx_equal_p (rld[i].in, rld[j].in))
6702 if (rld[j].when_needed != RELOAD_FOR_INPUT
6703 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6704 || rld[i].opnum > rld[j].opnum)
6705 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6706 break;
6707 conflicting_input = 1;
6708 if (min_conflicting_input_opnum > rld[j].opnum)
6709 min_conflicting_input_opnum = rld[j].opnum;
6713 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6714 we, in fact, found any matching reloads. */
6716 if (j == n_reloads
6717 && max_input_address_opnum <= min_conflicting_input_opnum)
6719 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6721 for (j = 0; j < n_reloads; j++)
6722 if (i != j && rld[j].reg_rtx != 0
6723 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6724 && (! conflicting_input
6725 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6726 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6728 rld[i].when_needed = RELOAD_OTHER;
6729 rld[j].in = 0;
6730 reload_spill_index[j] = -1;
6731 transfer_replacements (i, j);
6734 /* If this is now RELOAD_OTHER, look for any reloads that
6735 load parts of this operand and set them to
6736 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6737 RELOAD_OTHER for outputs. Note that this test is
6738 equivalent to looking for reloads for this operand
6739 number.
6741 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6742 it may share registers with a RELOAD_FOR_INPUT, so we can
6743 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6744 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6746 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6747 instruction is assigned the same register as the earlier
6748 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6749 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6750 instruction to be deleted later on. */
6752 if (rld[i].when_needed == RELOAD_OTHER)
6753 for (j = 0; j < n_reloads; j++)
6754 if (rld[j].in != 0
6755 && rld[j].when_needed != RELOAD_OTHER
6756 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6757 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6758 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6759 && (! conflicting_input
6760 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6761 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6762 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6763 rld[i].in))
6765 int k;
6767 rld[j].when_needed
6768 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6769 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6770 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6772 /* Check to see if we accidentally converted two
6773 reloads that use the same reload register with
6774 different inputs to the same type. If so, the
6775 resulting code won't work. */
6776 if (rld[j].reg_rtx)
6777 for (k = 0; k < j; k++)
6778 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6779 || rld[k].when_needed != rld[j].when_needed
6780 || !rtx_equal_p (rld[k].reg_rtx,
6781 rld[j].reg_rtx)
6782 || rtx_equal_p (rld[k].in,
6783 rld[j].in));
6789 /* These arrays are filled by emit_reload_insns and its subroutines. */
6790 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6791 static rtx other_input_address_reload_insns = 0;
6792 static rtx other_input_reload_insns = 0;
6793 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6794 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6795 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6796 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6797 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6798 static rtx operand_reload_insns = 0;
6799 static rtx other_operand_reload_insns = 0;
6800 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6802 /* Values to be put in spill_reg_store are put here first. */
6803 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6804 static HARD_REG_SET reg_reloaded_died;
6806 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6807 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6808 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6809 adjusted register, and return true. Otherwise, return false. */
6810 static bool
6811 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6812 enum reg_class new_class,
6813 enum machine_mode new_mode)
6816 rtx reg;
6818 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6820 unsigned regno = REGNO (reg);
6822 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6823 continue;
6824 if (GET_MODE (reg) != new_mode)
6826 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6827 continue;
6828 if (hard_regno_nregs[regno][new_mode]
6829 > hard_regno_nregs[regno][GET_MODE (reg)])
6830 continue;
6831 reg = reload_adjust_reg_for_mode (reg, new_mode);
6833 *reload_reg = reg;
6834 return true;
6836 return false;
6839 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6840 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6841 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6842 adjusted register, and return true. Otherwise, return false. */
6843 static bool
6844 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6845 enum insn_code icode)
6848 enum reg_class new_class = scratch_reload_class (icode);
6849 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6851 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6852 new_class, new_mode);
6855 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6856 has the number J. OLD contains the value to be used as input. */
6858 static void
6859 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6860 rtx old, int j)
6862 rtx insn = chain->insn;
6863 rtx reloadreg;
6864 rtx oldequiv_reg = 0;
6865 rtx oldequiv = 0;
6866 int special = 0;
6867 enum machine_mode mode;
6868 rtx *where;
6870 /* delete_output_reload is only invoked properly if old contains
6871 the original pseudo register. Since this is replaced with a
6872 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6873 find the pseudo in RELOAD_IN_REG. */
6874 if (reload_override_in[j]
6875 && REG_P (rl->in_reg))
6877 oldequiv = old;
6878 old = rl->in_reg;
6880 if (oldequiv == 0)
6881 oldequiv = old;
6882 else if (REG_P (oldequiv))
6883 oldequiv_reg = oldequiv;
6884 else if (GET_CODE (oldequiv) == SUBREG)
6885 oldequiv_reg = SUBREG_REG (oldequiv);
6887 reloadreg = reload_reg_rtx_for_input[j];
6888 mode = GET_MODE (reloadreg);
6890 /* If we are reloading from a register that was recently stored in
6891 with an output-reload, see if we can prove there was
6892 actually no need to store the old value in it. */
6894 if (optimize && REG_P (oldequiv)
6895 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6896 && spill_reg_store[REGNO (oldequiv)]
6897 && REG_P (old)
6898 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6899 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6900 rl->out_reg)))
6901 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6903 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6904 OLDEQUIV. */
6906 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6907 oldequiv = SUBREG_REG (oldequiv);
6908 if (GET_MODE (oldequiv) != VOIDmode
6909 && mode != GET_MODE (oldequiv))
6910 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6912 /* Switch to the right place to emit the reload insns. */
6913 switch (rl->when_needed)
6915 case RELOAD_OTHER:
6916 where = &other_input_reload_insns;
6917 break;
6918 case RELOAD_FOR_INPUT:
6919 where = &input_reload_insns[rl->opnum];
6920 break;
6921 case RELOAD_FOR_INPUT_ADDRESS:
6922 where = &input_address_reload_insns[rl->opnum];
6923 break;
6924 case RELOAD_FOR_INPADDR_ADDRESS:
6925 where = &inpaddr_address_reload_insns[rl->opnum];
6926 break;
6927 case RELOAD_FOR_OUTPUT_ADDRESS:
6928 where = &output_address_reload_insns[rl->opnum];
6929 break;
6930 case RELOAD_FOR_OUTADDR_ADDRESS:
6931 where = &outaddr_address_reload_insns[rl->opnum];
6932 break;
6933 case RELOAD_FOR_OPERAND_ADDRESS:
6934 where = &operand_reload_insns;
6935 break;
6936 case RELOAD_FOR_OPADDR_ADDR:
6937 where = &other_operand_reload_insns;
6938 break;
6939 case RELOAD_FOR_OTHER_ADDRESS:
6940 where = &other_input_address_reload_insns;
6941 break;
6942 default:
6943 gcc_unreachable ();
6946 push_to_sequence (*where);
6948 /* Auto-increment addresses must be reloaded in a special way. */
6949 if (rl->out && ! rl->out_reg)
6951 /* We are not going to bother supporting the case where a
6952 incremented register can't be copied directly from
6953 OLDEQUIV since this seems highly unlikely. */
6954 gcc_assert (rl->secondary_in_reload < 0);
6956 if (reload_inherited[j])
6957 oldequiv = reloadreg;
6959 old = XEXP (rl->in_reg, 0);
6961 if (optimize && REG_P (oldequiv)
6962 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6963 && spill_reg_store[REGNO (oldequiv)]
6964 && REG_P (old)
6965 && (dead_or_set_p (insn,
6966 spill_reg_stored_to[REGNO (oldequiv)])
6967 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6968 old)))
6969 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6971 /* Prevent normal processing of this reload. */
6972 special = 1;
6973 /* Output a special code sequence for this case. */
6974 new_spill_reg_store[REGNO (reloadreg)]
6975 = inc_for_reload (reloadreg, oldequiv, rl->out,
6976 rl->inc);
6979 /* If we are reloading a pseudo-register that was set by the previous
6980 insn, see if we can get rid of that pseudo-register entirely
6981 by redirecting the previous insn into our reload register. */
6983 else if (optimize && REG_P (old)
6984 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6985 && dead_or_set_p (insn, old)
6986 /* This is unsafe if some other reload
6987 uses the same reg first. */
6988 && ! conflicts_with_override (reloadreg)
6989 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6990 rl->when_needed, old, rl->out, j, 0))
6992 rtx temp = PREV_INSN (insn);
6993 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
6994 temp = PREV_INSN (temp);
6995 if (temp
6996 && NONJUMP_INSN_P (temp)
6997 && GET_CODE (PATTERN (temp)) == SET
6998 && SET_DEST (PATTERN (temp)) == old
6999 /* Make sure we can access insn_operand_constraint. */
7000 && asm_noperands (PATTERN (temp)) < 0
7001 /* This is unsafe if operand occurs more than once in current
7002 insn. Perhaps some occurrences aren't reloaded. */
7003 && count_occurrences (PATTERN (insn), old, 0) == 1)
7005 rtx old = SET_DEST (PATTERN (temp));
7006 /* Store into the reload register instead of the pseudo. */
7007 SET_DEST (PATTERN (temp)) = reloadreg;
7009 /* Verify that resulting insn is valid. */
7010 extract_insn (temp);
7011 if (constrain_operands (1))
7013 /* If the previous insn is an output reload, the source is
7014 a reload register, and its spill_reg_store entry will
7015 contain the previous destination. This is now
7016 invalid. */
7017 if (REG_P (SET_SRC (PATTERN (temp)))
7018 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7020 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7021 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7024 /* If these are the only uses of the pseudo reg,
7025 pretend for GDB it lives in the reload reg we used. */
7026 if (REG_N_DEATHS (REGNO (old)) == 1
7027 && REG_N_SETS (REGNO (old)) == 1)
7029 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7030 if (ira_conflicts_p)
7031 /* Inform IRA about the change. */
7032 ira_mark_allocation_change (REGNO (old));
7033 alter_reg (REGNO (old), -1, false);
7035 special = 1;
7037 /* Adjust any debug insns between temp and insn. */
7038 while ((temp = NEXT_INSN (temp)) != insn)
7039 if (DEBUG_INSN_P (temp))
7040 replace_rtx (PATTERN (temp), old, reloadreg);
7041 else
7042 gcc_assert (NOTE_P (temp));
7044 else
7046 SET_DEST (PATTERN (temp)) = old;
7051 /* We can't do that, so output an insn to load RELOADREG. */
7053 /* If we have a secondary reload, pick up the secondary register
7054 and icode, if any. If OLDEQUIV and OLD are different or
7055 if this is an in-out reload, recompute whether or not we
7056 still need a secondary register and what the icode should
7057 be. If we still need a secondary register and the class or
7058 icode is different, go back to reloading from OLD if using
7059 OLDEQUIV means that we got the wrong type of register. We
7060 cannot have different class or icode due to an in-out reload
7061 because we don't make such reloads when both the input and
7062 output need secondary reload registers. */
7064 if (! special && rl->secondary_in_reload >= 0)
7066 rtx second_reload_reg = 0;
7067 rtx third_reload_reg = 0;
7068 int secondary_reload = rl->secondary_in_reload;
7069 rtx real_oldequiv = oldequiv;
7070 rtx real_old = old;
7071 rtx tmp;
7072 enum insn_code icode;
7073 enum insn_code tertiary_icode = CODE_FOR_nothing;
7075 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7076 and similarly for OLD.
7077 See comments in get_secondary_reload in reload.c. */
7078 /* If it is a pseudo that cannot be replaced with its
7079 equivalent MEM, we must fall back to reload_in, which
7080 will have all the necessary substitutions registered.
7081 Likewise for a pseudo that can't be replaced with its
7082 equivalent constant.
7084 Take extra care for subregs of such pseudos. Note that
7085 we cannot use reg_equiv_mem in this case because it is
7086 not in the right mode. */
7088 tmp = oldequiv;
7089 if (GET_CODE (tmp) == SUBREG)
7090 tmp = SUBREG_REG (tmp);
7091 if (REG_P (tmp)
7092 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7093 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7094 || reg_equiv_constant[REGNO (tmp)] != 0))
7096 if (! reg_equiv_mem[REGNO (tmp)]
7097 || num_not_at_initial_offset
7098 || GET_CODE (oldequiv) == SUBREG)
7099 real_oldequiv = rl->in;
7100 else
7101 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7104 tmp = old;
7105 if (GET_CODE (tmp) == SUBREG)
7106 tmp = SUBREG_REG (tmp);
7107 if (REG_P (tmp)
7108 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7109 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7110 || reg_equiv_constant[REGNO (tmp)] != 0))
7112 if (! reg_equiv_mem[REGNO (tmp)]
7113 || num_not_at_initial_offset
7114 || GET_CODE (old) == SUBREG)
7115 real_old = rl->in;
7116 else
7117 real_old = reg_equiv_mem[REGNO (tmp)];
7120 second_reload_reg = rld[secondary_reload].reg_rtx;
7121 if (rld[secondary_reload].secondary_in_reload >= 0)
7123 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7125 third_reload_reg = rld[tertiary_reload].reg_rtx;
7126 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7127 /* We'd have to add more code for quartary reloads. */
7128 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7130 icode = rl->secondary_in_icode;
7132 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7133 || (rl->in != 0 && rl->out != 0))
7135 secondary_reload_info sri, sri2;
7136 enum reg_class new_class, new_t_class;
7138 sri.icode = CODE_FOR_nothing;
7139 sri.prev_sri = NULL;
7140 new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7141 mode, &sri);
7143 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7144 second_reload_reg = 0;
7145 else if (new_class == NO_REGS)
7147 if (reload_adjust_reg_for_icode (&second_reload_reg,
7148 third_reload_reg,
7149 (enum insn_code) sri.icode))
7151 icode = (enum insn_code) sri.icode;
7152 third_reload_reg = 0;
7154 else
7156 oldequiv = old;
7157 real_oldequiv = real_old;
7160 else if (sri.icode != CODE_FOR_nothing)
7161 /* We currently lack a way to express this in reloads. */
7162 gcc_unreachable ();
7163 else
7165 sri2.icode = CODE_FOR_nothing;
7166 sri2.prev_sri = &sri;
7167 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7168 new_class, mode, &sri);
7169 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7171 if (reload_adjust_reg_for_temp (&second_reload_reg,
7172 third_reload_reg,
7173 new_class, mode))
7175 third_reload_reg = 0;
7176 tertiary_icode = (enum insn_code) sri2.icode;
7178 else
7180 oldequiv = old;
7181 real_oldequiv = real_old;
7184 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7186 rtx intermediate = second_reload_reg;
7188 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7189 new_class, mode)
7190 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7191 ((enum insn_code)
7192 sri2.icode)))
7194 second_reload_reg = intermediate;
7195 tertiary_icode = (enum insn_code) sri2.icode;
7197 else
7199 oldequiv = old;
7200 real_oldequiv = real_old;
7203 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7205 rtx intermediate = second_reload_reg;
7207 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7208 new_class, mode)
7209 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7210 new_t_class, mode))
7212 second_reload_reg = intermediate;
7213 tertiary_icode = (enum insn_code) sri2.icode;
7215 else
7217 oldequiv = old;
7218 real_oldequiv = real_old;
7221 else
7223 /* This could be handled more intelligently too. */
7224 oldequiv = old;
7225 real_oldequiv = real_old;
7230 /* If we still need a secondary reload register, check
7231 to see if it is being used as a scratch or intermediate
7232 register and generate code appropriately. If we need
7233 a scratch register, use REAL_OLDEQUIV since the form of
7234 the insn may depend on the actual address if it is
7235 a MEM. */
7237 if (second_reload_reg)
7239 if (icode != CODE_FOR_nothing)
7241 /* We'd have to add extra code to handle this case. */
7242 gcc_assert (!third_reload_reg);
7244 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7245 second_reload_reg));
7246 special = 1;
7248 else
7250 /* See if we need a scratch register to load the
7251 intermediate register (a tertiary reload). */
7252 if (tertiary_icode != CODE_FOR_nothing)
7254 emit_insn ((GEN_FCN (tertiary_icode)
7255 (second_reload_reg, real_oldequiv,
7256 third_reload_reg)));
7258 else if (third_reload_reg)
7260 gen_reload (third_reload_reg, real_oldequiv,
7261 rl->opnum,
7262 rl->when_needed);
7263 gen_reload (second_reload_reg, third_reload_reg,
7264 rl->opnum,
7265 rl->when_needed);
7267 else
7268 gen_reload (second_reload_reg, real_oldequiv,
7269 rl->opnum,
7270 rl->when_needed);
7272 oldequiv = second_reload_reg;
7277 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7279 rtx real_oldequiv = oldequiv;
7281 if ((REG_P (oldequiv)
7282 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7283 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7284 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7285 || (GET_CODE (oldequiv) == SUBREG
7286 && REG_P (SUBREG_REG (oldequiv))
7287 && (REGNO (SUBREG_REG (oldequiv))
7288 >= FIRST_PSEUDO_REGISTER)
7289 && ((reg_equiv_memory_loc
7290 [REGNO (SUBREG_REG (oldequiv))] != 0)
7291 || (reg_equiv_constant
7292 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7293 || (CONSTANT_P (oldequiv)
7294 && (PREFERRED_RELOAD_CLASS (oldequiv,
7295 REGNO_REG_CLASS (REGNO (reloadreg)))
7296 == NO_REGS)))
7297 real_oldequiv = rl->in;
7298 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7299 rl->when_needed);
7302 if (flag_non_call_exceptions)
7303 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7305 /* End this sequence. */
7306 *where = get_insns ();
7307 end_sequence ();
7309 /* Update reload_override_in so that delete_address_reloads_1
7310 can see the actual register usage. */
7311 if (oldequiv_reg)
7312 reload_override_in[j] = oldequiv;
7315 /* Generate insns to for the output reload RL, which is for the insn described
7316 by CHAIN and has the number J. */
7317 static void
7318 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7319 int j)
7321 rtx reloadreg;
7322 rtx insn = chain->insn;
7323 int special = 0;
7324 rtx old = rl->out;
7325 enum machine_mode mode;
7326 rtx p;
7327 rtx rl_reg_rtx;
7329 if (rl->when_needed == RELOAD_OTHER)
7330 start_sequence ();
7331 else
7332 push_to_sequence (output_reload_insns[rl->opnum]);
7334 rl_reg_rtx = reload_reg_rtx_for_output[j];
7335 mode = GET_MODE (rl_reg_rtx);
7337 reloadreg = rl_reg_rtx;
7339 /* If we need two reload regs, set RELOADREG to the intermediate
7340 one, since it will be stored into OLD. We might need a secondary
7341 register only for an input reload, so check again here. */
7343 if (rl->secondary_out_reload >= 0)
7345 rtx real_old = old;
7346 int secondary_reload = rl->secondary_out_reload;
7347 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7349 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7350 && reg_equiv_mem[REGNO (old)] != 0)
7351 real_old = reg_equiv_mem[REGNO (old)];
7353 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7355 rtx second_reloadreg = reloadreg;
7356 reloadreg = rld[secondary_reload].reg_rtx;
7358 /* See if RELOADREG is to be used as a scratch register
7359 or as an intermediate register. */
7360 if (rl->secondary_out_icode != CODE_FOR_nothing)
7362 /* We'd have to add extra code to handle this case. */
7363 gcc_assert (tertiary_reload < 0);
7365 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7366 (real_old, second_reloadreg, reloadreg)));
7367 special = 1;
7369 else
7371 /* See if we need both a scratch and intermediate reload
7372 register. */
7374 enum insn_code tertiary_icode
7375 = rld[secondary_reload].secondary_out_icode;
7377 /* We'd have to add more code for quartary reloads. */
7378 gcc_assert (tertiary_reload < 0
7379 || rld[tertiary_reload].secondary_out_reload < 0);
7381 if (GET_MODE (reloadreg) != mode)
7382 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7384 if (tertiary_icode != CODE_FOR_nothing)
7386 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7387 rtx tem;
7389 /* Copy primary reload reg to secondary reload reg.
7390 (Note that these have been swapped above, then
7391 secondary reload reg to OLD using our insn.) */
7393 /* If REAL_OLD is a paradoxical SUBREG, remove it
7394 and try to put the opposite SUBREG on
7395 RELOADREG. */
7396 if (GET_CODE (real_old) == SUBREG
7397 && (GET_MODE_SIZE (GET_MODE (real_old))
7398 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7399 && 0 != (tem = gen_lowpart_common
7400 (GET_MODE (SUBREG_REG (real_old)),
7401 reloadreg)))
7402 real_old = SUBREG_REG (real_old), reloadreg = tem;
7404 gen_reload (reloadreg, second_reloadreg,
7405 rl->opnum, rl->when_needed);
7406 emit_insn ((GEN_FCN (tertiary_icode)
7407 (real_old, reloadreg, third_reloadreg)));
7408 special = 1;
7411 else
7413 /* Copy between the reload regs here and then to
7414 OUT later. */
7416 gen_reload (reloadreg, second_reloadreg,
7417 rl->opnum, rl->when_needed);
7418 if (tertiary_reload >= 0)
7420 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7422 gen_reload (third_reloadreg, reloadreg,
7423 rl->opnum, rl->when_needed);
7424 reloadreg = third_reloadreg;
7431 /* Output the last reload insn. */
7432 if (! special)
7434 rtx set;
7436 /* Don't output the last reload if OLD is not the dest of
7437 INSN and is in the src and is clobbered by INSN. */
7438 if (! flag_expensive_optimizations
7439 || !REG_P (old)
7440 || !(set = single_set (insn))
7441 || rtx_equal_p (old, SET_DEST (set))
7442 || !reg_mentioned_p (old, SET_SRC (set))
7443 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7444 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7445 gen_reload (old, reloadreg, rl->opnum,
7446 rl->when_needed);
7449 /* Look at all insns we emitted, just to be safe. */
7450 for (p = get_insns (); p; p = NEXT_INSN (p))
7451 if (INSN_P (p))
7453 rtx pat = PATTERN (p);
7455 /* If this output reload doesn't come from a spill reg,
7456 clear any memory of reloaded copies of the pseudo reg.
7457 If this output reload comes from a spill reg,
7458 reg_has_output_reload will make this do nothing. */
7459 note_stores (pat, forget_old_reloads_1, NULL);
7461 if (reg_mentioned_p (rl_reg_rtx, pat))
7463 rtx set = single_set (insn);
7464 if (reload_spill_index[j] < 0
7465 && set
7466 && SET_SRC (set) == rl_reg_rtx)
7468 int src = REGNO (SET_SRC (set));
7470 reload_spill_index[j] = src;
7471 SET_HARD_REG_BIT (reg_is_output_reload, src);
7472 if (find_regno_note (insn, REG_DEAD, src))
7473 SET_HARD_REG_BIT (reg_reloaded_died, src);
7475 if (HARD_REGISTER_P (rl_reg_rtx))
7477 int s = rl->secondary_out_reload;
7478 set = single_set (p);
7479 /* If this reload copies only to the secondary reload
7480 register, the secondary reload does the actual
7481 store. */
7482 if (s >= 0 && set == NULL_RTX)
7483 /* We can't tell what function the secondary reload
7484 has and where the actual store to the pseudo is
7485 made; leave new_spill_reg_store alone. */
7487 else if (s >= 0
7488 && SET_SRC (set) == rl_reg_rtx
7489 && SET_DEST (set) == rld[s].reg_rtx)
7491 /* Usually the next instruction will be the
7492 secondary reload insn; if we can confirm
7493 that it is, setting new_spill_reg_store to
7494 that insn will allow an extra optimization. */
7495 rtx s_reg = rld[s].reg_rtx;
7496 rtx next = NEXT_INSN (p);
7497 rld[s].out = rl->out;
7498 rld[s].out_reg = rl->out_reg;
7499 set = single_set (next);
7500 if (set && SET_SRC (set) == s_reg
7501 && ! new_spill_reg_store[REGNO (s_reg)])
7503 SET_HARD_REG_BIT (reg_is_output_reload,
7504 REGNO (s_reg));
7505 new_spill_reg_store[REGNO (s_reg)] = next;
7508 else
7509 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7514 if (rl->when_needed == RELOAD_OTHER)
7516 emit_insn (other_output_reload_insns[rl->opnum]);
7517 other_output_reload_insns[rl->opnum] = get_insns ();
7519 else
7520 output_reload_insns[rl->opnum] = get_insns ();
7522 if (flag_non_call_exceptions)
7523 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7525 end_sequence ();
7528 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7529 and has the number J. */
7530 static void
7531 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7533 rtx insn = chain->insn;
7534 rtx old = (rl->in && MEM_P (rl->in)
7535 ? rl->in_reg : rl->in);
7536 rtx reg_rtx = rl->reg_rtx;
7538 if (old && reg_rtx)
7540 enum machine_mode mode;
7542 /* Determine the mode to reload in.
7543 This is very tricky because we have three to choose from.
7544 There is the mode the insn operand wants (rl->inmode).
7545 There is the mode of the reload register RELOADREG.
7546 There is the intrinsic mode of the operand, which we could find
7547 by stripping some SUBREGs.
7548 It turns out that RELOADREG's mode is irrelevant:
7549 we can change that arbitrarily.
7551 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7552 then the reload reg may not support QImode moves, so use SImode.
7553 If foo is in memory due to spilling a pseudo reg, this is safe,
7554 because the QImode value is in the least significant part of a
7555 slot big enough for a SImode. If foo is some other sort of
7556 memory reference, then it is impossible to reload this case,
7557 so previous passes had better make sure this never happens.
7559 Then consider a one-word union which has SImode and one of its
7560 members is a float, being fetched as (SUBREG:SF union:SI).
7561 We must fetch that as SFmode because we could be loading into
7562 a float-only register. In this case OLD's mode is correct.
7564 Consider an immediate integer: it has VOIDmode. Here we need
7565 to get a mode from something else.
7567 In some cases, there is a fourth mode, the operand's
7568 containing mode. If the insn specifies a containing mode for
7569 this operand, it overrides all others.
7571 I am not sure whether the algorithm here is always right,
7572 but it does the right things in those cases. */
7574 mode = GET_MODE (old);
7575 if (mode == VOIDmode)
7576 mode = rl->inmode;
7578 /* We cannot use gen_lowpart_common since it can do the wrong thing
7579 when REG_RTX has a multi-word mode. Note that REG_RTX must
7580 always be a REG here. */
7581 if (GET_MODE (reg_rtx) != mode)
7582 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7584 reload_reg_rtx_for_input[j] = reg_rtx;
7586 if (old != 0
7587 /* AUTO_INC reloads need to be handled even if inherited. We got an
7588 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7589 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7590 && ! rtx_equal_p (reg_rtx, old)
7591 && reg_rtx != 0)
7592 emit_input_reload_insns (chain, rld + j, old, j);
7594 /* When inheriting a wider reload, we have a MEM in rl->in,
7595 e.g. inheriting a SImode output reload for
7596 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7597 if (optimize && reload_inherited[j] && rl->in
7598 && MEM_P (rl->in)
7599 && MEM_P (rl->in_reg)
7600 && reload_spill_index[j] >= 0
7601 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7602 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7604 /* If we are reloading a register that was recently stored in with an
7605 output-reload, see if we can prove there was
7606 actually no need to store the old value in it. */
7608 if (optimize
7609 && (reload_inherited[j] || reload_override_in[j])
7610 && reg_rtx
7611 && REG_P (reg_rtx)
7612 && spill_reg_store[REGNO (reg_rtx)] != 0
7613 #if 0
7614 /* There doesn't seem to be any reason to restrict this to pseudos
7615 and doing so loses in the case where we are copying from a
7616 register of the wrong class. */
7617 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7618 #endif
7619 /* The insn might have already some references to stackslots
7620 replaced by MEMs, while reload_out_reg still names the
7621 original pseudo. */
7622 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7623 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7624 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7627 /* Do output reloading for reload RL, which is for the insn described by
7628 CHAIN and has the number J.
7629 ??? At some point we need to support handling output reloads of
7630 JUMP_INSNs or insns that set cc0. */
7631 static void
7632 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7634 rtx note, old;
7635 rtx insn = chain->insn;
7636 /* If this is an output reload that stores something that is
7637 not loaded in this same reload, see if we can eliminate a previous
7638 store. */
7639 rtx pseudo = rl->out_reg;
7640 rtx reg_rtx = rl->reg_rtx;
7642 if (rl->out && reg_rtx)
7644 enum machine_mode mode;
7646 /* Determine the mode to reload in.
7647 See comments above (for input reloading). */
7648 mode = GET_MODE (rl->out);
7649 if (mode == VOIDmode)
7651 /* VOIDmode should never happen for an output. */
7652 if (asm_noperands (PATTERN (insn)) < 0)
7653 /* It's the compiler's fault. */
7654 fatal_insn ("VOIDmode on an output", insn);
7655 error_for_asm (insn, "output operand is constant in %<asm%>");
7656 /* Prevent crash--use something we know is valid. */
7657 mode = word_mode;
7658 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7660 if (GET_MODE (reg_rtx) != mode)
7661 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7663 reload_reg_rtx_for_output[j] = reg_rtx;
7665 if (pseudo
7666 && optimize
7667 && REG_P (pseudo)
7668 && ! rtx_equal_p (rl->in_reg, pseudo)
7669 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7670 && reg_last_reload_reg[REGNO (pseudo)])
7672 int pseudo_no = REGNO (pseudo);
7673 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7675 /* We don't need to test full validity of last_regno for
7676 inherit here; we only want to know if the store actually
7677 matches the pseudo. */
7678 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7679 && reg_reloaded_contents[last_regno] == pseudo_no
7680 && spill_reg_store[last_regno]
7681 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7682 delete_output_reload (insn, j, last_regno, reg_rtx);
7685 old = rl->out_reg;
7686 if (old == 0
7687 || reg_rtx == 0
7688 || rtx_equal_p (old, reg_rtx))
7689 return;
7691 /* An output operand that dies right away does need a reload,
7692 but need not be copied from it. Show the new location in the
7693 REG_UNUSED note. */
7694 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7695 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7697 XEXP (note, 0) = reg_rtx;
7698 return;
7700 /* Likewise for a SUBREG of an operand that dies. */
7701 else if (GET_CODE (old) == SUBREG
7702 && REG_P (SUBREG_REG (old))
7703 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7704 SUBREG_REG (old))))
7706 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7707 return;
7709 else if (GET_CODE (old) == SCRATCH)
7710 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7711 but we don't want to make an output reload. */
7712 return;
7714 /* If is a JUMP_INSN, we can't support output reloads yet. */
7715 gcc_assert (NONJUMP_INSN_P (insn));
7717 emit_output_reload_insns (chain, rld + j, j);
7720 /* A reload copies values of MODE from register SRC to register DEST.
7721 Return true if it can be treated for inheritance purposes like a
7722 group of reloads, each one reloading a single hard register. The
7723 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7724 occupy the same number of hard registers. */
7726 static bool
7727 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7728 int src ATTRIBUTE_UNUSED,
7729 enum machine_mode mode ATTRIBUTE_UNUSED)
7731 #ifdef CANNOT_CHANGE_MODE_CLASS
7732 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7733 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7734 #else
7735 return true;
7736 #endif
7739 /* Output insns to reload values in and out of the chosen reload regs. */
7741 static void
7742 emit_reload_insns (struct insn_chain *chain)
7744 rtx insn = chain->insn;
7746 int j;
7748 CLEAR_HARD_REG_SET (reg_reloaded_died);
7750 for (j = 0; j < reload_n_operands; j++)
7751 input_reload_insns[j] = input_address_reload_insns[j]
7752 = inpaddr_address_reload_insns[j]
7753 = output_reload_insns[j] = output_address_reload_insns[j]
7754 = outaddr_address_reload_insns[j]
7755 = other_output_reload_insns[j] = 0;
7756 other_input_address_reload_insns = 0;
7757 other_input_reload_insns = 0;
7758 operand_reload_insns = 0;
7759 other_operand_reload_insns = 0;
7761 /* Dump reloads into the dump file. */
7762 if (dump_file)
7764 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7765 debug_reload_to_stream (dump_file);
7768 /* Now output the instructions to copy the data into and out of the
7769 reload registers. Do these in the order that the reloads were reported,
7770 since reloads of base and index registers precede reloads of operands
7771 and the operands may need the base and index registers reloaded. */
7773 for (j = 0; j < n_reloads; j++)
7775 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7777 unsigned int i;
7779 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7780 new_spill_reg_store[i] = 0;
7783 do_input_reload (chain, rld + j, j);
7784 do_output_reload (chain, rld + j, j);
7787 /* Now write all the insns we made for reloads in the order expected by
7788 the allocation functions. Prior to the insn being reloaded, we write
7789 the following reloads:
7791 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7793 RELOAD_OTHER reloads.
7795 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7796 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7797 RELOAD_FOR_INPUT reload for the operand.
7799 RELOAD_FOR_OPADDR_ADDRS reloads.
7801 RELOAD_FOR_OPERAND_ADDRESS reloads.
7803 After the insn being reloaded, we write the following:
7805 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7806 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7807 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7808 reloads for the operand. The RELOAD_OTHER output reloads are
7809 output in descending order by reload number. */
7811 emit_insn_before (other_input_address_reload_insns, insn);
7812 emit_insn_before (other_input_reload_insns, insn);
7814 for (j = 0; j < reload_n_operands; j++)
7816 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7817 emit_insn_before (input_address_reload_insns[j], insn);
7818 emit_insn_before (input_reload_insns[j], insn);
7821 emit_insn_before (other_operand_reload_insns, insn);
7822 emit_insn_before (operand_reload_insns, insn);
7824 for (j = 0; j < reload_n_operands; j++)
7826 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7827 x = emit_insn_after (output_address_reload_insns[j], x);
7828 x = emit_insn_after (output_reload_insns[j], x);
7829 emit_insn_after (other_output_reload_insns[j], x);
7832 /* For all the spill regs newly reloaded in this instruction,
7833 record what they were reloaded from, so subsequent instructions
7834 can inherit the reloads.
7836 Update spill_reg_store for the reloads of this insn.
7837 Copy the elements that were updated in the loop above. */
7839 for (j = 0; j < n_reloads; j++)
7841 int r = reload_order[j];
7842 int i = reload_spill_index[r];
7844 /* If this is a non-inherited input reload from a pseudo, we must
7845 clear any memory of a previous store to the same pseudo. Only do
7846 something if there will not be an output reload for the pseudo
7847 being reloaded. */
7848 if (rld[r].in_reg != 0
7849 && ! (reload_inherited[r] || reload_override_in[r]))
7851 rtx reg = rld[r].in_reg;
7853 if (GET_CODE (reg) == SUBREG)
7854 reg = SUBREG_REG (reg);
7856 if (REG_P (reg)
7857 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7858 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7860 int nregno = REGNO (reg);
7862 if (reg_last_reload_reg[nregno])
7864 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7866 if (reg_reloaded_contents[last_regno] == nregno)
7867 spill_reg_store[last_regno] = 0;
7872 /* I is nonneg if this reload used a register.
7873 If rld[r].reg_rtx is 0, this is an optional reload
7874 that we opted to ignore. */
7876 if (i >= 0 && rld[r].reg_rtx != 0)
7878 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7879 int k;
7881 /* For a multi register reload, we need to check if all or part
7882 of the value lives to the end. */
7883 for (k = 0; k < nr; k++)
7884 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7885 rld[r].when_needed))
7886 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7888 /* Maybe the spill reg contains a copy of reload_out. */
7889 if (rld[r].out != 0
7890 && (REG_P (rld[r].out)
7891 #ifdef AUTO_INC_DEC
7892 || ! rld[r].out_reg
7893 #endif
7894 || REG_P (rld[r].out_reg)))
7896 rtx reg;
7897 enum machine_mode mode;
7898 int regno, nregs;
7900 reg = reload_reg_rtx_for_output[r];
7901 mode = GET_MODE (reg);
7902 regno = REGNO (reg);
7903 nregs = hard_regno_nregs[regno][mode];
7904 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7905 rld[r].when_needed))
7907 rtx out = (REG_P (rld[r].out)
7908 ? rld[r].out
7909 : rld[r].out_reg
7910 ? rld[r].out_reg
7911 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7912 int out_regno = REGNO (out);
7913 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7914 : hard_regno_nregs[out_regno][mode]);
7915 bool piecemeal;
7917 spill_reg_store[regno] = new_spill_reg_store[regno];
7918 spill_reg_stored_to[regno] = out;
7919 reg_last_reload_reg[out_regno] = reg;
7921 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7922 && nregs == out_nregs
7923 && inherit_piecemeal_p (out_regno, regno, mode));
7925 /* If OUT_REGNO is a hard register, it may occupy more than
7926 one register. If it does, say what is in the
7927 rest of the registers assuming that both registers
7928 agree on how many words the object takes. If not,
7929 invalidate the subsequent registers. */
7931 if (HARD_REGISTER_NUM_P (out_regno))
7932 for (k = 1; k < out_nregs; k++)
7933 reg_last_reload_reg[out_regno + k]
7934 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
7936 /* Now do the inverse operation. */
7937 for (k = 0; k < nregs; k++)
7939 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7940 reg_reloaded_contents[regno + k]
7941 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7942 ? out_regno
7943 : out_regno + k);
7944 reg_reloaded_insn[regno + k] = insn;
7945 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7946 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7947 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7948 regno + k);
7949 else
7950 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7951 regno + k);
7955 /* Maybe the spill reg contains a copy of reload_in. Only do
7956 something if there will not be an output reload for
7957 the register being reloaded. */
7958 else if (rld[r].out_reg == 0
7959 && rld[r].in != 0
7960 && ((REG_P (rld[r].in)
7961 && !HARD_REGISTER_P (rld[r].in)
7962 && !REGNO_REG_SET_P (&reg_has_output_reload,
7963 REGNO (rld[r].in)))
7964 || (REG_P (rld[r].in_reg)
7965 && !REGNO_REG_SET_P (&reg_has_output_reload,
7966 REGNO (rld[r].in_reg))))
7967 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
7969 rtx reg;
7970 enum machine_mode mode;
7971 int regno, nregs;
7973 reg = reload_reg_rtx_for_input[r];
7974 mode = GET_MODE (reg);
7975 regno = REGNO (reg);
7976 nregs = hard_regno_nregs[regno][mode];
7977 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7978 rld[r].when_needed))
7980 int in_regno;
7981 int in_nregs;
7982 rtx in;
7983 bool piecemeal;
7985 if (REG_P (rld[r].in)
7986 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7987 in = rld[r].in;
7988 else if (REG_P (rld[r].in_reg))
7989 in = rld[r].in_reg;
7990 else
7991 in = XEXP (rld[r].in_reg, 0);
7992 in_regno = REGNO (in);
7994 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
7995 : hard_regno_nregs[in_regno][mode]);
7997 reg_last_reload_reg[in_regno] = reg;
7999 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8000 && nregs == in_nregs
8001 && inherit_piecemeal_p (regno, in_regno, mode));
8003 if (HARD_REGISTER_NUM_P (in_regno))
8004 for (k = 1; k < in_nregs; k++)
8005 reg_last_reload_reg[in_regno + k]
8006 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8008 /* Unless we inherited this reload, show we haven't
8009 recently done a store.
8010 Previous stores of inherited auto_inc expressions
8011 also have to be discarded. */
8012 if (! reload_inherited[r]
8013 || (rld[r].out && ! rld[r].out_reg))
8014 spill_reg_store[regno] = 0;
8016 for (k = 0; k < nregs; k++)
8018 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8019 reg_reloaded_contents[regno + k]
8020 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8021 ? in_regno
8022 : in_regno + k);
8023 reg_reloaded_insn[regno + k] = insn;
8024 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8025 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8026 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8027 regno + k);
8028 else
8029 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8030 regno + k);
8036 /* The following if-statement was #if 0'd in 1.34 (or before...).
8037 It's reenabled in 1.35 because supposedly nothing else
8038 deals with this problem. */
8040 /* If a register gets output-reloaded from a non-spill register,
8041 that invalidates any previous reloaded copy of it.
8042 But forget_old_reloads_1 won't get to see it, because
8043 it thinks only about the original insn. So invalidate it here.
8044 Also do the same thing for RELOAD_OTHER constraints where the
8045 output is discarded. */
8046 if (i < 0
8047 && ((rld[r].out != 0
8048 && (REG_P (rld[r].out)
8049 || (MEM_P (rld[r].out)
8050 && REG_P (rld[r].out_reg))))
8051 || (rld[r].out == 0 && rld[r].out_reg
8052 && REG_P (rld[r].out_reg))))
8054 rtx out = ((rld[r].out && REG_P (rld[r].out))
8055 ? rld[r].out : rld[r].out_reg);
8056 int out_regno = REGNO (out);
8057 enum machine_mode mode = GET_MODE (out);
8059 /* REG_RTX is now set or clobbered by the main instruction.
8060 As the comment above explains, forget_old_reloads_1 only
8061 sees the original instruction, and there is no guarantee
8062 that the original instruction also clobbered REG_RTX.
8063 For example, if find_reloads sees that the input side of
8064 a matched operand pair dies in this instruction, it may
8065 use the input register as the reload register.
8067 Calling forget_old_reloads_1 is a waste of effort if
8068 REG_RTX is also the output register.
8070 If we know that REG_RTX holds the value of a pseudo
8071 register, the code after the call will record that fact. */
8072 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8073 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8075 if (!HARD_REGISTER_NUM_P (out_regno))
8077 rtx src_reg, store_insn = NULL_RTX;
8079 reg_last_reload_reg[out_regno] = 0;
8081 /* If we can find a hard register that is stored, record
8082 the storing insn so that we may delete this insn with
8083 delete_output_reload. */
8084 src_reg = reload_reg_rtx_for_output[r];
8086 /* If this is an optional reload, try to find the source reg
8087 from an input reload. */
8088 if (! src_reg)
8090 rtx set = single_set (insn);
8091 if (set && SET_DEST (set) == rld[r].out)
8093 int k;
8095 src_reg = SET_SRC (set);
8096 store_insn = insn;
8097 for (k = 0; k < n_reloads; k++)
8099 if (rld[k].in == src_reg)
8101 src_reg = reload_reg_rtx_for_input[k];
8102 break;
8107 else
8108 store_insn = new_spill_reg_store[REGNO (src_reg)];
8109 if (src_reg && REG_P (src_reg)
8110 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8112 int src_regno, src_nregs, k;
8113 rtx note;
8115 gcc_assert (GET_MODE (src_reg) == mode);
8116 src_regno = REGNO (src_reg);
8117 src_nregs = hard_regno_nregs[src_regno][mode];
8118 /* The place where to find a death note varies with
8119 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8120 necessarily checked exactly in the code that moves
8121 notes, so just check both locations. */
8122 note = find_regno_note (insn, REG_DEAD, src_regno);
8123 if (! note && store_insn)
8124 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8125 for (k = 0; k < src_nregs; k++)
8127 spill_reg_store[src_regno + k] = store_insn;
8128 spill_reg_stored_to[src_regno + k] = out;
8129 reg_reloaded_contents[src_regno + k] = out_regno;
8130 reg_reloaded_insn[src_regno + k] = store_insn;
8131 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8132 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8133 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8134 mode))
8135 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8136 src_regno + k);
8137 else
8138 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8139 src_regno + k);
8140 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8141 if (note)
8142 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8143 else
8144 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8146 reg_last_reload_reg[out_regno] = src_reg;
8147 /* We have to set reg_has_output_reload here, or else
8148 forget_old_reloads_1 will clear reg_last_reload_reg
8149 right away. */
8150 SET_REGNO_REG_SET (&reg_has_output_reload,
8151 out_regno);
8154 else
8156 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8158 for (k = 0; k < out_nregs; k++)
8159 reg_last_reload_reg[out_regno + k] = 0;
8163 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8166 /* Go through the motions to emit INSN and test if it is strictly valid.
8167 Return the emitted insn if valid, else return NULL. */
8169 static rtx
8170 emit_insn_if_valid_for_reload (rtx insn)
8172 rtx last = get_last_insn ();
8173 int code;
8175 insn = emit_insn (insn);
8176 code = recog_memoized (insn);
8178 if (code >= 0)
8180 extract_insn (insn);
8181 /* We want constrain operands to treat this insn strictly in its
8182 validity determination, i.e., the way it would after reload has
8183 completed. */
8184 if (constrain_operands (1))
8185 return insn;
8188 delete_insns_since (last);
8189 return NULL;
8192 /* Emit code to perform a reload from IN (which may be a reload register) to
8193 OUT (which may also be a reload register). IN or OUT is from operand
8194 OPNUM with reload type TYPE.
8196 Returns first insn emitted. */
8198 static rtx
8199 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8201 rtx last = get_last_insn ();
8202 rtx tem;
8204 /* If IN is a paradoxical SUBREG, remove it and try to put the
8205 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8206 if (GET_CODE (in) == SUBREG
8207 && (GET_MODE_SIZE (GET_MODE (in))
8208 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8209 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8210 in = SUBREG_REG (in), out = tem;
8211 else if (GET_CODE (out) == SUBREG
8212 && (GET_MODE_SIZE (GET_MODE (out))
8213 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8214 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8215 out = SUBREG_REG (out), in = tem;
8217 /* How to do this reload can get quite tricky. Normally, we are being
8218 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8219 register that didn't get a hard register. In that case we can just
8220 call emit_move_insn.
8222 We can also be asked to reload a PLUS that adds a register or a MEM to
8223 another register, constant or MEM. This can occur during frame pointer
8224 elimination and while reloading addresses. This case is handled by
8225 trying to emit a single insn to perform the add. If it is not valid,
8226 we use a two insn sequence.
8228 Or we can be asked to reload an unary operand that was a fragment of
8229 an addressing mode, into a register. If it isn't recognized as-is,
8230 we try making the unop operand and the reload-register the same:
8231 (set reg:X (unop:X expr:Y))
8232 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8234 Finally, we could be called to handle an 'o' constraint by putting
8235 an address into a register. In that case, we first try to do this
8236 with a named pattern of "reload_load_address". If no such pattern
8237 exists, we just emit a SET insn and hope for the best (it will normally
8238 be valid on machines that use 'o').
8240 This entire process is made complex because reload will never
8241 process the insns we generate here and so we must ensure that
8242 they will fit their constraints and also by the fact that parts of
8243 IN might be being reloaded separately and replaced with spill registers.
8244 Because of this, we are, in some sense, just guessing the right approach
8245 here. The one listed above seems to work.
8247 ??? At some point, this whole thing needs to be rethought. */
8249 if (GET_CODE (in) == PLUS
8250 && (REG_P (XEXP (in, 0))
8251 || GET_CODE (XEXP (in, 0)) == SUBREG
8252 || MEM_P (XEXP (in, 0)))
8253 && (REG_P (XEXP (in, 1))
8254 || GET_CODE (XEXP (in, 1)) == SUBREG
8255 || CONSTANT_P (XEXP (in, 1))
8256 || MEM_P (XEXP (in, 1))))
8258 /* We need to compute the sum of a register or a MEM and another
8259 register, constant, or MEM, and put it into the reload
8260 register. The best possible way of doing this is if the machine
8261 has a three-operand ADD insn that accepts the required operands.
8263 The simplest approach is to try to generate such an insn and see if it
8264 is recognized and matches its constraints. If so, it can be used.
8266 It might be better not to actually emit the insn unless it is valid,
8267 but we need to pass the insn as an operand to `recog' and
8268 `extract_insn' and it is simpler to emit and then delete the insn if
8269 not valid than to dummy things up. */
8271 rtx op0, op1, tem, insn;
8272 int code;
8274 op0 = find_replacement (&XEXP (in, 0));
8275 op1 = find_replacement (&XEXP (in, 1));
8277 /* Since constraint checking is strict, commutativity won't be
8278 checked, so we need to do that here to avoid spurious failure
8279 if the add instruction is two-address and the second operand
8280 of the add is the same as the reload reg, which is frequently
8281 the case. If the insn would be A = B + A, rearrange it so
8282 it will be A = A + B as constrain_operands expects. */
8284 if (REG_P (XEXP (in, 1))
8285 && REGNO (out) == REGNO (XEXP (in, 1)))
8286 tem = op0, op0 = op1, op1 = tem;
8288 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8289 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8291 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8292 if (insn)
8293 return insn;
8295 /* If that failed, we must use a conservative two-insn sequence.
8297 Use a move to copy one operand into the reload register. Prefer
8298 to reload a constant, MEM or pseudo since the move patterns can
8299 handle an arbitrary operand. If OP1 is not a constant, MEM or
8300 pseudo and OP1 is not a valid operand for an add instruction, then
8301 reload OP1.
8303 After reloading one of the operands into the reload register, add
8304 the reload register to the output register.
8306 If there is another way to do this for a specific machine, a
8307 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8308 we emit below. */
8310 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8312 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8313 || (REG_P (op1)
8314 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8315 || (code != CODE_FOR_nothing
8316 && ! ((*insn_data[code].operand[2].predicate)
8317 (op1, insn_data[code].operand[2].mode))))
8318 tem = op0, op0 = op1, op1 = tem;
8320 gen_reload (out, op0, opnum, type);
8322 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8323 This fixes a problem on the 32K where the stack pointer cannot
8324 be used as an operand of an add insn. */
8326 if (rtx_equal_p (op0, op1))
8327 op1 = out;
8329 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8330 if (insn)
8332 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8333 set_unique_reg_note (insn, REG_EQUIV, in);
8334 return insn;
8337 /* If that failed, copy the address register to the reload register.
8338 Then add the constant to the reload register. */
8340 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8341 gen_reload (out, op1, opnum, type);
8342 insn = emit_insn (gen_add2_insn (out, op0));
8343 set_unique_reg_note (insn, REG_EQUIV, in);
8346 #ifdef SECONDARY_MEMORY_NEEDED
8347 /* If we need a memory location to do the move, do it that way. */
8348 else if ((REG_P (in)
8349 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8350 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8351 && (REG_P (out)
8352 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8353 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8354 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8355 REGNO_REG_CLASS (reg_or_subregno (out)),
8356 GET_MODE (out)))
8358 /* Get the memory to use and rewrite both registers to its mode. */
8359 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8361 if (GET_MODE (loc) != GET_MODE (out))
8362 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8364 if (GET_MODE (loc) != GET_MODE (in))
8365 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8367 gen_reload (loc, in, opnum, type);
8368 gen_reload (out, loc, opnum, type);
8370 #endif
8371 else if (REG_P (out) && UNARY_P (in))
8373 rtx insn;
8374 rtx op1;
8375 rtx out_moded;
8376 rtx set;
8378 op1 = find_replacement (&XEXP (in, 0));
8379 if (op1 != XEXP (in, 0))
8380 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8382 /* First, try a plain SET. */
8383 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8384 if (set)
8385 return set;
8387 /* If that failed, move the inner operand to the reload
8388 register, and try the same unop with the inner expression
8389 replaced with the reload register. */
8391 if (GET_MODE (op1) != GET_MODE (out))
8392 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8393 else
8394 out_moded = out;
8396 gen_reload (out_moded, op1, opnum, type);
8398 insn
8399 = gen_rtx_SET (VOIDmode, out,
8400 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8401 out_moded));
8402 insn = emit_insn_if_valid_for_reload (insn);
8403 if (insn)
8405 set_unique_reg_note (insn, REG_EQUIV, in);
8406 return insn;
8409 fatal_insn ("Failure trying to reload:", set);
8411 /* If IN is a simple operand, use gen_move_insn. */
8412 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8414 tem = emit_insn (gen_move_insn (out, in));
8415 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8416 mark_jump_label (in, tem, 0);
8419 #ifdef HAVE_reload_load_address
8420 else if (HAVE_reload_load_address)
8421 emit_insn (gen_reload_load_address (out, in));
8422 #endif
8424 /* Otherwise, just write (set OUT IN) and hope for the best. */
8425 else
8426 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8428 /* Return the first insn emitted.
8429 We can not just return get_last_insn, because there may have
8430 been multiple instructions emitted. Also note that gen_move_insn may
8431 emit more than one insn itself, so we can not assume that there is one
8432 insn emitted per emit_insn_before call. */
8434 return last ? NEXT_INSN (last) : get_insns ();
8437 /* Delete a previously made output-reload whose result we now believe
8438 is not needed. First we double-check.
8440 INSN is the insn now being processed.
8441 LAST_RELOAD_REG is the hard register number for which we want to delete
8442 the last output reload.
8443 J is the reload-number that originally used REG. The caller has made
8444 certain that reload J doesn't use REG any longer for input.
8445 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8447 static void
8448 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8450 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8451 rtx reg = spill_reg_stored_to[last_reload_reg];
8452 int k;
8453 int n_occurrences;
8454 int n_inherited = 0;
8455 rtx i1;
8456 rtx substed;
8458 /* It is possible that this reload has been only used to set another reload
8459 we eliminated earlier and thus deleted this instruction too. */
8460 if (INSN_DELETED_P (output_reload_insn))
8461 return;
8463 /* Get the raw pseudo-register referred to. */
8465 while (GET_CODE (reg) == SUBREG)
8466 reg = SUBREG_REG (reg);
8467 substed = reg_equiv_memory_loc[REGNO (reg)];
8469 /* This is unsafe if the operand occurs more often in the current
8470 insn than it is inherited. */
8471 for (k = n_reloads - 1; k >= 0; k--)
8473 rtx reg2 = rld[k].in;
8474 if (! reg2)
8475 continue;
8476 if (MEM_P (reg2) || reload_override_in[k])
8477 reg2 = rld[k].in_reg;
8478 #ifdef AUTO_INC_DEC
8479 if (rld[k].out && ! rld[k].out_reg)
8480 reg2 = XEXP (rld[k].in_reg, 0);
8481 #endif
8482 while (GET_CODE (reg2) == SUBREG)
8483 reg2 = SUBREG_REG (reg2);
8484 if (rtx_equal_p (reg2, reg))
8486 if (reload_inherited[k] || reload_override_in[k] || k == j)
8487 n_inherited++;
8488 else
8489 return;
8492 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8493 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8494 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8495 reg, 0);
8496 if (substed)
8497 n_occurrences += count_occurrences (PATTERN (insn),
8498 eliminate_regs (substed, VOIDmode,
8499 NULL_RTX), 0);
8500 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8502 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8503 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8505 if (n_occurrences > n_inherited)
8506 return;
8508 /* If the pseudo-reg we are reloading is no longer referenced
8509 anywhere between the store into it and here,
8510 and we're within the same basic block, then the value can only
8511 pass through the reload reg and end up here.
8512 Otherwise, give up--return. */
8513 for (i1 = NEXT_INSN (output_reload_insn);
8514 i1 != insn; i1 = NEXT_INSN (i1))
8516 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8517 return;
8518 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8519 && reg_mentioned_p (reg, PATTERN (i1)))
8521 /* If this is USE in front of INSN, we only have to check that
8522 there are no more references than accounted for by inheritance. */
8523 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8525 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8526 i1 = NEXT_INSN (i1);
8528 if (n_occurrences <= n_inherited && i1 == insn)
8529 break;
8530 return;
8534 /* We will be deleting the insn. Remove the spill reg information. */
8535 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8537 spill_reg_store[last_reload_reg + k] = 0;
8538 spill_reg_stored_to[last_reload_reg + k] = 0;
8541 /* The caller has already checked that REG dies or is set in INSN.
8542 It has also checked that we are optimizing, and thus some
8543 inaccuracies in the debugging information are acceptable.
8544 So we could just delete output_reload_insn. But in some cases
8545 we can improve the debugging information without sacrificing
8546 optimization - maybe even improving the code: See if the pseudo
8547 reg has been completely replaced with reload regs. If so, delete
8548 the store insn and forget we had a stack slot for the pseudo. */
8549 if (rld[j].out != rld[j].in
8550 && REG_N_DEATHS (REGNO (reg)) == 1
8551 && REG_N_SETS (REGNO (reg)) == 1
8552 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8553 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8555 rtx i2;
8557 /* We know that it was used only between here and the beginning of
8558 the current basic block. (We also know that the last use before
8559 INSN was the output reload we are thinking of deleting, but never
8560 mind that.) Search that range; see if any ref remains. */
8561 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8563 rtx set = single_set (i2);
8565 /* Uses which just store in the pseudo don't count,
8566 since if they are the only uses, they are dead. */
8567 if (set != 0 && SET_DEST (set) == reg)
8568 continue;
8569 if (LABEL_P (i2)
8570 || JUMP_P (i2))
8571 break;
8572 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8573 && reg_mentioned_p (reg, PATTERN (i2)))
8575 /* Some other ref remains; just delete the output reload we
8576 know to be dead. */
8577 delete_address_reloads (output_reload_insn, insn);
8578 delete_insn (output_reload_insn);
8579 return;
8583 /* Delete the now-dead stores into this pseudo. Note that this
8584 loop also takes care of deleting output_reload_insn. */
8585 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8587 rtx set = single_set (i2);
8589 if (set != 0 && SET_DEST (set) == reg)
8591 delete_address_reloads (i2, insn);
8592 delete_insn (i2);
8594 if (LABEL_P (i2)
8595 || JUMP_P (i2))
8596 break;
8599 /* For the debugging info, say the pseudo lives in this reload reg. */
8600 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8601 if (ira_conflicts_p)
8602 /* Inform IRA about the change. */
8603 ira_mark_allocation_change (REGNO (reg));
8604 alter_reg (REGNO (reg), -1, false);
8606 else
8608 delete_address_reloads (output_reload_insn, insn);
8609 delete_insn (output_reload_insn);
8613 /* We are going to delete DEAD_INSN. Recursively delete loads of
8614 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8615 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8616 static void
8617 delete_address_reloads (rtx dead_insn, rtx current_insn)
8619 rtx set = single_set (dead_insn);
8620 rtx set2, dst, prev, next;
8621 if (set)
8623 rtx dst = SET_DEST (set);
8624 if (MEM_P (dst))
8625 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8627 /* If we deleted the store from a reloaded post_{in,de}c expression,
8628 we can delete the matching adds. */
8629 prev = PREV_INSN (dead_insn);
8630 next = NEXT_INSN (dead_insn);
8631 if (! prev || ! next)
8632 return;
8633 set = single_set (next);
8634 set2 = single_set (prev);
8635 if (! set || ! set2
8636 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8637 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8638 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8639 return;
8640 dst = SET_DEST (set);
8641 if (! rtx_equal_p (dst, SET_DEST (set2))
8642 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8643 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8644 || (INTVAL (XEXP (SET_SRC (set), 1))
8645 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8646 return;
8647 delete_related_insns (prev);
8648 delete_related_insns (next);
8651 /* Subfunction of delete_address_reloads: process registers found in X. */
8652 static void
8653 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8655 rtx prev, set, dst, i2;
8656 int i, j;
8657 enum rtx_code code = GET_CODE (x);
8659 if (code != REG)
8661 const char *fmt = GET_RTX_FORMAT (code);
8662 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8664 if (fmt[i] == 'e')
8665 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8666 else if (fmt[i] == 'E')
8668 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8669 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8670 current_insn);
8673 return;
8676 if (spill_reg_order[REGNO (x)] < 0)
8677 return;
8679 /* Scan backwards for the insn that sets x. This might be a way back due
8680 to inheritance. */
8681 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8683 code = GET_CODE (prev);
8684 if (code == CODE_LABEL || code == JUMP_INSN)
8685 return;
8686 if (!INSN_P (prev))
8687 continue;
8688 if (reg_set_p (x, PATTERN (prev)))
8689 break;
8690 if (reg_referenced_p (x, PATTERN (prev)))
8691 return;
8693 if (! prev || INSN_UID (prev) < reload_first_uid)
8694 return;
8695 /* Check that PREV only sets the reload register. */
8696 set = single_set (prev);
8697 if (! set)
8698 return;
8699 dst = SET_DEST (set);
8700 if (!REG_P (dst)
8701 || ! rtx_equal_p (dst, x))
8702 return;
8703 if (! reg_set_p (dst, PATTERN (dead_insn)))
8705 /* Check if DST was used in a later insn -
8706 it might have been inherited. */
8707 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8709 if (LABEL_P (i2))
8710 break;
8711 if (! INSN_P (i2))
8712 continue;
8713 if (reg_referenced_p (dst, PATTERN (i2)))
8715 /* If there is a reference to the register in the current insn,
8716 it might be loaded in a non-inherited reload. If no other
8717 reload uses it, that means the register is set before
8718 referenced. */
8719 if (i2 == current_insn)
8721 for (j = n_reloads - 1; j >= 0; j--)
8722 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8723 || reload_override_in[j] == dst)
8724 return;
8725 for (j = n_reloads - 1; j >= 0; j--)
8726 if (rld[j].in && rld[j].reg_rtx == dst)
8727 break;
8728 if (j >= 0)
8729 break;
8731 return;
8733 if (JUMP_P (i2))
8734 break;
8735 /* If DST is still live at CURRENT_INSN, check if it is used for
8736 any reload. Note that even if CURRENT_INSN sets DST, we still
8737 have to check the reloads. */
8738 if (i2 == current_insn)
8740 for (j = n_reloads - 1; j >= 0; j--)
8741 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8742 || reload_override_in[j] == dst)
8743 return;
8744 /* ??? We can't finish the loop here, because dst might be
8745 allocated to a pseudo in this block if no reload in this
8746 block needs any of the classes containing DST - see
8747 spill_hard_reg. There is no easy way to tell this, so we
8748 have to scan till the end of the basic block. */
8750 if (reg_set_p (dst, PATTERN (i2)))
8751 break;
8754 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8755 reg_reloaded_contents[REGNO (dst)] = -1;
8756 delete_insn (prev);
8759 /* Output reload-insns to reload VALUE into RELOADREG.
8760 VALUE is an autoincrement or autodecrement RTX whose operand
8761 is a register or memory location;
8762 so reloading involves incrementing that location.
8763 IN is either identical to VALUE, or some cheaper place to reload from.
8765 INC_AMOUNT is the number to increment or decrement by (always positive).
8766 This cannot be deduced from VALUE.
8768 Return the instruction that stores into RELOADREG. */
8770 static rtx
8771 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8773 /* REG or MEM to be copied and incremented. */
8774 rtx incloc = find_replacement (&XEXP (value, 0));
8775 /* Nonzero if increment after copying. */
8776 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8777 || GET_CODE (value) == POST_MODIFY);
8778 rtx last;
8779 rtx inc;
8780 rtx add_insn;
8781 int code;
8782 rtx store;
8783 rtx real_in = in == value ? incloc : in;
8785 /* No hard register is equivalent to this register after
8786 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8787 we could inc/dec that register as well (maybe even using it for
8788 the source), but I'm not sure it's worth worrying about. */
8789 if (REG_P (incloc))
8790 reg_last_reload_reg[REGNO (incloc)] = 0;
8792 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8794 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8795 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8797 else
8799 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8800 inc_amount = -inc_amount;
8802 inc = GEN_INT (inc_amount);
8805 /* If this is post-increment, first copy the location to the reload reg. */
8806 if (post && real_in != reloadreg)
8807 emit_insn (gen_move_insn (reloadreg, real_in));
8809 if (in == value)
8811 /* See if we can directly increment INCLOC. Use a method similar to
8812 that in gen_reload. */
8814 last = get_last_insn ();
8815 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8816 gen_rtx_PLUS (GET_MODE (incloc),
8817 incloc, inc)));
8819 code = recog_memoized (add_insn);
8820 if (code >= 0)
8822 extract_insn (add_insn);
8823 if (constrain_operands (1))
8825 /* If this is a pre-increment and we have incremented the value
8826 where it lives, copy the incremented value to RELOADREG to
8827 be used as an address. */
8829 if (! post)
8830 emit_insn (gen_move_insn (reloadreg, incloc));
8832 return add_insn;
8835 delete_insns_since (last);
8838 /* If couldn't do the increment directly, must increment in RELOADREG.
8839 The way we do this depends on whether this is pre- or post-increment.
8840 For pre-increment, copy INCLOC to the reload register, increment it
8841 there, then save back. */
8843 if (! post)
8845 if (in != reloadreg)
8846 emit_insn (gen_move_insn (reloadreg, real_in));
8847 emit_insn (gen_add2_insn (reloadreg, inc));
8848 store = emit_insn (gen_move_insn (incloc, reloadreg));
8850 else
8852 /* Postincrement.
8853 Because this might be a jump insn or a compare, and because RELOADREG
8854 may not be available after the insn in an input reload, we must do
8855 the incrementation before the insn being reloaded for.
8857 We have already copied IN to RELOADREG. Increment the copy in
8858 RELOADREG, save that back, then decrement RELOADREG so it has
8859 the original value. */
8861 emit_insn (gen_add2_insn (reloadreg, inc));
8862 store = emit_insn (gen_move_insn (incloc, reloadreg));
8863 if (CONST_INT_P (inc))
8864 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8865 else
8866 emit_insn (gen_sub2_insn (reloadreg, inc));
8869 return store;
8872 #ifdef AUTO_INC_DEC
8873 static void
8874 add_auto_inc_notes (rtx insn, rtx x)
8876 enum rtx_code code = GET_CODE (x);
8877 const char *fmt;
8878 int i, j;
8880 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8882 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
8883 return;
8886 /* Scan all the operand sub-expressions. */
8887 fmt = GET_RTX_FORMAT (code);
8888 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8890 if (fmt[i] == 'e')
8891 add_auto_inc_notes (insn, XEXP (x, i));
8892 else if (fmt[i] == 'E')
8893 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8894 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8897 #endif
8899 /* This is used by reload pass, that does emit some instructions after
8900 abnormal calls moving basic block end, but in fact it wants to emit
8901 them on the edge. Looks for abnormal call edges, find backward the
8902 proper call and fix the damage.
8904 Similar handle instructions throwing exceptions internally. */
8905 void
8906 fixup_abnormal_edges (void)
8908 bool inserted = false;
8909 basic_block bb;
8911 FOR_EACH_BB (bb)
8913 edge e;
8914 edge_iterator ei;
8916 /* Look for cases we are interested in - calls or instructions causing
8917 exceptions. */
8918 FOR_EACH_EDGE (e, ei, bb->succs)
8920 if (e->flags & EDGE_ABNORMAL_CALL)
8921 break;
8922 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8923 == (EDGE_ABNORMAL | EDGE_EH))
8924 break;
8926 if (e && !CALL_P (BB_END (bb))
8927 && !can_throw_internal (BB_END (bb)))
8929 rtx insn;
8931 /* Get past the new insns generated. Allow notes, as the insns
8932 may be already deleted. */
8933 insn = BB_END (bb);
8934 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8935 && !can_throw_internal (insn)
8936 && insn != BB_HEAD (bb))
8937 insn = PREV_INSN (insn);
8939 if (CALL_P (insn) || can_throw_internal (insn))
8941 rtx stop, next;
8943 stop = NEXT_INSN (BB_END (bb));
8944 BB_END (bb) = insn;
8945 insn = NEXT_INSN (insn);
8947 FOR_EACH_EDGE (e, ei, bb->succs)
8948 if (e->flags & EDGE_FALLTHRU)
8949 break;
8951 while (insn && insn != stop)
8953 next = NEXT_INSN (insn);
8954 if (INSN_P (insn))
8956 delete_insn (insn);
8958 /* Sometimes there's still the return value USE.
8959 If it's placed after a trapping call (i.e. that
8960 call is the last insn anyway), we have no fallthru
8961 edge. Simply delete this use and don't try to insert
8962 on the non-existent edge. */
8963 if (GET_CODE (PATTERN (insn)) != USE)
8965 /* We're not deleting it, we're moving it. */
8966 INSN_DELETED_P (insn) = 0;
8967 PREV_INSN (insn) = NULL_RTX;
8968 NEXT_INSN (insn) = NULL_RTX;
8970 insert_insn_on_edge (insn, e);
8971 inserted = true;
8974 else if (!BARRIER_P (insn))
8975 set_block_for_insn (insn, NULL);
8976 insn = next;
8980 /* It may be that we don't find any such trapping insn. In this
8981 case we discovered quite late that the insn that had been
8982 marked as can_throw_internal in fact couldn't trap at all.
8983 So we should in fact delete the EH edges out of the block. */
8984 else
8985 purge_dead_edges (bb);
8989 /* We've possibly turned single trapping insn into multiple ones. */
8990 if (flag_non_call_exceptions)
8992 sbitmap blocks;
8993 blocks = sbitmap_alloc (last_basic_block);
8994 sbitmap_ones (blocks);
8995 find_many_sub_basic_blocks (blocks);
8996 sbitmap_free (blocks);
8999 if (inserted)
9000 commit_edge_insertions ();
9002 #ifdef ENABLE_CHECKING
9003 /* Verify that we didn't turn one trapping insn into many, and that
9004 we found and corrected all of the problems wrt fixups on the
9005 fallthru edge. */
9006 verify_flow_info ();
9007 #endif