Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / reload1.c
blobd5cd37ce0bd09765c0f769ebd5745b5e3ad28ae9
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "real.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "df.h"
49 #include "target.h"
50 #include "emit-rtl.h"
52 /* This file contains the reload pass of the compiler, which is
53 run after register allocation has been done. It checks that
54 each insn is valid (operands required to be in registers really
55 are in registers of the proper class) and fixes up invalid ones
56 by copying values temporarily into registers for the insns
57 that need them.
59 The results of register allocation are described by the vector
60 reg_renumber; the insns still contain pseudo regs, but reg_renumber
61 can be used to find which hard reg, if any, a pseudo reg is in.
63 The technique we always use is to free up a few hard regs that are
64 called ``reload regs'', and for each place where a pseudo reg
65 must be in a hard reg, copy it temporarily into one of the reload regs.
67 Reload regs are allocated locally for every instruction that needs
68 reloads. When there are pseudos which are allocated to a register that
69 has been chosen as a reload reg, such pseudos must be ``spilled''.
70 This means that they go to other hard regs, or to stack slots if no other
71 available hard regs can be found. Spilling can invalidate more
72 insns, requiring additional need for reloads, so we must keep checking
73 until the process stabilizes.
75 For machines with different classes of registers, we must keep track
76 of the register class needed for each reload, and make sure that
77 we allocate enough reload registers of each class.
79 The file reload.c contains the code that checks one insn for
80 validity and reports the reloads that it needs. This file
81 is in charge of scanning the entire rtl code, accumulating the
82 reload needs, spilling, assigning reload registers to use for
83 fixing up each insn, and generating the new insns to copy values
84 into the reload registers. */
86 /* During reload_as_needed, element N contains a REG rtx for the hard reg
87 into which reg N has been reloaded (perhaps for a previous insn). */
88 static rtx *reg_last_reload_reg;
90 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92 static regset_head reg_has_output_reload;
94 /* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96 static HARD_REG_SET reg_is_output_reload;
98 /* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102 rtx *reg_equiv_constant;
104 /* Element N is an invariant value to which pseudo reg N is equivalent.
105 eliminate_regs_in_insn uses this to replace pseudos in particular
106 contexts. */
107 rtx *reg_equiv_invariant;
109 /* Element N is a memory location to which pseudo reg N is equivalent,
110 prior to any register elimination (such as frame pointer to stack
111 pointer). Depending on whether or not it is a valid address, this value
112 is transferred to either reg_equiv_address or reg_equiv_mem. */
113 rtx *reg_equiv_memory_loc;
115 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116 collector can keep track of what is inside. */
117 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
119 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
120 This is used when the address is not valid as a memory address
121 (because its displacement is too big for the machine.) */
122 rtx *reg_equiv_address;
124 /* Element N is the memory slot to which pseudo reg N is equivalent,
125 or zero if pseudo reg N is not equivalent to a memory slot. */
126 rtx *reg_equiv_mem;
128 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129 alternate representations of the location of pseudo reg N. */
130 rtx *reg_equiv_alt_mem_list;
132 /* Widest width in which each pseudo reg is referred to (via subreg). */
133 static unsigned int *reg_max_ref_width;
135 /* Element N is the list of insns that initialized reg N from its equivalent
136 constant or memory slot. */
137 rtx *reg_equiv_init;
138 int reg_equiv_init_size;
140 /* Vector to remember old contents of reg_renumber before spilling. */
141 static short *reg_old_renumber;
143 /* During reload_as_needed, element N contains the last pseudo regno reloaded
144 into hard register N. If that pseudo reg occupied more than one register,
145 reg_reloaded_contents points to that pseudo for each spill register in
146 use; all of these must remain set for an inheritance to occur. */
147 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
149 /* During reload_as_needed, element N contains the insn for which
150 hard register N was last used. Its contents are significant only
151 when reg_reloaded_valid is set for this register. */
152 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
154 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
155 static HARD_REG_SET reg_reloaded_valid;
156 /* Indicate if the register was dead at the end of the reload.
157 This is only valid if reg_reloaded_contents is set and valid. */
158 static HARD_REG_SET reg_reloaded_dead;
160 /* Indicate whether the register's current value is one that is not
161 safe to retain across a call, even for registers that are normally
162 call-saved. This is only meaningful for members of reg_reloaded_valid. */
163 static HARD_REG_SET reg_reloaded_call_part_clobbered;
165 /* Number of spill-regs so far; number of valid elements of spill_regs. */
166 static int n_spills;
168 /* In parallel with spill_regs, contains REG rtx's for those regs.
169 Holds the last rtx used for any given reg, or 0 if it has never
170 been used for spilling yet. This rtx is reused, provided it has
171 the proper mode. */
172 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
174 /* In parallel with spill_regs, contains nonzero for a spill reg
175 that was stored after the last time it was used.
176 The precise value is the insn generated to do the store. */
177 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
179 /* This is the register that was stored with spill_reg_store. This is a
180 copy of reload_out / reload_out_reg when the value was stored; if
181 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
182 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
184 /* This table is the inverse mapping of spill_regs:
185 indexed by hard reg number,
186 it contains the position of that reg in spill_regs,
187 or -1 for something that is not in spill_regs.
189 ?!? This is no longer accurate. */
190 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
192 /* This reg set indicates registers that can't be used as spill registers for
193 the currently processed insn. These are the hard registers which are live
194 during the insn, but not allocated to pseudos, as well as fixed
195 registers. */
196 static HARD_REG_SET bad_spill_regs;
198 /* These are the hard registers that can't be used as spill register for any
199 insn. This includes registers used for user variables and registers that
200 we can't eliminate. A register that appears in this set also can't be used
201 to retry register allocation. */
202 static HARD_REG_SET bad_spill_regs_global;
204 /* Describes order of use of registers for reloading
205 of spilled pseudo-registers. `n_spills' is the number of
206 elements that are actually valid; new ones are added at the end.
208 Both spill_regs and spill_reg_order are used on two occasions:
209 once during find_reload_regs, where they keep track of the spill registers
210 for a single insn, but also during reload_as_needed where they show all
211 the registers ever used by reload. For the latter case, the information
212 is calculated during finish_spills. */
213 static short spill_regs[FIRST_PSEUDO_REGISTER];
215 /* This vector of reg sets indicates, for each pseudo, which hard registers
216 may not be used for retrying global allocation because the register was
217 formerly spilled from one of them. If we allowed reallocating a pseudo to
218 a register that it was already allocated to, reload might not
219 terminate. */
220 static HARD_REG_SET *pseudo_previous_regs;
222 /* This vector of reg sets indicates, for each pseudo, which hard
223 registers may not be used for retrying global allocation because they
224 are used as spill registers during one of the insns in which the
225 pseudo is live. */
226 static HARD_REG_SET *pseudo_forbidden_regs;
228 /* All hard regs that have been used as spill registers for any insn are
229 marked in this set. */
230 static HARD_REG_SET used_spill_regs;
232 /* Index of last register assigned as a spill register. We allocate in
233 a round-robin fashion. */
234 static int last_spill_reg;
236 /* Nonzero if indirect addressing is supported on the machine; this means
237 that spilling (REG n) does not require reloading it into a register in
238 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
239 value indicates the level of indirect addressing supported, e.g., two
240 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 a hard register. */
242 static char spill_indirect_levels;
244 /* Nonzero if indirect addressing is supported when the innermost MEM is
245 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
246 which these are valid is the same as spill_indirect_levels, above. */
247 char indirect_symref_ok;
249 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
250 char double_reg_address_ok;
252 /* Record the stack slot for each spilled hard register. */
253 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
255 /* Width allocated so far for that stack slot. */
256 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
258 /* Record which pseudos needed to be spilled. */
259 static regset_head spilled_pseudos;
261 /* Record which pseudos changed their allocation in finish_spills. */
262 static regset_head changed_allocation_pseudos;
264 /* Used for communication between order_regs_for_reload and count_pseudo.
265 Used to avoid counting one pseudo twice. */
266 static regset_head pseudos_counted;
268 /* First uid used by insns created by reload in this function.
269 Used in find_equiv_reg. */
270 int reload_first_uid;
272 /* Flag set by local-alloc or global-alloc if anything is live in
273 a call-clobbered reg across calls. */
274 int caller_save_needed;
276 /* Set to 1 while reload_as_needed is operating.
277 Required by some machines to handle any generated moves differently. */
278 int reload_in_progress = 0;
280 /* These arrays record the insn_code of insns that may be needed to
281 perform input and output reloads of special objects. They provide a
282 place to pass a scratch register. */
283 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
284 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
286 /* This obstack is used for allocation of rtl during register elimination.
287 The allocated storage can be freed once find_reloads has processed the
288 insn. */
289 static struct obstack reload_obstack;
291 /* Points to the beginning of the reload_obstack. All insn_chain structures
292 are allocated first. */
293 static char *reload_startobj;
295 /* The point after all insn_chain structures. Used to quickly deallocate
296 memory allocated in copy_reloads during calculate_needs_all_insns. */
297 static char *reload_firstobj;
299 /* This points before all local rtl generated by register elimination.
300 Used to quickly free all memory after processing one insn. */
301 static char *reload_insn_firstobj;
303 /* List of insn_chain instructions, one for every insn that reload needs to
304 examine. */
305 struct insn_chain *reload_insn_chain;
307 /* List of all insns needing reloads. */
308 static struct insn_chain *insns_need_reload;
310 /* This structure is used to record information about register eliminations.
311 Each array entry describes one possible way of eliminating a register
312 in favor of another. If there is more than one way of eliminating a
313 particular register, the most preferred should be specified first. */
315 struct elim_table
317 int from; /* Register number to be eliminated. */
318 int to; /* Register number used as replacement. */
319 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
320 int can_eliminate; /* Nonzero if this elimination can be done. */
321 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
322 target hook in previous scan over insns
323 made by reload. */
324 HOST_WIDE_INT offset; /* Current offset between the two regs. */
325 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
326 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
327 rtx from_rtx; /* REG rtx for the register to be eliminated.
328 We cannot simply compare the number since
329 we might then spuriously replace a hard
330 register corresponding to a pseudo
331 assigned to the reg to be eliminated. */
332 rtx to_rtx; /* REG rtx for the replacement. */
335 static struct elim_table *reg_eliminate = 0;
337 /* This is an intermediate structure to initialize the table. It has
338 exactly the members provided by ELIMINABLE_REGS. */
339 static const struct elim_table_1
341 const int from;
342 const int to;
343 } reg_eliminate_1[] =
345 /* If a set of eliminable registers was specified, define the table from it.
346 Otherwise, default to the normal case of the frame pointer being
347 replaced by the stack pointer. */
349 #ifdef ELIMINABLE_REGS
350 ELIMINABLE_REGS;
351 #else
352 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
353 #endif
355 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
357 /* Record the number of pending eliminations that have an offset not equal
358 to their initial offset. If nonzero, we use a new copy of each
359 replacement result in any insns encountered. */
360 int num_not_at_initial_offset;
362 /* Count the number of registers that we may be able to eliminate. */
363 static int num_eliminable;
364 /* And the number of registers that are equivalent to a constant that
365 can be eliminated to frame_pointer / arg_pointer + constant. */
366 static int num_eliminable_invariants;
368 /* For each label, we record the offset of each elimination. If we reach
369 a label by more than one path and an offset differs, we cannot do the
370 elimination. This information is indexed by the difference of the
371 number of the label and the first label number. We can't offset the
372 pointer itself as this can cause problems on machines with segmented
373 memory. The first table is an array of flags that records whether we
374 have yet encountered a label and the second table is an array of arrays,
375 one entry in the latter array for each elimination. */
377 static int first_label_num;
378 static char *offsets_known_at;
379 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
381 /* Number of labels in the current function. */
383 static int num_labels;
385 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
386 static void maybe_fix_stack_asms (void);
387 static void copy_reloads (struct insn_chain *);
388 static void calculate_needs_all_insns (int);
389 static int find_reg (struct insn_chain *, int);
390 static void find_reload_regs (struct insn_chain *);
391 static void select_reload_regs (void);
392 static void delete_caller_save_insns (void);
394 static void spill_failure (rtx, enum reg_class);
395 static void count_spilled_pseudo (int, int, int);
396 static void delete_dead_insn (rtx);
397 static void alter_reg (int, int, bool);
398 static void set_label_offsets (rtx, rtx, int);
399 static void check_eliminable_occurrences (rtx);
400 static void elimination_effects (rtx, enum machine_mode);
401 static int eliminate_regs_in_insn (rtx, int);
402 static void update_eliminable_offsets (void);
403 static void mark_not_eliminable (rtx, const_rtx, void *);
404 static void set_initial_elim_offsets (void);
405 static bool verify_initial_elim_offsets (void);
406 static void set_initial_label_offsets (void);
407 static void set_offsets_for_label (rtx);
408 static void init_elim_table (void);
409 static void update_eliminables (HARD_REG_SET *);
410 static void spill_hard_reg (unsigned int, int);
411 static int finish_spills (int);
412 static void scan_paradoxical_subregs (rtx);
413 static void count_pseudo (int);
414 static void order_regs_for_reload (struct insn_chain *);
415 static void reload_as_needed (int);
416 static void forget_old_reloads_1 (rtx, const_rtx, void *);
417 static void forget_marked_reloads (regset);
418 static int reload_reg_class_lower (const void *, const void *);
419 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
420 enum machine_mode);
421 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
422 enum machine_mode);
423 static int reload_reg_free_p (unsigned int, int, enum reload_type);
424 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
425 rtx, rtx, int, int);
426 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
427 rtx, rtx, int, int);
428 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
429 static int allocate_reload_reg (struct insn_chain *, int, int);
430 static int conflicts_with_override (rtx);
431 static void failed_reload (rtx, int);
432 static int set_reload_reg (int, int);
433 static void choose_reload_regs_init (struct insn_chain *, rtx *);
434 static void choose_reload_regs (struct insn_chain *);
435 static void merge_assigned_reloads (rtx);
436 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
437 rtx, int);
438 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
439 int);
440 static void do_input_reload (struct insn_chain *, struct reload *, int);
441 static void do_output_reload (struct insn_chain *, struct reload *, int);
442 static void emit_reload_insns (struct insn_chain *);
443 static void delete_output_reload (rtx, int, int, rtx);
444 static void delete_address_reloads (rtx, rtx);
445 static void delete_address_reloads_1 (rtx, rtx, rtx);
446 static rtx inc_for_reload (rtx, rtx, rtx, int);
447 #ifdef AUTO_INC_DEC
448 static void add_auto_inc_notes (rtx, rtx);
449 #endif
450 static void copy_eh_notes (rtx, rtx);
451 static void substitute (rtx *, const_rtx, rtx);
452 static bool gen_reload_chain_without_interm_reg_p (int, int);
453 static int reloads_conflict (int, int);
454 static rtx gen_reload (rtx, rtx, int, enum reload_type);
455 static rtx emit_insn_if_valid_for_reload (rtx);
457 /* Initialize the reload pass. This is called at the beginning of compilation
458 and may be called again if the target is reinitialized. */
460 void
461 init_reload (void)
463 int i;
465 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
466 Set spill_indirect_levels to the number of levels such addressing is
467 permitted, zero if it is not permitted at all. */
469 rtx tem
470 = gen_rtx_MEM (Pmode,
471 gen_rtx_PLUS (Pmode,
472 gen_rtx_REG (Pmode,
473 LAST_VIRTUAL_REGISTER + 1),
474 GEN_INT (4)));
475 spill_indirect_levels = 0;
477 while (memory_address_p (QImode, tem))
479 spill_indirect_levels++;
480 tem = gen_rtx_MEM (Pmode, tem);
483 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
485 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
486 indirect_symref_ok = memory_address_p (QImode, tem);
488 /* See if reg+reg is a valid (and offsettable) address. */
490 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
492 tem = gen_rtx_PLUS (Pmode,
493 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
494 gen_rtx_REG (Pmode, i));
496 /* This way, we make sure that reg+reg is an offsettable address. */
497 tem = plus_constant (tem, 4);
499 if (memory_address_p (QImode, tem))
501 double_reg_address_ok = 1;
502 break;
506 /* Initialize obstack for our rtl allocation. */
507 gcc_obstack_init (&reload_obstack);
508 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
510 INIT_REG_SET (&spilled_pseudos);
511 INIT_REG_SET (&changed_allocation_pseudos);
512 INIT_REG_SET (&pseudos_counted);
515 /* List of insn chains that are currently unused. */
516 static struct insn_chain *unused_insn_chains = 0;
518 /* Allocate an empty insn_chain structure. */
519 struct insn_chain *
520 new_insn_chain (void)
522 struct insn_chain *c;
524 if (unused_insn_chains == 0)
526 c = XOBNEW (&reload_obstack, struct insn_chain);
527 INIT_REG_SET (&c->live_throughout);
528 INIT_REG_SET (&c->dead_or_set);
530 else
532 c = unused_insn_chains;
533 unused_insn_chains = c->next;
535 c->is_caller_save_insn = 0;
536 c->need_operand_change = 0;
537 c->need_reload = 0;
538 c->need_elim = 0;
539 return c;
542 /* Small utility function to set all regs in hard reg set TO which are
543 allocated to pseudos in regset FROM. */
545 void
546 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
548 unsigned int regno;
549 reg_set_iterator rsi;
551 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
553 int r = reg_renumber[regno];
555 if (r < 0)
557 /* reload_combine uses the information from DF_LIVE_IN,
558 which might still contain registers that have not
559 actually been allocated since they have an
560 equivalence. */
561 gcc_assert (ira_conflicts_p || reload_completed);
563 else
564 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
568 /* Replace all pseudos found in LOC with their corresponding
569 equivalences. */
571 static void
572 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
574 rtx x = *loc;
575 enum rtx_code code;
576 const char *fmt;
577 int i, j;
579 if (! x)
580 return;
582 code = GET_CODE (x);
583 if (code == REG)
585 unsigned int regno = REGNO (x);
587 if (regno < FIRST_PSEUDO_REGISTER)
588 return;
590 x = eliminate_regs (x, mem_mode, usage);
591 if (x != *loc)
593 *loc = x;
594 replace_pseudos_in (loc, mem_mode, usage);
595 return;
598 if (reg_equiv_constant[regno])
599 *loc = reg_equiv_constant[regno];
600 else if (reg_equiv_mem[regno])
601 *loc = reg_equiv_mem[regno];
602 else if (reg_equiv_address[regno])
603 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
604 else
606 gcc_assert (!REG_P (regno_reg_rtx[regno])
607 || REGNO (regno_reg_rtx[regno]) != regno);
608 *loc = regno_reg_rtx[regno];
611 return;
613 else if (code == MEM)
615 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
616 return;
619 /* Process each of our operands recursively. */
620 fmt = GET_RTX_FORMAT (code);
621 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
622 if (*fmt == 'e')
623 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
624 else if (*fmt == 'E')
625 for (j = 0; j < XVECLEN (x, i); j++)
626 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
629 /* Determine if the current function has an exception receiver block
630 that reaches the exit block via non-exceptional edges */
632 static bool
633 has_nonexceptional_receiver (void)
635 edge e;
636 edge_iterator ei;
637 basic_block *tos, *worklist, bb;
639 /* If we're not optimizing, then just err on the safe side. */
640 if (!optimize)
641 return true;
643 /* First determine which blocks can reach exit via normal paths. */
644 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
646 FOR_EACH_BB (bb)
647 bb->flags &= ~BB_REACHABLE;
649 /* Place the exit block on our worklist. */
650 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
651 *tos++ = EXIT_BLOCK_PTR;
653 /* Iterate: find everything reachable from what we've already seen. */
654 while (tos != worklist)
656 bb = *--tos;
658 FOR_EACH_EDGE (e, ei, bb->preds)
659 if (!(e->flags & EDGE_ABNORMAL))
661 basic_block src = e->src;
663 if (!(src->flags & BB_REACHABLE))
665 src->flags |= BB_REACHABLE;
666 *tos++ = src;
670 free (worklist);
672 /* Now see if there's a reachable block with an exceptional incoming
673 edge. */
674 FOR_EACH_BB (bb)
675 if (bb->flags & BB_REACHABLE)
676 FOR_EACH_EDGE (e, ei, bb->preds)
677 if (e->flags & EDGE_ABNORMAL)
678 return true;
680 /* No exceptional block reached exit unexceptionally. */
681 return false;
685 /* Global variables used by reload and its subroutines. */
687 /* Set during calculate_needs if an insn needs register elimination. */
688 static int something_needs_elimination;
689 /* Set during calculate_needs if an insn needs an operand changed. */
690 static int something_needs_operands_changed;
692 /* Nonzero means we couldn't get enough spill regs. */
693 static int failure;
695 /* Temporary array of pseudo-register number. */
696 static int *temp_pseudo_reg_arr;
698 /* Main entry point for the reload pass.
700 FIRST is the first insn of the function being compiled.
702 GLOBAL nonzero means we were called from global_alloc
703 and should attempt to reallocate any pseudoregs that we
704 displace from hard regs we will use for reloads.
705 If GLOBAL is zero, we do not have enough information to do that,
706 so any pseudo reg that is spilled must go to the stack.
708 Return value is nonzero if reload failed
709 and we must not do any more for this function. */
712 reload (rtx first, int global)
714 int i, n;
715 rtx insn;
716 struct elim_table *ep;
717 basic_block bb;
719 /* Make sure even insns with volatile mem refs are recognizable. */
720 init_recog ();
722 failure = 0;
724 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
726 /* Make sure that the last insn in the chain
727 is not something that needs reloading. */
728 emit_note (NOTE_INSN_DELETED);
730 /* Enable find_equiv_reg to distinguish insns made by reload. */
731 reload_first_uid = get_max_uid ();
733 #ifdef SECONDARY_MEMORY_NEEDED
734 /* Initialize the secondary memory table. */
735 clear_secondary_mem ();
736 #endif
738 /* We don't have a stack slot for any spill reg yet. */
739 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
740 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
742 /* Initialize the save area information for caller-save, in case some
743 are needed. */
744 init_save_areas ();
746 /* Compute which hard registers are now in use
747 as homes for pseudo registers.
748 This is done here rather than (eg) in global_alloc
749 because this point is reached even if not optimizing. */
750 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
751 mark_home_live (i);
753 /* A function that has a nonlocal label that can reach the exit
754 block via non-exceptional paths must save all call-saved
755 registers. */
756 if (cfun->has_nonlocal_label
757 && has_nonexceptional_receiver ())
758 crtl->saves_all_registers = 1;
760 if (crtl->saves_all_registers)
761 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
762 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
763 df_set_regs_ever_live (i, true);
765 /* Find all the pseudo registers that didn't get hard regs
766 but do have known equivalent constants or memory slots.
767 These include parameters (known equivalent to parameter slots)
768 and cse'd or loop-moved constant memory addresses.
770 Record constant equivalents in reg_equiv_constant
771 so they will be substituted by find_reloads.
772 Record memory equivalents in reg_mem_equiv so they can
773 be substituted eventually by altering the REG-rtx's. */
775 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
776 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
777 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
778 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
779 reg_equiv_address = XCNEWVEC (rtx, max_regno);
780 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
781 reg_old_renumber = XCNEWVEC (short, max_regno);
782 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
783 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
784 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
786 CLEAR_HARD_REG_SET (bad_spill_regs_global);
788 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
789 to. Also find all paradoxical subregs and find largest such for
790 each pseudo. */
792 num_eliminable_invariants = 0;
793 for (insn = first; insn; insn = NEXT_INSN (insn))
795 rtx set = single_set (insn);
797 /* We may introduce USEs that we want to remove at the end, so
798 we'll mark them with QImode. Make sure there are no
799 previously-marked insns left by say regmove. */
800 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
801 && GET_MODE (insn) != VOIDmode)
802 PUT_MODE (insn, VOIDmode);
804 if (NONDEBUG_INSN_P (insn))
805 scan_paradoxical_subregs (PATTERN (insn));
807 if (set != 0 && REG_P (SET_DEST (set)))
809 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
810 rtx x;
812 if (! note)
813 continue;
815 i = REGNO (SET_DEST (set));
816 x = XEXP (note, 0);
818 if (i <= LAST_VIRTUAL_REGISTER)
819 continue;
821 if (! function_invariant_p (x)
822 || ! flag_pic
823 /* A function invariant is often CONSTANT_P but may
824 include a register. We promise to only pass
825 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
826 || (CONSTANT_P (x)
827 && LEGITIMATE_PIC_OPERAND_P (x)))
829 /* It can happen that a REG_EQUIV note contains a MEM
830 that is not a legitimate memory operand. As later
831 stages of reload assume that all addresses found
832 in the reg_equiv_* arrays were originally legitimate,
833 we ignore such REG_EQUIV notes. */
834 if (memory_operand (x, VOIDmode))
836 /* Always unshare the equivalence, so we can
837 substitute into this insn without touching the
838 equivalence. */
839 reg_equiv_memory_loc[i] = copy_rtx (x);
841 else if (function_invariant_p (x))
843 if (GET_CODE (x) == PLUS)
845 /* This is PLUS of frame pointer and a constant,
846 and might be shared. Unshare it. */
847 reg_equiv_invariant[i] = copy_rtx (x);
848 num_eliminable_invariants++;
850 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
852 reg_equiv_invariant[i] = x;
853 num_eliminable_invariants++;
855 else if (LEGITIMATE_CONSTANT_P (x))
856 reg_equiv_constant[i] = x;
857 else
859 reg_equiv_memory_loc[i]
860 = force_const_mem (GET_MODE (SET_DEST (set)), x);
861 if (! reg_equiv_memory_loc[i])
862 reg_equiv_init[i] = NULL_RTX;
865 else
867 reg_equiv_init[i] = NULL_RTX;
868 continue;
871 else
872 reg_equiv_init[i] = NULL_RTX;
876 if (dump_file)
877 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
878 if (reg_equiv_init[i])
880 fprintf (dump_file, "init_insns for %u: ", i);
881 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
882 fprintf (dump_file, "\n");
885 init_elim_table ();
887 first_label_num = get_first_label_num ();
888 num_labels = max_label_num () - first_label_num;
890 /* Allocate the tables used to store offset information at labels. */
891 /* We used to use alloca here, but the size of what it would try to
892 allocate would occasionally cause it to exceed the stack limit and
893 cause a core dump. */
894 offsets_known_at = XNEWVEC (char, num_labels);
895 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
897 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
898 stack slots to the pseudos that lack hard regs or equivalents.
899 Do not touch virtual registers. */
901 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
902 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
903 temp_pseudo_reg_arr[n++] = i;
905 if (ira_conflicts_p)
906 /* Ask IRA to order pseudo-registers for better stack slot
907 sharing. */
908 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
910 for (i = 0; i < n; i++)
911 alter_reg (temp_pseudo_reg_arr[i], -1, false);
913 /* If we have some registers we think can be eliminated, scan all insns to
914 see if there is an insn that sets one of these registers to something
915 other than itself plus a constant. If so, the register cannot be
916 eliminated. Doing this scan here eliminates an extra pass through the
917 main reload loop in the most common case where register elimination
918 cannot be done. */
919 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
920 if (INSN_P (insn))
921 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
923 maybe_fix_stack_asms ();
925 insns_need_reload = 0;
926 something_needs_elimination = 0;
928 /* Initialize to -1, which means take the first spill register. */
929 last_spill_reg = -1;
931 /* Spill any hard regs that we know we can't eliminate. */
932 CLEAR_HARD_REG_SET (used_spill_regs);
933 /* There can be multiple ways to eliminate a register;
934 they should be listed adjacently.
935 Elimination for any register fails only if all possible ways fail. */
936 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
938 int from = ep->from;
939 int can_eliminate = 0;
942 can_eliminate |= ep->can_eliminate;
943 ep++;
945 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
946 if (! can_eliminate)
947 spill_hard_reg (from, 1);
950 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
951 if (frame_pointer_needed)
952 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
953 #endif
954 finish_spills (global);
956 /* From now on, we may need to generate moves differently. We may also
957 allow modifications of insns which cause them to not be recognized.
958 Any such modifications will be cleaned up during reload itself. */
959 reload_in_progress = 1;
961 /* This loop scans the entire function each go-round
962 and repeats until one repetition spills no additional hard regs. */
963 for (;;)
965 int something_changed;
966 int did_spill;
967 HOST_WIDE_INT starting_frame_size;
969 starting_frame_size = get_frame_size ();
971 set_initial_elim_offsets ();
972 set_initial_label_offsets ();
974 /* For each pseudo register that has an equivalent location defined,
975 try to eliminate any eliminable registers (such as the frame pointer)
976 assuming initial offsets for the replacement register, which
977 is the normal case.
979 If the resulting location is directly addressable, substitute
980 the MEM we just got directly for the old REG.
982 If it is not addressable but is a constant or the sum of a hard reg
983 and constant, it is probably not addressable because the constant is
984 out of range, in that case record the address; we will generate
985 hairy code to compute the address in a register each time it is
986 needed. Similarly if it is a hard register, but one that is not
987 valid as an address register.
989 If the location is not addressable, but does not have one of the
990 above forms, assign a stack slot. We have to do this to avoid the
991 potential of producing lots of reloads if, e.g., a location involves
992 a pseudo that didn't get a hard register and has an equivalent memory
993 location that also involves a pseudo that didn't get a hard register.
995 Perhaps at some point we will improve reload_when_needed handling
996 so this problem goes away. But that's very hairy. */
998 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
999 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1001 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1002 NULL_RTX);
1004 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
1005 XEXP (x, 0)))
1006 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1007 else if (CONSTANT_P (XEXP (x, 0))
1008 || (REG_P (XEXP (x, 0))
1009 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1010 || (GET_CODE (XEXP (x, 0)) == PLUS
1011 && REG_P (XEXP (XEXP (x, 0), 0))
1012 && (REGNO (XEXP (XEXP (x, 0), 0))
1013 < FIRST_PSEUDO_REGISTER)
1014 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1015 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1016 else
1018 /* Make a new stack slot. Then indicate that something
1019 changed so we go back and recompute offsets for
1020 eliminable registers because the allocation of memory
1021 below might change some offset. reg_equiv_{mem,address}
1022 will be set up for this pseudo on the next pass around
1023 the loop. */
1024 reg_equiv_memory_loc[i] = 0;
1025 reg_equiv_init[i] = 0;
1026 alter_reg (i, -1, true);
1030 if (caller_save_needed)
1031 setup_save_areas ();
1033 /* If we allocated another stack slot, redo elimination bookkeeping. */
1034 if (starting_frame_size != get_frame_size ())
1035 continue;
1036 if (starting_frame_size && crtl->stack_alignment_needed)
1038 /* If we have a stack frame, we must align it now. The
1039 stack size may be a part of the offset computation for
1040 register elimination. So if this changes the stack size,
1041 then repeat the elimination bookkeeping. We don't
1042 realign when there is no stack, as that will cause a
1043 stack frame when none is needed should
1044 STARTING_FRAME_OFFSET not be already aligned to
1045 STACK_BOUNDARY. */
1046 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1047 if (starting_frame_size != get_frame_size ())
1048 continue;
1051 if (caller_save_needed)
1053 save_call_clobbered_regs ();
1054 /* That might have allocated new insn_chain structures. */
1055 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1058 calculate_needs_all_insns (global);
1060 if (! ira_conflicts_p)
1061 /* Don't do it for IRA. We need this info because we don't
1062 change live_throughout and dead_or_set for chains when IRA
1063 is used. */
1064 CLEAR_REG_SET (&spilled_pseudos);
1066 did_spill = 0;
1068 something_changed = 0;
1070 /* If we allocated any new memory locations, make another pass
1071 since it might have changed elimination offsets. */
1072 if (starting_frame_size != get_frame_size ())
1073 something_changed = 1;
1075 /* Even if the frame size remained the same, we might still have
1076 changed elimination offsets, e.g. if find_reloads called
1077 force_const_mem requiring the back end to allocate a constant
1078 pool base register that needs to be saved on the stack. */
1079 else if (!verify_initial_elim_offsets ())
1080 something_changed = 1;
1083 HARD_REG_SET to_spill;
1084 CLEAR_HARD_REG_SET (to_spill);
1085 update_eliminables (&to_spill);
1086 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1089 if (TEST_HARD_REG_BIT (to_spill, i))
1091 spill_hard_reg (i, 1);
1092 did_spill = 1;
1094 /* Regardless of the state of spills, if we previously had
1095 a register that we thought we could eliminate, but now can
1096 not eliminate, we must run another pass.
1098 Consider pseudos which have an entry in reg_equiv_* which
1099 reference an eliminable register. We must make another pass
1100 to update reg_equiv_* so that we do not substitute in the
1101 old value from when we thought the elimination could be
1102 performed. */
1103 something_changed = 1;
1107 select_reload_regs ();
1108 if (failure)
1109 goto failed;
1111 if (insns_need_reload != 0 || did_spill)
1112 something_changed |= finish_spills (global);
1114 if (! something_changed)
1115 break;
1117 if (caller_save_needed)
1118 delete_caller_save_insns ();
1120 obstack_free (&reload_obstack, reload_firstobj);
1123 /* If global-alloc was run, notify it of any register eliminations we have
1124 done. */
1125 if (global)
1126 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1127 if (ep->can_eliminate)
1128 mark_elimination (ep->from, ep->to);
1130 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1131 If that insn didn't set the register (i.e., it copied the register to
1132 memory), just delete that insn instead of the equivalencing insn plus
1133 anything now dead. If we call delete_dead_insn on that insn, we may
1134 delete the insn that actually sets the register if the register dies
1135 there and that is incorrect. */
1137 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1139 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1141 rtx list;
1142 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1144 rtx equiv_insn = XEXP (list, 0);
1146 /* If we already deleted the insn or if it may trap, we can't
1147 delete it. The latter case shouldn't happen, but can
1148 if an insn has a variable address, gets a REG_EH_REGION
1149 note added to it, and then gets converted into a load
1150 from a constant address. */
1151 if (NOTE_P (equiv_insn)
1152 || can_throw_internal (equiv_insn))
1154 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1155 delete_dead_insn (equiv_insn);
1156 else
1157 SET_INSN_DELETED (equiv_insn);
1162 /* Use the reload registers where necessary
1163 by generating move instructions to move the must-be-register
1164 values into or out of the reload registers. */
1166 if (insns_need_reload != 0 || something_needs_elimination
1167 || something_needs_operands_changed)
1169 HOST_WIDE_INT old_frame_size = get_frame_size ();
1171 reload_as_needed (global);
1173 gcc_assert (old_frame_size == get_frame_size ());
1175 gcc_assert (verify_initial_elim_offsets ());
1178 /* If we were able to eliminate the frame pointer, show that it is no
1179 longer live at the start of any basic block. If it ls live by
1180 virtue of being in a pseudo, that pseudo will be marked live
1181 and hence the frame pointer will be known to be live via that
1182 pseudo. */
1184 if (! frame_pointer_needed)
1185 FOR_EACH_BB (bb)
1186 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1188 /* Come here (with failure set nonzero) if we can't get enough spill
1189 regs. */
1190 failed:
1192 CLEAR_REG_SET (&changed_allocation_pseudos);
1193 CLEAR_REG_SET (&spilled_pseudos);
1194 reload_in_progress = 0;
1196 /* Now eliminate all pseudo regs by modifying them into
1197 their equivalent memory references.
1198 The REG-rtx's for the pseudos are modified in place,
1199 so all insns that used to refer to them now refer to memory.
1201 For a reg that has a reg_equiv_address, all those insns
1202 were changed by reloading so that no insns refer to it any longer;
1203 but the DECL_RTL of a variable decl may refer to it,
1204 and if so this causes the debugging info to mention the variable. */
1206 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1208 rtx addr = 0;
1210 if (reg_equiv_mem[i])
1211 addr = XEXP (reg_equiv_mem[i], 0);
1213 if (reg_equiv_address[i])
1214 addr = reg_equiv_address[i];
1216 if (addr)
1218 if (reg_renumber[i] < 0)
1220 rtx reg = regno_reg_rtx[i];
1222 REG_USERVAR_P (reg) = 0;
1223 PUT_CODE (reg, MEM);
1224 XEXP (reg, 0) = addr;
1225 if (reg_equiv_memory_loc[i])
1226 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1227 else
1229 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1230 MEM_ATTRS (reg) = 0;
1232 MEM_NOTRAP_P (reg) = 1;
1234 else if (reg_equiv_mem[i])
1235 XEXP (reg_equiv_mem[i], 0) = addr;
1238 /* We don't want complex addressing modes in debug insns
1239 if simpler ones will do, so delegitimize equivalences
1240 in debug insns. */
1241 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1243 rtx reg = regno_reg_rtx[i];
1244 rtx equiv = 0;
1245 df_ref use;
1247 if (reg_equiv_constant[i])
1248 equiv = reg_equiv_constant[i];
1249 else if (reg_equiv_invariant[i])
1250 equiv = reg_equiv_invariant[i];
1251 else if (reg && MEM_P (reg))
1253 equiv = targetm.delegitimize_address (reg);
1254 if (equiv == reg)
1255 equiv = 0;
1257 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1258 equiv = reg;
1260 if (equiv)
1261 for (use = DF_REG_USE_CHAIN (i); use;
1262 use = DF_REF_NEXT_REG (use))
1263 if (DEBUG_INSN_P (DF_REF_INSN (use)))
1265 rtx *loc = DF_REF_LOC (use);
1266 rtx x = *loc;
1268 if (x == reg)
1269 *loc = copy_rtx (equiv);
1270 else if (GET_CODE (x) == SUBREG
1271 && SUBREG_REG (x) == reg)
1272 *loc = simplify_gen_subreg (GET_MODE (x), equiv,
1273 GET_MODE (reg),
1274 SUBREG_BYTE (x));
1275 else
1276 gcc_unreachable ();
1281 /* We must set reload_completed now since the cleanup_subreg_operands call
1282 below will re-recognize each insn and reload may have generated insns
1283 which are only valid during and after reload. */
1284 reload_completed = 1;
1286 /* Make a pass over all the insns and delete all USEs which we inserted
1287 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1288 notes. Delete all CLOBBER insns, except those that refer to the return
1289 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1290 from misarranging variable-array code, and simplify (subreg (reg))
1291 operands. Strip and regenerate REG_INC notes that may have been moved
1292 around. */
1294 for (insn = first; insn; insn = NEXT_INSN (insn))
1295 if (INSN_P (insn))
1297 rtx *pnote;
1299 if (CALL_P (insn))
1300 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1301 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1303 if ((GET_CODE (PATTERN (insn)) == USE
1304 /* We mark with QImode USEs introduced by reload itself. */
1305 && (GET_MODE (insn) == QImode
1306 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1307 || (GET_CODE (PATTERN (insn)) == CLOBBER
1308 && (!MEM_P (XEXP (PATTERN (insn), 0))
1309 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1310 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1311 && XEXP (XEXP (PATTERN (insn), 0), 0)
1312 != stack_pointer_rtx))
1313 && (!REG_P (XEXP (PATTERN (insn), 0))
1314 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1316 delete_insn (insn);
1317 continue;
1320 /* Some CLOBBERs may survive until here and still reference unassigned
1321 pseudos with const equivalent, which may in turn cause ICE in later
1322 passes if the reference remains in place. */
1323 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1324 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1325 VOIDmode, PATTERN (insn));
1327 /* Discard obvious no-ops, even without -O. This optimization
1328 is fast and doesn't interfere with debugging. */
1329 if (NONJUMP_INSN_P (insn)
1330 && GET_CODE (PATTERN (insn)) == SET
1331 && REG_P (SET_SRC (PATTERN (insn)))
1332 && REG_P (SET_DEST (PATTERN (insn)))
1333 && (REGNO (SET_SRC (PATTERN (insn)))
1334 == REGNO (SET_DEST (PATTERN (insn)))))
1336 delete_insn (insn);
1337 continue;
1340 pnote = &REG_NOTES (insn);
1341 while (*pnote != 0)
1343 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1344 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1345 || REG_NOTE_KIND (*pnote) == REG_INC)
1346 *pnote = XEXP (*pnote, 1);
1347 else
1348 pnote = &XEXP (*pnote, 1);
1351 #ifdef AUTO_INC_DEC
1352 add_auto_inc_notes (insn, PATTERN (insn));
1353 #endif
1355 /* Simplify (subreg (reg)) if it appears as an operand. */
1356 cleanup_subreg_operands (insn);
1358 /* Clean up invalid ASMs so that they don't confuse later passes.
1359 See PR 21299. */
1360 if (asm_noperands (PATTERN (insn)) >= 0)
1362 extract_insn (insn);
1363 if (!constrain_operands (1))
1365 error_for_asm (insn,
1366 "%<asm%> operand has impossible constraints");
1367 delete_insn (insn);
1368 continue;
1373 /* If we are doing generic stack checking, give a warning if this
1374 function's frame size is larger than we expect. */
1375 if (flag_stack_check == GENERIC_STACK_CHECK)
1377 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1378 static int verbose_warned = 0;
1380 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1381 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1382 size += UNITS_PER_WORD;
1384 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1386 warning (0, "frame size too large for reliable stack checking");
1387 if (! verbose_warned)
1389 warning (0, "try reducing the number of local variables");
1390 verbose_warned = 1;
1395 /* Indicate that we no longer have known memory locations or constants. */
1396 if (reg_equiv_constant)
1397 free (reg_equiv_constant);
1398 if (reg_equiv_invariant)
1399 free (reg_equiv_invariant);
1400 reg_equiv_constant = 0;
1401 reg_equiv_invariant = 0;
1402 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1403 reg_equiv_memory_loc = 0;
1405 free (temp_pseudo_reg_arr);
1407 if (offsets_known_at)
1408 free (offsets_known_at);
1409 if (offsets_at)
1410 free (offsets_at);
1412 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1413 if (reg_equiv_alt_mem_list[i])
1414 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1415 free (reg_equiv_alt_mem_list);
1417 free (reg_equiv_mem);
1418 reg_equiv_init = 0;
1419 free (reg_equiv_address);
1420 free (reg_max_ref_width);
1421 free (reg_old_renumber);
1422 free (pseudo_previous_regs);
1423 free (pseudo_forbidden_regs);
1425 CLEAR_HARD_REG_SET (used_spill_regs);
1426 for (i = 0; i < n_spills; i++)
1427 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1429 /* Free all the insn_chain structures at once. */
1430 obstack_free (&reload_obstack, reload_startobj);
1431 unused_insn_chains = 0;
1432 fixup_abnormal_edges ();
1434 /* Replacing pseudos with their memory equivalents might have
1435 created shared rtx. Subsequent passes would get confused
1436 by this, so unshare everything here. */
1437 unshare_all_rtl_again (first);
1439 #ifdef STACK_BOUNDARY
1440 /* init_emit has set the alignment of the hard frame pointer
1441 to STACK_BOUNDARY. It is very likely no longer valid if
1442 the hard frame pointer was used for register allocation. */
1443 if (!frame_pointer_needed)
1444 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1445 #endif
1447 return failure;
1450 /* Yet another special case. Unfortunately, reg-stack forces people to
1451 write incorrect clobbers in asm statements. These clobbers must not
1452 cause the register to appear in bad_spill_regs, otherwise we'll call
1453 fatal_insn later. We clear the corresponding regnos in the live
1454 register sets to avoid this.
1455 The whole thing is rather sick, I'm afraid. */
1457 static void
1458 maybe_fix_stack_asms (void)
1460 #ifdef STACK_REGS
1461 const char *constraints[MAX_RECOG_OPERANDS];
1462 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1463 struct insn_chain *chain;
1465 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1467 int i, noperands;
1468 HARD_REG_SET clobbered, allowed;
1469 rtx pat;
1471 if (! INSN_P (chain->insn)
1472 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1473 continue;
1474 pat = PATTERN (chain->insn);
1475 if (GET_CODE (pat) != PARALLEL)
1476 continue;
1478 CLEAR_HARD_REG_SET (clobbered);
1479 CLEAR_HARD_REG_SET (allowed);
1481 /* First, make a mask of all stack regs that are clobbered. */
1482 for (i = 0; i < XVECLEN (pat, 0); i++)
1484 rtx t = XVECEXP (pat, 0, i);
1485 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1486 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1489 /* Get the operand values and constraints out of the insn. */
1490 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1491 constraints, operand_mode, NULL);
1493 /* For every operand, see what registers are allowed. */
1494 for (i = 0; i < noperands; i++)
1496 const char *p = constraints[i];
1497 /* For every alternative, we compute the class of registers allowed
1498 for reloading in CLS, and merge its contents into the reg set
1499 ALLOWED. */
1500 int cls = (int) NO_REGS;
1502 for (;;)
1504 char c = *p;
1506 if (c == '\0' || c == ',' || c == '#')
1508 /* End of one alternative - mark the regs in the current
1509 class, and reset the class. */
1510 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1511 cls = NO_REGS;
1512 p++;
1513 if (c == '#')
1514 do {
1515 c = *p++;
1516 } while (c != '\0' && c != ',');
1517 if (c == '\0')
1518 break;
1519 continue;
1522 switch (c)
1524 case '=': case '+': case '*': case '%': case '?': case '!':
1525 case '0': case '1': case '2': case '3': case '4': case '<':
1526 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1527 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1528 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1529 case TARGET_MEM_CONSTRAINT:
1530 break;
1532 case 'p':
1533 cls = (int) reg_class_subunion[cls]
1534 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1535 break;
1537 case 'g':
1538 case 'r':
1539 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1540 break;
1542 default:
1543 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1544 cls = (int) reg_class_subunion[cls]
1545 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1546 else
1547 cls = (int) reg_class_subunion[cls]
1548 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1550 p += CONSTRAINT_LEN (c, p);
1553 /* Those of the registers which are clobbered, but allowed by the
1554 constraints, must be usable as reload registers. So clear them
1555 out of the life information. */
1556 AND_HARD_REG_SET (allowed, clobbered);
1557 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1558 if (TEST_HARD_REG_BIT (allowed, i))
1560 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1561 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1565 #endif
1568 /* Copy the global variables n_reloads and rld into the corresponding elts
1569 of CHAIN. */
1570 static void
1571 copy_reloads (struct insn_chain *chain)
1573 chain->n_reloads = n_reloads;
1574 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1575 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1576 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1579 /* Walk the chain of insns, and determine for each whether it needs reloads
1580 and/or eliminations. Build the corresponding insns_need_reload list, and
1581 set something_needs_elimination as appropriate. */
1582 static void
1583 calculate_needs_all_insns (int global)
1585 struct insn_chain **pprev_reload = &insns_need_reload;
1586 struct insn_chain *chain, *next = 0;
1588 something_needs_elimination = 0;
1590 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1591 for (chain = reload_insn_chain; chain != 0; chain = next)
1593 rtx insn = chain->insn;
1595 next = chain->next;
1597 /* Clear out the shortcuts. */
1598 chain->n_reloads = 0;
1599 chain->need_elim = 0;
1600 chain->need_reload = 0;
1601 chain->need_operand_change = 0;
1603 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1604 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1605 what effects this has on the known offsets at labels. */
1607 if (LABEL_P (insn) || JUMP_P (insn)
1608 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1609 set_label_offsets (insn, insn, 0);
1611 if (INSN_P (insn))
1613 rtx old_body = PATTERN (insn);
1614 int old_code = INSN_CODE (insn);
1615 rtx old_notes = REG_NOTES (insn);
1616 int did_elimination = 0;
1617 int operands_changed = 0;
1618 rtx set = single_set (insn);
1620 /* Skip insns that only set an equivalence. */
1621 if (set && REG_P (SET_DEST (set))
1622 && reg_renumber[REGNO (SET_DEST (set))] < 0
1623 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1624 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1625 && reg_equiv_init[REGNO (SET_DEST (set))])
1626 continue;
1628 /* If needed, eliminate any eliminable registers. */
1629 if (num_eliminable || num_eliminable_invariants)
1630 did_elimination = eliminate_regs_in_insn (insn, 0);
1632 /* Analyze the instruction. */
1633 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1634 global, spill_reg_order);
1636 /* If a no-op set needs more than one reload, this is likely
1637 to be something that needs input address reloads. We
1638 can't get rid of this cleanly later, and it is of no use
1639 anyway, so discard it now.
1640 We only do this when expensive_optimizations is enabled,
1641 since this complements reload inheritance / output
1642 reload deletion, and it can make debugging harder. */
1643 if (flag_expensive_optimizations && n_reloads > 1)
1645 rtx set = single_set (insn);
1646 if (set
1648 ((SET_SRC (set) == SET_DEST (set)
1649 && REG_P (SET_SRC (set))
1650 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1651 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1652 && reg_renumber[REGNO (SET_SRC (set))] < 0
1653 && reg_renumber[REGNO (SET_DEST (set))] < 0
1654 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1655 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1656 && rtx_equal_p (reg_equiv_memory_loc
1657 [REGNO (SET_SRC (set))],
1658 reg_equiv_memory_loc
1659 [REGNO (SET_DEST (set))]))))
1661 if (ira_conflicts_p)
1662 /* Inform IRA about the insn deletion. */
1663 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1664 REGNO (SET_SRC (set)));
1665 delete_insn (insn);
1666 /* Delete it from the reload chain. */
1667 if (chain->prev)
1668 chain->prev->next = next;
1669 else
1670 reload_insn_chain = next;
1671 if (next)
1672 next->prev = chain->prev;
1673 chain->next = unused_insn_chains;
1674 unused_insn_chains = chain;
1675 continue;
1678 if (num_eliminable)
1679 update_eliminable_offsets ();
1681 /* Remember for later shortcuts which insns had any reloads or
1682 register eliminations. */
1683 chain->need_elim = did_elimination;
1684 chain->need_reload = n_reloads > 0;
1685 chain->need_operand_change = operands_changed;
1687 /* Discard any register replacements done. */
1688 if (did_elimination)
1690 obstack_free (&reload_obstack, reload_insn_firstobj);
1691 PATTERN (insn) = old_body;
1692 INSN_CODE (insn) = old_code;
1693 REG_NOTES (insn) = old_notes;
1694 something_needs_elimination = 1;
1697 something_needs_operands_changed |= operands_changed;
1699 if (n_reloads != 0)
1701 copy_reloads (chain);
1702 *pprev_reload = chain;
1703 pprev_reload = &chain->next_need_reload;
1707 *pprev_reload = 0;
1710 /* Comparison function for qsort to decide which of two reloads
1711 should be handled first. *P1 and *P2 are the reload numbers. */
1713 static int
1714 reload_reg_class_lower (const void *r1p, const void *r2p)
1716 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1717 int t;
1719 /* Consider required reloads before optional ones. */
1720 t = rld[r1].optional - rld[r2].optional;
1721 if (t != 0)
1722 return t;
1724 /* Count all solitary classes before non-solitary ones. */
1725 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1726 - (reg_class_size[(int) rld[r1].rclass] == 1));
1727 if (t != 0)
1728 return t;
1730 /* Aside from solitaires, consider all multi-reg groups first. */
1731 t = rld[r2].nregs - rld[r1].nregs;
1732 if (t != 0)
1733 return t;
1735 /* Consider reloads in order of increasing reg-class number. */
1736 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1737 if (t != 0)
1738 return t;
1740 /* If reloads are equally urgent, sort by reload number,
1741 so that the results of qsort leave nothing to chance. */
1742 return r1 - r2;
1745 /* The cost of spilling each hard reg. */
1746 static int spill_cost[FIRST_PSEUDO_REGISTER];
1748 /* When spilling multiple hard registers, we use SPILL_COST for the first
1749 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1750 only the first hard reg for a multi-reg pseudo. */
1751 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1753 /* Map of hard regno to pseudo regno currently occupying the hard
1754 reg. */
1755 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1757 /* Update the spill cost arrays, considering that pseudo REG is live. */
1759 static void
1760 count_pseudo (int reg)
1762 int freq = REG_FREQ (reg);
1763 int r = reg_renumber[reg];
1764 int nregs;
1766 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1767 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1768 /* Ignore spilled pseudo-registers which can be here only if IRA
1769 is used. */
1770 || (ira_conflicts_p && r < 0))
1771 return;
1773 SET_REGNO_REG_SET (&pseudos_counted, reg);
1775 gcc_assert (r >= 0);
1777 spill_add_cost[r] += freq;
1778 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1779 while (nregs-- > 0)
1781 hard_regno_to_pseudo_regno[r + nregs] = reg;
1782 spill_cost[r + nregs] += freq;
1786 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1787 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1789 static void
1790 order_regs_for_reload (struct insn_chain *chain)
1792 unsigned i;
1793 HARD_REG_SET used_by_pseudos;
1794 HARD_REG_SET used_by_pseudos2;
1795 reg_set_iterator rsi;
1797 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1799 memset (spill_cost, 0, sizeof spill_cost);
1800 memset (spill_add_cost, 0, sizeof spill_add_cost);
1801 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1802 hard_regno_to_pseudo_regno[i] = -1;
1804 /* Count number of uses of each hard reg by pseudo regs allocated to it
1805 and then order them by decreasing use. First exclude hard registers
1806 that are live in or across this insn. */
1808 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1809 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1810 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1811 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1813 /* Now find out which pseudos are allocated to it, and update
1814 hard_reg_n_uses. */
1815 CLEAR_REG_SET (&pseudos_counted);
1817 EXECUTE_IF_SET_IN_REG_SET
1818 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1820 count_pseudo (i);
1822 EXECUTE_IF_SET_IN_REG_SET
1823 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1825 count_pseudo (i);
1827 CLEAR_REG_SET (&pseudos_counted);
1830 /* Vector of reload-numbers showing the order in which the reloads should
1831 be processed. */
1832 static short reload_order[MAX_RELOADS];
1834 /* This is used to keep track of the spill regs used in one insn. */
1835 static HARD_REG_SET used_spill_regs_local;
1837 /* We decided to spill hard register SPILLED, which has a size of
1838 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1839 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1840 update SPILL_COST/SPILL_ADD_COST. */
1842 static void
1843 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1845 int freq = REG_FREQ (reg);
1846 int r = reg_renumber[reg];
1847 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1849 /* Ignore spilled pseudo-registers which can be here only if IRA is
1850 used. */
1851 if ((ira_conflicts_p && r < 0)
1852 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1853 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1854 return;
1856 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1858 spill_add_cost[r] -= freq;
1859 while (nregs-- > 0)
1861 hard_regno_to_pseudo_regno[r + nregs] = -1;
1862 spill_cost[r + nregs] -= freq;
1866 /* Find reload register to use for reload number ORDER. */
1868 static int
1869 find_reg (struct insn_chain *chain, int order)
1871 int rnum = reload_order[order];
1872 struct reload *rl = rld + rnum;
1873 int best_cost = INT_MAX;
1874 int best_reg = -1;
1875 unsigned int i, j, n;
1876 int k;
1877 HARD_REG_SET not_usable;
1878 HARD_REG_SET used_by_other_reload;
1879 reg_set_iterator rsi;
1880 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1881 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1883 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1884 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1885 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1887 CLEAR_HARD_REG_SET (used_by_other_reload);
1888 for (k = 0; k < order; k++)
1890 int other = reload_order[k];
1892 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1893 for (j = 0; j < rld[other].nregs; j++)
1894 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1897 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1899 #ifdef REG_ALLOC_ORDER
1900 unsigned int regno = reg_alloc_order[i];
1901 #else
1902 unsigned int regno = i;
1903 #endif
1905 if (! TEST_HARD_REG_BIT (not_usable, regno)
1906 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1907 && HARD_REGNO_MODE_OK (regno, rl->mode))
1909 int this_cost = spill_cost[regno];
1910 int ok = 1;
1911 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1913 for (j = 1; j < this_nregs; j++)
1915 this_cost += spill_add_cost[regno + j];
1916 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1917 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1918 ok = 0;
1920 if (! ok)
1921 continue;
1923 if (ira_conflicts_p)
1925 /* Ask IRA to find a better pseudo-register for
1926 spilling. */
1927 for (n = j = 0; j < this_nregs; j++)
1929 int r = hard_regno_to_pseudo_regno[regno + j];
1931 if (r < 0)
1932 continue;
1933 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1934 regno_pseudo_regs[n++] = r;
1936 regno_pseudo_regs[n++] = -1;
1937 if (best_reg < 0
1938 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1939 best_regno_pseudo_regs,
1940 rl->in, rl->out,
1941 chain->insn))
1943 best_reg = regno;
1944 for (j = 0;; j++)
1946 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1947 if (regno_pseudo_regs[j] < 0)
1948 break;
1951 continue;
1954 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1955 this_cost--;
1956 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1957 this_cost--;
1958 if (this_cost < best_cost
1959 /* Among registers with equal cost, prefer caller-saved ones, or
1960 use REG_ALLOC_ORDER if it is defined. */
1961 || (this_cost == best_cost
1962 #ifdef REG_ALLOC_ORDER
1963 && (inv_reg_alloc_order[regno]
1964 < inv_reg_alloc_order[best_reg])
1965 #else
1966 && call_used_regs[regno]
1967 && ! call_used_regs[best_reg]
1968 #endif
1971 best_reg = regno;
1972 best_cost = this_cost;
1976 if (best_reg == -1)
1977 return 0;
1979 if (dump_file)
1980 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1982 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1983 rl->regno = best_reg;
1985 EXECUTE_IF_SET_IN_REG_SET
1986 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1988 count_spilled_pseudo (best_reg, rl->nregs, j);
1991 EXECUTE_IF_SET_IN_REG_SET
1992 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1994 count_spilled_pseudo (best_reg, rl->nregs, j);
1997 for (i = 0; i < rl->nregs; i++)
1999 gcc_assert (spill_cost[best_reg + i] == 0);
2000 gcc_assert (spill_add_cost[best_reg + i] == 0);
2001 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2002 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2004 return 1;
2007 /* Find more reload regs to satisfy the remaining need of an insn, which
2008 is given by CHAIN.
2009 Do it by ascending class number, since otherwise a reg
2010 might be spilled for a big class and might fail to count
2011 for a smaller class even though it belongs to that class. */
2013 static void
2014 find_reload_regs (struct insn_chain *chain)
2016 int i;
2018 /* In order to be certain of getting the registers we need,
2019 we must sort the reloads into order of increasing register class.
2020 Then our grabbing of reload registers will parallel the process
2021 that provided the reload registers. */
2022 for (i = 0; i < chain->n_reloads; i++)
2024 /* Show whether this reload already has a hard reg. */
2025 if (chain->rld[i].reg_rtx)
2027 int regno = REGNO (chain->rld[i].reg_rtx);
2028 chain->rld[i].regno = regno;
2029 chain->rld[i].nregs
2030 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2032 else
2033 chain->rld[i].regno = -1;
2034 reload_order[i] = i;
2037 n_reloads = chain->n_reloads;
2038 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2040 CLEAR_HARD_REG_SET (used_spill_regs_local);
2042 if (dump_file)
2043 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2045 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2047 /* Compute the order of preference for hard registers to spill. */
2049 order_regs_for_reload (chain);
2051 for (i = 0; i < n_reloads; i++)
2053 int r = reload_order[i];
2055 /* Ignore reloads that got marked inoperative. */
2056 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2057 && ! rld[r].optional
2058 && rld[r].regno == -1)
2059 if (! find_reg (chain, i))
2061 if (dump_file)
2062 fprintf (dump_file, "reload failure for reload %d\n", r);
2063 spill_failure (chain->insn, rld[r].rclass);
2064 failure = 1;
2065 return;
2069 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2070 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2072 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2075 static void
2076 select_reload_regs (void)
2078 struct insn_chain *chain;
2080 /* Try to satisfy the needs for each insn. */
2081 for (chain = insns_need_reload; chain != 0;
2082 chain = chain->next_need_reload)
2083 find_reload_regs (chain);
2086 /* Delete all insns that were inserted by emit_caller_save_insns during
2087 this iteration. */
2088 static void
2089 delete_caller_save_insns (void)
2091 struct insn_chain *c = reload_insn_chain;
2093 while (c != 0)
2095 while (c != 0 && c->is_caller_save_insn)
2097 struct insn_chain *next = c->next;
2098 rtx insn = c->insn;
2100 if (c == reload_insn_chain)
2101 reload_insn_chain = next;
2102 delete_insn (insn);
2104 if (next)
2105 next->prev = c->prev;
2106 if (c->prev)
2107 c->prev->next = next;
2108 c->next = unused_insn_chains;
2109 unused_insn_chains = c;
2110 c = next;
2112 if (c != 0)
2113 c = c->next;
2117 /* Handle the failure to find a register to spill.
2118 INSN should be one of the insns which needed this particular spill reg. */
2120 static void
2121 spill_failure (rtx insn, enum reg_class rclass)
2123 if (asm_noperands (PATTERN (insn)) >= 0)
2124 error_for_asm (insn, "can't find a register in class %qs while "
2125 "reloading %<asm%>",
2126 reg_class_names[rclass]);
2127 else
2129 error ("unable to find a register to spill in class %qs",
2130 reg_class_names[rclass]);
2132 if (dump_file)
2134 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2135 debug_reload_to_stream (dump_file);
2137 fatal_insn ("this is the insn:", insn);
2141 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2142 data that is dead in INSN. */
2144 static void
2145 delete_dead_insn (rtx insn)
2147 rtx prev = prev_real_insn (insn);
2148 rtx prev_dest;
2150 /* If the previous insn sets a register that dies in our insn, delete it
2151 too. */
2152 if (prev && GET_CODE (PATTERN (prev)) == SET
2153 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2154 && reg_mentioned_p (prev_dest, PATTERN (insn))
2155 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2156 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2157 delete_dead_insn (prev);
2159 SET_INSN_DELETED (insn);
2162 /* Modify the home of pseudo-reg I.
2163 The new home is present in reg_renumber[I].
2165 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2166 or it may be -1, meaning there is none or it is not relevant.
2167 This is used so that all pseudos spilled from a given hard reg
2168 can share one stack slot. */
2170 static void
2171 alter_reg (int i, int from_reg, bool dont_share_p)
2173 /* When outputting an inline function, this can happen
2174 for a reg that isn't actually used. */
2175 if (regno_reg_rtx[i] == 0)
2176 return;
2178 /* If the reg got changed to a MEM at rtl-generation time,
2179 ignore it. */
2180 if (!REG_P (regno_reg_rtx[i]))
2181 return;
2183 /* Modify the reg-rtx to contain the new hard reg
2184 number or else to contain its pseudo reg number. */
2185 SET_REGNO (regno_reg_rtx[i],
2186 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2188 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2189 allocate a stack slot for it. */
2191 if (reg_renumber[i] < 0
2192 && REG_N_REFS (i) > 0
2193 && reg_equiv_constant[i] == 0
2194 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2195 && reg_equiv_memory_loc[i] == 0)
2197 rtx x = NULL_RTX;
2198 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2199 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2200 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2201 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2202 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2203 int adjust = 0;
2205 if (ira_conflicts_p)
2207 /* Mark the spill for IRA. */
2208 SET_REGNO_REG_SET (&spilled_pseudos, i);
2209 if (!dont_share_p)
2210 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2213 if (x)
2216 /* Each pseudo reg has an inherent size which comes from its own mode,
2217 and a total size which provides room for paradoxical subregs
2218 which refer to the pseudo reg in wider modes.
2220 We can use a slot already allocated if it provides both
2221 enough inherent space and enough total space.
2222 Otherwise, we allocate a new slot, making sure that it has no less
2223 inherent space, and no less total space, then the previous slot. */
2224 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2226 rtx stack_slot;
2228 /* No known place to spill from => no slot to reuse. */
2229 x = assign_stack_local (mode, total_size,
2230 min_align > inherent_align
2231 || total_size > inherent_size ? -1 : 0);
2233 stack_slot = x;
2235 /* Cancel the big-endian correction done in assign_stack_local.
2236 Get the address of the beginning of the slot. This is so we
2237 can do a big-endian correction unconditionally below. */
2238 if (BYTES_BIG_ENDIAN)
2240 adjust = inherent_size - total_size;
2241 if (adjust)
2242 stack_slot
2243 = adjust_address_nv (x, mode_for_size (total_size
2244 * BITS_PER_UNIT,
2245 MODE_INT, 1),
2246 adjust);
2249 if (! dont_share_p && ira_conflicts_p)
2250 /* Inform IRA about allocation a new stack slot. */
2251 ira_mark_new_stack_slot (stack_slot, i, total_size);
2254 /* Reuse a stack slot if possible. */
2255 else if (spill_stack_slot[from_reg] != 0
2256 && spill_stack_slot_width[from_reg] >= total_size
2257 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2258 >= inherent_size)
2259 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2260 x = spill_stack_slot[from_reg];
2262 /* Allocate a bigger slot. */
2263 else
2265 /* Compute maximum size needed, both for inherent size
2266 and for total size. */
2267 rtx stack_slot;
2269 if (spill_stack_slot[from_reg])
2271 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2272 > inherent_size)
2273 mode = GET_MODE (spill_stack_slot[from_reg]);
2274 if (spill_stack_slot_width[from_reg] > total_size)
2275 total_size = spill_stack_slot_width[from_reg];
2276 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2277 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2280 /* Make a slot with that size. */
2281 x = assign_stack_local (mode, total_size,
2282 min_align > inherent_align
2283 || total_size > inherent_size ? -1 : 0);
2284 stack_slot = x;
2286 /* Cancel the big-endian correction done in assign_stack_local.
2287 Get the address of the beginning of the slot. This is so we
2288 can do a big-endian correction unconditionally below. */
2289 if (BYTES_BIG_ENDIAN)
2291 adjust = GET_MODE_SIZE (mode) - total_size;
2292 if (adjust)
2293 stack_slot
2294 = adjust_address_nv (x, mode_for_size (total_size
2295 * BITS_PER_UNIT,
2296 MODE_INT, 1),
2297 adjust);
2300 spill_stack_slot[from_reg] = stack_slot;
2301 spill_stack_slot_width[from_reg] = total_size;
2304 /* On a big endian machine, the "address" of the slot
2305 is the address of the low part that fits its inherent mode. */
2306 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2307 adjust += (total_size - inherent_size);
2309 /* If we have any adjustment to make, or if the stack slot is the
2310 wrong mode, make a new stack slot. */
2311 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2313 /* Set all of the memory attributes as appropriate for a spill. */
2314 set_mem_attrs_for_spill (x);
2316 /* Save the stack slot for later. */
2317 reg_equiv_memory_loc[i] = x;
2321 /* Mark the slots in regs_ever_live for the hard regs used by
2322 pseudo-reg number REGNO, accessed in MODE. */
2324 static void
2325 mark_home_live_1 (int regno, enum machine_mode mode)
2327 int i, lim;
2329 i = reg_renumber[regno];
2330 if (i < 0)
2331 return;
2332 lim = end_hard_regno (mode, i);
2333 while (i < lim)
2334 df_set_regs_ever_live(i++, true);
2337 /* Mark the slots in regs_ever_live for the hard regs
2338 used by pseudo-reg number REGNO. */
2340 void
2341 mark_home_live (int regno)
2343 if (reg_renumber[regno] >= 0)
2344 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2347 /* This function handles the tracking of elimination offsets around branches.
2349 X is a piece of RTL being scanned.
2351 INSN is the insn that it came from, if any.
2353 INITIAL_P is nonzero if we are to set the offset to be the initial
2354 offset and zero if we are setting the offset of the label to be the
2355 current offset. */
2357 static void
2358 set_label_offsets (rtx x, rtx insn, int initial_p)
2360 enum rtx_code code = GET_CODE (x);
2361 rtx tem;
2362 unsigned int i;
2363 struct elim_table *p;
2365 switch (code)
2367 case LABEL_REF:
2368 if (LABEL_REF_NONLOCAL_P (x))
2369 return;
2371 x = XEXP (x, 0);
2373 /* ... fall through ... */
2375 case CODE_LABEL:
2376 /* If we know nothing about this label, set the desired offsets. Note
2377 that this sets the offset at a label to be the offset before a label
2378 if we don't know anything about the label. This is not correct for
2379 the label after a BARRIER, but is the best guess we can make. If
2380 we guessed wrong, we will suppress an elimination that might have
2381 been possible had we been able to guess correctly. */
2383 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2385 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2386 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2387 = (initial_p ? reg_eliminate[i].initial_offset
2388 : reg_eliminate[i].offset);
2389 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2392 /* Otherwise, if this is the definition of a label and it is
2393 preceded by a BARRIER, set our offsets to the known offset of
2394 that label. */
2396 else if (x == insn
2397 && (tem = prev_nonnote_insn (insn)) != 0
2398 && BARRIER_P (tem))
2399 set_offsets_for_label (insn);
2400 else
2401 /* If neither of the above cases is true, compare each offset
2402 with those previously recorded and suppress any eliminations
2403 where the offsets disagree. */
2405 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2406 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2407 != (initial_p ? reg_eliminate[i].initial_offset
2408 : reg_eliminate[i].offset))
2409 reg_eliminate[i].can_eliminate = 0;
2411 return;
2413 case JUMP_INSN:
2414 set_label_offsets (PATTERN (insn), insn, initial_p);
2416 /* ... fall through ... */
2418 case INSN:
2419 case CALL_INSN:
2420 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2421 to indirectly and hence must have all eliminations at their
2422 initial offsets. */
2423 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2424 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2425 set_label_offsets (XEXP (tem, 0), insn, 1);
2426 return;
2428 case PARALLEL:
2429 case ADDR_VEC:
2430 case ADDR_DIFF_VEC:
2431 /* Each of the labels in the parallel or address vector must be
2432 at their initial offsets. We want the first field for PARALLEL
2433 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2435 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2436 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2437 insn, initial_p);
2438 return;
2440 case SET:
2441 /* We only care about setting PC. If the source is not RETURN,
2442 IF_THEN_ELSE, or a label, disable any eliminations not at
2443 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2444 isn't one of those possibilities. For branches to a label,
2445 call ourselves recursively.
2447 Note that this can disable elimination unnecessarily when we have
2448 a non-local goto since it will look like a non-constant jump to
2449 someplace in the current function. This isn't a significant
2450 problem since such jumps will normally be when all elimination
2451 pairs are back to their initial offsets. */
2453 if (SET_DEST (x) != pc_rtx)
2454 return;
2456 switch (GET_CODE (SET_SRC (x)))
2458 case PC:
2459 case RETURN:
2460 return;
2462 case LABEL_REF:
2463 set_label_offsets (SET_SRC (x), insn, initial_p);
2464 return;
2466 case IF_THEN_ELSE:
2467 tem = XEXP (SET_SRC (x), 1);
2468 if (GET_CODE (tem) == LABEL_REF)
2469 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2470 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2471 break;
2473 tem = XEXP (SET_SRC (x), 2);
2474 if (GET_CODE (tem) == LABEL_REF)
2475 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2476 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2477 break;
2478 return;
2480 default:
2481 break;
2484 /* If we reach here, all eliminations must be at their initial
2485 offset because we are doing a jump to a variable address. */
2486 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2487 if (p->offset != p->initial_offset)
2488 p->can_eliminate = 0;
2489 break;
2491 default:
2492 break;
2496 /* Scan X and replace any eliminable registers (such as fp) with a
2497 replacement (such as sp), plus an offset.
2499 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2500 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2501 MEM, we are allowed to replace a sum of a register and the constant zero
2502 with the register, which we cannot do outside a MEM. In addition, we need
2503 to record the fact that a register is referenced outside a MEM.
2505 If INSN is an insn, it is the insn containing X. If we replace a REG
2506 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2507 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2508 the REG is being modified.
2510 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2511 That's used when we eliminate in expressions stored in notes.
2512 This means, do not set ref_outside_mem even if the reference
2513 is outside of MEMs.
2515 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2516 replacements done assuming all offsets are at their initial values. If
2517 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2518 encounter, return the actual location so that find_reloads will do
2519 the proper thing. */
2521 static rtx
2522 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2523 bool may_use_invariant)
2525 enum rtx_code code = GET_CODE (x);
2526 struct elim_table *ep;
2527 int regno;
2528 rtx new_rtx;
2529 int i, j;
2530 const char *fmt;
2531 int copied = 0;
2533 if (! current_function_decl)
2534 return x;
2536 switch (code)
2538 case CONST_INT:
2539 case CONST_DOUBLE:
2540 case CONST_FIXED:
2541 case CONST_VECTOR:
2542 case CONST:
2543 case SYMBOL_REF:
2544 case CODE_LABEL:
2545 case PC:
2546 case CC0:
2547 case ASM_INPUT:
2548 case ADDR_VEC:
2549 case ADDR_DIFF_VEC:
2550 case RETURN:
2551 return x;
2553 case REG:
2554 regno = REGNO (x);
2556 /* First handle the case where we encounter a bare register that
2557 is eliminable. Replace it with a PLUS. */
2558 if (regno < FIRST_PSEUDO_REGISTER)
2560 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2561 ep++)
2562 if (ep->from_rtx == x && ep->can_eliminate)
2563 return plus_constant (ep->to_rtx, ep->previous_offset);
2566 else if (reg_renumber && reg_renumber[regno] < 0
2567 && reg_equiv_invariant && reg_equiv_invariant[regno])
2569 if (may_use_invariant)
2570 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2571 mem_mode, insn, true);
2572 /* There exists at least one use of REGNO that cannot be
2573 eliminated. Prevent the defining insn from being deleted. */
2574 reg_equiv_init[regno] = NULL_RTX;
2575 alter_reg (regno, -1, true);
2577 return x;
2579 /* You might think handling MINUS in a manner similar to PLUS is a
2580 good idea. It is not. It has been tried multiple times and every
2581 time the change has had to have been reverted.
2583 Other parts of reload know a PLUS is special (gen_reload for example)
2584 and require special code to handle code a reloaded PLUS operand.
2586 Also consider backends where the flags register is clobbered by a
2587 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2588 lea instruction comes to mind). If we try to reload a MINUS, we
2589 may kill the flags register that was holding a useful value.
2591 So, please before trying to handle MINUS, consider reload as a
2592 whole instead of this little section as well as the backend issues. */
2593 case PLUS:
2594 /* If this is the sum of an eliminable register and a constant, rework
2595 the sum. */
2596 if (REG_P (XEXP (x, 0))
2597 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2598 && CONSTANT_P (XEXP (x, 1)))
2600 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2601 ep++)
2602 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2604 /* The only time we want to replace a PLUS with a REG (this
2605 occurs when the constant operand of the PLUS is the negative
2606 of the offset) is when we are inside a MEM. We won't want
2607 to do so at other times because that would change the
2608 structure of the insn in a way that reload can't handle.
2609 We special-case the commonest situation in
2610 eliminate_regs_in_insn, so just replace a PLUS with a
2611 PLUS here, unless inside a MEM. */
2612 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2613 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2614 return ep->to_rtx;
2615 else
2616 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2617 plus_constant (XEXP (x, 1),
2618 ep->previous_offset));
2621 /* If the register is not eliminable, we are done since the other
2622 operand is a constant. */
2623 return x;
2626 /* If this is part of an address, we want to bring any constant to the
2627 outermost PLUS. We will do this by doing register replacement in
2628 our operands and seeing if a constant shows up in one of them.
2630 Note that there is no risk of modifying the structure of the insn,
2631 since we only get called for its operands, thus we are either
2632 modifying the address inside a MEM, or something like an address
2633 operand of a load-address insn. */
2636 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2637 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2639 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2641 /* If one side is a PLUS and the other side is a pseudo that
2642 didn't get a hard register but has a reg_equiv_constant,
2643 we must replace the constant here since it may no longer
2644 be in the position of any operand. */
2645 if (GET_CODE (new0) == PLUS && REG_P (new1)
2646 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2647 && reg_renumber[REGNO (new1)] < 0
2648 && reg_equiv_constant != 0
2649 && reg_equiv_constant[REGNO (new1)] != 0)
2650 new1 = reg_equiv_constant[REGNO (new1)];
2651 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2652 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2653 && reg_renumber[REGNO (new0)] < 0
2654 && reg_equiv_constant[REGNO (new0)] != 0)
2655 new0 = reg_equiv_constant[REGNO (new0)];
2657 new_rtx = form_sum (new0, new1);
2659 /* As above, if we are not inside a MEM we do not want to
2660 turn a PLUS into something else. We might try to do so here
2661 for an addition of 0 if we aren't optimizing. */
2662 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2663 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2664 else
2665 return new_rtx;
2668 return x;
2670 case MULT:
2671 /* If this is the product of an eliminable register and a
2672 constant, apply the distribute law and move the constant out
2673 so that we have (plus (mult ..) ..). This is needed in order
2674 to keep load-address insns valid. This case is pathological.
2675 We ignore the possibility of overflow here. */
2676 if (REG_P (XEXP (x, 0))
2677 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2678 && CONST_INT_P (XEXP (x, 1)))
2679 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2680 ep++)
2681 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2683 if (! mem_mode
2684 /* Refs inside notes don't count for this purpose. */
2685 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2686 || GET_CODE (insn) == INSN_LIST)))
2687 ep->ref_outside_mem = 1;
2689 return
2690 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2691 ep->previous_offset * INTVAL (XEXP (x, 1)));
2694 /* ... fall through ... */
2696 case CALL:
2697 case COMPARE:
2698 /* See comments before PLUS about handling MINUS. */
2699 case MINUS:
2700 case DIV: case UDIV:
2701 case MOD: case UMOD:
2702 case AND: case IOR: case XOR:
2703 case ROTATERT: case ROTATE:
2704 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2705 case NE: case EQ:
2706 case GE: case GT: case GEU: case GTU:
2707 case LE: case LT: case LEU: case LTU:
2709 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2710 rtx new1 = XEXP (x, 1)
2711 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2713 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2714 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2716 return x;
2718 case EXPR_LIST:
2719 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2720 if (XEXP (x, 0))
2722 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2723 if (new_rtx != XEXP (x, 0))
2725 /* If this is a REG_DEAD note, it is not valid anymore.
2726 Using the eliminated version could result in creating a
2727 REG_DEAD note for the stack or frame pointer. */
2728 if (REG_NOTE_KIND (x) == REG_DEAD)
2729 return (XEXP (x, 1)
2730 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2731 : NULL_RTX);
2733 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2737 /* ... fall through ... */
2739 case INSN_LIST:
2740 /* Now do eliminations in the rest of the chain. If this was
2741 an EXPR_LIST, this might result in allocating more memory than is
2742 strictly needed, but it simplifies the code. */
2743 if (XEXP (x, 1))
2745 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2746 if (new_rtx != XEXP (x, 1))
2747 return
2748 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2750 return x;
2752 case PRE_INC:
2753 case POST_INC:
2754 case PRE_DEC:
2755 case POST_DEC:
2756 /* We do not support elimination of a register that is modified.
2757 elimination_effects has already make sure that this does not
2758 happen. */
2759 return x;
2761 case PRE_MODIFY:
2762 case POST_MODIFY:
2763 /* We do not support elimination of a register that is modified.
2764 elimination_effects has already make sure that this does not
2765 happen. The only remaining case we need to consider here is
2766 that the increment value may be an eliminable register. */
2767 if (GET_CODE (XEXP (x, 1)) == PLUS
2768 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2770 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2771 insn, true);
2773 if (new_rtx != XEXP (XEXP (x, 1), 1))
2774 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2775 gen_rtx_PLUS (GET_MODE (x),
2776 XEXP (x, 0), new_rtx));
2778 return x;
2780 case STRICT_LOW_PART:
2781 case NEG: case NOT:
2782 case SIGN_EXTEND: case ZERO_EXTEND:
2783 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2784 case FLOAT: case FIX:
2785 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2786 case ABS:
2787 case SQRT:
2788 case FFS:
2789 case CLZ:
2790 case CTZ:
2791 case POPCOUNT:
2792 case PARITY:
2793 case BSWAP:
2794 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2795 if (new_rtx != XEXP (x, 0))
2796 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2797 return x;
2799 case SUBREG:
2800 /* Similar to above processing, but preserve SUBREG_BYTE.
2801 Convert (subreg (mem)) to (mem) if not paradoxical.
2802 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2803 pseudo didn't get a hard reg, we must replace this with the
2804 eliminated version of the memory location because push_reload
2805 may do the replacement in certain circumstances. */
2806 if (REG_P (SUBREG_REG (x))
2807 && (GET_MODE_SIZE (GET_MODE (x))
2808 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2809 && reg_equiv_memory_loc != 0
2810 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2812 new_rtx = SUBREG_REG (x);
2814 else
2815 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2817 if (new_rtx != SUBREG_REG (x))
2819 int x_size = GET_MODE_SIZE (GET_MODE (x));
2820 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2822 if (MEM_P (new_rtx)
2823 && ((x_size < new_size
2824 #ifdef WORD_REGISTER_OPERATIONS
2825 /* On these machines, combine can create rtl of the form
2826 (set (subreg:m1 (reg:m2 R) 0) ...)
2827 where m1 < m2, and expects something interesting to
2828 happen to the entire word. Moreover, it will use the
2829 (reg:m2 R) later, expecting all bits to be preserved.
2830 So if the number of words is the same, preserve the
2831 subreg so that push_reload can see it. */
2832 && ! ((x_size - 1) / UNITS_PER_WORD
2833 == (new_size -1 ) / UNITS_PER_WORD)
2834 #endif
2836 || x_size == new_size)
2838 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2839 else
2840 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2843 return x;
2845 case MEM:
2846 /* Our only special processing is to pass the mode of the MEM to our
2847 recursive call and copy the flags. While we are here, handle this
2848 case more efficiently. */
2849 return
2850 replace_equiv_address_nv (x,
2851 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2852 insn, true));
2854 case USE:
2855 /* Handle insn_list USE that a call to a pure function may generate. */
2856 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2857 if (new_rtx != XEXP (x, 0))
2858 return gen_rtx_USE (GET_MODE (x), new_rtx);
2859 return x;
2861 case CLOBBER:
2862 case ASM_OPERANDS:
2863 case SET:
2864 gcc_unreachable ();
2866 default:
2867 break;
2870 /* Process each of our operands recursively. If any have changed, make a
2871 copy of the rtx. */
2872 fmt = GET_RTX_FORMAT (code);
2873 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2875 if (*fmt == 'e')
2877 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2878 if (new_rtx != XEXP (x, i) && ! copied)
2880 x = shallow_copy_rtx (x);
2881 copied = 1;
2883 XEXP (x, i) = new_rtx;
2885 else if (*fmt == 'E')
2887 int copied_vec = 0;
2888 for (j = 0; j < XVECLEN (x, i); j++)
2890 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2891 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2893 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2894 XVEC (x, i)->elem);
2895 if (! copied)
2897 x = shallow_copy_rtx (x);
2898 copied = 1;
2900 XVEC (x, i) = new_v;
2901 copied_vec = 1;
2903 XVECEXP (x, i, j) = new_rtx;
2908 return x;
2912 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2914 return eliminate_regs_1 (x, mem_mode, insn, false);
2917 /* Scan rtx X for modifications of elimination target registers. Update
2918 the table of eliminables to reflect the changed state. MEM_MODE is
2919 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2921 static void
2922 elimination_effects (rtx x, enum machine_mode mem_mode)
2924 enum rtx_code code = GET_CODE (x);
2925 struct elim_table *ep;
2926 int regno;
2927 int i, j;
2928 const char *fmt;
2930 switch (code)
2932 case CONST_INT:
2933 case CONST_DOUBLE:
2934 case CONST_FIXED:
2935 case CONST_VECTOR:
2936 case CONST:
2937 case SYMBOL_REF:
2938 case CODE_LABEL:
2939 case PC:
2940 case CC0:
2941 case ASM_INPUT:
2942 case ADDR_VEC:
2943 case ADDR_DIFF_VEC:
2944 case RETURN:
2945 return;
2947 case REG:
2948 regno = REGNO (x);
2950 /* First handle the case where we encounter a bare register that
2951 is eliminable. Replace it with a PLUS. */
2952 if (regno < FIRST_PSEUDO_REGISTER)
2954 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2955 ep++)
2956 if (ep->from_rtx == x && ep->can_eliminate)
2958 if (! mem_mode)
2959 ep->ref_outside_mem = 1;
2960 return;
2964 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2965 && reg_equiv_constant[regno]
2966 && ! function_invariant_p (reg_equiv_constant[regno]))
2967 elimination_effects (reg_equiv_constant[regno], mem_mode);
2968 return;
2970 case PRE_INC:
2971 case POST_INC:
2972 case PRE_DEC:
2973 case POST_DEC:
2974 case POST_MODIFY:
2975 case PRE_MODIFY:
2976 /* If we modify the source of an elimination rule, disable it. */
2977 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2978 if (ep->from_rtx == XEXP (x, 0))
2979 ep->can_eliminate = 0;
2981 /* If we modify the target of an elimination rule by adding a constant,
2982 update its offset. If we modify the target in any other way, we'll
2983 have to disable the rule as well. */
2984 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2985 if (ep->to_rtx == XEXP (x, 0))
2987 int size = GET_MODE_SIZE (mem_mode);
2989 /* If more bytes than MEM_MODE are pushed, account for them. */
2990 #ifdef PUSH_ROUNDING
2991 if (ep->to_rtx == stack_pointer_rtx)
2992 size = PUSH_ROUNDING (size);
2993 #endif
2994 if (code == PRE_DEC || code == POST_DEC)
2995 ep->offset += size;
2996 else if (code == PRE_INC || code == POST_INC)
2997 ep->offset -= size;
2998 else if (code == PRE_MODIFY || code == POST_MODIFY)
3000 if (GET_CODE (XEXP (x, 1)) == PLUS
3001 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3002 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3003 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3004 else
3005 ep->can_eliminate = 0;
3009 /* These two aren't unary operators. */
3010 if (code == POST_MODIFY || code == PRE_MODIFY)
3011 break;
3013 /* Fall through to generic unary operation case. */
3014 case STRICT_LOW_PART:
3015 case NEG: case NOT:
3016 case SIGN_EXTEND: case ZERO_EXTEND:
3017 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3018 case FLOAT: case FIX:
3019 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3020 case ABS:
3021 case SQRT:
3022 case FFS:
3023 case CLZ:
3024 case CTZ:
3025 case POPCOUNT:
3026 case PARITY:
3027 case BSWAP:
3028 elimination_effects (XEXP (x, 0), mem_mode);
3029 return;
3031 case SUBREG:
3032 if (REG_P (SUBREG_REG (x))
3033 && (GET_MODE_SIZE (GET_MODE (x))
3034 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3035 && reg_equiv_memory_loc != 0
3036 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3037 return;
3039 elimination_effects (SUBREG_REG (x), mem_mode);
3040 return;
3042 case USE:
3043 /* If using a register that is the source of an eliminate we still
3044 think can be performed, note it cannot be performed since we don't
3045 know how this register is used. */
3046 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3047 if (ep->from_rtx == XEXP (x, 0))
3048 ep->can_eliminate = 0;
3050 elimination_effects (XEXP (x, 0), mem_mode);
3051 return;
3053 case CLOBBER:
3054 /* If clobbering a register that is the replacement register for an
3055 elimination we still think can be performed, note that it cannot
3056 be performed. Otherwise, we need not be concerned about it. */
3057 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3058 if (ep->to_rtx == XEXP (x, 0))
3059 ep->can_eliminate = 0;
3061 elimination_effects (XEXP (x, 0), mem_mode);
3062 return;
3064 case SET:
3065 /* Check for setting a register that we know about. */
3066 if (REG_P (SET_DEST (x)))
3068 /* See if this is setting the replacement register for an
3069 elimination.
3071 If DEST is the hard frame pointer, we do nothing because we
3072 assume that all assignments to the frame pointer are for
3073 non-local gotos and are being done at a time when they are valid
3074 and do not disturb anything else. Some machines want to
3075 eliminate a fake argument pointer (or even a fake frame pointer)
3076 with either the real frame or the stack pointer. Assignments to
3077 the hard frame pointer must not prevent this elimination. */
3079 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3080 ep++)
3081 if (ep->to_rtx == SET_DEST (x)
3082 && SET_DEST (x) != hard_frame_pointer_rtx)
3084 /* If it is being incremented, adjust the offset. Otherwise,
3085 this elimination can't be done. */
3086 rtx src = SET_SRC (x);
3088 if (GET_CODE (src) == PLUS
3089 && XEXP (src, 0) == SET_DEST (x)
3090 && CONST_INT_P (XEXP (src, 1)))
3091 ep->offset -= INTVAL (XEXP (src, 1));
3092 else
3093 ep->can_eliminate = 0;
3097 elimination_effects (SET_DEST (x), VOIDmode);
3098 elimination_effects (SET_SRC (x), VOIDmode);
3099 return;
3101 case MEM:
3102 /* Our only special processing is to pass the mode of the MEM to our
3103 recursive call. */
3104 elimination_effects (XEXP (x, 0), GET_MODE (x));
3105 return;
3107 default:
3108 break;
3111 fmt = GET_RTX_FORMAT (code);
3112 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3114 if (*fmt == 'e')
3115 elimination_effects (XEXP (x, i), mem_mode);
3116 else if (*fmt == 'E')
3117 for (j = 0; j < XVECLEN (x, i); j++)
3118 elimination_effects (XVECEXP (x, i, j), mem_mode);
3122 /* Descend through rtx X and verify that no references to eliminable registers
3123 remain. If any do remain, mark the involved register as not
3124 eliminable. */
3126 static void
3127 check_eliminable_occurrences (rtx x)
3129 const char *fmt;
3130 int i;
3131 enum rtx_code code;
3133 if (x == 0)
3134 return;
3136 code = GET_CODE (x);
3138 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3140 struct elim_table *ep;
3142 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3143 if (ep->from_rtx == x)
3144 ep->can_eliminate = 0;
3145 return;
3148 fmt = GET_RTX_FORMAT (code);
3149 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3151 if (*fmt == 'e')
3152 check_eliminable_occurrences (XEXP (x, i));
3153 else if (*fmt == 'E')
3155 int j;
3156 for (j = 0; j < XVECLEN (x, i); j++)
3157 check_eliminable_occurrences (XVECEXP (x, i, j));
3162 /* Scan INSN and eliminate all eliminable registers in it.
3164 If REPLACE is nonzero, do the replacement destructively. Also
3165 delete the insn as dead it if it is setting an eliminable register.
3167 If REPLACE is zero, do all our allocations in reload_obstack.
3169 If no eliminations were done and this insn doesn't require any elimination
3170 processing (these are not identical conditions: it might be updating sp,
3171 but not referencing fp; this needs to be seen during reload_as_needed so
3172 that the offset between fp and sp can be taken into consideration), zero
3173 is returned. Otherwise, 1 is returned. */
3175 static int
3176 eliminate_regs_in_insn (rtx insn, int replace)
3178 int icode = recog_memoized (insn);
3179 rtx old_body = PATTERN (insn);
3180 int insn_is_asm = asm_noperands (old_body) >= 0;
3181 rtx old_set = single_set (insn);
3182 rtx new_body;
3183 int val = 0;
3184 int i;
3185 rtx substed_operand[MAX_RECOG_OPERANDS];
3186 rtx orig_operand[MAX_RECOG_OPERANDS];
3187 struct elim_table *ep;
3188 rtx plus_src, plus_cst_src;
3190 if (! insn_is_asm && icode < 0)
3192 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3193 || GET_CODE (PATTERN (insn)) == CLOBBER
3194 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3195 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3196 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3197 || DEBUG_INSN_P (insn));
3198 return 0;
3201 if (old_set != 0 && REG_P (SET_DEST (old_set))
3202 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3204 /* Check for setting an eliminable register. */
3205 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3206 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3208 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3209 /* If this is setting the frame pointer register to the
3210 hardware frame pointer register and this is an elimination
3211 that will be done (tested above), this insn is really
3212 adjusting the frame pointer downward to compensate for
3213 the adjustment done before a nonlocal goto. */
3214 if (ep->from == FRAME_POINTER_REGNUM
3215 && ep->to == HARD_FRAME_POINTER_REGNUM)
3217 rtx base = SET_SRC (old_set);
3218 rtx base_insn = insn;
3219 HOST_WIDE_INT offset = 0;
3221 while (base != ep->to_rtx)
3223 rtx prev_insn, prev_set;
3225 if (GET_CODE (base) == PLUS
3226 && CONST_INT_P (XEXP (base, 1)))
3228 offset += INTVAL (XEXP (base, 1));
3229 base = XEXP (base, 0);
3231 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3232 && (prev_set = single_set (prev_insn)) != 0
3233 && rtx_equal_p (SET_DEST (prev_set), base))
3235 base = SET_SRC (prev_set);
3236 base_insn = prev_insn;
3238 else
3239 break;
3242 if (base == ep->to_rtx)
3244 rtx src
3245 = plus_constant (ep->to_rtx, offset - ep->offset);
3247 new_body = old_body;
3248 if (! replace)
3250 new_body = copy_insn (old_body);
3251 if (REG_NOTES (insn))
3252 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3254 PATTERN (insn) = new_body;
3255 old_set = single_set (insn);
3257 /* First see if this insn remains valid when we
3258 make the change. If not, keep the INSN_CODE
3259 the same and let reload fit it up. */
3260 validate_change (insn, &SET_SRC (old_set), src, 1);
3261 validate_change (insn, &SET_DEST (old_set),
3262 ep->to_rtx, 1);
3263 if (! apply_change_group ())
3265 SET_SRC (old_set) = src;
3266 SET_DEST (old_set) = ep->to_rtx;
3269 val = 1;
3270 goto done;
3273 #endif
3275 /* In this case this insn isn't serving a useful purpose. We
3276 will delete it in reload_as_needed once we know that this
3277 elimination is, in fact, being done.
3279 If REPLACE isn't set, we can't delete this insn, but needn't
3280 process it since it won't be used unless something changes. */
3281 if (replace)
3283 delete_dead_insn (insn);
3284 return 1;
3286 val = 1;
3287 goto done;
3291 /* We allow one special case which happens to work on all machines we
3292 currently support: a single set with the source or a REG_EQUAL
3293 note being a PLUS of an eliminable register and a constant. */
3294 plus_src = plus_cst_src = 0;
3295 if (old_set && REG_P (SET_DEST (old_set)))
3297 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3298 plus_src = SET_SRC (old_set);
3299 /* First see if the source is of the form (plus (...) CST). */
3300 if (plus_src
3301 && CONST_INT_P (XEXP (plus_src, 1)))
3302 plus_cst_src = plus_src;
3303 else if (REG_P (SET_SRC (old_set))
3304 || plus_src)
3306 /* Otherwise, see if we have a REG_EQUAL note of the form
3307 (plus (...) CST). */
3308 rtx links;
3309 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3311 if ((REG_NOTE_KIND (links) == REG_EQUAL
3312 || REG_NOTE_KIND (links) == REG_EQUIV)
3313 && GET_CODE (XEXP (links, 0)) == PLUS
3314 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3316 plus_cst_src = XEXP (links, 0);
3317 break;
3322 /* Check that the first operand of the PLUS is a hard reg or
3323 the lowpart subreg of one. */
3324 if (plus_cst_src)
3326 rtx reg = XEXP (plus_cst_src, 0);
3327 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3328 reg = SUBREG_REG (reg);
3330 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3331 plus_cst_src = 0;
3334 if (plus_cst_src)
3336 rtx reg = XEXP (plus_cst_src, 0);
3337 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3339 if (GET_CODE (reg) == SUBREG)
3340 reg = SUBREG_REG (reg);
3342 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3343 if (ep->from_rtx == reg && ep->can_eliminate)
3345 rtx to_rtx = ep->to_rtx;
3346 offset += ep->offset;
3347 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3349 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3350 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3351 to_rtx);
3352 /* If we have a nonzero offset, and the source is already
3353 a simple REG, the following transformation would
3354 increase the cost of the insn by replacing a simple REG
3355 with (plus (reg sp) CST). So try only when we already
3356 had a PLUS before. */
3357 if (offset == 0 || plus_src)
3359 rtx new_src = plus_constant (to_rtx, offset);
3361 new_body = old_body;
3362 if (! replace)
3364 new_body = copy_insn (old_body);
3365 if (REG_NOTES (insn))
3366 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3368 PATTERN (insn) = new_body;
3369 old_set = single_set (insn);
3371 /* First see if this insn remains valid when we make the
3372 change. If not, try to replace the whole pattern with
3373 a simple set (this may help if the original insn was a
3374 PARALLEL that was only recognized as single_set due to
3375 REG_UNUSED notes). If this isn't valid either, keep
3376 the INSN_CODE the same and let reload fix it up. */
3377 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3379 rtx new_pat = gen_rtx_SET (VOIDmode,
3380 SET_DEST (old_set), new_src);
3382 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3383 SET_SRC (old_set) = new_src;
3386 else
3387 break;
3389 val = 1;
3390 /* This can't have an effect on elimination offsets, so skip right
3391 to the end. */
3392 goto done;
3396 /* Determine the effects of this insn on elimination offsets. */
3397 elimination_effects (old_body, VOIDmode);
3399 /* Eliminate all eliminable registers occurring in operands that
3400 can be handled by reload. */
3401 extract_insn (insn);
3402 for (i = 0; i < recog_data.n_operands; i++)
3404 orig_operand[i] = recog_data.operand[i];
3405 substed_operand[i] = recog_data.operand[i];
3407 /* For an asm statement, every operand is eliminable. */
3408 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3410 bool is_set_src, in_plus;
3412 /* Check for setting a register that we know about. */
3413 if (recog_data.operand_type[i] != OP_IN
3414 && REG_P (orig_operand[i]))
3416 /* If we are assigning to a register that can be eliminated, it
3417 must be as part of a PARALLEL, since the code above handles
3418 single SETs. We must indicate that we can no longer
3419 eliminate this reg. */
3420 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3421 ep++)
3422 if (ep->from_rtx == orig_operand[i])
3423 ep->can_eliminate = 0;
3426 /* Companion to the above plus substitution, we can allow
3427 invariants as the source of a plain move. */
3428 is_set_src = false;
3429 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3430 is_set_src = true;
3431 in_plus = false;
3432 if (plus_src
3433 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3434 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3435 in_plus = true;
3437 substed_operand[i]
3438 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3439 replace ? insn : NULL_RTX,
3440 is_set_src || in_plus);
3441 if (substed_operand[i] != orig_operand[i])
3442 val = 1;
3443 /* Terminate the search in check_eliminable_occurrences at
3444 this point. */
3445 *recog_data.operand_loc[i] = 0;
3447 /* If an output operand changed from a REG to a MEM and INSN is an
3448 insn, write a CLOBBER insn. */
3449 if (recog_data.operand_type[i] != OP_IN
3450 && REG_P (orig_operand[i])
3451 && MEM_P (substed_operand[i])
3452 && replace)
3453 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3457 for (i = 0; i < recog_data.n_dups; i++)
3458 *recog_data.dup_loc[i]
3459 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3461 /* If any eliminable remain, they aren't eliminable anymore. */
3462 check_eliminable_occurrences (old_body);
3464 /* Substitute the operands; the new values are in the substed_operand
3465 array. */
3466 for (i = 0; i < recog_data.n_operands; i++)
3467 *recog_data.operand_loc[i] = substed_operand[i];
3468 for (i = 0; i < recog_data.n_dups; i++)
3469 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3471 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3472 re-recognize the insn. We do this in case we had a simple addition
3473 but now can do this as a load-address. This saves an insn in this
3474 common case.
3475 If re-recognition fails, the old insn code number will still be used,
3476 and some register operands may have changed into PLUS expressions.
3477 These will be handled by find_reloads by loading them into a register
3478 again. */
3480 if (val)
3482 /* If we aren't replacing things permanently and we changed something,
3483 make another copy to ensure that all the RTL is new. Otherwise
3484 things can go wrong if find_reload swaps commutative operands
3485 and one is inside RTL that has been copied while the other is not. */
3486 new_body = old_body;
3487 if (! replace)
3489 new_body = copy_insn (old_body);
3490 if (REG_NOTES (insn))
3491 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3493 PATTERN (insn) = new_body;
3495 /* If we had a move insn but now we don't, rerecognize it. This will
3496 cause spurious re-recognition if the old move had a PARALLEL since
3497 the new one still will, but we can't call single_set without
3498 having put NEW_BODY into the insn and the re-recognition won't
3499 hurt in this rare case. */
3500 /* ??? Why this huge if statement - why don't we just rerecognize the
3501 thing always? */
3502 if (! insn_is_asm
3503 && old_set != 0
3504 && ((REG_P (SET_SRC (old_set))
3505 && (GET_CODE (new_body) != SET
3506 || !REG_P (SET_SRC (new_body))))
3507 /* If this was a load from or store to memory, compare
3508 the MEM in recog_data.operand to the one in the insn.
3509 If they are not equal, then rerecognize the insn. */
3510 || (old_set != 0
3511 && ((MEM_P (SET_SRC (old_set))
3512 && SET_SRC (old_set) != recog_data.operand[1])
3513 || (MEM_P (SET_DEST (old_set))
3514 && SET_DEST (old_set) != recog_data.operand[0])))
3515 /* If this was an add insn before, rerecognize. */
3516 || GET_CODE (SET_SRC (old_set)) == PLUS))
3518 int new_icode = recog (PATTERN (insn), insn, 0);
3519 if (new_icode >= 0)
3520 INSN_CODE (insn) = new_icode;
3524 /* Restore the old body. If there were any changes to it, we made a copy
3525 of it while the changes were still in place, so we'll correctly return
3526 a modified insn below. */
3527 if (! replace)
3529 /* Restore the old body. */
3530 for (i = 0; i < recog_data.n_operands; i++)
3531 *recog_data.operand_loc[i] = orig_operand[i];
3532 for (i = 0; i < recog_data.n_dups; i++)
3533 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3536 /* Update all elimination pairs to reflect the status after the current
3537 insn. The changes we make were determined by the earlier call to
3538 elimination_effects.
3540 We also detect cases where register elimination cannot be done,
3541 namely, if a register would be both changed and referenced outside a MEM
3542 in the resulting insn since such an insn is often undefined and, even if
3543 not, we cannot know what meaning will be given to it. Note that it is
3544 valid to have a register used in an address in an insn that changes it
3545 (presumably with a pre- or post-increment or decrement).
3547 If anything changes, return nonzero. */
3549 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3551 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3552 ep->can_eliminate = 0;
3554 ep->ref_outside_mem = 0;
3556 if (ep->previous_offset != ep->offset)
3557 val = 1;
3560 done:
3561 /* If we changed something, perform elimination in REG_NOTES. This is
3562 needed even when REPLACE is zero because a REG_DEAD note might refer
3563 to a register that we eliminate and could cause a different number
3564 of spill registers to be needed in the final reload pass than in
3565 the pre-passes. */
3566 if (val && REG_NOTES (insn) != 0)
3567 REG_NOTES (insn)
3568 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3570 return val;
3573 /* Loop through all elimination pairs.
3574 Recalculate the number not at initial offset.
3576 Compute the maximum offset (minimum offset if the stack does not
3577 grow downward) for each elimination pair. */
3579 static void
3580 update_eliminable_offsets (void)
3582 struct elim_table *ep;
3584 num_not_at_initial_offset = 0;
3585 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3587 ep->previous_offset = ep->offset;
3588 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3589 num_not_at_initial_offset++;
3593 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3594 replacement we currently believe is valid, mark it as not eliminable if X
3595 modifies DEST in any way other than by adding a constant integer to it.
3597 If DEST is the frame pointer, we do nothing because we assume that
3598 all assignments to the hard frame pointer are nonlocal gotos and are being
3599 done at a time when they are valid and do not disturb anything else.
3600 Some machines want to eliminate a fake argument pointer with either the
3601 frame or stack pointer. Assignments to the hard frame pointer must not
3602 prevent this elimination.
3604 Called via note_stores from reload before starting its passes to scan
3605 the insns of the function. */
3607 static void
3608 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3610 unsigned int i;
3612 /* A SUBREG of a hard register here is just changing its mode. We should
3613 not see a SUBREG of an eliminable hard register, but check just in
3614 case. */
3615 if (GET_CODE (dest) == SUBREG)
3616 dest = SUBREG_REG (dest);
3618 if (dest == hard_frame_pointer_rtx)
3619 return;
3621 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3622 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3623 && (GET_CODE (x) != SET
3624 || GET_CODE (SET_SRC (x)) != PLUS
3625 || XEXP (SET_SRC (x), 0) != dest
3626 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3628 reg_eliminate[i].can_eliminate_previous
3629 = reg_eliminate[i].can_eliminate = 0;
3630 num_eliminable--;
3634 /* Verify that the initial elimination offsets did not change since the
3635 last call to set_initial_elim_offsets. This is used to catch cases
3636 where something illegal happened during reload_as_needed that could
3637 cause incorrect code to be generated if we did not check for it. */
3639 static bool
3640 verify_initial_elim_offsets (void)
3642 HOST_WIDE_INT t;
3644 if (!num_eliminable)
3645 return true;
3647 #ifdef ELIMINABLE_REGS
3649 struct elim_table *ep;
3651 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3653 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3654 if (t != ep->initial_offset)
3655 return false;
3658 #else
3659 INITIAL_FRAME_POINTER_OFFSET (t);
3660 if (t != reg_eliminate[0].initial_offset)
3661 return false;
3662 #endif
3664 return true;
3667 /* Reset all offsets on eliminable registers to their initial values. */
3669 static void
3670 set_initial_elim_offsets (void)
3672 struct elim_table *ep = reg_eliminate;
3674 #ifdef ELIMINABLE_REGS
3675 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3677 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3678 ep->previous_offset = ep->offset = ep->initial_offset;
3680 #else
3681 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3682 ep->previous_offset = ep->offset = ep->initial_offset;
3683 #endif
3685 num_not_at_initial_offset = 0;
3688 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3690 static void
3691 set_initial_eh_label_offset (rtx label)
3693 set_label_offsets (label, NULL_RTX, 1);
3696 /* Initialize the known label offsets.
3697 Set a known offset for each forced label to be at the initial offset
3698 of each elimination. We do this because we assume that all
3699 computed jumps occur from a location where each elimination is
3700 at its initial offset.
3701 For all other labels, show that we don't know the offsets. */
3703 static void
3704 set_initial_label_offsets (void)
3706 rtx x;
3707 memset (offsets_known_at, 0, num_labels);
3709 for (x = forced_labels; x; x = XEXP (x, 1))
3710 if (XEXP (x, 0))
3711 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3713 for_each_eh_label (set_initial_eh_label_offset);
3716 /* Set all elimination offsets to the known values for the code label given
3717 by INSN. */
3719 static void
3720 set_offsets_for_label (rtx insn)
3722 unsigned int i;
3723 int label_nr = CODE_LABEL_NUMBER (insn);
3724 struct elim_table *ep;
3726 num_not_at_initial_offset = 0;
3727 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3729 ep->offset = ep->previous_offset
3730 = offsets_at[label_nr - first_label_num][i];
3731 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3732 num_not_at_initial_offset++;
3736 /* See if anything that happened changes which eliminations are valid.
3737 For example, on the SPARC, whether or not the frame pointer can
3738 be eliminated can depend on what registers have been used. We need
3739 not check some conditions again (such as flag_omit_frame_pointer)
3740 since they can't have changed. */
3742 static void
3743 update_eliminables (HARD_REG_SET *pset)
3745 int previous_frame_pointer_needed = frame_pointer_needed;
3746 struct elim_table *ep;
3748 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3749 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3750 && targetm.frame_pointer_required ())
3751 #ifdef ELIMINABLE_REGS
3752 || ! targetm.can_eliminate (ep->from, ep->to)
3753 #endif
3755 ep->can_eliminate = 0;
3757 /* Look for the case where we have discovered that we can't replace
3758 register A with register B and that means that we will now be
3759 trying to replace register A with register C. This means we can
3760 no longer replace register C with register B and we need to disable
3761 such an elimination, if it exists. This occurs often with A == ap,
3762 B == sp, and C == fp. */
3764 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3766 struct elim_table *op;
3767 int new_to = -1;
3769 if (! ep->can_eliminate && ep->can_eliminate_previous)
3771 /* Find the current elimination for ep->from, if there is a
3772 new one. */
3773 for (op = reg_eliminate;
3774 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3775 if (op->from == ep->from && op->can_eliminate)
3777 new_to = op->to;
3778 break;
3781 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3782 disable it. */
3783 for (op = reg_eliminate;
3784 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3785 if (op->from == new_to && op->to == ep->to)
3786 op->can_eliminate = 0;
3790 /* See if any registers that we thought we could eliminate the previous
3791 time are no longer eliminable. If so, something has changed and we
3792 must spill the register. Also, recompute the number of eliminable
3793 registers and see if the frame pointer is needed; it is if there is
3794 no elimination of the frame pointer that we can perform. */
3796 frame_pointer_needed = 1;
3797 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3799 if (ep->can_eliminate
3800 && ep->from == FRAME_POINTER_REGNUM
3801 && ep->to != HARD_FRAME_POINTER_REGNUM
3802 && (! SUPPORTS_STACK_ALIGNMENT
3803 || ! crtl->stack_realign_needed))
3804 frame_pointer_needed = 0;
3806 if (! ep->can_eliminate && ep->can_eliminate_previous)
3808 ep->can_eliminate_previous = 0;
3809 SET_HARD_REG_BIT (*pset, ep->from);
3810 num_eliminable--;
3814 /* If we didn't need a frame pointer last time, but we do now, spill
3815 the hard frame pointer. */
3816 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3817 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3820 /* Return true if X is used as the target register of an elimination. */
3822 bool
3823 elimination_target_reg_p (rtx x)
3825 struct elim_table *ep;
3827 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3828 if (ep->to_rtx == x && ep->can_eliminate)
3829 return true;
3831 return false;
3834 /* Initialize the table of registers to eliminate.
3835 Pre-condition: global flag frame_pointer_needed has been set before
3836 calling this function. */
3838 static void
3839 init_elim_table (void)
3841 struct elim_table *ep;
3842 #ifdef ELIMINABLE_REGS
3843 const struct elim_table_1 *ep1;
3844 #endif
3846 if (!reg_eliminate)
3847 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3849 num_eliminable = 0;
3851 #ifdef ELIMINABLE_REGS
3852 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3853 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3855 ep->from = ep1->from;
3856 ep->to = ep1->to;
3857 ep->can_eliminate = ep->can_eliminate_previous
3858 = (targetm.can_eliminate (ep->from, ep->to)
3859 && ! (ep->to == STACK_POINTER_REGNUM
3860 && frame_pointer_needed
3861 && (! SUPPORTS_STACK_ALIGNMENT
3862 || ! stack_realign_fp)));
3864 #else
3865 reg_eliminate[0].from = reg_eliminate_1[0].from;
3866 reg_eliminate[0].to = reg_eliminate_1[0].to;
3867 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3868 = ! frame_pointer_needed;
3869 #endif
3871 /* Count the number of eliminable registers and build the FROM and TO
3872 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3873 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3874 We depend on this. */
3875 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3877 num_eliminable += ep->can_eliminate;
3878 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3879 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3883 /* Kick all pseudos out of hard register REGNO.
3885 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3886 because we found we can't eliminate some register. In the case, no pseudos
3887 are allowed to be in the register, even if they are only in a block that
3888 doesn't require spill registers, unlike the case when we are spilling this
3889 hard reg to produce another spill register.
3891 Return nonzero if any pseudos needed to be kicked out. */
3893 static void
3894 spill_hard_reg (unsigned int regno, int cant_eliminate)
3896 int i;
3898 if (cant_eliminate)
3900 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3901 df_set_regs_ever_live (regno, true);
3904 /* Spill every pseudo reg that was allocated to this reg
3905 or to something that overlaps this reg. */
3907 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3908 if (reg_renumber[i] >= 0
3909 && (unsigned int) reg_renumber[i] <= regno
3910 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3911 SET_REGNO_REG_SET (&spilled_pseudos, i);
3914 /* After find_reload_regs has been run for all insn that need reloads,
3915 and/or spill_hard_regs was called, this function is used to actually
3916 spill pseudo registers and try to reallocate them. It also sets up the
3917 spill_regs array for use by choose_reload_regs. */
3919 static int
3920 finish_spills (int global)
3922 struct insn_chain *chain;
3923 int something_changed = 0;
3924 unsigned i;
3925 reg_set_iterator rsi;
3927 /* Build the spill_regs array for the function. */
3928 /* If there are some registers still to eliminate and one of the spill regs
3929 wasn't ever used before, additional stack space may have to be
3930 allocated to store this register. Thus, we may have changed the offset
3931 between the stack and frame pointers, so mark that something has changed.
3933 One might think that we need only set VAL to 1 if this is a call-used
3934 register. However, the set of registers that must be saved by the
3935 prologue is not identical to the call-used set. For example, the
3936 register used by the call insn for the return PC is a call-used register,
3937 but must be saved by the prologue. */
3939 n_spills = 0;
3940 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3941 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3943 spill_reg_order[i] = n_spills;
3944 spill_regs[n_spills++] = i;
3945 if (num_eliminable && ! df_regs_ever_live_p (i))
3946 something_changed = 1;
3947 df_set_regs_ever_live (i, true);
3949 else
3950 spill_reg_order[i] = -1;
3952 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3953 if (! ira_conflicts_p || reg_renumber[i] >= 0)
3955 /* Record the current hard register the pseudo is allocated to
3956 in pseudo_previous_regs so we avoid reallocating it to the
3957 same hard reg in a later pass. */
3958 gcc_assert (reg_renumber[i] >= 0);
3960 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3961 /* Mark it as no longer having a hard register home. */
3962 reg_renumber[i] = -1;
3963 if (ira_conflicts_p)
3964 /* Inform IRA about the change. */
3965 ira_mark_allocation_change (i);
3966 /* We will need to scan everything again. */
3967 something_changed = 1;
3970 /* Retry global register allocation if possible. */
3971 if (global && ira_conflicts_p)
3973 unsigned int n;
3975 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3976 /* For every insn that needs reloads, set the registers used as spill
3977 regs in pseudo_forbidden_regs for every pseudo live across the
3978 insn. */
3979 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3981 EXECUTE_IF_SET_IN_REG_SET
3982 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3984 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3985 chain->used_spill_regs);
3987 EXECUTE_IF_SET_IN_REG_SET
3988 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3990 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3991 chain->used_spill_regs);
3995 /* Retry allocating the pseudos spilled in IRA and the
3996 reload. For each reg, merge the various reg sets that
3997 indicate which hard regs can't be used, and call
3998 ira_reassign_pseudos. */
3999 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4000 if (reg_old_renumber[i] != reg_renumber[i])
4002 if (reg_renumber[i] < 0)
4003 temp_pseudo_reg_arr[n++] = i;
4004 else
4005 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4007 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4008 bad_spill_regs_global,
4009 pseudo_forbidden_regs, pseudo_previous_regs,
4010 &spilled_pseudos))
4011 something_changed = 1;
4013 /* Fix up the register information in the insn chain.
4014 This involves deleting those of the spilled pseudos which did not get
4015 a new hard register home from the live_{before,after} sets. */
4016 for (chain = reload_insn_chain; chain; chain = chain->next)
4018 HARD_REG_SET used_by_pseudos;
4019 HARD_REG_SET used_by_pseudos2;
4021 if (! ira_conflicts_p)
4023 /* Don't do it for IRA because IRA and the reload still can
4024 assign hard registers to the spilled pseudos on next
4025 reload iterations. */
4026 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4027 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4029 /* Mark any unallocated hard regs as available for spills. That
4030 makes inheritance work somewhat better. */
4031 if (chain->need_reload)
4033 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4034 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4035 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4037 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4038 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4039 /* Value of chain->used_spill_regs from previous iteration
4040 may be not included in the value calculated here because
4041 of possible removing caller-saves insns (see function
4042 delete_caller_save_insns. */
4043 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4044 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4048 CLEAR_REG_SET (&changed_allocation_pseudos);
4049 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4050 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4052 int regno = reg_renumber[i];
4053 if (reg_old_renumber[i] == regno)
4054 continue;
4056 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4058 alter_reg (i, reg_old_renumber[i], false);
4059 reg_old_renumber[i] = regno;
4060 if (dump_file)
4062 if (regno == -1)
4063 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4064 else
4065 fprintf (dump_file, " Register %d now in %d.\n\n",
4066 i, reg_renumber[i]);
4070 return something_changed;
4073 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4075 static void
4076 scan_paradoxical_subregs (rtx x)
4078 int i;
4079 const char *fmt;
4080 enum rtx_code code = GET_CODE (x);
4082 switch (code)
4084 case REG:
4085 case CONST_INT:
4086 case CONST:
4087 case SYMBOL_REF:
4088 case LABEL_REF:
4089 case CONST_DOUBLE:
4090 case CONST_FIXED:
4091 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4092 case CC0:
4093 case PC:
4094 case USE:
4095 case CLOBBER:
4096 return;
4098 case SUBREG:
4099 if (REG_P (SUBREG_REG (x))
4100 && (GET_MODE_SIZE (GET_MODE (x))
4101 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4103 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4104 = GET_MODE_SIZE (GET_MODE (x));
4105 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4107 return;
4109 default:
4110 break;
4113 fmt = GET_RTX_FORMAT (code);
4114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4116 if (fmt[i] == 'e')
4117 scan_paradoxical_subregs (XEXP (x, i));
4118 else if (fmt[i] == 'E')
4120 int j;
4121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4122 scan_paradoxical_subregs (XVECEXP (x, i, j));
4127 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4128 examine all of the reload insns between PREV and NEXT exclusive, and
4129 annotate all that may trap. */
4131 static void
4132 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4134 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4135 rtx i;
4137 if (note == NULL)
4138 return;
4140 if (! may_trap_p (PATTERN (insn)))
4141 remove_note (insn, note);
4143 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4144 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4145 add_reg_note (i, REG_EH_REGION, XEXP (note, 0));
4148 /* Reload pseudo-registers into hard regs around each insn as needed.
4149 Additional register load insns are output before the insn that needs it
4150 and perhaps store insns after insns that modify the reloaded pseudo reg.
4152 reg_last_reload_reg and reg_reloaded_contents keep track of
4153 which registers are already available in reload registers.
4154 We update these for the reloads that we perform,
4155 as the insns are scanned. */
4157 static void
4158 reload_as_needed (int live_known)
4160 struct insn_chain *chain;
4161 #if defined (AUTO_INC_DEC)
4162 int i;
4163 #endif
4164 rtx x;
4166 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4167 memset (spill_reg_store, 0, sizeof spill_reg_store);
4168 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4169 INIT_REG_SET (&reg_has_output_reload);
4170 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4171 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4173 set_initial_elim_offsets ();
4175 for (chain = reload_insn_chain; chain; chain = chain->next)
4177 rtx prev = 0;
4178 rtx insn = chain->insn;
4179 rtx old_next = NEXT_INSN (insn);
4180 #ifdef AUTO_INC_DEC
4181 rtx old_prev = PREV_INSN (insn);
4182 #endif
4184 /* If we pass a label, copy the offsets from the label information
4185 into the current offsets of each elimination. */
4186 if (LABEL_P (insn))
4187 set_offsets_for_label (insn);
4189 else if (INSN_P (insn))
4191 regset_head regs_to_forget;
4192 INIT_REG_SET (&regs_to_forget);
4193 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4195 /* If this is a USE and CLOBBER of a MEM, ensure that any
4196 references to eliminable registers have been removed. */
4198 if ((GET_CODE (PATTERN (insn)) == USE
4199 || GET_CODE (PATTERN (insn)) == CLOBBER)
4200 && MEM_P (XEXP (PATTERN (insn), 0)))
4201 XEXP (XEXP (PATTERN (insn), 0), 0)
4202 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4203 GET_MODE (XEXP (PATTERN (insn), 0)),
4204 NULL_RTX);
4206 /* If we need to do register elimination processing, do so.
4207 This might delete the insn, in which case we are done. */
4208 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4210 eliminate_regs_in_insn (insn, 1);
4211 if (NOTE_P (insn))
4213 update_eliminable_offsets ();
4214 CLEAR_REG_SET (&regs_to_forget);
4215 continue;
4219 /* If need_elim is nonzero but need_reload is zero, one might think
4220 that we could simply set n_reloads to 0. However, find_reloads
4221 could have done some manipulation of the insn (such as swapping
4222 commutative operands), and these manipulations are lost during
4223 the first pass for every insn that needs register elimination.
4224 So the actions of find_reloads must be redone here. */
4226 if (! chain->need_elim && ! chain->need_reload
4227 && ! chain->need_operand_change)
4228 n_reloads = 0;
4229 /* First find the pseudo regs that must be reloaded for this insn.
4230 This info is returned in the tables reload_... (see reload.h).
4231 Also modify the body of INSN by substituting RELOAD
4232 rtx's for those pseudo regs. */
4233 else
4235 CLEAR_REG_SET (&reg_has_output_reload);
4236 CLEAR_HARD_REG_SET (reg_is_output_reload);
4238 find_reloads (insn, 1, spill_indirect_levels, live_known,
4239 spill_reg_order);
4242 if (n_reloads > 0)
4244 rtx next = NEXT_INSN (insn);
4245 rtx p;
4247 prev = PREV_INSN (insn);
4249 /* Now compute which reload regs to reload them into. Perhaps
4250 reusing reload regs from previous insns, or else output
4251 load insns to reload them. Maybe output store insns too.
4252 Record the choices of reload reg in reload_reg_rtx. */
4253 choose_reload_regs (chain);
4255 /* Merge any reloads that we didn't combine for fear of
4256 increasing the number of spill registers needed but now
4257 discover can be safely merged. */
4258 if (SMALL_REGISTER_CLASSES)
4259 merge_assigned_reloads (insn);
4261 /* Generate the insns to reload operands into or out of
4262 their reload regs. */
4263 emit_reload_insns (chain);
4265 /* Substitute the chosen reload regs from reload_reg_rtx
4266 into the insn's body (or perhaps into the bodies of other
4267 load and store insn that we just made for reloading
4268 and that we moved the structure into). */
4269 subst_reloads (insn);
4271 /* Adjust the exception region notes for loads and stores. */
4272 if (flag_non_call_exceptions && !CALL_P (insn))
4273 fixup_eh_region_note (insn, prev, next);
4275 /* If this was an ASM, make sure that all the reload insns
4276 we have generated are valid. If not, give an error
4277 and delete them. */
4278 if (asm_noperands (PATTERN (insn)) >= 0)
4279 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4280 if (p != insn && INSN_P (p)
4281 && GET_CODE (PATTERN (p)) != USE
4282 && (recog_memoized (p) < 0
4283 || (extract_insn (p), ! constrain_operands (1))))
4285 error_for_asm (insn,
4286 "%<asm%> operand requires "
4287 "impossible reload");
4288 delete_insn (p);
4292 if (num_eliminable && chain->need_elim)
4293 update_eliminable_offsets ();
4295 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4296 is no longer validly lying around to save a future reload.
4297 Note that this does not detect pseudos that were reloaded
4298 for this insn in order to be stored in
4299 (obeying register constraints). That is correct; such reload
4300 registers ARE still valid. */
4301 forget_marked_reloads (&regs_to_forget);
4302 CLEAR_REG_SET (&regs_to_forget);
4304 /* There may have been CLOBBER insns placed after INSN. So scan
4305 between INSN and NEXT and use them to forget old reloads. */
4306 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4307 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4308 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4310 #ifdef AUTO_INC_DEC
4311 /* Likewise for regs altered by auto-increment in this insn.
4312 REG_INC notes have been changed by reloading:
4313 find_reloads_address_1 records substitutions for them,
4314 which have been performed by subst_reloads above. */
4315 for (i = n_reloads - 1; i >= 0; i--)
4317 rtx in_reg = rld[i].in_reg;
4318 if (in_reg)
4320 enum rtx_code code = GET_CODE (in_reg);
4321 /* PRE_INC / PRE_DEC will have the reload register ending up
4322 with the same value as the stack slot, but that doesn't
4323 hold true for POST_INC / POST_DEC. Either we have to
4324 convert the memory access to a true POST_INC / POST_DEC,
4325 or we can't use the reload register for inheritance. */
4326 if ((code == POST_INC || code == POST_DEC)
4327 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4328 REGNO (rld[i].reg_rtx))
4329 /* Make sure it is the inc/dec pseudo, and not
4330 some other (e.g. output operand) pseudo. */
4331 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4332 == REGNO (XEXP (in_reg, 0))))
4335 rtx reload_reg = rld[i].reg_rtx;
4336 enum machine_mode mode = GET_MODE (reload_reg);
4337 int n = 0;
4338 rtx p;
4340 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4342 /* We really want to ignore REG_INC notes here, so
4343 use PATTERN (p) as argument to reg_set_p . */
4344 if (reg_set_p (reload_reg, PATTERN (p)))
4345 break;
4346 n = count_occurrences (PATTERN (p), reload_reg, 0);
4347 if (! n)
4348 continue;
4349 if (n == 1)
4351 rtx replace_reg
4352 = gen_rtx_fmt_e (code, mode, reload_reg);
4354 validate_replace_rtx_group (reload_reg,
4355 replace_reg, p);
4356 n = verify_changes (0);
4358 /* We must also verify that the constraints
4359 are met after the replacement. Make sure
4360 extract_insn is only called for an insn
4361 where the replacements were found to be
4362 valid so far. */
4363 if (n)
4365 extract_insn (p);
4366 n = constrain_operands (1);
4369 /* If the constraints were not met, then
4370 undo the replacement, else confirm it. */
4371 if (!n)
4372 cancel_changes (0);
4373 else
4374 confirm_change_group ();
4376 break;
4378 if (n == 1)
4380 add_reg_note (p, REG_INC, reload_reg);
4381 /* Mark this as having an output reload so that the
4382 REG_INC processing code below won't invalidate
4383 the reload for inheritance. */
4384 SET_HARD_REG_BIT (reg_is_output_reload,
4385 REGNO (reload_reg));
4386 SET_REGNO_REG_SET (&reg_has_output_reload,
4387 REGNO (XEXP (in_reg, 0)));
4389 else
4390 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4391 NULL);
4393 else if ((code == PRE_INC || code == PRE_DEC)
4394 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4395 REGNO (rld[i].reg_rtx))
4396 /* Make sure it is the inc/dec pseudo, and not
4397 some other (e.g. output operand) pseudo. */
4398 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4399 == REGNO (XEXP (in_reg, 0))))
4401 SET_HARD_REG_BIT (reg_is_output_reload,
4402 REGNO (rld[i].reg_rtx));
4403 SET_REGNO_REG_SET (&reg_has_output_reload,
4404 REGNO (XEXP (in_reg, 0)));
4406 else if (code == PRE_INC || code == PRE_DEC
4407 || code == POST_INC || code == POST_DEC)
4409 int in_regno = REGNO (XEXP (in_reg, 0));
4411 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4413 int in_hard_regno;
4414 bool forget_p = true;
4416 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4417 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4418 in_hard_regno))
4420 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4421 x != old_next;
4422 x = NEXT_INSN (x))
4423 if (x == reg_reloaded_insn[in_hard_regno])
4425 forget_p = false;
4426 break;
4429 /* If for some reasons, we didn't set up
4430 reg_last_reload_reg in this insn,
4431 invalidate inheritance from previous
4432 insns for the incremented/decremented
4433 register. Such registers will be not in
4434 reg_has_output_reload. Invalidate it
4435 also if the corresponding element in
4436 reg_reloaded_insn is also
4437 invalidated. */
4438 if (forget_p)
4439 forget_old_reloads_1 (XEXP (in_reg, 0),
4440 NULL_RTX, NULL);
4445 /* If a pseudo that got a hard register is auto-incremented,
4446 we must purge records of copying it into pseudos without
4447 hard registers. */
4448 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4449 if (REG_NOTE_KIND (x) == REG_INC)
4451 /* See if this pseudo reg was reloaded in this insn.
4452 If so, its last-reload info is still valid
4453 because it is based on this insn's reload. */
4454 for (i = 0; i < n_reloads; i++)
4455 if (rld[i].out == XEXP (x, 0))
4456 break;
4458 if (i == n_reloads)
4459 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4461 #endif
4463 /* A reload reg's contents are unknown after a label. */
4464 if (LABEL_P (insn))
4465 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4467 /* Don't assume a reload reg is still good after a call insn
4468 if it is a call-used reg, or if it contains a value that will
4469 be partially clobbered by the call. */
4470 else if (CALL_P (insn))
4472 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4473 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4477 /* Clean up. */
4478 free (reg_last_reload_reg);
4479 CLEAR_REG_SET (&reg_has_output_reload);
4482 /* Discard all record of any value reloaded from X,
4483 or reloaded in X from someplace else;
4484 unless X is an output reload reg of the current insn.
4486 X may be a hard reg (the reload reg)
4487 or it may be a pseudo reg that was reloaded from.
4489 When DATA is non-NULL just mark the registers in regset
4490 to be forgotten later. */
4492 static void
4493 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4494 void *data)
4496 unsigned int regno;
4497 unsigned int nr;
4498 regset regs = (regset) data;
4500 /* note_stores does give us subregs of hard regs,
4501 subreg_regno_offset requires a hard reg. */
4502 while (GET_CODE (x) == SUBREG)
4504 /* We ignore the subreg offset when calculating the regno,
4505 because we are using the entire underlying hard register
4506 below. */
4507 x = SUBREG_REG (x);
4510 if (!REG_P (x))
4511 return;
4513 regno = REGNO (x);
4515 if (regno >= FIRST_PSEUDO_REGISTER)
4516 nr = 1;
4517 else
4519 unsigned int i;
4521 nr = hard_regno_nregs[regno][GET_MODE (x)];
4522 /* Storing into a spilled-reg invalidates its contents.
4523 This can happen if a block-local pseudo is allocated to that reg
4524 and it wasn't spilled because this block's total need is 0.
4525 Then some insn might have an optional reload and use this reg. */
4526 if (!regs)
4527 for (i = 0; i < nr; i++)
4528 /* But don't do this if the reg actually serves as an output
4529 reload reg in the current instruction. */
4530 if (n_reloads == 0
4531 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4533 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4534 spill_reg_store[regno + i] = 0;
4538 if (regs)
4539 while (nr-- > 0)
4540 SET_REGNO_REG_SET (regs, regno + nr);
4541 else
4543 /* Since value of X has changed,
4544 forget any value previously copied from it. */
4546 while (nr-- > 0)
4547 /* But don't forget a copy if this is the output reload
4548 that establishes the copy's validity. */
4549 if (n_reloads == 0
4550 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4551 reg_last_reload_reg[regno + nr] = 0;
4555 /* Forget the reloads marked in regset by previous function. */
4556 static void
4557 forget_marked_reloads (regset regs)
4559 unsigned int reg;
4560 reg_set_iterator rsi;
4561 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4563 if (reg < FIRST_PSEUDO_REGISTER
4564 /* But don't do this if the reg actually serves as an output
4565 reload reg in the current instruction. */
4566 && (n_reloads == 0
4567 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4569 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4570 spill_reg_store[reg] = 0;
4572 if (n_reloads == 0
4573 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4574 reg_last_reload_reg[reg] = 0;
4578 /* The following HARD_REG_SETs indicate when each hard register is
4579 used for a reload of various parts of the current insn. */
4581 /* If reg is unavailable for all reloads. */
4582 static HARD_REG_SET reload_reg_unavailable;
4583 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4584 static HARD_REG_SET reload_reg_used;
4585 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4586 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4587 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4588 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4589 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4590 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4591 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4592 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4593 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4594 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4595 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4596 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4597 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4598 static HARD_REG_SET reload_reg_used_in_op_addr;
4599 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4600 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4601 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4602 static HARD_REG_SET reload_reg_used_in_insn;
4603 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4604 static HARD_REG_SET reload_reg_used_in_other_addr;
4606 /* If reg is in use as a reload reg for any sort of reload. */
4607 static HARD_REG_SET reload_reg_used_at_all;
4609 /* If reg is use as an inherited reload. We just mark the first register
4610 in the group. */
4611 static HARD_REG_SET reload_reg_used_for_inherit;
4613 /* Records which hard regs are used in any way, either as explicit use or
4614 by being allocated to a pseudo during any point of the current insn. */
4615 static HARD_REG_SET reg_used_in_insn;
4617 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4618 TYPE. MODE is used to indicate how many consecutive regs are
4619 actually used. */
4621 static void
4622 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4623 enum machine_mode mode)
4625 unsigned int nregs = hard_regno_nregs[regno][mode];
4626 unsigned int i;
4628 for (i = regno; i < nregs + regno; i++)
4630 switch (type)
4632 case RELOAD_OTHER:
4633 SET_HARD_REG_BIT (reload_reg_used, i);
4634 break;
4636 case RELOAD_FOR_INPUT_ADDRESS:
4637 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4638 break;
4640 case RELOAD_FOR_INPADDR_ADDRESS:
4641 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4642 break;
4644 case RELOAD_FOR_OUTPUT_ADDRESS:
4645 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4646 break;
4648 case RELOAD_FOR_OUTADDR_ADDRESS:
4649 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4650 break;
4652 case RELOAD_FOR_OPERAND_ADDRESS:
4653 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4654 break;
4656 case RELOAD_FOR_OPADDR_ADDR:
4657 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4658 break;
4660 case RELOAD_FOR_OTHER_ADDRESS:
4661 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4662 break;
4664 case RELOAD_FOR_INPUT:
4665 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4666 break;
4668 case RELOAD_FOR_OUTPUT:
4669 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4670 break;
4672 case RELOAD_FOR_INSN:
4673 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4674 break;
4677 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4681 /* Similarly, but show REGNO is no longer in use for a reload. */
4683 static void
4684 clear_reload_reg_in_use (unsigned int regno, int opnum,
4685 enum reload_type type, enum machine_mode mode)
4687 unsigned int nregs = hard_regno_nregs[regno][mode];
4688 unsigned int start_regno, end_regno, r;
4689 int i;
4690 /* A complication is that for some reload types, inheritance might
4691 allow multiple reloads of the same types to share a reload register.
4692 We set check_opnum if we have to check only reloads with the same
4693 operand number, and check_any if we have to check all reloads. */
4694 int check_opnum = 0;
4695 int check_any = 0;
4696 HARD_REG_SET *used_in_set;
4698 switch (type)
4700 case RELOAD_OTHER:
4701 used_in_set = &reload_reg_used;
4702 break;
4704 case RELOAD_FOR_INPUT_ADDRESS:
4705 used_in_set = &reload_reg_used_in_input_addr[opnum];
4706 break;
4708 case RELOAD_FOR_INPADDR_ADDRESS:
4709 check_opnum = 1;
4710 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4711 break;
4713 case RELOAD_FOR_OUTPUT_ADDRESS:
4714 used_in_set = &reload_reg_used_in_output_addr[opnum];
4715 break;
4717 case RELOAD_FOR_OUTADDR_ADDRESS:
4718 check_opnum = 1;
4719 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4720 break;
4722 case RELOAD_FOR_OPERAND_ADDRESS:
4723 used_in_set = &reload_reg_used_in_op_addr;
4724 break;
4726 case RELOAD_FOR_OPADDR_ADDR:
4727 check_any = 1;
4728 used_in_set = &reload_reg_used_in_op_addr_reload;
4729 break;
4731 case RELOAD_FOR_OTHER_ADDRESS:
4732 used_in_set = &reload_reg_used_in_other_addr;
4733 check_any = 1;
4734 break;
4736 case RELOAD_FOR_INPUT:
4737 used_in_set = &reload_reg_used_in_input[opnum];
4738 break;
4740 case RELOAD_FOR_OUTPUT:
4741 used_in_set = &reload_reg_used_in_output[opnum];
4742 break;
4744 case RELOAD_FOR_INSN:
4745 used_in_set = &reload_reg_used_in_insn;
4746 break;
4747 default:
4748 gcc_unreachable ();
4750 /* We resolve conflicts with remaining reloads of the same type by
4751 excluding the intervals of reload registers by them from the
4752 interval of freed reload registers. Since we only keep track of
4753 one set of interval bounds, we might have to exclude somewhat
4754 more than what would be necessary if we used a HARD_REG_SET here.
4755 But this should only happen very infrequently, so there should
4756 be no reason to worry about it. */
4758 start_regno = regno;
4759 end_regno = regno + nregs;
4760 if (check_opnum || check_any)
4762 for (i = n_reloads - 1; i >= 0; i--)
4764 if (rld[i].when_needed == type
4765 && (check_any || rld[i].opnum == opnum)
4766 && rld[i].reg_rtx)
4768 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4769 unsigned int conflict_end
4770 = end_hard_regno (rld[i].mode, conflict_start);
4772 /* If there is an overlap with the first to-be-freed register,
4773 adjust the interval start. */
4774 if (conflict_start <= start_regno && conflict_end > start_regno)
4775 start_regno = conflict_end;
4776 /* Otherwise, if there is a conflict with one of the other
4777 to-be-freed registers, adjust the interval end. */
4778 if (conflict_start > start_regno && conflict_start < end_regno)
4779 end_regno = conflict_start;
4784 for (r = start_regno; r < end_regno; r++)
4785 CLEAR_HARD_REG_BIT (*used_in_set, r);
4788 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4789 specified by OPNUM and TYPE. */
4791 static int
4792 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4794 int i;
4796 /* In use for a RELOAD_OTHER means it's not available for anything. */
4797 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4798 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4799 return 0;
4801 switch (type)
4803 case RELOAD_OTHER:
4804 /* In use for anything means we can't use it for RELOAD_OTHER. */
4805 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4806 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4807 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4808 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4809 return 0;
4811 for (i = 0; i < reload_n_operands; i++)
4812 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4815 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4816 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4817 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4818 return 0;
4820 return 1;
4822 case RELOAD_FOR_INPUT:
4823 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4824 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4825 return 0;
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4828 return 0;
4830 /* If it is used for some other input, can't use it. */
4831 for (i = 0; i < reload_n_operands; i++)
4832 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4833 return 0;
4835 /* If it is used in a later operand's address, can't use it. */
4836 for (i = opnum + 1; i < reload_n_operands; i++)
4837 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4838 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4839 return 0;
4841 return 1;
4843 case RELOAD_FOR_INPUT_ADDRESS:
4844 /* Can't use a register if it is used for an input address for this
4845 operand or used as an input in an earlier one. */
4846 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4847 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4848 return 0;
4850 for (i = 0; i < opnum; i++)
4851 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4852 return 0;
4854 return 1;
4856 case RELOAD_FOR_INPADDR_ADDRESS:
4857 /* Can't use a register if it is used for an input address
4858 for this operand or used as an input in an earlier
4859 one. */
4860 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4861 return 0;
4863 for (i = 0; i < opnum; i++)
4864 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4865 return 0;
4867 return 1;
4869 case RELOAD_FOR_OUTPUT_ADDRESS:
4870 /* Can't use a register if it is used for an output address for this
4871 operand or used as an output in this or a later operand. Note
4872 that multiple output operands are emitted in reverse order, so
4873 the conflicting ones are those with lower indices. */
4874 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4875 return 0;
4877 for (i = 0; i <= opnum; i++)
4878 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4879 return 0;
4881 return 1;
4883 case RELOAD_FOR_OUTADDR_ADDRESS:
4884 /* Can't use a register if it is used for an output address
4885 for this operand or used as an output in this or a
4886 later operand. Note that multiple output operands are
4887 emitted in reverse order, so the conflicting ones are
4888 those with lower indices. */
4889 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4890 return 0;
4892 for (i = 0; i <= opnum; i++)
4893 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4894 return 0;
4896 return 1;
4898 case RELOAD_FOR_OPERAND_ADDRESS:
4899 for (i = 0; i < reload_n_operands; i++)
4900 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4901 return 0;
4903 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4904 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4906 case RELOAD_FOR_OPADDR_ADDR:
4907 for (i = 0; i < reload_n_operands; i++)
4908 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4909 return 0;
4911 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4913 case RELOAD_FOR_OUTPUT:
4914 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4915 outputs, or an operand address for this or an earlier output.
4916 Note that multiple output operands are emitted in reverse order,
4917 so the conflicting ones are those with higher indices. */
4918 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4919 return 0;
4921 for (i = 0; i < reload_n_operands; i++)
4922 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4923 return 0;
4925 for (i = opnum; i < reload_n_operands; i++)
4926 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4927 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4928 return 0;
4930 return 1;
4932 case RELOAD_FOR_INSN:
4933 for (i = 0; i < reload_n_operands; i++)
4934 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4935 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4936 return 0;
4938 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4939 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4941 case RELOAD_FOR_OTHER_ADDRESS:
4942 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4944 default:
4945 gcc_unreachable ();
4949 /* Return 1 if the value in reload reg REGNO, as used by a reload
4950 needed for the part of the insn specified by OPNUM and TYPE,
4951 is still available in REGNO at the end of the insn.
4953 We can assume that the reload reg was already tested for availability
4954 at the time it is needed, and we should not check this again,
4955 in case the reg has already been marked in use. */
4957 static int
4958 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4960 int i;
4962 switch (type)
4964 case RELOAD_OTHER:
4965 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4966 its value must reach the end. */
4967 return 1;
4969 /* If this use is for part of the insn,
4970 its value reaches if no subsequent part uses the same register.
4971 Just like the above function, don't try to do this with lots
4972 of fallthroughs. */
4974 case RELOAD_FOR_OTHER_ADDRESS:
4975 /* Here we check for everything else, since these don't conflict
4976 with anything else and everything comes later. */
4978 for (i = 0; i < reload_n_operands; i++)
4979 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4980 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4981 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4982 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4983 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4984 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4985 return 0;
4987 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4988 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4989 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4990 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4992 case RELOAD_FOR_INPUT_ADDRESS:
4993 case RELOAD_FOR_INPADDR_ADDRESS:
4994 /* Similar, except that we check only for this and subsequent inputs
4995 and the address of only subsequent inputs and we do not need
4996 to check for RELOAD_OTHER objects since they are known not to
4997 conflict. */
4999 for (i = opnum; i < reload_n_operands; i++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5001 return 0;
5003 for (i = opnum + 1; i < reload_n_operands; i++)
5004 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5005 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5006 return 0;
5008 for (i = 0; i < reload_n_operands; i++)
5009 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5010 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5011 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5012 return 0;
5014 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5015 return 0;
5017 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5018 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5019 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5021 case RELOAD_FOR_INPUT:
5022 /* Similar to input address, except we start at the next operand for
5023 both input and input address and we do not check for
5024 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5025 would conflict. */
5027 for (i = opnum + 1; i < reload_n_operands; i++)
5028 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5029 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5030 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5031 return 0;
5033 /* ... fall through ... */
5035 case RELOAD_FOR_OPERAND_ADDRESS:
5036 /* Check outputs and their addresses. */
5038 for (i = 0; i < reload_n_operands; i++)
5039 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5040 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5041 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5042 return 0;
5044 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5046 case RELOAD_FOR_OPADDR_ADDR:
5047 for (i = 0; i < reload_n_operands; i++)
5048 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5049 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5050 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5051 return 0;
5053 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5054 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5055 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5057 case RELOAD_FOR_INSN:
5058 /* These conflict with other outputs with RELOAD_OTHER. So
5059 we need only check for output addresses. */
5061 opnum = reload_n_operands;
5063 /* ... fall through ... */
5065 case RELOAD_FOR_OUTPUT:
5066 case RELOAD_FOR_OUTPUT_ADDRESS:
5067 case RELOAD_FOR_OUTADDR_ADDRESS:
5068 /* We already know these can't conflict with a later output. So the
5069 only thing to check are later output addresses.
5070 Note that multiple output operands are emitted in reverse order,
5071 so the conflicting ones are those with lower indices. */
5072 for (i = 0; i < opnum; i++)
5073 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5074 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5075 return 0;
5077 return 1;
5079 default:
5080 gcc_unreachable ();
5084 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5085 every register in the range [REGNO, REGNO + NREGS). */
5087 static bool
5088 reload_regs_reach_end_p (unsigned int regno, int nregs,
5089 int opnum, enum reload_type type)
5091 int i;
5093 for (i = 0; i < nregs; i++)
5094 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5095 return false;
5096 return true;
5100 /* Returns whether R1 and R2 are uniquely chained: the value of one
5101 is used by the other, and that value is not used by any other
5102 reload for this insn. This is used to partially undo the decision
5103 made in find_reloads when in the case of multiple
5104 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5105 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5106 reloads. This code tries to avoid the conflict created by that
5107 change. It might be cleaner to explicitly keep track of which
5108 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5109 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5110 this after the fact. */
5111 static bool
5112 reloads_unique_chain_p (int r1, int r2)
5114 int i;
5116 /* We only check input reloads. */
5117 if (! rld[r1].in || ! rld[r2].in)
5118 return false;
5120 /* Avoid anything with output reloads. */
5121 if (rld[r1].out || rld[r2].out)
5122 return false;
5124 /* "chained" means one reload is a component of the other reload,
5125 not the same as the other reload. */
5126 if (rld[r1].opnum != rld[r2].opnum
5127 || rtx_equal_p (rld[r1].in, rld[r2].in)
5128 || rld[r1].optional || rld[r2].optional
5129 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5130 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5131 return false;
5133 for (i = 0; i < n_reloads; i ++)
5134 /* Look for input reloads that aren't our two */
5135 if (i != r1 && i != r2 && rld[i].in)
5137 /* If our reload is mentioned at all, it isn't a simple chain. */
5138 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5139 return false;
5141 return true;
5145 /* The recursive function change all occurrences of WHAT in *WHERE
5146 onto REPL. */
5147 static void
5148 substitute (rtx *where, const_rtx what, rtx repl)
5150 const char *fmt;
5151 int i;
5152 enum rtx_code code;
5154 if (*where == 0)
5155 return;
5157 if (*where == what || rtx_equal_p (*where, what))
5159 *where = repl;
5160 return;
5163 code = GET_CODE (*where);
5164 fmt = GET_RTX_FORMAT (code);
5165 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5167 if (fmt[i] == 'E')
5169 int j;
5171 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5172 substitute (&XVECEXP (*where, i, j), what, repl);
5174 else if (fmt[i] == 'e')
5175 substitute (&XEXP (*where, i), what, repl);
5179 /* The function returns TRUE if chain of reload R1 and R2 (in any
5180 order) can be evaluated without usage of intermediate register for
5181 the reload containing another reload. It is important to see
5182 gen_reload to understand what the function is trying to do. As an
5183 example, let us have reload chain
5185 r2: const
5186 r1: <something> + const
5188 and reload R2 got reload reg HR. The function returns true if
5189 there is a correct insn HR = HR + <something>. Otherwise,
5190 gen_reload will use intermediate register (and this is the reload
5191 reg for R1) to reload <something>.
5193 We need this function to find a conflict for chain reloads. In our
5194 example, if HR = HR + <something> is incorrect insn, then we cannot
5195 use HR as a reload register for R2. If we do use it then we get a
5196 wrong code:
5198 HR = const
5199 HR = <something>
5200 HR = HR + HR
5203 static bool
5204 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5206 bool result;
5207 int regno, n, code;
5208 rtx out, in, tem, insn;
5209 rtx last = get_last_insn ();
5211 /* Make r2 a component of r1. */
5212 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5214 n = r1;
5215 r1 = r2;
5216 r2 = n;
5218 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5219 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5220 gcc_assert (regno >= 0);
5221 out = gen_rtx_REG (rld[r1].mode, regno);
5222 in = copy_rtx (rld[r1].in);
5223 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5225 /* If IN is a paradoxical SUBREG, remove it and try to put the
5226 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5227 if (GET_CODE (in) == SUBREG
5228 && (GET_MODE_SIZE (GET_MODE (in))
5229 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5230 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5231 in = SUBREG_REG (in), out = tem;
5233 if (GET_CODE (in) == PLUS
5234 && (REG_P (XEXP (in, 0))
5235 || GET_CODE (XEXP (in, 0)) == SUBREG
5236 || MEM_P (XEXP (in, 0)))
5237 && (REG_P (XEXP (in, 1))
5238 || GET_CODE (XEXP (in, 1)) == SUBREG
5239 || CONSTANT_P (XEXP (in, 1))
5240 || MEM_P (XEXP (in, 1))))
5242 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5243 code = recog_memoized (insn);
5244 result = false;
5246 if (code >= 0)
5248 extract_insn (insn);
5249 /* We want constrain operands to treat this insn strictly in
5250 its validity determination, i.e., the way it would after
5251 reload has completed. */
5252 result = constrain_operands (1);
5255 delete_insns_since (last);
5256 return result;
5259 /* It looks like other cases in gen_reload are not possible for
5260 chain reloads or do need an intermediate hard registers. */
5261 return true;
5264 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5265 Return 0 otherwise.
5267 This function uses the same algorithm as reload_reg_free_p above. */
5269 static int
5270 reloads_conflict (int r1, int r2)
5272 enum reload_type r1_type = rld[r1].when_needed;
5273 enum reload_type r2_type = rld[r2].when_needed;
5274 int r1_opnum = rld[r1].opnum;
5275 int r2_opnum = rld[r2].opnum;
5277 /* RELOAD_OTHER conflicts with everything. */
5278 if (r2_type == RELOAD_OTHER)
5279 return 1;
5281 /* Otherwise, check conflicts differently for each type. */
5283 switch (r1_type)
5285 case RELOAD_FOR_INPUT:
5286 return (r2_type == RELOAD_FOR_INSN
5287 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5288 || r2_type == RELOAD_FOR_OPADDR_ADDR
5289 || r2_type == RELOAD_FOR_INPUT
5290 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5291 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5292 && r2_opnum > r1_opnum));
5294 case RELOAD_FOR_INPUT_ADDRESS:
5295 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5296 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5298 case RELOAD_FOR_INPADDR_ADDRESS:
5299 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5300 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5302 case RELOAD_FOR_OUTPUT_ADDRESS:
5303 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5304 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5306 case RELOAD_FOR_OUTADDR_ADDRESS:
5307 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5308 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5310 case RELOAD_FOR_OPERAND_ADDRESS:
5311 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5312 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5313 && (!reloads_unique_chain_p (r1, r2)
5314 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5316 case RELOAD_FOR_OPADDR_ADDR:
5317 return (r2_type == RELOAD_FOR_INPUT
5318 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5320 case RELOAD_FOR_OUTPUT:
5321 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5322 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5323 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5324 && r2_opnum >= r1_opnum));
5326 case RELOAD_FOR_INSN:
5327 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5328 || r2_type == RELOAD_FOR_INSN
5329 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5331 case RELOAD_FOR_OTHER_ADDRESS:
5332 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5334 case RELOAD_OTHER:
5335 return 1;
5337 default:
5338 gcc_unreachable ();
5342 /* Indexed by reload number, 1 if incoming value
5343 inherited from previous insns. */
5344 static char reload_inherited[MAX_RELOADS];
5346 /* For an inherited reload, this is the insn the reload was inherited from,
5347 if we know it. Otherwise, this is 0. */
5348 static rtx reload_inheritance_insn[MAX_RELOADS];
5350 /* If nonzero, this is a place to get the value of the reload,
5351 rather than using reload_in. */
5352 static rtx reload_override_in[MAX_RELOADS];
5354 /* For each reload, the hard register number of the register used,
5355 or -1 if we did not need a register for this reload. */
5356 static int reload_spill_index[MAX_RELOADS];
5358 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5359 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5361 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5362 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5364 /* Subroutine of free_for_value_p, used to check a single register.
5365 START_REGNO is the starting regno of the full reload register
5366 (possibly comprising multiple hard registers) that we are considering. */
5368 static int
5369 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5370 enum reload_type type, rtx value, rtx out,
5371 int reloadnum, int ignore_address_reloads)
5373 int time1;
5374 /* Set if we see an input reload that must not share its reload register
5375 with any new earlyclobber, but might otherwise share the reload
5376 register with an output or input-output reload. */
5377 int check_earlyclobber = 0;
5378 int i;
5379 int copy = 0;
5381 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5382 return 0;
5384 if (out == const0_rtx)
5386 copy = 1;
5387 out = NULL_RTX;
5390 /* We use some pseudo 'time' value to check if the lifetimes of the
5391 new register use would overlap with the one of a previous reload
5392 that is not read-only or uses a different value.
5393 The 'time' used doesn't have to be linear in any shape or form, just
5394 monotonic.
5395 Some reload types use different 'buckets' for each operand.
5396 So there are MAX_RECOG_OPERANDS different time values for each
5397 such reload type.
5398 We compute TIME1 as the time when the register for the prospective
5399 new reload ceases to be live, and TIME2 for each existing
5400 reload as the time when that the reload register of that reload
5401 becomes live.
5402 Where there is little to be gained by exact lifetime calculations,
5403 we just make conservative assumptions, i.e. a longer lifetime;
5404 this is done in the 'default:' cases. */
5405 switch (type)
5407 case RELOAD_FOR_OTHER_ADDRESS:
5408 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5409 time1 = copy ? 0 : 1;
5410 break;
5411 case RELOAD_OTHER:
5412 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5413 break;
5414 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5415 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5416 respectively, to the time values for these, we get distinct time
5417 values. To get distinct time values for each operand, we have to
5418 multiply opnum by at least three. We round that up to four because
5419 multiply by four is often cheaper. */
5420 case RELOAD_FOR_INPADDR_ADDRESS:
5421 time1 = opnum * 4 + 2;
5422 break;
5423 case RELOAD_FOR_INPUT_ADDRESS:
5424 time1 = opnum * 4 + 3;
5425 break;
5426 case RELOAD_FOR_INPUT:
5427 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5428 executes (inclusive). */
5429 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5430 break;
5431 case RELOAD_FOR_OPADDR_ADDR:
5432 /* opnum * 4 + 4
5433 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5434 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5435 break;
5436 case RELOAD_FOR_OPERAND_ADDRESS:
5437 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5438 is executed. */
5439 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5440 break;
5441 case RELOAD_FOR_OUTADDR_ADDRESS:
5442 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5443 break;
5444 case RELOAD_FOR_OUTPUT_ADDRESS:
5445 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5446 break;
5447 default:
5448 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5451 for (i = 0; i < n_reloads; i++)
5453 rtx reg = rld[i].reg_rtx;
5454 if (reg && REG_P (reg)
5455 && ((unsigned) regno - true_regnum (reg)
5456 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5457 && i != reloadnum)
5459 rtx other_input = rld[i].in;
5461 /* If the other reload loads the same input value, that
5462 will not cause a conflict only if it's loading it into
5463 the same register. */
5464 if (true_regnum (reg) != start_regno)
5465 other_input = NULL_RTX;
5466 if (! other_input || ! rtx_equal_p (other_input, value)
5467 || rld[i].out || out)
5469 int time2;
5470 switch (rld[i].when_needed)
5472 case RELOAD_FOR_OTHER_ADDRESS:
5473 time2 = 0;
5474 break;
5475 case RELOAD_FOR_INPADDR_ADDRESS:
5476 /* find_reloads makes sure that a
5477 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5478 by at most one - the first -
5479 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5480 address reload is inherited, the address address reload
5481 goes away, so we can ignore this conflict. */
5482 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5483 && ignore_address_reloads
5484 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5485 Then the address address is still needed to store
5486 back the new address. */
5487 && ! rld[reloadnum].out)
5488 continue;
5489 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5490 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5491 reloads go away. */
5492 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5493 && ignore_address_reloads
5494 /* Unless we are reloading an auto_inc expression. */
5495 && ! rld[reloadnum].out)
5496 continue;
5497 time2 = rld[i].opnum * 4 + 2;
5498 break;
5499 case RELOAD_FOR_INPUT_ADDRESS:
5500 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5501 && ignore_address_reloads
5502 && ! rld[reloadnum].out)
5503 continue;
5504 time2 = rld[i].opnum * 4 + 3;
5505 break;
5506 case RELOAD_FOR_INPUT:
5507 time2 = rld[i].opnum * 4 + 4;
5508 check_earlyclobber = 1;
5509 break;
5510 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5511 == MAX_RECOG_OPERAND * 4 */
5512 case RELOAD_FOR_OPADDR_ADDR:
5513 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5514 && ignore_address_reloads
5515 && ! rld[reloadnum].out)
5516 continue;
5517 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5518 break;
5519 case RELOAD_FOR_OPERAND_ADDRESS:
5520 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5521 check_earlyclobber = 1;
5522 break;
5523 case RELOAD_FOR_INSN:
5524 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5525 break;
5526 case RELOAD_FOR_OUTPUT:
5527 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5528 instruction is executed. */
5529 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5530 break;
5531 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5532 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5533 value. */
5534 case RELOAD_FOR_OUTADDR_ADDRESS:
5535 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5536 && ignore_address_reloads
5537 && ! rld[reloadnum].out)
5538 continue;
5539 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5540 break;
5541 case RELOAD_FOR_OUTPUT_ADDRESS:
5542 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5543 break;
5544 case RELOAD_OTHER:
5545 /* If there is no conflict in the input part, handle this
5546 like an output reload. */
5547 if (! rld[i].in || rtx_equal_p (other_input, value))
5549 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5550 /* Earlyclobbered outputs must conflict with inputs. */
5551 if (earlyclobber_operand_p (rld[i].out))
5552 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5554 break;
5556 time2 = 1;
5557 /* RELOAD_OTHER might be live beyond instruction execution,
5558 but this is not obvious when we set time2 = 1. So check
5559 here if there might be a problem with the new reload
5560 clobbering the register used by the RELOAD_OTHER. */
5561 if (out)
5562 return 0;
5563 break;
5564 default:
5565 return 0;
5567 if ((time1 >= time2
5568 && (! rld[i].in || rld[i].out
5569 || ! rtx_equal_p (other_input, value)))
5570 || (out && rld[reloadnum].out_reg
5571 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5572 return 0;
5577 /* Earlyclobbered outputs must conflict with inputs. */
5578 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5579 return 0;
5581 return 1;
5584 /* Return 1 if the value in reload reg REGNO, as used by a reload
5585 needed for the part of the insn specified by OPNUM and TYPE,
5586 may be used to load VALUE into it.
5588 MODE is the mode in which the register is used, this is needed to
5589 determine how many hard regs to test.
5591 Other read-only reloads with the same value do not conflict
5592 unless OUT is nonzero and these other reloads have to live while
5593 output reloads live.
5594 If OUT is CONST0_RTX, this is a special case: it means that the
5595 test should not be for using register REGNO as reload register, but
5596 for copying from register REGNO into the reload register.
5598 RELOADNUM is the number of the reload we want to load this value for;
5599 a reload does not conflict with itself.
5601 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5602 reloads that load an address for the very reload we are considering.
5604 The caller has to make sure that there is no conflict with the return
5605 register. */
5607 static int
5608 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5609 enum reload_type type, rtx value, rtx out, int reloadnum,
5610 int ignore_address_reloads)
5612 int nregs = hard_regno_nregs[regno][mode];
5613 while (nregs-- > 0)
5614 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5615 value, out, reloadnum,
5616 ignore_address_reloads))
5617 return 0;
5618 return 1;
5621 /* Return nonzero if the rtx X is invariant over the current function. */
5622 /* ??? Actually, the places where we use this expect exactly what is
5623 tested here, and not everything that is function invariant. In
5624 particular, the frame pointer and arg pointer are special cased;
5625 pic_offset_table_rtx is not, and we must not spill these things to
5626 memory. */
5629 function_invariant_p (const_rtx x)
5631 if (CONSTANT_P (x))
5632 return 1;
5633 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5634 return 1;
5635 if (GET_CODE (x) == PLUS
5636 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5637 && CONSTANT_P (XEXP (x, 1)))
5638 return 1;
5639 return 0;
5642 /* Determine whether the reload reg X overlaps any rtx'es used for
5643 overriding inheritance. Return nonzero if so. */
5645 static int
5646 conflicts_with_override (rtx x)
5648 int i;
5649 for (i = 0; i < n_reloads; i++)
5650 if (reload_override_in[i]
5651 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5652 return 1;
5653 return 0;
5656 /* Give an error message saying we failed to find a reload for INSN,
5657 and clear out reload R. */
5658 static void
5659 failed_reload (rtx insn, int r)
5661 if (asm_noperands (PATTERN (insn)) < 0)
5662 /* It's the compiler's fault. */
5663 fatal_insn ("could not find a spill register", insn);
5665 /* It's the user's fault; the operand's mode and constraint
5666 don't match. Disable this reload so we don't crash in final. */
5667 error_for_asm (insn,
5668 "%<asm%> operand constraint incompatible with operand size");
5669 rld[r].in = 0;
5670 rld[r].out = 0;
5671 rld[r].reg_rtx = 0;
5672 rld[r].optional = 1;
5673 rld[r].secondary_p = 1;
5676 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5677 for reload R. If it's valid, get an rtx for it. Return nonzero if
5678 successful. */
5679 static int
5680 set_reload_reg (int i, int r)
5682 int regno;
5683 rtx reg = spill_reg_rtx[i];
5685 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5686 spill_reg_rtx[i] = reg
5687 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5689 regno = true_regnum (reg);
5691 /* Detect when the reload reg can't hold the reload mode.
5692 This used to be one `if', but Sequent compiler can't handle that. */
5693 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5695 enum machine_mode test_mode = VOIDmode;
5696 if (rld[r].in)
5697 test_mode = GET_MODE (rld[r].in);
5698 /* If rld[r].in has VOIDmode, it means we will load it
5699 in whatever mode the reload reg has: to wit, rld[r].mode.
5700 We have already tested that for validity. */
5701 /* Aside from that, we need to test that the expressions
5702 to reload from or into have modes which are valid for this
5703 reload register. Otherwise the reload insns would be invalid. */
5704 if (! (rld[r].in != 0 && test_mode != VOIDmode
5705 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5706 if (! (rld[r].out != 0
5707 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5709 /* The reg is OK. */
5710 last_spill_reg = i;
5712 /* Mark as in use for this insn the reload regs we use
5713 for this. */
5714 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5715 rld[r].when_needed, rld[r].mode);
5717 rld[r].reg_rtx = reg;
5718 reload_spill_index[r] = spill_regs[i];
5719 return 1;
5722 return 0;
5725 /* Find a spill register to use as a reload register for reload R.
5726 LAST_RELOAD is nonzero if this is the last reload for the insn being
5727 processed.
5729 Set rld[R].reg_rtx to the register allocated.
5731 We return 1 if successful, or 0 if we couldn't find a spill reg and
5732 we didn't change anything. */
5734 static int
5735 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5736 int last_reload)
5738 int i, pass, count;
5740 /* If we put this reload ahead, thinking it is a group,
5741 then insist on finding a group. Otherwise we can grab a
5742 reg that some other reload needs.
5743 (That can happen when we have a 68000 DATA_OR_FP_REG
5744 which is a group of data regs or one fp reg.)
5745 We need not be so restrictive if there are no more reloads
5746 for this insn.
5748 ??? Really it would be nicer to have smarter handling
5749 for that kind of reg class, where a problem like this is normal.
5750 Perhaps those classes should be avoided for reloading
5751 by use of more alternatives. */
5753 int force_group = rld[r].nregs > 1 && ! last_reload;
5755 /* If we want a single register and haven't yet found one,
5756 take any reg in the right class and not in use.
5757 If we want a consecutive group, here is where we look for it.
5759 We use two passes so we can first look for reload regs to
5760 reuse, which are already in use for other reloads in this insn,
5761 and only then use additional registers.
5762 I think that maximizing reuse is needed to make sure we don't
5763 run out of reload regs. Suppose we have three reloads, and
5764 reloads A and B can share regs. These need two regs.
5765 Suppose A and B are given different regs.
5766 That leaves none for C. */
5767 for (pass = 0; pass < 2; pass++)
5769 /* I is the index in spill_regs.
5770 We advance it round-robin between insns to use all spill regs
5771 equally, so that inherited reloads have a chance
5772 of leapfrogging each other. */
5774 i = last_spill_reg;
5776 for (count = 0; count < n_spills; count++)
5778 int rclass = (int) rld[r].rclass;
5779 int regnum;
5781 i++;
5782 if (i >= n_spills)
5783 i -= n_spills;
5784 regnum = spill_regs[i];
5786 if ((reload_reg_free_p (regnum, rld[r].opnum,
5787 rld[r].when_needed)
5788 || (rld[r].in
5789 /* We check reload_reg_used to make sure we
5790 don't clobber the return register. */
5791 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5792 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5793 rld[r].when_needed, rld[r].in,
5794 rld[r].out, r, 1)))
5795 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
5796 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5797 /* Look first for regs to share, then for unshared. But
5798 don't share regs used for inherited reloads; they are
5799 the ones we want to preserve. */
5800 && (pass
5801 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5802 regnum)
5803 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5804 regnum))))
5806 int nr = hard_regno_nregs[regnum][rld[r].mode];
5807 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5808 (on 68000) got us two FP regs. If NR is 1,
5809 we would reject both of them. */
5810 if (force_group)
5811 nr = rld[r].nregs;
5812 /* If we need only one reg, we have already won. */
5813 if (nr == 1)
5815 /* But reject a single reg if we demand a group. */
5816 if (force_group)
5817 continue;
5818 break;
5820 /* Otherwise check that as many consecutive regs as we need
5821 are available here. */
5822 while (nr > 1)
5824 int regno = regnum + nr - 1;
5825 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
5826 && spill_reg_order[regno] >= 0
5827 && reload_reg_free_p (regno, rld[r].opnum,
5828 rld[r].when_needed)))
5829 break;
5830 nr--;
5832 if (nr == 1)
5833 break;
5837 /* If we found something on pass 1, omit pass 2. */
5838 if (count < n_spills)
5839 break;
5842 /* We should have found a spill register by now. */
5843 if (count >= n_spills)
5844 return 0;
5846 /* I is the index in SPILL_REG_RTX of the reload register we are to
5847 allocate. Get an rtx for it and find its register number. */
5849 return set_reload_reg (i, r);
5852 /* Initialize all the tables needed to allocate reload registers.
5853 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5854 is the array we use to restore the reg_rtx field for every reload. */
5856 static void
5857 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5859 int i;
5861 for (i = 0; i < n_reloads; i++)
5862 rld[i].reg_rtx = save_reload_reg_rtx[i];
5864 memset (reload_inherited, 0, MAX_RELOADS);
5865 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5866 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5868 CLEAR_HARD_REG_SET (reload_reg_used);
5869 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5870 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5871 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5872 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5873 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5875 CLEAR_HARD_REG_SET (reg_used_in_insn);
5877 HARD_REG_SET tmp;
5878 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5879 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5880 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5881 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5882 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5883 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5886 for (i = 0; i < reload_n_operands; i++)
5888 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5889 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5890 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5891 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5892 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5893 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5896 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5898 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5900 for (i = 0; i < n_reloads; i++)
5901 /* If we have already decided to use a certain register,
5902 don't use it in another way. */
5903 if (rld[i].reg_rtx)
5904 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5905 rld[i].when_needed, rld[i].mode);
5908 /* Assign hard reg targets for the pseudo-registers we must reload
5909 into hard regs for this insn.
5910 Also output the instructions to copy them in and out of the hard regs.
5912 For machines with register classes, we are responsible for
5913 finding a reload reg in the proper class. */
5915 static void
5916 choose_reload_regs (struct insn_chain *chain)
5918 rtx insn = chain->insn;
5919 int i, j;
5920 unsigned int max_group_size = 1;
5921 enum reg_class group_class = NO_REGS;
5922 int pass, win, inheritance;
5924 rtx save_reload_reg_rtx[MAX_RELOADS];
5926 /* In order to be certain of getting the registers we need,
5927 we must sort the reloads into order of increasing register class.
5928 Then our grabbing of reload registers will parallel the process
5929 that provided the reload registers.
5931 Also note whether any of the reloads wants a consecutive group of regs.
5932 If so, record the maximum size of the group desired and what
5933 register class contains all the groups needed by this insn. */
5935 for (j = 0; j < n_reloads; j++)
5937 reload_order[j] = j;
5938 if (rld[j].reg_rtx != NULL_RTX)
5940 gcc_assert (REG_P (rld[j].reg_rtx)
5941 && HARD_REGISTER_P (rld[j].reg_rtx));
5942 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5944 else
5945 reload_spill_index[j] = -1;
5947 if (rld[j].nregs > 1)
5949 max_group_size = MAX (rld[j].nregs, max_group_size);
5950 group_class
5951 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
5954 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5957 if (n_reloads > 1)
5958 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5960 /* If -O, try first with inheritance, then turning it off.
5961 If not -O, don't do inheritance.
5962 Using inheritance when not optimizing leads to paradoxes
5963 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5964 because one side of the comparison might be inherited. */
5965 win = 0;
5966 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5968 choose_reload_regs_init (chain, save_reload_reg_rtx);
5970 /* Process the reloads in order of preference just found.
5971 Beyond this point, subregs can be found in reload_reg_rtx.
5973 This used to look for an existing reloaded home for all of the
5974 reloads, and only then perform any new reloads. But that could lose
5975 if the reloads were done out of reg-class order because a later
5976 reload with a looser constraint might have an old home in a register
5977 needed by an earlier reload with a tighter constraint.
5979 To solve this, we make two passes over the reloads, in the order
5980 described above. In the first pass we try to inherit a reload
5981 from a previous insn. If there is a later reload that needs a
5982 class that is a proper subset of the class being processed, we must
5983 also allocate a spill register during the first pass.
5985 Then make a second pass over the reloads to allocate any reloads
5986 that haven't been given registers yet. */
5988 for (j = 0; j < n_reloads; j++)
5990 int r = reload_order[j];
5991 rtx search_equiv = NULL_RTX;
5993 /* Ignore reloads that got marked inoperative. */
5994 if (rld[r].out == 0 && rld[r].in == 0
5995 && ! rld[r].secondary_p)
5996 continue;
5998 /* If find_reloads chose to use reload_in or reload_out as a reload
5999 register, we don't need to chose one. Otherwise, try even if it
6000 found one since we might save an insn if we find the value lying
6001 around.
6002 Try also when reload_in is a pseudo without a hard reg. */
6003 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6004 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6005 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6006 && !MEM_P (rld[r].in)
6007 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6008 continue;
6010 #if 0 /* No longer needed for correct operation.
6011 It might give better code, or might not; worth an experiment? */
6012 /* If this is an optional reload, we can't inherit from earlier insns
6013 until we are sure that any non-optional reloads have been allocated.
6014 The following code takes advantage of the fact that optional reloads
6015 are at the end of reload_order. */
6016 if (rld[r].optional != 0)
6017 for (i = 0; i < j; i++)
6018 if ((rld[reload_order[i]].out != 0
6019 || rld[reload_order[i]].in != 0
6020 || rld[reload_order[i]].secondary_p)
6021 && ! rld[reload_order[i]].optional
6022 && rld[reload_order[i]].reg_rtx == 0)
6023 allocate_reload_reg (chain, reload_order[i], 0);
6024 #endif
6026 /* First see if this pseudo is already available as reloaded
6027 for a previous insn. We cannot try to inherit for reloads
6028 that are smaller than the maximum number of registers needed
6029 for groups unless the register we would allocate cannot be used
6030 for the groups.
6032 We could check here to see if this is a secondary reload for
6033 an object that is already in a register of the desired class.
6034 This would avoid the need for the secondary reload register.
6035 But this is complex because we can't easily determine what
6036 objects might want to be loaded via this reload. So let a
6037 register be allocated here. In `emit_reload_insns' we suppress
6038 one of the loads in the case described above. */
6040 if (inheritance)
6042 int byte = 0;
6043 int regno = -1;
6044 enum machine_mode mode = VOIDmode;
6046 if (rld[r].in == 0)
6048 else if (REG_P (rld[r].in))
6050 regno = REGNO (rld[r].in);
6051 mode = GET_MODE (rld[r].in);
6053 else if (REG_P (rld[r].in_reg))
6055 regno = REGNO (rld[r].in_reg);
6056 mode = GET_MODE (rld[r].in_reg);
6058 else if (GET_CODE (rld[r].in_reg) == SUBREG
6059 && REG_P (SUBREG_REG (rld[r].in_reg)))
6061 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6062 if (regno < FIRST_PSEUDO_REGISTER)
6063 regno = subreg_regno (rld[r].in_reg);
6064 else
6065 byte = SUBREG_BYTE (rld[r].in_reg);
6066 mode = GET_MODE (rld[r].in_reg);
6068 #ifdef AUTO_INC_DEC
6069 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6070 && REG_P (XEXP (rld[r].in_reg, 0)))
6072 regno = REGNO (XEXP (rld[r].in_reg, 0));
6073 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6074 rld[r].out = rld[r].in;
6076 #endif
6077 #if 0
6078 /* This won't work, since REGNO can be a pseudo reg number.
6079 Also, it takes much more hair to keep track of all the things
6080 that can invalidate an inherited reload of part of a pseudoreg. */
6081 else if (GET_CODE (rld[r].in) == SUBREG
6082 && REG_P (SUBREG_REG (rld[r].in)))
6083 regno = subreg_regno (rld[r].in);
6084 #endif
6086 if (regno >= 0
6087 && reg_last_reload_reg[regno] != 0
6088 #ifdef CANNOT_CHANGE_MODE_CLASS
6089 /* Verify that the register it's in can be used in
6090 mode MODE. */
6091 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6092 GET_MODE (reg_last_reload_reg[regno]),
6093 mode)
6094 #endif
6097 enum reg_class rclass = rld[r].rclass, last_class;
6098 rtx last_reg = reg_last_reload_reg[regno];
6099 enum machine_mode need_mode;
6101 i = REGNO (last_reg);
6102 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6103 last_class = REGNO_REG_CLASS (i);
6105 if (byte == 0)
6106 need_mode = mode;
6107 else
6108 need_mode
6109 = smallest_mode_for_size
6110 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6111 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6112 ? MODE_INT : GET_MODE_CLASS (mode));
6114 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6115 >= GET_MODE_SIZE (need_mode))
6116 && reg_reloaded_contents[i] == regno
6117 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6118 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6119 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6120 /* Even if we can't use this register as a reload
6121 register, we might use it for reload_override_in,
6122 if copying it to the desired class is cheap
6123 enough. */
6124 || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6125 < MEMORY_MOVE_COST (mode, rclass, 1))
6126 && (secondary_reload_class (1, rclass, mode,
6127 last_reg)
6128 == NO_REGS)
6129 #ifdef SECONDARY_MEMORY_NEEDED
6130 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6131 mode)
6132 #endif
6135 && (rld[r].nregs == max_group_size
6136 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6138 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6139 rld[r].when_needed, rld[r].in,
6140 const0_rtx, r, 1))
6142 /* If a group is needed, verify that all the subsequent
6143 registers still have their values intact. */
6144 int nr = hard_regno_nregs[i][rld[r].mode];
6145 int k;
6147 for (k = 1; k < nr; k++)
6148 if (reg_reloaded_contents[i + k] != regno
6149 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6150 break;
6152 if (k == nr)
6154 int i1;
6155 int bad_for_class;
6157 last_reg = (GET_MODE (last_reg) == mode
6158 ? last_reg : gen_rtx_REG (mode, i));
6160 bad_for_class = 0;
6161 for (k = 0; k < nr; k++)
6162 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6163 i+k);
6165 /* We found a register that contains the
6166 value we need. If this register is the
6167 same as an `earlyclobber' operand of the
6168 current insn, just mark it as a place to
6169 reload from since we can't use it as the
6170 reload register itself. */
6172 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6173 if (reg_overlap_mentioned_for_reload_p
6174 (reg_last_reload_reg[regno],
6175 reload_earlyclobbers[i1]))
6176 break;
6178 if (i1 != n_earlyclobbers
6179 || ! (free_for_value_p (i, rld[r].mode,
6180 rld[r].opnum,
6181 rld[r].when_needed, rld[r].in,
6182 rld[r].out, r, 1))
6183 /* Don't use it if we'd clobber a pseudo reg. */
6184 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6185 && rld[r].out
6186 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6187 /* Don't clobber the frame pointer. */
6188 || (i == HARD_FRAME_POINTER_REGNUM
6189 && frame_pointer_needed
6190 && rld[r].out)
6191 /* Don't really use the inherited spill reg
6192 if we need it wider than we've got it. */
6193 || (GET_MODE_SIZE (rld[r].mode)
6194 > GET_MODE_SIZE (mode))
6195 || bad_for_class
6197 /* If find_reloads chose reload_out as reload
6198 register, stay with it - that leaves the
6199 inherited register for subsequent reloads. */
6200 || (rld[r].out && rld[r].reg_rtx
6201 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6203 if (! rld[r].optional)
6205 reload_override_in[r] = last_reg;
6206 reload_inheritance_insn[r]
6207 = reg_reloaded_insn[i];
6210 else
6212 int k;
6213 /* We can use this as a reload reg. */
6214 /* Mark the register as in use for this part of
6215 the insn. */
6216 mark_reload_reg_in_use (i,
6217 rld[r].opnum,
6218 rld[r].when_needed,
6219 rld[r].mode);
6220 rld[r].reg_rtx = last_reg;
6221 reload_inherited[r] = 1;
6222 reload_inheritance_insn[r]
6223 = reg_reloaded_insn[i];
6224 reload_spill_index[r] = i;
6225 for (k = 0; k < nr; k++)
6226 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6227 i + k);
6234 /* Here's another way to see if the value is already lying around. */
6235 if (inheritance
6236 && rld[r].in != 0
6237 && ! reload_inherited[r]
6238 && rld[r].out == 0
6239 && (CONSTANT_P (rld[r].in)
6240 || GET_CODE (rld[r].in) == PLUS
6241 || REG_P (rld[r].in)
6242 || MEM_P (rld[r].in))
6243 && (rld[r].nregs == max_group_size
6244 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6245 search_equiv = rld[r].in;
6246 /* If this is an output reload from a simple move insn, look
6247 if an equivalence for the input is available. */
6248 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6250 rtx set = single_set (insn);
6252 if (set
6253 && rtx_equal_p (rld[r].out, SET_DEST (set))
6254 && CONSTANT_P (SET_SRC (set)))
6255 search_equiv = SET_SRC (set);
6258 if (search_equiv)
6260 rtx equiv
6261 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6262 -1, NULL, 0, rld[r].mode);
6263 int regno = 0;
6265 if (equiv != 0)
6267 if (REG_P (equiv))
6268 regno = REGNO (equiv);
6269 else
6271 /* This must be a SUBREG of a hard register.
6272 Make a new REG since this might be used in an
6273 address and not all machines support SUBREGs
6274 there. */
6275 gcc_assert (GET_CODE (equiv) == SUBREG);
6276 regno = subreg_regno (equiv);
6277 equiv = gen_rtx_REG (rld[r].mode, regno);
6278 /* If we choose EQUIV as the reload register, but the
6279 loop below decides to cancel the inheritance, we'll
6280 end up reloading EQUIV in rld[r].mode, not the mode
6281 it had originally. That isn't safe when EQUIV isn't
6282 available as a spill register since its value might
6283 still be live at this point. */
6284 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6285 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6286 equiv = 0;
6290 /* If we found a spill reg, reject it unless it is free
6291 and of the desired class. */
6292 if (equiv != 0)
6294 int regs_used = 0;
6295 int bad_for_class = 0;
6296 int max_regno = regno + rld[r].nregs;
6298 for (i = regno; i < max_regno; i++)
6300 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6302 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6306 if ((regs_used
6307 && ! free_for_value_p (regno, rld[r].mode,
6308 rld[r].opnum, rld[r].when_needed,
6309 rld[r].in, rld[r].out, r, 1))
6310 || bad_for_class)
6311 equiv = 0;
6314 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6315 equiv = 0;
6317 /* We found a register that contains the value we need.
6318 If this register is the same as an `earlyclobber' operand
6319 of the current insn, just mark it as a place to reload from
6320 since we can't use it as the reload register itself. */
6322 if (equiv != 0)
6323 for (i = 0; i < n_earlyclobbers; i++)
6324 if (reg_overlap_mentioned_for_reload_p (equiv,
6325 reload_earlyclobbers[i]))
6327 if (! rld[r].optional)
6328 reload_override_in[r] = equiv;
6329 equiv = 0;
6330 break;
6333 /* If the equiv register we have found is explicitly clobbered
6334 in the current insn, it depends on the reload type if we
6335 can use it, use it for reload_override_in, or not at all.
6336 In particular, we then can't use EQUIV for a
6337 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6339 if (equiv != 0)
6341 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6342 switch (rld[r].when_needed)
6344 case RELOAD_FOR_OTHER_ADDRESS:
6345 case RELOAD_FOR_INPADDR_ADDRESS:
6346 case RELOAD_FOR_INPUT_ADDRESS:
6347 case RELOAD_FOR_OPADDR_ADDR:
6348 break;
6349 case RELOAD_OTHER:
6350 case RELOAD_FOR_INPUT:
6351 case RELOAD_FOR_OPERAND_ADDRESS:
6352 if (! rld[r].optional)
6353 reload_override_in[r] = equiv;
6354 /* Fall through. */
6355 default:
6356 equiv = 0;
6357 break;
6359 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6360 switch (rld[r].when_needed)
6362 case RELOAD_FOR_OTHER_ADDRESS:
6363 case RELOAD_FOR_INPADDR_ADDRESS:
6364 case RELOAD_FOR_INPUT_ADDRESS:
6365 case RELOAD_FOR_OPADDR_ADDR:
6366 case RELOAD_FOR_OPERAND_ADDRESS:
6367 case RELOAD_FOR_INPUT:
6368 break;
6369 case RELOAD_OTHER:
6370 if (! rld[r].optional)
6371 reload_override_in[r] = equiv;
6372 /* Fall through. */
6373 default:
6374 equiv = 0;
6375 break;
6379 /* If we found an equivalent reg, say no code need be generated
6380 to load it, and use it as our reload reg. */
6381 if (equiv != 0
6382 && (regno != HARD_FRAME_POINTER_REGNUM
6383 || !frame_pointer_needed))
6385 int nr = hard_regno_nregs[regno][rld[r].mode];
6386 int k;
6387 rld[r].reg_rtx = equiv;
6388 reload_spill_index[r] = regno;
6389 reload_inherited[r] = 1;
6391 /* If reg_reloaded_valid is not set for this register,
6392 there might be a stale spill_reg_store lying around.
6393 We must clear it, since otherwise emit_reload_insns
6394 might delete the store. */
6395 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6396 spill_reg_store[regno] = NULL_RTX;
6397 /* If any of the hard registers in EQUIV are spill
6398 registers, mark them as in use for this insn. */
6399 for (k = 0; k < nr; k++)
6401 i = spill_reg_order[regno + k];
6402 if (i >= 0)
6404 mark_reload_reg_in_use (regno, rld[r].opnum,
6405 rld[r].when_needed,
6406 rld[r].mode);
6407 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6408 regno + k);
6414 /* If we found a register to use already, or if this is an optional
6415 reload, we are done. */
6416 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6417 continue;
6419 #if 0
6420 /* No longer needed for correct operation. Might or might
6421 not give better code on the average. Want to experiment? */
6423 /* See if there is a later reload that has a class different from our
6424 class that intersects our class or that requires less register
6425 than our reload. If so, we must allocate a register to this
6426 reload now, since that reload might inherit a previous reload
6427 and take the only available register in our class. Don't do this
6428 for optional reloads since they will force all previous reloads
6429 to be allocated. Also don't do this for reloads that have been
6430 turned off. */
6432 for (i = j + 1; i < n_reloads; i++)
6434 int s = reload_order[i];
6436 if ((rld[s].in == 0 && rld[s].out == 0
6437 && ! rld[s].secondary_p)
6438 || rld[s].optional)
6439 continue;
6441 if ((rld[s].rclass != rld[r].rclass
6442 && reg_classes_intersect_p (rld[r].rclass,
6443 rld[s].rclass))
6444 || rld[s].nregs < rld[r].nregs)
6445 break;
6448 if (i == n_reloads)
6449 continue;
6451 allocate_reload_reg (chain, r, j == n_reloads - 1);
6452 #endif
6455 /* Now allocate reload registers for anything non-optional that
6456 didn't get one yet. */
6457 for (j = 0; j < n_reloads; j++)
6459 int r = reload_order[j];
6461 /* Ignore reloads that got marked inoperative. */
6462 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6463 continue;
6465 /* Skip reloads that already have a register allocated or are
6466 optional. */
6467 if (rld[r].reg_rtx != 0 || rld[r].optional)
6468 continue;
6470 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6471 break;
6474 /* If that loop got all the way, we have won. */
6475 if (j == n_reloads)
6477 win = 1;
6478 break;
6481 /* Loop around and try without any inheritance. */
6484 if (! win)
6486 /* First undo everything done by the failed attempt
6487 to allocate with inheritance. */
6488 choose_reload_regs_init (chain, save_reload_reg_rtx);
6490 /* Some sanity tests to verify that the reloads found in the first
6491 pass are identical to the ones we have now. */
6492 gcc_assert (chain->n_reloads == n_reloads);
6494 for (i = 0; i < n_reloads; i++)
6496 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6497 continue;
6498 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6499 for (j = 0; j < n_spills; j++)
6500 if (spill_regs[j] == chain->rld[i].regno)
6501 if (! set_reload_reg (j, i))
6502 failed_reload (chain->insn, i);
6506 /* If we thought we could inherit a reload, because it seemed that
6507 nothing else wanted the same reload register earlier in the insn,
6508 verify that assumption, now that all reloads have been assigned.
6509 Likewise for reloads where reload_override_in has been set. */
6511 /* If doing expensive optimizations, do one preliminary pass that doesn't
6512 cancel any inheritance, but removes reloads that have been needed only
6513 for reloads that we know can be inherited. */
6514 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6516 for (j = 0; j < n_reloads; j++)
6518 int r = reload_order[j];
6519 rtx check_reg;
6520 if (reload_inherited[r] && rld[r].reg_rtx)
6521 check_reg = rld[r].reg_rtx;
6522 else if (reload_override_in[r]
6523 && (REG_P (reload_override_in[r])
6524 || GET_CODE (reload_override_in[r]) == SUBREG))
6525 check_reg = reload_override_in[r];
6526 else
6527 continue;
6528 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6529 rld[r].opnum, rld[r].when_needed, rld[r].in,
6530 (reload_inherited[r]
6531 ? rld[r].out : const0_rtx),
6532 r, 1))
6534 if (pass)
6535 continue;
6536 reload_inherited[r] = 0;
6537 reload_override_in[r] = 0;
6539 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6540 reload_override_in, then we do not need its related
6541 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6542 likewise for other reload types.
6543 We handle this by removing a reload when its only replacement
6544 is mentioned in reload_in of the reload we are going to inherit.
6545 A special case are auto_inc expressions; even if the input is
6546 inherited, we still need the address for the output. We can
6547 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6548 If we succeeded removing some reload and we are doing a preliminary
6549 pass just to remove such reloads, make another pass, since the
6550 removal of one reload might allow us to inherit another one. */
6551 else if (rld[r].in
6552 && rld[r].out != rld[r].in
6553 && remove_address_replacements (rld[r].in) && pass)
6554 pass = 2;
6558 /* Now that reload_override_in is known valid,
6559 actually override reload_in. */
6560 for (j = 0; j < n_reloads; j++)
6561 if (reload_override_in[j])
6562 rld[j].in = reload_override_in[j];
6564 /* If this reload won't be done because it has been canceled or is
6565 optional and not inherited, clear reload_reg_rtx so other
6566 routines (such as subst_reloads) don't get confused. */
6567 for (j = 0; j < n_reloads; j++)
6568 if (rld[j].reg_rtx != 0
6569 && ((rld[j].optional && ! reload_inherited[j])
6570 || (rld[j].in == 0 && rld[j].out == 0
6571 && ! rld[j].secondary_p)))
6573 int regno = true_regnum (rld[j].reg_rtx);
6575 if (spill_reg_order[regno] >= 0)
6576 clear_reload_reg_in_use (regno, rld[j].opnum,
6577 rld[j].when_needed, rld[j].mode);
6578 rld[j].reg_rtx = 0;
6579 reload_spill_index[j] = -1;
6582 /* Record which pseudos and which spill regs have output reloads. */
6583 for (j = 0; j < n_reloads; j++)
6585 int r = reload_order[j];
6587 i = reload_spill_index[r];
6589 /* I is nonneg if this reload uses a register.
6590 If rld[r].reg_rtx is 0, this is an optional reload
6591 that we opted to ignore. */
6592 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6593 && rld[r].reg_rtx != 0)
6595 int nregno = REGNO (rld[r].out_reg);
6596 int nr = 1;
6598 if (nregno < FIRST_PSEUDO_REGISTER)
6599 nr = hard_regno_nregs[nregno][rld[r].mode];
6601 while (--nr >= 0)
6602 SET_REGNO_REG_SET (&reg_has_output_reload,
6603 nregno + nr);
6605 if (i >= 0)
6607 nr = hard_regno_nregs[i][rld[r].mode];
6608 while (--nr >= 0)
6609 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6612 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6613 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6614 || rld[r].when_needed == RELOAD_FOR_INSN);
6619 /* Deallocate the reload register for reload R. This is called from
6620 remove_address_replacements. */
6622 void
6623 deallocate_reload_reg (int r)
6625 int regno;
6627 if (! rld[r].reg_rtx)
6628 return;
6629 regno = true_regnum (rld[r].reg_rtx);
6630 rld[r].reg_rtx = 0;
6631 if (spill_reg_order[regno] >= 0)
6632 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6633 rld[r].mode);
6634 reload_spill_index[r] = -1;
6637 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6638 reloads of the same item for fear that we might not have enough reload
6639 registers. However, normally they will get the same reload register
6640 and hence actually need not be loaded twice.
6642 Here we check for the most common case of this phenomenon: when we have
6643 a number of reloads for the same object, each of which were allocated
6644 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6645 reload, and is not modified in the insn itself. If we find such,
6646 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6647 This will not increase the number of spill registers needed and will
6648 prevent redundant code. */
6650 static void
6651 merge_assigned_reloads (rtx insn)
6653 int i, j;
6655 /* Scan all the reloads looking for ones that only load values and
6656 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6657 assigned and not modified by INSN. */
6659 for (i = 0; i < n_reloads; i++)
6661 int conflicting_input = 0;
6662 int max_input_address_opnum = -1;
6663 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6665 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6666 || rld[i].out != 0 || rld[i].reg_rtx == 0
6667 || reg_set_p (rld[i].reg_rtx, insn))
6668 continue;
6670 /* Look at all other reloads. Ensure that the only use of this
6671 reload_reg_rtx is in a reload that just loads the same value
6672 as we do. Note that any secondary reloads must be of the identical
6673 class since the values, modes, and result registers are the
6674 same, so we need not do anything with any secondary reloads. */
6676 for (j = 0; j < n_reloads; j++)
6678 if (i == j || rld[j].reg_rtx == 0
6679 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6680 rld[i].reg_rtx))
6681 continue;
6683 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6684 && rld[j].opnum > max_input_address_opnum)
6685 max_input_address_opnum = rld[j].opnum;
6687 /* If the reload regs aren't exactly the same (e.g, different modes)
6688 or if the values are different, we can't merge this reload.
6689 But if it is an input reload, we might still merge
6690 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6692 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6693 || rld[j].out != 0 || rld[j].in == 0
6694 || ! rtx_equal_p (rld[i].in, rld[j].in))
6696 if (rld[j].when_needed != RELOAD_FOR_INPUT
6697 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6698 || rld[i].opnum > rld[j].opnum)
6699 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6700 break;
6701 conflicting_input = 1;
6702 if (min_conflicting_input_opnum > rld[j].opnum)
6703 min_conflicting_input_opnum = rld[j].opnum;
6707 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6708 we, in fact, found any matching reloads. */
6710 if (j == n_reloads
6711 && max_input_address_opnum <= min_conflicting_input_opnum)
6713 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6715 for (j = 0; j < n_reloads; j++)
6716 if (i != j && rld[j].reg_rtx != 0
6717 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6718 && (! conflicting_input
6719 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6720 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6722 rld[i].when_needed = RELOAD_OTHER;
6723 rld[j].in = 0;
6724 reload_spill_index[j] = -1;
6725 transfer_replacements (i, j);
6728 /* If this is now RELOAD_OTHER, look for any reloads that
6729 load parts of this operand and set them to
6730 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6731 RELOAD_OTHER for outputs. Note that this test is
6732 equivalent to looking for reloads for this operand
6733 number.
6735 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6736 it may share registers with a RELOAD_FOR_INPUT, so we can
6737 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6738 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6740 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6741 instruction is assigned the same register as the earlier
6742 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6743 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6744 instruction to be deleted later on. */
6746 if (rld[i].when_needed == RELOAD_OTHER)
6747 for (j = 0; j < n_reloads; j++)
6748 if (rld[j].in != 0
6749 && rld[j].when_needed != RELOAD_OTHER
6750 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6751 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6752 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6753 && (! conflicting_input
6754 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6755 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6756 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6757 rld[i].in))
6759 int k;
6761 rld[j].when_needed
6762 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6763 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6764 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6766 /* Check to see if we accidentally converted two
6767 reloads that use the same reload register with
6768 different inputs to the same type. If so, the
6769 resulting code won't work. */
6770 if (rld[j].reg_rtx)
6771 for (k = 0; k < j; k++)
6772 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6773 || rld[k].when_needed != rld[j].when_needed
6774 || !rtx_equal_p (rld[k].reg_rtx,
6775 rld[j].reg_rtx)
6776 || rtx_equal_p (rld[k].in,
6777 rld[j].in));
6783 /* These arrays are filled by emit_reload_insns and its subroutines. */
6784 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6785 static rtx other_input_address_reload_insns = 0;
6786 static rtx other_input_reload_insns = 0;
6787 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6788 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6789 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6790 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6791 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6792 static rtx operand_reload_insns = 0;
6793 static rtx other_operand_reload_insns = 0;
6794 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6796 /* Values to be put in spill_reg_store are put here first. */
6797 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6798 static HARD_REG_SET reg_reloaded_died;
6800 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6801 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6802 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6803 adjusted register, and return true. Otherwise, return false. */
6804 static bool
6805 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6806 enum reg_class new_class,
6807 enum machine_mode new_mode)
6810 rtx reg;
6812 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6814 unsigned regno = REGNO (reg);
6816 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6817 continue;
6818 if (GET_MODE (reg) != new_mode)
6820 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6821 continue;
6822 if (hard_regno_nregs[regno][new_mode]
6823 > hard_regno_nregs[regno][GET_MODE (reg)])
6824 continue;
6825 reg = reload_adjust_reg_for_mode (reg, new_mode);
6827 *reload_reg = reg;
6828 return true;
6830 return false;
6833 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6834 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6835 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6836 adjusted register, and return true. Otherwise, return false. */
6837 static bool
6838 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6839 enum insn_code icode)
6842 enum reg_class new_class = scratch_reload_class (icode);
6843 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6845 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6846 new_class, new_mode);
6849 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6850 has the number J. OLD contains the value to be used as input. */
6852 static void
6853 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6854 rtx old, int j)
6856 rtx insn = chain->insn;
6857 rtx reloadreg;
6858 rtx oldequiv_reg = 0;
6859 rtx oldequiv = 0;
6860 int special = 0;
6861 enum machine_mode mode;
6862 rtx *where;
6864 /* delete_output_reload is only invoked properly if old contains
6865 the original pseudo register. Since this is replaced with a
6866 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6867 find the pseudo in RELOAD_IN_REG. */
6868 if (reload_override_in[j]
6869 && REG_P (rl->in_reg))
6871 oldequiv = old;
6872 old = rl->in_reg;
6874 if (oldequiv == 0)
6875 oldequiv = old;
6876 else if (REG_P (oldequiv))
6877 oldequiv_reg = oldequiv;
6878 else if (GET_CODE (oldequiv) == SUBREG)
6879 oldequiv_reg = SUBREG_REG (oldequiv);
6881 reloadreg = reload_reg_rtx_for_input[j];
6882 mode = GET_MODE (reloadreg);
6884 /* If we are reloading from a register that was recently stored in
6885 with an output-reload, see if we can prove there was
6886 actually no need to store the old value in it. */
6888 if (optimize && REG_P (oldequiv)
6889 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6890 && spill_reg_store[REGNO (oldequiv)]
6891 && REG_P (old)
6892 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6893 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6894 rl->out_reg)))
6895 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6897 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6898 OLDEQUIV. */
6900 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6901 oldequiv = SUBREG_REG (oldequiv);
6902 if (GET_MODE (oldequiv) != VOIDmode
6903 && mode != GET_MODE (oldequiv))
6904 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6906 /* Switch to the right place to emit the reload insns. */
6907 switch (rl->when_needed)
6909 case RELOAD_OTHER:
6910 where = &other_input_reload_insns;
6911 break;
6912 case RELOAD_FOR_INPUT:
6913 where = &input_reload_insns[rl->opnum];
6914 break;
6915 case RELOAD_FOR_INPUT_ADDRESS:
6916 where = &input_address_reload_insns[rl->opnum];
6917 break;
6918 case RELOAD_FOR_INPADDR_ADDRESS:
6919 where = &inpaddr_address_reload_insns[rl->opnum];
6920 break;
6921 case RELOAD_FOR_OUTPUT_ADDRESS:
6922 where = &output_address_reload_insns[rl->opnum];
6923 break;
6924 case RELOAD_FOR_OUTADDR_ADDRESS:
6925 where = &outaddr_address_reload_insns[rl->opnum];
6926 break;
6927 case RELOAD_FOR_OPERAND_ADDRESS:
6928 where = &operand_reload_insns;
6929 break;
6930 case RELOAD_FOR_OPADDR_ADDR:
6931 where = &other_operand_reload_insns;
6932 break;
6933 case RELOAD_FOR_OTHER_ADDRESS:
6934 where = &other_input_address_reload_insns;
6935 break;
6936 default:
6937 gcc_unreachable ();
6940 push_to_sequence (*where);
6942 /* Auto-increment addresses must be reloaded in a special way. */
6943 if (rl->out && ! rl->out_reg)
6945 /* We are not going to bother supporting the case where a
6946 incremented register can't be copied directly from
6947 OLDEQUIV since this seems highly unlikely. */
6948 gcc_assert (rl->secondary_in_reload < 0);
6950 if (reload_inherited[j])
6951 oldequiv = reloadreg;
6953 old = XEXP (rl->in_reg, 0);
6955 if (optimize && REG_P (oldequiv)
6956 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6957 && spill_reg_store[REGNO (oldequiv)]
6958 && REG_P (old)
6959 && (dead_or_set_p (insn,
6960 spill_reg_stored_to[REGNO (oldequiv)])
6961 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6962 old)))
6963 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6965 /* Prevent normal processing of this reload. */
6966 special = 1;
6967 /* Output a special code sequence for this case. */
6968 new_spill_reg_store[REGNO (reloadreg)]
6969 = inc_for_reload (reloadreg, oldequiv, rl->out,
6970 rl->inc);
6973 /* If we are reloading a pseudo-register that was set by the previous
6974 insn, see if we can get rid of that pseudo-register entirely
6975 by redirecting the previous insn into our reload register. */
6977 else if (optimize && REG_P (old)
6978 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6979 && dead_or_set_p (insn, old)
6980 /* This is unsafe if some other reload
6981 uses the same reg first. */
6982 && ! conflicts_with_override (reloadreg)
6983 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6984 rl->when_needed, old, rl->out, j, 0))
6986 rtx temp = PREV_INSN (insn);
6987 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
6988 temp = PREV_INSN (temp);
6989 if (temp
6990 && NONJUMP_INSN_P (temp)
6991 && GET_CODE (PATTERN (temp)) == SET
6992 && SET_DEST (PATTERN (temp)) == old
6993 /* Make sure we can access insn_operand_constraint. */
6994 && asm_noperands (PATTERN (temp)) < 0
6995 /* This is unsafe if operand occurs more than once in current
6996 insn. Perhaps some occurrences aren't reloaded. */
6997 && count_occurrences (PATTERN (insn), old, 0) == 1)
6999 rtx old = SET_DEST (PATTERN (temp));
7000 /* Store into the reload register instead of the pseudo. */
7001 SET_DEST (PATTERN (temp)) = reloadreg;
7003 /* Verify that resulting insn is valid. */
7004 extract_insn (temp);
7005 if (constrain_operands (1))
7007 /* If the previous insn is an output reload, the source is
7008 a reload register, and its spill_reg_store entry will
7009 contain the previous destination. This is now
7010 invalid. */
7011 if (REG_P (SET_SRC (PATTERN (temp)))
7012 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7014 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7015 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7018 /* If these are the only uses of the pseudo reg,
7019 pretend for GDB it lives in the reload reg we used. */
7020 if (REG_N_DEATHS (REGNO (old)) == 1
7021 && REG_N_SETS (REGNO (old)) == 1)
7023 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7024 if (ira_conflicts_p)
7025 /* Inform IRA about the change. */
7026 ira_mark_allocation_change (REGNO (old));
7027 alter_reg (REGNO (old), -1, false);
7029 special = 1;
7031 /* Adjust any debug insns between temp and insn. */
7032 while ((temp = NEXT_INSN (temp)) != insn)
7033 if (DEBUG_INSN_P (temp))
7034 replace_rtx (PATTERN (temp), old, reloadreg);
7035 else
7036 gcc_assert (NOTE_P (temp));
7038 else
7040 SET_DEST (PATTERN (temp)) = old;
7045 /* We can't do that, so output an insn to load RELOADREG. */
7047 /* If we have a secondary reload, pick up the secondary register
7048 and icode, if any. If OLDEQUIV and OLD are different or
7049 if this is an in-out reload, recompute whether or not we
7050 still need a secondary register and what the icode should
7051 be. If we still need a secondary register and the class or
7052 icode is different, go back to reloading from OLD if using
7053 OLDEQUIV means that we got the wrong type of register. We
7054 cannot have different class or icode due to an in-out reload
7055 because we don't make such reloads when both the input and
7056 output need secondary reload registers. */
7058 if (! special && rl->secondary_in_reload >= 0)
7060 rtx second_reload_reg = 0;
7061 rtx third_reload_reg = 0;
7062 int secondary_reload = rl->secondary_in_reload;
7063 rtx real_oldequiv = oldequiv;
7064 rtx real_old = old;
7065 rtx tmp;
7066 enum insn_code icode;
7067 enum insn_code tertiary_icode = CODE_FOR_nothing;
7069 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7070 and similarly for OLD.
7071 See comments in get_secondary_reload in reload.c. */
7072 /* If it is a pseudo that cannot be replaced with its
7073 equivalent MEM, we must fall back to reload_in, which
7074 will have all the necessary substitutions registered.
7075 Likewise for a pseudo that can't be replaced with its
7076 equivalent constant.
7078 Take extra care for subregs of such pseudos. Note that
7079 we cannot use reg_equiv_mem in this case because it is
7080 not in the right mode. */
7082 tmp = oldequiv;
7083 if (GET_CODE (tmp) == SUBREG)
7084 tmp = SUBREG_REG (tmp);
7085 if (REG_P (tmp)
7086 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7087 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7088 || reg_equiv_constant[REGNO (tmp)] != 0))
7090 if (! reg_equiv_mem[REGNO (tmp)]
7091 || num_not_at_initial_offset
7092 || GET_CODE (oldequiv) == SUBREG)
7093 real_oldequiv = rl->in;
7094 else
7095 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7098 tmp = old;
7099 if (GET_CODE (tmp) == SUBREG)
7100 tmp = SUBREG_REG (tmp);
7101 if (REG_P (tmp)
7102 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7103 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7104 || reg_equiv_constant[REGNO (tmp)] != 0))
7106 if (! reg_equiv_mem[REGNO (tmp)]
7107 || num_not_at_initial_offset
7108 || GET_CODE (old) == SUBREG)
7109 real_old = rl->in;
7110 else
7111 real_old = reg_equiv_mem[REGNO (tmp)];
7114 second_reload_reg = rld[secondary_reload].reg_rtx;
7115 if (rld[secondary_reload].secondary_in_reload >= 0)
7117 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7119 third_reload_reg = rld[tertiary_reload].reg_rtx;
7120 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7121 /* We'd have to add more code for quartary reloads. */
7122 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7124 icode = rl->secondary_in_icode;
7126 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7127 || (rl->in != 0 && rl->out != 0))
7129 secondary_reload_info sri, sri2;
7130 enum reg_class new_class, new_t_class;
7132 sri.icode = CODE_FOR_nothing;
7133 sri.prev_sri = NULL;
7134 new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7135 mode, &sri);
7137 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7138 second_reload_reg = 0;
7139 else if (new_class == NO_REGS)
7141 if (reload_adjust_reg_for_icode (&second_reload_reg,
7142 third_reload_reg,
7143 (enum insn_code) sri.icode))
7145 icode = (enum insn_code) sri.icode;
7146 third_reload_reg = 0;
7148 else
7150 oldequiv = old;
7151 real_oldequiv = real_old;
7154 else if (sri.icode != CODE_FOR_nothing)
7155 /* We currently lack a way to express this in reloads. */
7156 gcc_unreachable ();
7157 else
7159 sri2.icode = CODE_FOR_nothing;
7160 sri2.prev_sri = &sri;
7161 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7162 new_class, mode, &sri);
7163 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7165 if (reload_adjust_reg_for_temp (&second_reload_reg,
7166 third_reload_reg,
7167 new_class, mode))
7169 third_reload_reg = 0;
7170 tertiary_icode = (enum insn_code) sri2.icode;
7172 else
7174 oldequiv = old;
7175 real_oldequiv = real_old;
7178 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7180 rtx intermediate = second_reload_reg;
7182 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7183 new_class, mode)
7184 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7185 ((enum insn_code)
7186 sri2.icode)))
7188 second_reload_reg = intermediate;
7189 tertiary_icode = (enum insn_code) sri2.icode;
7191 else
7193 oldequiv = old;
7194 real_oldequiv = real_old;
7197 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7199 rtx intermediate = second_reload_reg;
7201 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7202 new_class, mode)
7203 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7204 new_t_class, mode))
7206 second_reload_reg = intermediate;
7207 tertiary_icode = (enum insn_code) sri2.icode;
7209 else
7211 oldequiv = old;
7212 real_oldequiv = real_old;
7215 else
7217 /* This could be handled more intelligently too. */
7218 oldequiv = old;
7219 real_oldequiv = real_old;
7224 /* If we still need a secondary reload register, check
7225 to see if it is being used as a scratch or intermediate
7226 register and generate code appropriately. If we need
7227 a scratch register, use REAL_OLDEQUIV since the form of
7228 the insn may depend on the actual address if it is
7229 a MEM. */
7231 if (second_reload_reg)
7233 if (icode != CODE_FOR_nothing)
7235 /* We'd have to add extra code to handle this case. */
7236 gcc_assert (!third_reload_reg);
7238 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7239 second_reload_reg));
7240 special = 1;
7242 else
7244 /* See if we need a scratch register to load the
7245 intermediate register (a tertiary reload). */
7246 if (tertiary_icode != CODE_FOR_nothing)
7248 emit_insn ((GEN_FCN (tertiary_icode)
7249 (second_reload_reg, real_oldequiv,
7250 third_reload_reg)));
7252 else if (third_reload_reg)
7254 gen_reload (third_reload_reg, real_oldequiv,
7255 rl->opnum,
7256 rl->when_needed);
7257 gen_reload (second_reload_reg, third_reload_reg,
7258 rl->opnum,
7259 rl->when_needed);
7261 else
7262 gen_reload (second_reload_reg, real_oldequiv,
7263 rl->opnum,
7264 rl->when_needed);
7266 oldequiv = second_reload_reg;
7271 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7273 rtx real_oldequiv = oldequiv;
7275 if ((REG_P (oldequiv)
7276 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7277 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7278 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7279 || (GET_CODE (oldequiv) == SUBREG
7280 && REG_P (SUBREG_REG (oldequiv))
7281 && (REGNO (SUBREG_REG (oldequiv))
7282 >= FIRST_PSEUDO_REGISTER)
7283 && ((reg_equiv_memory_loc
7284 [REGNO (SUBREG_REG (oldequiv))] != 0)
7285 || (reg_equiv_constant
7286 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7287 || (CONSTANT_P (oldequiv)
7288 && (PREFERRED_RELOAD_CLASS (oldequiv,
7289 REGNO_REG_CLASS (REGNO (reloadreg)))
7290 == NO_REGS)))
7291 real_oldequiv = rl->in;
7292 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7293 rl->when_needed);
7296 if (flag_non_call_exceptions)
7297 copy_eh_notes (insn, get_insns ());
7299 /* End this sequence. */
7300 *where = get_insns ();
7301 end_sequence ();
7303 /* Update reload_override_in so that delete_address_reloads_1
7304 can see the actual register usage. */
7305 if (oldequiv_reg)
7306 reload_override_in[j] = oldequiv;
7309 /* Generate insns to for the output reload RL, which is for the insn described
7310 by CHAIN and has the number J. */
7311 static void
7312 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7313 int j)
7315 rtx reloadreg;
7316 rtx insn = chain->insn;
7317 int special = 0;
7318 rtx old = rl->out;
7319 enum machine_mode mode;
7320 rtx p;
7321 rtx rl_reg_rtx;
7323 if (rl->when_needed == RELOAD_OTHER)
7324 start_sequence ();
7325 else
7326 push_to_sequence (output_reload_insns[rl->opnum]);
7328 rl_reg_rtx = reload_reg_rtx_for_output[j];
7329 mode = GET_MODE (rl_reg_rtx);
7331 reloadreg = rl_reg_rtx;
7333 /* If we need two reload regs, set RELOADREG to the intermediate
7334 one, since it will be stored into OLD. We might need a secondary
7335 register only for an input reload, so check again here. */
7337 if (rl->secondary_out_reload >= 0)
7339 rtx real_old = old;
7340 int secondary_reload = rl->secondary_out_reload;
7341 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7343 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7344 && reg_equiv_mem[REGNO (old)] != 0)
7345 real_old = reg_equiv_mem[REGNO (old)];
7347 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7349 rtx second_reloadreg = reloadreg;
7350 reloadreg = rld[secondary_reload].reg_rtx;
7352 /* See if RELOADREG is to be used as a scratch register
7353 or as an intermediate register. */
7354 if (rl->secondary_out_icode != CODE_FOR_nothing)
7356 /* We'd have to add extra code to handle this case. */
7357 gcc_assert (tertiary_reload < 0);
7359 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7360 (real_old, second_reloadreg, reloadreg)));
7361 special = 1;
7363 else
7365 /* See if we need both a scratch and intermediate reload
7366 register. */
7368 enum insn_code tertiary_icode
7369 = rld[secondary_reload].secondary_out_icode;
7371 /* We'd have to add more code for quartary reloads. */
7372 gcc_assert (tertiary_reload < 0
7373 || rld[tertiary_reload].secondary_out_reload < 0);
7375 if (GET_MODE (reloadreg) != mode)
7376 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7378 if (tertiary_icode != CODE_FOR_nothing)
7380 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7381 rtx tem;
7383 /* Copy primary reload reg to secondary reload reg.
7384 (Note that these have been swapped above, then
7385 secondary reload reg to OLD using our insn.) */
7387 /* If REAL_OLD is a paradoxical SUBREG, remove it
7388 and try to put the opposite SUBREG on
7389 RELOADREG. */
7390 if (GET_CODE (real_old) == SUBREG
7391 && (GET_MODE_SIZE (GET_MODE (real_old))
7392 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7393 && 0 != (tem = gen_lowpart_common
7394 (GET_MODE (SUBREG_REG (real_old)),
7395 reloadreg)))
7396 real_old = SUBREG_REG (real_old), reloadreg = tem;
7398 gen_reload (reloadreg, second_reloadreg,
7399 rl->opnum, rl->when_needed);
7400 emit_insn ((GEN_FCN (tertiary_icode)
7401 (real_old, reloadreg, third_reloadreg)));
7402 special = 1;
7405 else
7407 /* Copy between the reload regs here and then to
7408 OUT later. */
7410 gen_reload (reloadreg, second_reloadreg,
7411 rl->opnum, rl->when_needed);
7412 if (tertiary_reload >= 0)
7414 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7416 gen_reload (third_reloadreg, reloadreg,
7417 rl->opnum, rl->when_needed);
7418 reloadreg = third_reloadreg;
7425 /* Output the last reload insn. */
7426 if (! special)
7428 rtx set;
7430 /* Don't output the last reload if OLD is not the dest of
7431 INSN and is in the src and is clobbered by INSN. */
7432 if (! flag_expensive_optimizations
7433 || !REG_P (old)
7434 || !(set = single_set (insn))
7435 || rtx_equal_p (old, SET_DEST (set))
7436 || !reg_mentioned_p (old, SET_SRC (set))
7437 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7438 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7439 gen_reload (old, reloadreg, rl->opnum,
7440 rl->when_needed);
7443 /* Look at all insns we emitted, just to be safe. */
7444 for (p = get_insns (); p; p = NEXT_INSN (p))
7445 if (INSN_P (p))
7447 rtx pat = PATTERN (p);
7449 /* If this output reload doesn't come from a spill reg,
7450 clear any memory of reloaded copies of the pseudo reg.
7451 If this output reload comes from a spill reg,
7452 reg_has_output_reload will make this do nothing. */
7453 note_stores (pat, forget_old_reloads_1, NULL);
7455 if (reg_mentioned_p (rl_reg_rtx, pat))
7457 rtx set = single_set (insn);
7458 if (reload_spill_index[j] < 0
7459 && set
7460 && SET_SRC (set) == rl_reg_rtx)
7462 int src = REGNO (SET_SRC (set));
7464 reload_spill_index[j] = src;
7465 SET_HARD_REG_BIT (reg_is_output_reload, src);
7466 if (find_regno_note (insn, REG_DEAD, src))
7467 SET_HARD_REG_BIT (reg_reloaded_died, src);
7469 if (HARD_REGISTER_P (rl_reg_rtx))
7471 int s = rl->secondary_out_reload;
7472 set = single_set (p);
7473 /* If this reload copies only to the secondary reload
7474 register, the secondary reload does the actual
7475 store. */
7476 if (s >= 0 && set == NULL_RTX)
7477 /* We can't tell what function the secondary reload
7478 has and where the actual store to the pseudo is
7479 made; leave new_spill_reg_store alone. */
7481 else if (s >= 0
7482 && SET_SRC (set) == rl_reg_rtx
7483 && SET_DEST (set) == rld[s].reg_rtx)
7485 /* Usually the next instruction will be the
7486 secondary reload insn; if we can confirm
7487 that it is, setting new_spill_reg_store to
7488 that insn will allow an extra optimization. */
7489 rtx s_reg = rld[s].reg_rtx;
7490 rtx next = NEXT_INSN (p);
7491 rld[s].out = rl->out;
7492 rld[s].out_reg = rl->out_reg;
7493 set = single_set (next);
7494 if (set && SET_SRC (set) == s_reg
7495 && ! new_spill_reg_store[REGNO (s_reg)])
7497 SET_HARD_REG_BIT (reg_is_output_reload,
7498 REGNO (s_reg));
7499 new_spill_reg_store[REGNO (s_reg)] = next;
7502 else
7503 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7508 if (rl->when_needed == RELOAD_OTHER)
7510 emit_insn (other_output_reload_insns[rl->opnum]);
7511 other_output_reload_insns[rl->opnum] = get_insns ();
7513 else
7514 output_reload_insns[rl->opnum] = get_insns ();
7516 if (flag_non_call_exceptions)
7517 copy_eh_notes (insn, get_insns ());
7519 end_sequence ();
7522 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7523 and has the number J. */
7524 static void
7525 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7527 rtx insn = chain->insn;
7528 rtx old = (rl->in && MEM_P (rl->in)
7529 ? rl->in_reg : rl->in);
7530 rtx reg_rtx = rl->reg_rtx;
7532 if (old && reg_rtx)
7534 enum machine_mode mode;
7536 /* Determine the mode to reload in.
7537 This is very tricky because we have three to choose from.
7538 There is the mode the insn operand wants (rl->inmode).
7539 There is the mode of the reload register RELOADREG.
7540 There is the intrinsic mode of the operand, which we could find
7541 by stripping some SUBREGs.
7542 It turns out that RELOADREG's mode is irrelevant:
7543 we can change that arbitrarily.
7545 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7546 then the reload reg may not support QImode moves, so use SImode.
7547 If foo is in memory due to spilling a pseudo reg, this is safe,
7548 because the QImode value is in the least significant part of a
7549 slot big enough for a SImode. If foo is some other sort of
7550 memory reference, then it is impossible to reload this case,
7551 so previous passes had better make sure this never happens.
7553 Then consider a one-word union which has SImode and one of its
7554 members is a float, being fetched as (SUBREG:SF union:SI).
7555 We must fetch that as SFmode because we could be loading into
7556 a float-only register. In this case OLD's mode is correct.
7558 Consider an immediate integer: it has VOIDmode. Here we need
7559 to get a mode from something else.
7561 In some cases, there is a fourth mode, the operand's
7562 containing mode. If the insn specifies a containing mode for
7563 this operand, it overrides all others.
7565 I am not sure whether the algorithm here is always right,
7566 but it does the right things in those cases. */
7568 mode = GET_MODE (old);
7569 if (mode == VOIDmode)
7570 mode = rl->inmode;
7572 /* We cannot use gen_lowpart_common since it can do the wrong thing
7573 when REG_RTX has a multi-word mode. Note that REG_RTX must
7574 always be a REG here. */
7575 if (GET_MODE (reg_rtx) != mode)
7576 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7578 reload_reg_rtx_for_input[j] = reg_rtx;
7580 if (old != 0
7581 /* AUTO_INC reloads need to be handled even if inherited. We got an
7582 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7583 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7584 && ! rtx_equal_p (reg_rtx, old)
7585 && reg_rtx != 0)
7586 emit_input_reload_insns (chain, rld + j, old, j);
7588 /* When inheriting a wider reload, we have a MEM in rl->in,
7589 e.g. inheriting a SImode output reload for
7590 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7591 if (optimize && reload_inherited[j] && rl->in
7592 && MEM_P (rl->in)
7593 && MEM_P (rl->in_reg)
7594 && reload_spill_index[j] >= 0
7595 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7596 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7598 /* If we are reloading a register that was recently stored in with an
7599 output-reload, see if we can prove there was
7600 actually no need to store the old value in it. */
7602 if (optimize
7603 && (reload_inherited[j] || reload_override_in[j])
7604 && reg_rtx
7605 && REG_P (reg_rtx)
7606 && spill_reg_store[REGNO (reg_rtx)] != 0
7607 #if 0
7608 /* There doesn't seem to be any reason to restrict this to pseudos
7609 and doing so loses in the case where we are copying from a
7610 register of the wrong class. */
7611 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7612 #endif
7613 /* The insn might have already some references to stackslots
7614 replaced by MEMs, while reload_out_reg still names the
7615 original pseudo. */
7616 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7617 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7618 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7621 /* Do output reloading for reload RL, which is for the insn described by
7622 CHAIN and has the number J.
7623 ??? At some point we need to support handling output reloads of
7624 JUMP_INSNs or insns that set cc0. */
7625 static void
7626 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7628 rtx note, old;
7629 rtx insn = chain->insn;
7630 /* If this is an output reload that stores something that is
7631 not loaded in this same reload, see if we can eliminate a previous
7632 store. */
7633 rtx pseudo = rl->out_reg;
7634 rtx reg_rtx = rl->reg_rtx;
7636 if (rl->out && reg_rtx)
7638 enum machine_mode mode;
7640 /* Determine the mode to reload in.
7641 See comments above (for input reloading). */
7642 mode = GET_MODE (rl->out);
7643 if (mode == VOIDmode)
7645 /* VOIDmode should never happen for an output. */
7646 if (asm_noperands (PATTERN (insn)) < 0)
7647 /* It's the compiler's fault. */
7648 fatal_insn ("VOIDmode on an output", insn);
7649 error_for_asm (insn, "output operand is constant in %<asm%>");
7650 /* Prevent crash--use something we know is valid. */
7651 mode = word_mode;
7652 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7654 if (GET_MODE (reg_rtx) != mode)
7655 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7657 reload_reg_rtx_for_output[j] = reg_rtx;
7659 if (pseudo
7660 && optimize
7661 && REG_P (pseudo)
7662 && ! rtx_equal_p (rl->in_reg, pseudo)
7663 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7664 && reg_last_reload_reg[REGNO (pseudo)])
7666 int pseudo_no = REGNO (pseudo);
7667 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7669 /* We don't need to test full validity of last_regno for
7670 inherit here; we only want to know if the store actually
7671 matches the pseudo. */
7672 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7673 && reg_reloaded_contents[last_regno] == pseudo_no
7674 && spill_reg_store[last_regno]
7675 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7676 delete_output_reload (insn, j, last_regno, reg_rtx);
7679 old = rl->out_reg;
7680 if (old == 0
7681 || reg_rtx == 0
7682 || rtx_equal_p (old, reg_rtx))
7683 return;
7685 /* An output operand that dies right away does need a reload,
7686 but need not be copied from it. Show the new location in the
7687 REG_UNUSED note. */
7688 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7689 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7691 XEXP (note, 0) = reg_rtx;
7692 return;
7694 /* Likewise for a SUBREG of an operand that dies. */
7695 else if (GET_CODE (old) == SUBREG
7696 && REG_P (SUBREG_REG (old))
7697 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7698 SUBREG_REG (old))))
7700 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7701 return;
7703 else if (GET_CODE (old) == SCRATCH)
7704 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7705 but we don't want to make an output reload. */
7706 return;
7708 /* If is a JUMP_INSN, we can't support output reloads yet. */
7709 gcc_assert (NONJUMP_INSN_P (insn));
7711 emit_output_reload_insns (chain, rld + j, j);
7714 /* A reload copies values of MODE from register SRC to register DEST.
7715 Return true if it can be treated for inheritance purposes like a
7716 group of reloads, each one reloading a single hard register. The
7717 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7718 occupy the same number of hard registers. */
7720 static bool
7721 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7722 int src ATTRIBUTE_UNUSED,
7723 enum machine_mode mode ATTRIBUTE_UNUSED)
7725 #ifdef CANNOT_CHANGE_MODE_CLASS
7726 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7727 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7728 #else
7729 return true;
7730 #endif
7733 /* Output insns to reload values in and out of the chosen reload regs. */
7735 static void
7736 emit_reload_insns (struct insn_chain *chain)
7738 rtx insn = chain->insn;
7740 int j;
7742 CLEAR_HARD_REG_SET (reg_reloaded_died);
7744 for (j = 0; j < reload_n_operands; j++)
7745 input_reload_insns[j] = input_address_reload_insns[j]
7746 = inpaddr_address_reload_insns[j]
7747 = output_reload_insns[j] = output_address_reload_insns[j]
7748 = outaddr_address_reload_insns[j]
7749 = other_output_reload_insns[j] = 0;
7750 other_input_address_reload_insns = 0;
7751 other_input_reload_insns = 0;
7752 operand_reload_insns = 0;
7753 other_operand_reload_insns = 0;
7755 /* Dump reloads into the dump file. */
7756 if (dump_file)
7758 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7759 debug_reload_to_stream (dump_file);
7762 /* Now output the instructions to copy the data into and out of the
7763 reload registers. Do these in the order that the reloads were reported,
7764 since reloads of base and index registers precede reloads of operands
7765 and the operands may need the base and index registers reloaded. */
7767 for (j = 0; j < n_reloads; j++)
7769 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7771 unsigned int i;
7773 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7774 new_spill_reg_store[i] = 0;
7777 do_input_reload (chain, rld + j, j);
7778 do_output_reload (chain, rld + j, j);
7781 /* Now write all the insns we made for reloads in the order expected by
7782 the allocation functions. Prior to the insn being reloaded, we write
7783 the following reloads:
7785 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7787 RELOAD_OTHER reloads.
7789 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7790 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7791 RELOAD_FOR_INPUT reload for the operand.
7793 RELOAD_FOR_OPADDR_ADDRS reloads.
7795 RELOAD_FOR_OPERAND_ADDRESS reloads.
7797 After the insn being reloaded, we write the following:
7799 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7800 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7801 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7802 reloads for the operand. The RELOAD_OTHER output reloads are
7803 output in descending order by reload number. */
7805 emit_insn_before (other_input_address_reload_insns, insn);
7806 emit_insn_before (other_input_reload_insns, insn);
7808 for (j = 0; j < reload_n_operands; j++)
7810 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7811 emit_insn_before (input_address_reload_insns[j], insn);
7812 emit_insn_before (input_reload_insns[j], insn);
7815 emit_insn_before (other_operand_reload_insns, insn);
7816 emit_insn_before (operand_reload_insns, insn);
7818 for (j = 0; j < reload_n_operands; j++)
7820 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7821 x = emit_insn_after (output_address_reload_insns[j], x);
7822 x = emit_insn_after (output_reload_insns[j], x);
7823 emit_insn_after (other_output_reload_insns[j], x);
7826 /* For all the spill regs newly reloaded in this instruction,
7827 record what they were reloaded from, so subsequent instructions
7828 can inherit the reloads.
7830 Update spill_reg_store for the reloads of this insn.
7831 Copy the elements that were updated in the loop above. */
7833 for (j = 0; j < n_reloads; j++)
7835 int r = reload_order[j];
7836 int i = reload_spill_index[r];
7838 /* If this is a non-inherited input reload from a pseudo, we must
7839 clear any memory of a previous store to the same pseudo. Only do
7840 something if there will not be an output reload for the pseudo
7841 being reloaded. */
7842 if (rld[r].in_reg != 0
7843 && ! (reload_inherited[r] || reload_override_in[r]))
7845 rtx reg = rld[r].in_reg;
7847 if (GET_CODE (reg) == SUBREG)
7848 reg = SUBREG_REG (reg);
7850 if (REG_P (reg)
7851 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7852 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7854 int nregno = REGNO (reg);
7856 if (reg_last_reload_reg[nregno])
7858 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7860 if (reg_reloaded_contents[last_regno] == nregno)
7861 spill_reg_store[last_regno] = 0;
7866 /* I is nonneg if this reload used a register.
7867 If rld[r].reg_rtx is 0, this is an optional reload
7868 that we opted to ignore. */
7870 if (i >= 0 && rld[r].reg_rtx != 0)
7872 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7873 int k;
7875 /* For a multi register reload, we need to check if all or part
7876 of the value lives to the end. */
7877 for (k = 0; k < nr; k++)
7878 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7879 rld[r].when_needed))
7880 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7882 /* Maybe the spill reg contains a copy of reload_out. */
7883 if (rld[r].out != 0
7884 && (REG_P (rld[r].out)
7885 #ifdef AUTO_INC_DEC
7886 || ! rld[r].out_reg
7887 #endif
7888 || REG_P (rld[r].out_reg)))
7890 rtx reg;
7891 enum machine_mode mode;
7892 int regno, nregs;
7894 reg = reload_reg_rtx_for_output[r];
7895 mode = GET_MODE (reg);
7896 regno = REGNO (reg);
7897 nregs = hard_regno_nregs[regno][mode];
7898 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7899 rld[r].when_needed))
7901 rtx out = (REG_P (rld[r].out)
7902 ? rld[r].out
7903 : rld[r].out_reg
7904 ? rld[r].out_reg
7905 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7906 int out_regno = REGNO (out);
7907 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7908 : hard_regno_nregs[out_regno][mode]);
7909 bool piecemeal;
7911 spill_reg_store[regno] = new_spill_reg_store[regno];
7912 spill_reg_stored_to[regno] = out;
7913 reg_last_reload_reg[out_regno] = reg;
7915 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7916 && nregs == out_nregs
7917 && inherit_piecemeal_p (out_regno, regno, mode));
7919 /* If OUT_REGNO is a hard register, it may occupy more than
7920 one register. If it does, say what is in the
7921 rest of the registers assuming that both registers
7922 agree on how many words the object takes. If not,
7923 invalidate the subsequent registers. */
7925 if (HARD_REGISTER_NUM_P (out_regno))
7926 for (k = 1; k < out_nregs; k++)
7927 reg_last_reload_reg[out_regno + k]
7928 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
7930 /* Now do the inverse operation. */
7931 for (k = 0; k < nregs; k++)
7933 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7934 reg_reloaded_contents[regno + k]
7935 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7936 ? out_regno
7937 : out_regno + k);
7938 reg_reloaded_insn[regno + k] = insn;
7939 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7940 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7941 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7942 regno + k);
7943 else
7944 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7945 regno + k);
7949 /* Maybe the spill reg contains a copy of reload_in. Only do
7950 something if there will not be an output reload for
7951 the register being reloaded. */
7952 else if (rld[r].out_reg == 0
7953 && rld[r].in != 0
7954 && ((REG_P (rld[r].in)
7955 && !HARD_REGISTER_P (rld[r].in)
7956 && !REGNO_REG_SET_P (&reg_has_output_reload,
7957 REGNO (rld[r].in)))
7958 || (REG_P (rld[r].in_reg)
7959 && !REGNO_REG_SET_P (&reg_has_output_reload,
7960 REGNO (rld[r].in_reg))))
7961 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
7963 rtx reg;
7964 enum machine_mode mode;
7965 int regno, nregs;
7967 reg = reload_reg_rtx_for_input[r];
7968 mode = GET_MODE (reg);
7969 regno = REGNO (reg);
7970 nregs = hard_regno_nregs[regno][mode];
7971 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7972 rld[r].when_needed))
7974 int in_regno;
7975 int in_nregs;
7976 rtx in;
7977 bool piecemeal;
7979 if (REG_P (rld[r].in)
7980 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7981 in = rld[r].in;
7982 else if (REG_P (rld[r].in_reg))
7983 in = rld[r].in_reg;
7984 else
7985 in = XEXP (rld[r].in_reg, 0);
7986 in_regno = REGNO (in);
7988 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
7989 : hard_regno_nregs[in_regno][mode]);
7991 reg_last_reload_reg[in_regno] = reg;
7993 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
7994 && nregs == in_nregs
7995 && inherit_piecemeal_p (regno, in_regno, mode));
7997 if (HARD_REGISTER_NUM_P (in_regno))
7998 for (k = 1; k < in_nregs; k++)
7999 reg_last_reload_reg[in_regno + k]
8000 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8002 /* Unless we inherited this reload, show we haven't
8003 recently done a store.
8004 Previous stores of inherited auto_inc expressions
8005 also have to be discarded. */
8006 if (! reload_inherited[r]
8007 || (rld[r].out && ! rld[r].out_reg))
8008 spill_reg_store[regno] = 0;
8010 for (k = 0; k < nregs; k++)
8012 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8013 reg_reloaded_contents[regno + k]
8014 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8015 ? in_regno
8016 : in_regno + k);
8017 reg_reloaded_insn[regno + k] = insn;
8018 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8019 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8020 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8021 regno + k);
8022 else
8023 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8024 regno + k);
8030 /* The following if-statement was #if 0'd in 1.34 (or before...).
8031 It's reenabled in 1.35 because supposedly nothing else
8032 deals with this problem. */
8034 /* If a register gets output-reloaded from a non-spill register,
8035 that invalidates any previous reloaded copy of it.
8036 But forget_old_reloads_1 won't get to see it, because
8037 it thinks only about the original insn. So invalidate it here.
8038 Also do the same thing for RELOAD_OTHER constraints where the
8039 output is discarded. */
8040 if (i < 0
8041 && ((rld[r].out != 0
8042 && (REG_P (rld[r].out)
8043 || (MEM_P (rld[r].out)
8044 && REG_P (rld[r].out_reg))))
8045 || (rld[r].out == 0 && rld[r].out_reg
8046 && REG_P (rld[r].out_reg))))
8048 rtx out = ((rld[r].out && REG_P (rld[r].out))
8049 ? rld[r].out : rld[r].out_reg);
8050 int out_regno = REGNO (out);
8051 enum machine_mode mode = GET_MODE (out);
8053 /* REG_RTX is now set or clobbered by the main instruction.
8054 As the comment above explains, forget_old_reloads_1 only
8055 sees the original instruction, and there is no guarantee
8056 that the original instruction also clobbered REG_RTX.
8057 For example, if find_reloads sees that the input side of
8058 a matched operand pair dies in this instruction, it may
8059 use the input register as the reload register.
8061 Calling forget_old_reloads_1 is a waste of effort if
8062 REG_RTX is also the output register.
8064 If we know that REG_RTX holds the value of a pseudo
8065 register, the code after the call will record that fact. */
8066 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8067 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8069 if (!HARD_REGISTER_NUM_P (out_regno))
8071 rtx src_reg, store_insn = NULL_RTX;
8073 reg_last_reload_reg[out_regno] = 0;
8075 /* If we can find a hard register that is stored, record
8076 the storing insn so that we may delete this insn with
8077 delete_output_reload. */
8078 src_reg = reload_reg_rtx_for_output[r];
8080 /* If this is an optional reload, try to find the source reg
8081 from an input reload. */
8082 if (! src_reg)
8084 rtx set = single_set (insn);
8085 if (set && SET_DEST (set) == rld[r].out)
8087 int k;
8089 src_reg = SET_SRC (set);
8090 store_insn = insn;
8091 for (k = 0; k < n_reloads; k++)
8093 if (rld[k].in == src_reg)
8095 src_reg = reload_reg_rtx_for_input[k];
8096 break;
8101 else
8102 store_insn = new_spill_reg_store[REGNO (src_reg)];
8103 if (src_reg && REG_P (src_reg)
8104 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8106 int src_regno, src_nregs, k;
8107 rtx note;
8109 gcc_assert (GET_MODE (src_reg) == mode);
8110 src_regno = REGNO (src_reg);
8111 src_nregs = hard_regno_nregs[src_regno][mode];
8112 /* The place where to find a death note varies with
8113 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8114 necessarily checked exactly in the code that moves
8115 notes, so just check both locations. */
8116 note = find_regno_note (insn, REG_DEAD, src_regno);
8117 if (! note && store_insn)
8118 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8119 for (k = 0; k < src_nregs; k++)
8121 spill_reg_store[src_regno + k] = store_insn;
8122 spill_reg_stored_to[src_regno + k] = out;
8123 reg_reloaded_contents[src_regno + k] = out_regno;
8124 reg_reloaded_insn[src_regno + k] = store_insn;
8125 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8126 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8127 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8128 mode))
8129 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8130 src_regno + k);
8131 else
8132 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8133 src_regno + k);
8134 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8135 if (note)
8136 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8137 else
8138 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8140 reg_last_reload_reg[out_regno] = src_reg;
8141 /* We have to set reg_has_output_reload here, or else
8142 forget_old_reloads_1 will clear reg_last_reload_reg
8143 right away. */
8144 SET_REGNO_REG_SET (&reg_has_output_reload,
8145 out_regno);
8148 else
8150 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8152 for (k = 0; k < out_nregs; k++)
8153 reg_last_reload_reg[out_regno + k] = 0;
8157 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8160 /* Go through the motions to emit INSN and test if it is strictly valid.
8161 Return the emitted insn if valid, else return NULL. */
8163 static rtx
8164 emit_insn_if_valid_for_reload (rtx insn)
8166 rtx last = get_last_insn ();
8167 int code;
8169 insn = emit_insn (insn);
8170 code = recog_memoized (insn);
8172 if (code >= 0)
8174 extract_insn (insn);
8175 /* We want constrain operands to treat this insn strictly in its
8176 validity determination, i.e., the way it would after reload has
8177 completed. */
8178 if (constrain_operands (1))
8179 return insn;
8182 delete_insns_since (last);
8183 return NULL;
8186 /* Emit code to perform a reload from IN (which may be a reload register) to
8187 OUT (which may also be a reload register). IN or OUT is from operand
8188 OPNUM with reload type TYPE.
8190 Returns first insn emitted. */
8192 static rtx
8193 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8195 rtx last = get_last_insn ();
8196 rtx tem;
8198 /* If IN is a paradoxical SUBREG, remove it and try to put the
8199 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8200 if (GET_CODE (in) == SUBREG
8201 && (GET_MODE_SIZE (GET_MODE (in))
8202 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8203 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8204 in = SUBREG_REG (in), out = tem;
8205 else if (GET_CODE (out) == SUBREG
8206 && (GET_MODE_SIZE (GET_MODE (out))
8207 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8208 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8209 out = SUBREG_REG (out), in = tem;
8211 /* How to do this reload can get quite tricky. Normally, we are being
8212 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8213 register that didn't get a hard register. In that case we can just
8214 call emit_move_insn.
8216 We can also be asked to reload a PLUS that adds a register or a MEM to
8217 another register, constant or MEM. This can occur during frame pointer
8218 elimination and while reloading addresses. This case is handled by
8219 trying to emit a single insn to perform the add. If it is not valid,
8220 we use a two insn sequence.
8222 Or we can be asked to reload an unary operand that was a fragment of
8223 an addressing mode, into a register. If it isn't recognized as-is,
8224 we try making the unop operand and the reload-register the same:
8225 (set reg:X (unop:X expr:Y))
8226 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8228 Finally, we could be called to handle an 'o' constraint by putting
8229 an address into a register. In that case, we first try to do this
8230 with a named pattern of "reload_load_address". If no such pattern
8231 exists, we just emit a SET insn and hope for the best (it will normally
8232 be valid on machines that use 'o').
8234 This entire process is made complex because reload will never
8235 process the insns we generate here and so we must ensure that
8236 they will fit their constraints and also by the fact that parts of
8237 IN might be being reloaded separately and replaced with spill registers.
8238 Because of this, we are, in some sense, just guessing the right approach
8239 here. The one listed above seems to work.
8241 ??? At some point, this whole thing needs to be rethought. */
8243 if (GET_CODE (in) == PLUS
8244 && (REG_P (XEXP (in, 0))
8245 || GET_CODE (XEXP (in, 0)) == SUBREG
8246 || MEM_P (XEXP (in, 0)))
8247 && (REG_P (XEXP (in, 1))
8248 || GET_CODE (XEXP (in, 1)) == SUBREG
8249 || CONSTANT_P (XEXP (in, 1))
8250 || MEM_P (XEXP (in, 1))))
8252 /* We need to compute the sum of a register or a MEM and another
8253 register, constant, or MEM, and put it into the reload
8254 register. The best possible way of doing this is if the machine
8255 has a three-operand ADD insn that accepts the required operands.
8257 The simplest approach is to try to generate such an insn and see if it
8258 is recognized and matches its constraints. If so, it can be used.
8260 It might be better not to actually emit the insn unless it is valid,
8261 but we need to pass the insn as an operand to `recog' and
8262 `extract_insn' and it is simpler to emit and then delete the insn if
8263 not valid than to dummy things up. */
8265 rtx op0, op1, tem, insn;
8266 int code;
8268 op0 = find_replacement (&XEXP (in, 0));
8269 op1 = find_replacement (&XEXP (in, 1));
8271 /* Since constraint checking is strict, commutativity won't be
8272 checked, so we need to do that here to avoid spurious failure
8273 if the add instruction is two-address and the second operand
8274 of the add is the same as the reload reg, which is frequently
8275 the case. If the insn would be A = B + A, rearrange it so
8276 it will be A = A + B as constrain_operands expects. */
8278 if (REG_P (XEXP (in, 1))
8279 && REGNO (out) == REGNO (XEXP (in, 1)))
8280 tem = op0, op0 = op1, op1 = tem;
8282 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8283 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8285 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8286 if (insn)
8287 return insn;
8289 /* If that failed, we must use a conservative two-insn sequence.
8291 Use a move to copy one operand into the reload register. Prefer
8292 to reload a constant, MEM or pseudo since the move patterns can
8293 handle an arbitrary operand. If OP1 is not a constant, MEM or
8294 pseudo and OP1 is not a valid operand for an add instruction, then
8295 reload OP1.
8297 After reloading one of the operands into the reload register, add
8298 the reload register to the output register.
8300 If there is another way to do this for a specific machine, a
8301 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8302 we emit below. */
8304 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8306 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8307 || (REG_P (op1)
8308 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8309 || (code != CODE_FOR_nothing
8310 && ! ((*insn_data[code].operand[2].predicate)
8311 (op1, insn_data[code].operand[2].mode))))
8312 tem = op0, op0 = op1, op1 = tem;
8314 gen_reload (out, op0, opnum, type);
8316 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8317 This fixes a problem on the 32K where the stack pointer cannot
8318 be used as an operand of an add insn. */
8320 if (rtx_equal_p (op0, op1))
8321 op1 = out;
8323 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8324 if (insn)
8326 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8327 set_unique_reg_note (insn, REG_EQUIV, in);
8328 return insn;
8331 /* If that failed, copy the address register to the reload register.
8332 Then add the constant to the reload register. */
8334 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8335 gen_reload (out, op1, opnum, type);
8336 insn = emit_insn (gen_add2_insn (out, op0));
8337 set_unique_reg_note (insn, REG_EQUIV, in);
8340 #ifdef SECONDARY_MEMORY_NEEDED
8341 /* If we need a memory location to do the move, do it that way. */
8342 else if ((REG_P (in)
8343 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8344 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8345 && (REG_P (out)
8346 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8347 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8348 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8349 REGNO_REG_CLASS (reg_or_subregno (out)),
8350 GET_MODE (out)))
8352 /* Get the memory to use and rewrite both registers to its mode. */
8353 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8355 if (GET_MODE (loc) != GET_MODE (out))
8356 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8358 if (GET_MODE (loc) != GET_MODE (in))
8359 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8361 gen_reload (loc, in, opnum, type);
8362 gen_reload (out, loc, opnum, type);
8364 #endif
8365 else if (REG_P (out) && UNARY_P (in))
8367 rtx insn;
8368 rtx op1;
8369 rtx out_moded;
8370 rtx set;
8372 op1 = find_replacement (&XEXP (in, 0));
8373 if (op1 != XEXP (in, 0))
8374 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8376 /* First, try a plain SET. */
8377 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8378 if (set)
8379 return set;
8381 /* If that failed, move the inner operand to the reload
8382 register, and try the same unop with the inner expression
8383 replaced with the reload register. */
8385 if (GET_MODE (op1) != GET_MODE (out))
8386 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8387 else
8388 out_moded = out;
8390 gen_reload (out_moded, op1, opnum, type);
8392 insn
8393 = gen_rtx_SET (VOIDmode, out,
8394 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8395 out_moded));
8396 insn = emit_insn_if_valid_for_reload (insn);
8397 if (insn)
8399 set_unique_reg_note (insn, REG_EQUIV, in);
8400 return insn;
8403 fatal_insn ("Failure trying to reload:", set);
8405 /* If IN is a simple operand, use gen_move_insn. */
8406 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8408 tem = emit_insn (gen_move_insn (out, in));
8409 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8410 mark_jump_label (in, tem, 0);
8413 #ifdef HAVE_reload_load_address
8414 else if (HAVE_reload_load_address)
8415 emit_insn (gen_reload_load_address (out, in));
8416 #endif
8418 /* Otherwise, just write (set OUT IN) and hope for the best. */
8419 else
8420 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8422 /* Return the first insn emitted.
8423 We can not just return get_last_insn, because there may have
8424 been multiple instructions emitted. Also note that gen_move_insn may
8425 emit more than one insn itself, so we can not assume that there is one
8426 insn emitted per emit_insn_before call. */
8428 return last ? NEXT_INSN (last) : get_insns ();
8431 /* Delete a previously made output-reload whose result we now believe
8432 is not needed. First we double-check.
8434 INSN is the insn now being processed.
8435 LAST_RELOAD_REG is the hard register number for which we want to delete
8436 the last output reload.
8437 J is the reload-number that originally used REG. The caller has made
8438 certain that reload J doesn't use REG any longer for input.
8439 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8441 static void
8442 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8444 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8445 rtx reg = spill_reg_stored_to[last_reload_reg];
8446 int k;
8447 int n_occurrences;
8448 int n_inherited = 0;
8449 rtx i1;
8450 rtx substed;
8452 /* It is possible that this reload has been only used to set another reload
8453 we eliminated earlier and thus deleted this instruction too. */
8454 if (INSN_DELETED_P (output_reload_insn))
8455 return;
8457 /* Get the raw pseudo-register referred to. */
8459 while (GET_CODE (reg) == SUBREG)
8460 reg = SUBREG_REG (reg);
8461 substed = reg_equiv_memory_loc[REGNO (reg)];
8463 /* This is unsafe if the operand occurs more often in the current
8464 insn than it is inherited. */
8465 for (k = n_reloads - 1; k >= 0; k--)
8467 rtx reg2 = rld[k].in;
8468 if (! reg2)
8469 continue;
8470 if (MEM_P (reg2) || reload_override_in[k])
8471 reg2 = rld[k].in_reg;
8472 #ifdef AUTO_INC_DEC
8473 if (rld[k].out && ! rld[k].out_reg)
8474 reg2 = XEXP (rld[k].in_reg, 0);
8475 #endif
8476 while (GET_CODE (reg2) == SUBREG)
8477 reg2 = SUBREG_REG (reg2);
8478 if (rtx_equal_p (reg2, reg))
8480 if (reload_inherited[k] || reload_override_in[k] || k == j)
8481 n_inherited++;
8482 else
8483 return;
8486 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8487 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8488 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8489 reg, 0);
8490 if (substed)
8491 n_occurrences += count_occurrences (PATTERN (insn),
8492 eliminate_regs (substed, VOIDmode,
8493 NULL_RTX), 0);
8494 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8496 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8497 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8499 if (n_occurrences > n_inherited)
8500 return;
8502 /* If the pseudo-reg we are reloading is no longer referenced
8503 anywhere between the store into it and here,
8504 and we're within the same basic block, then the value can only
8505 pass through the reload reg and end up here.
8506 Otherwise, give up--return. */
8507 for (i1 = NEXT_INSN (output_reload_insn);
8508 i1 != insn; i1 = NEXT_INSN (i1))
8510 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8511 return;
8512 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8513 && reg_mentioned_p (reg, PATTERN (i1)))
8515 /* If this is USE in front of INSN, we only have to check that
8516 there are no more references than accounted for by inheritance. */
8517 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8519 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8520 i1 = NEXT_INSN (i1);
8522 if (n_occurrences <= n_inherited && i1 == insn)
8523 break;
8524 return;
8528 /* We will be deleting the insn. Remove the spill reg information. */
8529 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8531 spill_reg_store[last_reload_reg + k] = 0;
8532 spill_reg_stored_to[last_reload_reg + k] = 0;
8535 /* The caller has already checked that REG dies or is set in INSN.
8536 It has also checked that we are optimizing, and thus some
8537 inaccuracies in the debugging information are acceptable.
8538 So we could just delete output_reload_insn. But in some cases
8539 we can improve the debugging information without sacrificing
8540 optimization - maybe even improving the code: See if the pseudo
8541 reg has been completely replaced with reload regs. If so, delete
8542 the store insn and forget we had a stack slot for the pseudo. */
8543 if (rld[j].out != rld[j].in
8544 && REG_N_DEATHS (REGNO (reg)) == 1
8545 && REG_N_SETS (REGNO (reg)) == 1
8546 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8547 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8549 rtx i2;
8551 /* We know that it was used only between here and the beginning of
8552 the current basic block. (We also know that the last use before
8553 INSN was the output reload we are thinking of deleting, but never
8554 mind that.) Search that range; see if any ref remains. */
8555 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8557 rtx set = single_set (i2);
8559 /* Uses which just store in the pseudo don't count,
8560 since if they are the only uses, they are dead. */
8561 if (set != 0 && SET_DEST (set) == reg)
8562 continue;
8563 if (LABEL_P (i2)
8564 || JUMP_P (i2))
8565 break;
8566 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8567 && reg_mentioned_p (reg, PATTERN (i2)))
8569 /* Some other ref remains; just delete the output reload we
8570 know to be dead. */
8571 delete_address_reloads (output_reload_insn, insn);
8572 delete_insn (output_reload_insn);
8573 return;
8577 /* Delete the now-dead stores into this pseudo. Note that this
8578 loop also takes care of deleting output_reload_insn. */
8579 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8581 rtx set = single_set (i2);
8583 if (set != 0 && SET_DEST (set) == reg)
8585 delete_address_reloads (i2, insn);
8586 delete_insn (i2);
8588 if (LABEL_P (i2)
8589 || JUMP_P (i2))
8590 break;
8593 /* For the debugging info, say the pseudo lives in this reload reg. */
8594 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8595 if (ira_conflicts_p)
8596 /* Inform IRA about the change. */
8597 ira_mark_allocation_change (REGNO (reg));
8598 alter_reg (REGNO (reg), -1, false);
8600 else
8602 delete_address_reloads (output_reload_insn, insn);
8603 delete_insn (output_reload_insn);
8607 /* We are going to delete DEAD_INSN. Recursively delete loads of
8608 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8609 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8610 static void
8611 delete_address_reloads (rtx dead_insn, rtx current_insn)
8613 rtx set = single_set (dead_insn);
8614 rtx set2, dst, prev, next;
8615 if (set)
8617 rtx dst = SET_DEST (set);
8618 if (MEM_P (dst))
8619 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8621 /* If we deleted the store from a reloaded post_{in,de}c expression,
8622 we can delete the matching adds. */
8623 prev = PREV_INSN (dead_insn);
8624 next = NEXT_INSN (dead_insn);
8625 if (! prev || ! next)
8626 return;
8627 set = single_set (next);
8628 set2 = single_set (prev);
8629 if (! set || ! set2
8630 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8631 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8632 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8633 return;
8634 dst = SET_DEST (set);
8635 if (! rtx_equal_p (dst, SET_DEST (set2))
8636 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8637 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8638 || (INTVAL (XEXP (SET_SRC (set), 1))
8639 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8640 return;
8641 delete_related_insns (prev);
8642 delete_related_insns (next);
8645 /* Subfunction of delete_address_reloads: process registers found in X. */
8646 static void
8647 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8649 rtx prev, set, dst, i2;
8650 int i, j;
8651 enum rtx_code code = GET_CODE (x);
8653 if (code != REG)
8655 const char *fmt = GET_RTX_FORMAT (code);
8656 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8658 if (fmt[i] == 'e')
8659 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8660 else if (fmt[i] == 'E')
8662 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8663 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8664 current_insn);
8667 return;
8670 if (spill_reg_order[REGNO (x)] < 0)
8671 return;
8673 /* Scan backwards for the insn that sets x. This might be a way back due
8674 to inheritance. */
8675 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8677 code = GET_CODE (prev);
8678 if (code == CODE_LABEL || code == JUMP_INSN)
8679 return;
8680 if (!INSN_P (prev))
8681 continue;
8682 if (reg_set_p (x, PATTERN (prev)))
8683 break;
8684 if (reg_referenced_p (x, PATTERN (prev)))
8685 return;
8687 if (! prev || INSN_UID (prev) < reload_first_uid)
8688 return;
8689 /* Check that PREV only sets the reload register. */
8690 set = single_set (prev);
8691 if (! set)
8692 return;
8693 dst = SET_DEST (set);
8694 if (!REG_P (dst)
8695 || ! rtx_equal_p (dst, x))
8696 return;
8697 if (! reg_set_p (dst, PATTERN (dead_insn)))
8699 /* Check if DST was used in a later insn -
8700 it might have been inherited. */
8701 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8703 if (LABEL_P (i2))
8704 break;
8705 if (! INSN_P (i2))
8706 continue;
8707 if (reg_referenced_p (dst, PATTERN (i2)))
8709 /* If there is a reference to the register in the current insn,
8710 it might be loaded in a non-inherited reload. If no other
8711 reload uses it, that means the register is set before
8712 referenced. */
8713 if (i2 == current_insn)
8715 for (j = n_reloads - 1; j >= 0; j--)
8716 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8717 || reload_override_in[j] == dst)
8718 return;
8719 for (j = n_reloads - 1; j >= 0; j--)
8720 if (rld[j].in && rld[j].reg_rtx == dst)
8721 break;
8722 if (j >= 0)
8723 break;
8725 return;
8727 if (JUMP_P (i2))
8728 break;
8729 /* If DST is still live at CURRENT_INSN, check if it is used for
8730 any reload. Note that even if CURRENT_INSN sets DST, we still
8731 have to check the reloads. */
8732 if (i2 == current_insn)
8734 for (j = n_reloads - 1; j >= 0; j--)
8735 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8736 || reload_override_in[j] == dst)
8737 return;
8738 /* ??? We can't finish the loop here, because dst might be
8739 allocated to a pseudo in this block if no reload in this
8740 block needs any of the classes containing DST - see
8741 spill_hard_reg. There is no easy way to tell this, so we
8742 have to scan till the end of the basic block. */
8744 if (reg_set_p (dst, PATTERN (i2)))
8745 break;
8748 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8749 reg_reloaded_contents[REGNO (dst)] = -1;
8750 delete_insn (prev);
8753 /* Output reload-insns to reload VALUE into RELOADREG.
8754 VALUE is an autoincrement or autodecrement RTX whose operand
8755 is a register or memory location;
8756 so reloading involves incrementing that location.
8757 IN is either identical to VALUE, or some cheaper place to reload from.
8759 INC_AMOUNT is the number to increment or decrement by (always positive).
8760 This cannot be deduced from VALUE.
8762 Return the instruction that stores into RELOADREG. */
8764 static rtx
8765 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8767 /* REG or MEM to be copied and incremented. */
8768 rtx incloc = find_replacement (&XEXP (value, 0));
8769 /* Nonzero if increment after copying. */
8770 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8771 || GET_CODE (value) == POST_MODIFY);
8772 rtx last;
8773 rtx inc;
8774 rtx add_insn;
8775 int code;
8776 rtx store;
8777 rtx real_in = in == value ? incloc : in;
8779 /* No hard register is equivalent to this register after
8780 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8781 we could inc/dec that register as well (maybe even using it for
8782 the source), but I'm not sure it's worth worrying about. */
8783 if (REG_P (incloc))
8784 reg_last_reload_reg[REGNO (incloc)] = 0;
8786 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8788 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8789 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8791 else
8793 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8794 inc_amount = -inc_amount;
8796 inc = GEN_INT (inc_amount);
8799 /* If this is post-increment, first copy the location to the reload reg. */
8800 if (post && real_in != reloadreg)
8801 emit_insn (gen_move_insn (reloadreg, real_in));
8803 if (in == value)
8805 /* See if we can directly increment INCLOC. Use a method similar to
8806 that in gen_reload. */
8808 last = get_last_insn ();
8809 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8810 gen_rtx_PLUS (GET_MODE (incloc),
8811 incloc, inc)));
8813 code = recog_memoized (add_insn);
8814 if (code >= 0)
8816 extract_insn (add_insn);
8817 if (constrain_operands (1))
8819 /* If this is a pre-increment and we have incremented the value
8820 where it lives, copy the incremented value to RELOADREG to
8821 be used as an address. */
8823 if (! post)
8824 emit_insn (gen_move_insn (reloadreg, incloc));
8826 return add_insn;
8829 delete_insns_since (last);
8832 /* If couldn't do the increment directly, must increment in RELOADREG.
8833 The way we do this depends on whether this is pre- or post-increment.
8834 For pre-increment, copy INCLOC to the reload register, increment it
8835 there, then save back. */
8837 if (! post)
8839 if (in != reloadreg)
8840 emit_insn (gen_move_insn (reloadreg, real_in));
8841 emit_insn (gen_add2_insn (reloadreg, inc));
8842 store = emit_insn (gen_move_insn (incloc, reloadreg));
8844 else
8846 /* Postincrement.
8847 Because this might be a jump insn or a compare, and because RELOADREG
8848 may not be available after the insn in an input reload, we must do
8849 the incrementation before the insn being reloaded for.
8851 We have already copied IN to RELOADREG. Increment the copy in
8852 RELOADREG, save that back, then decrement RELOADREG so it has
8853 the original value. */
8855 emit_insn (gen_add2_insn (reloadreg, inc));
8856 store = emit_insn (gen_move_insn (incloc, reloadreg));
8857 if (CONST_INT_P (inc))
8858 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8859 else
8860 emit_insn (gen_sub2_insn (reloadreg, inc));
8863 return store;
8866 #ifdef AUTO_INC_DEC
8867 static void
8868 add_auto_inc_notes (rtx insn, rtx x)
8870 enum rtx_code code = GET_CODE (x);
8871 const char *fmt;
8872 int i, j;
8874 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8876 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
8877 return;
8880 /* Scan all the operand sub-expressions. */
8881 fmt = GET_RTX_FORMAT (code);
8882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8884 if (fmt[i] == 'e')
8885 add_auto_inc_notes (insn, XEXP (x, i));
8886 else if (fmt[i] == 'E')
8887 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8888 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8891 #endif
8893 /* Copy EH notes from an insn to its reloads. */
8894 static void
8895 copy_eh_notes (rtx insn, rtx x)
8897 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8898 if (eh_note)
8900 for (; x != 0; x = NEXT_INSN (x))
8902 if (may_trap_p (PATTERN (x)))
8903 add_reg_note (x, REG_EH_REGION, XEXP (eh_note, 0));
8908 /* This is used by reload pass, that does emit some instructions after
8909 abnormal calls moving basic block end, but in fact it wants to emit
8910 them on the edge. Looks for abnormal call edges, find backward the
8911 proper call and fix the damage.
8913 Similar handle instructions throwing exceptions internally. */
8914 void
8915 fixup_abnormal_edges (void)
8917 bool inserted = false;
8918 basic_block bb;
8920 FOR_EACH_BB (bb)
8922 edge e;
8923 edge_iterator ei;
8925 /* Look for cases we are interested in - calls or instructions causing
8926 exceptions. */
8927 FOR_EACH_EDGE (e, ei, bb->succs)
8929 if (e->flags & EDGE_ABNORMAL_CALL)
8930 break;
8931 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8932 == (EDGE_ABNORMAL | EDGE_EH))
8933 break;
8935 if (e && !CALL_P (BB_END (bb))
8936 && !can_throw_internal (BB_END (bb)))
8938 rtx insn;
8940 /* Get past the new insns generated. Allow notes, as the insns
8941 may be already deleted. */
8942 insn = BB_END (bb);
8943 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8944 && !can_throw_internal (insn)
8945 && insn != BB_HEAD (bb))
8946 insn = PREV_INSN (insn);
8948 if (CALL_P (insn) || can_throw_internal (insn))
8950 rtx stop, next;
8952 stop = NEXT_INSN (BB_END (bb));
8953 BB_END (bb) = insn;
8954 insn = NEXT_INSN (insn);
8956 FOR_EACH_EDGE (e, ei, bb->succs)
8957 if (e->flags & EDGE_FALLTHRU)
8958 break;
8960 while (insn && insn != stop)
8962 next = NEXT_INSN (insn);
8963 if (INSN_P (insn))
8965 delete_insn (insn);
8967 /* Sometimes there's still the return value USE.
8968 If it's placed after a trapping call (i.e. that
8969 call is the last insn anyway), we have no fallthru
8970 edge. Simply delete this use and don't try to insert
8971 on the non-existent edge. */
8972 if (GET_CODE (PATTERN (insn)) != USE)
8974 /* We're not deleting it, we're moving it. */
8975 INSN_DELETED_P (insn) = 0;
8976 PREV_INSN (insn) = NULL_RTX;
8977 NEXT_INSN (insn) = NULL_RTX;
8979 insert_insn_on_edge (insn, e);
8980 inserted = true;
8983 else if (!BARRIER_P (insn))
8984 set_block_for_insn (insn, NULL);
8985 insn = next;
8989 /* It may be that we don't find any such trapping insn. In this
8990 case we discovered quite late that the insn that had been
8991 marked as can_throw_internal in fact couldn't trap at all.
8992 So we should in fact delete the EH edges out of the block. */
8993 else
8994 purge_dead_edges (bb);
8998 /* We've possibly turned single trapping insn into multiple ones. */
8999 if (flag_non_call_exceptions)
9001 sbitmap blocks;
9002 blocks = sbitmap_alloc (last_basic_block);
9003 sbitmap_ones (blocks);
9004 find_many_sub_basic_blocks (blocks);
9005 sbitmap_free (blocks);
9008 if (inserted)
9009 commit_edge_insertions ();
9011 #ifdef ENABLE_CHECKING
9012 /* Verify that we didn't turn one trapping insn into many, and that
9013 we found and corrected all of the problems wrt fixups on the
9014 fallthru edge. */
9015 verify_flow_info ();
9016 #endif