Retry rdrand if the carry flag isn't valid.
[official-gcc.git] / gcc / reload1.c
blob48191a2a00fbf102bf211d0887ffad4565eba849
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl-error.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "df.h"
41 #include "reload.h"
42 #include "recog.h"
43 #include "output.h"
44 #include "except.h"
45 #include "tree.h"
46 #include "ira.h"
47 #include "target.h"
48 #include "emit-rtl.h"
50 /* This file contains the reload pass of the compiler, which is
51 run after register allocation has been done. It checks that
52 each insn is valid (operands required to be in registers really
53 are in registers of the proper class) and fixes up invalid ones
54 by copying values temporarily into registers for the insns
55 that need them.
57 The results of register allocation are described by the vector
58 reg_renumber; the insns still contain pseudo regs, but reg_renumber
59 can be used to find which hard reg, if any, a pseudo reg is in.
61 The technique we always use is to free up a few hard regs that are
62 called ``reload regs'', and for each place where a pseudo reg
63 must be in a hard reg, copy it temporarily into one of the reload regs.
65 Reload regs are allocated locally for every instruction that needs
66 reloads. When there are pseudos which are allocated to a register that
67 has been chosen as a reload reg, such pseudos must be ``spilled''.
68 This means that they go to other hard regs, or to stack slots if no other
69 available hard regs can be found. Spilling can invalidate more
70 insns, requiring additional need for reloads, so we must keep checking
71 until the process stabilizes.
73 For machines with different classes of registers, we must keep track
74 of the register class needed for each reload, and make sure that
75 we allocate enough reload registers of each class.
77 The file reload.c contains the code that checks one insn for
78 validity and reports the reloads that it needs. This file
79 is in charge of scanning the entire rtl code, accumulating the
80 reload needs, spilling, assigning reload registers to use for
81 fixing up each insn, and generating the new insns to copy values
82 into the reload registers. */
84 /* During reload_as_needed, element N contains a REG rtx for the hard reg
85 into which reg N has been reloaded (perhaps for a previous insn). */
86 static rtx *reg_last_reload_reg;
88 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
89 for an output reload that stores into reg N. */
90 static regset_head reg_has_output_reload;
92 /* Indicates which hard regs are reload-registers for an output reload
93 in the current insn. */
94 static HARD_REG_SET reg_is_output_reload;
96 /* Element N is the constant value to which pseudo reg N is equivalent,
97 or zero if pseudo reg N is not equivalent to a constant.
98 find_reloads looks at this in order to replace pseudo reg N
99 with the constant it stands for. */
100 rtx *reg_equiv_constant;
102 /* Element N is an invariant value to which pseudo reg N is equivalent.
103 eliminate_regs_in_insn uses this to replace pseudos in particular
104 contexts. */
105 rtx *reg_equiv_invariant;
107 /* Element N is a memory location to which pseudo reg N is equivalent,
108 prior to any register elimination (such as frame pointer to stack
109 pointer). Depending on whether or not it is a valid address, this value
110 is transferred to either reg_equiv_address or reg_equiv_mem. */
111 rtx *reg_equiv_memory_loc;
113 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
114 collector can keep track of what is inside. */
115 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
117 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
118 This is used when the address is not valid as a memory address
119 (because its displacement is too big for the machine.) */
120 rtx *reg_equiv_address;
122 /* Element N is the memory slot to which pseudo reg N is equivalent,
123 or zero if pseudo reg N is not equivalent to a memory slot. */
124 rtx *reg_equiv_mem;
126 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
127 alternate representations of the location of pseudo reg N. */
128 rtx *reg_equiv_alt_mem_list;
130 /* Widest width in which each pseudo reg is referred to (via subreg). */
131 static unsigned int *reg_max_ref_width;
133 /* Element N is the list of insns that initialized reg N from its equivalent
134 constant or memory slot. */
135 rtx *reg_equiv_init;
136 int reg_equiv_init_size;
138 /* Vector to remember old contents of reg_renumber before spilling. */
139 static short *reg_old_renumber;
141 /* During reload_as_needed, element N contains the last pseudo regno reloaded
142 into hard register N. If that pseudo reg occupied more than one register,
143 reg_reloaded_contents points to that pseudo for each spill register in
144 use; all of these must remain set for an inheritance to occur. */
145 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
147 /* During reload_as_needed, element N contains the insn for which
148 hard register N was last used. Its contents are significant only
149 when reg_reloaded_valid is set for this register. */
150 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
152 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
153 static HARD_REG_SET reg_reloaded_valid;
154 /* Indicate if the register was dead at the end of the reload.
155 This is only valid if reg_reloaded_contents is set and valid. */
156 static HARD_REG_SET reg_reloaded_dead;
158 /* Indicate whether the register's current value is one that is not
159 safe to retain across a call, even for registers that are normally
160 call-saved. This is only meaningful for members of reg_reloaded_valid. */
161 static HARD_REG_SET reg_reloaded_call_part_clobbered;
163 /* Number of spill-regs so far; number of valid elements of spill_regs. */
164 static int n_spills;
166 /* In parallel with spill_regs, contains REG rtx's for those regs.
167 Holds the last rtx used for any given reg, or 0 if it has never
168 been used for spilling yet. This rtx is reused, provided it has
169 the proper mode. */
170 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
172 /* In parallel with spill_regs, contains nonzero for a spill reg
173 that was stored after the last time it was used.
174 The precise value is the insn generated to do the store. */
175 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
177 /* This is the register that was stored with spill_reg_store. This is a
178 copy of reload_out / reload_out_reg when the value was stored; if
179 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
180 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
182 /* This table is the inverse mapping of spill_regs:
183 indexed by hard reg number,
184 it contains the position of that reg in spill_regs,
185 or -1 for something that is not in spill_regs.
187 ?!? This is no longer accurate. */
188 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
190 /* This reg set indicates registers that can't be used as spill registers for
191 the currently processed insn. These are the hard registers which are live
192 during the insn, but not allocated to pseudos, as well as fixed
193 registers. */
194 static HARD_REG_SET bad_spill_regs;
196 /* These are the hard registers that can't be used as spill register for any
197 insn. This includes registers used for user variables and registers that
198 we can't eliminate. A register that appears in this set also can't be used
199 to retry register allocation. */
200 static HARD_REG_SET bad_spill_regs_global;
202 /* Describes order of use of registers for reloading
203 of spilled pseudo-registers. `n_spills' is the number of
204 elements that are actually valid; new ones are added at the end.
206 Both spill_regs and spill_reg_order are used on two occasions:
207 once during find_reload_regs, where they keep track of the spill registers
208 for a single insn, but also during reload_as_needed where they show all
209 the registers ever used by reload. For the latter case, the information
210 is calculated during finish_spills. */
211 static short spill_regs[FIRST_PSEUDO_REGISTER];
213 /* This vector of reg sets indicates, for each pseudo, which hard registers
214 may not be used for retrying global allocation because the register was
215 formerly spilled from one of them. If we allowed reallocating a pseudo to
216 a register that it was already allocated to, reload might not
217 terminate. */
218 static HARD_REG_SET *pseudo_previous_regs;
220 /* This vector of reg sets indicates, for each pseudo, which hard
221 registers may not be used for retrying global allocation because they
222 are used as spill registers during one of the insns in which the
223 pseudo is live. */
224 static HARD_REG_SET *pseudo_forbidden_regs;
226 /* All hard regs that have been used as spill registers for any insn are
227 marked in this set. */
228 static HARD_REG_SET used_spill_regs;
230 /* Index of last register assigned as a spill register. We allocate in
231 a round-robin fashion. */
232 static int last_spill_reg;
234 /* Nonzero if indirect addressing is supported on the machine; this means
235 that spilling (REG n) does not require reloading it into a register in
236 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
237 value indicates the level of indirect addressing supported, e.g., two
238 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
239 a hard register. */
240 static char spill_indirect_levels;
242 /* Nonzero if indirect addressing is supported when the innermost MEM is
243 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
244 which these are valid is the same as spill_indirect_levels, above. */
245 char indirect_symref_ok;
247 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
248 char double_reg_address_ok;
250 /* Record the stack slot for each spilled hard register. */
251 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
253 /* Width allocated so far for that stack slot. */
254 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
256 /* Record which pseudos needed to be spilled. */
257 static regset_head spilled_pseudos;
259 /* Record which pseudos changed their allocation in finish_spills. */
260 static regset_head changed_allocation_pseudos;
262 /* Used for communication between order_regs_for_reload and count_pseudo.
263 Used to avoid counting one pseudo twice. */
264 static regset_head pseudos_counted;
266 /* First uid used by insns created by reload in this function.
267 Used in find_equiv_reg. */
268 int reload_first_uid;
270 /* Flag set by local-alloc or global-alloc if anything is live in
271 a call-clobbered reg across calls. */
272 int caller_save_needed;
274 /* Set to 1 while reload_as_needed is operating.
275 Required by some machines to handle any generated moves differently. */
276 int reload_in_progress = 0;
278 /* This obstack is used for allocation of rtl during register elimination.
279 The allocated storage can be freed once find_reloads has processed the
280 insn. */
281 static struct obstack reload_obstack;
283 /* Points to the beginning of the reload_obstack. All insn_chain structures
284 are allocated first. */
285 static char *reload_startobj;
287 /* The point after all insn_chain structures. Used to quickly deallocate
288 memory allocated in copy_reloads during calculate_needs_all_insns. */
289 static char *reload_firstobj;
291 /* This points before all local rtl generated by register elimination.
292 Used to quickly free all memory after processing one insn. */
293 static char *reload_insn_firstobj;
295 /* List of insn_chain instructions, one for every insn that reload needs to
296 examine. */
297 struct insn_chain *reload_insn_chain;
299 /* List of all insns needing reloads. */
300 static struct insn_chain *insns_need_reload;
302 /* This structure is used to record information about register eliminations.
303 Each array entry describes one possible way of eliminating a register
304 in favor of another. If there is more than one way of eliminating a
305 particular register, the most preferred should be specified first. */
307 struct elim_table
309 int from; /* Register number to be eliminated. */
310 int to; /* Register number used as replacement. */
311 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
312 int can_eliminate; /* Nonzero if this elimination can be done. */
313 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
314 target hook in previous scan over insns
315 made by reload. */
316 HOST_WIDE_INT offset; /* Current offset between the two regs. */
317 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
318 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
319 rtx from_rtx; /* REG rtx for the register to be eliminated.
320 We cannot simply compare the number since
321 we might then spuriously replace a hard
322 register corresponding to a pseudo
323 assigned to the reg to be eliminated. */
324 rtx to_rtx; /* REG rtx for the replacement. */
327 static struct elim_table *reg_eliminate = 0;
329 /* This is an intermediate structure to initialize the table. It has
330 exactly the members provided by ELIMINABLE_REGS. */
331 static const struct elim_table_1
333 const int from;
334 const int to;
335 } reg_eliminate_1[] =
337 /* If a set of eliminable registers was specified, define the table from it.
338 Otherwise, default to the normal case of the frame pointer being
339 replaced by the stack pointer. */
341 #ifdef ELIMINABLE_REGS
342 ELIMINABLE_REGS;
343 #else
344 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
345 #endif
347 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
349 /* Record the number of pending eliminations that have an offset not equal
350 to their initial offset. If nonzero, we use a new copy of each
351 replacement result in any insns encountered. */
352 int num_not_at_initial_offset;
354 /* Count the number of registers that we may be able to eliminate. */
355 static int num_eliminable;
356 /* And the number of registers that are equivalent to a constant that
357 can be eliminated to frame_pointer / arg_pointer + constant. */
358 static int num_eliminable_invariants;
360 /* For each label, we record the offset of each elimination. If we reach
361 a label by more than one path and an offset differs, we cannot do the
362 elimination. This information is indexed by the difference of the
363 number of the label and the first label number. We can't offset the
364 pointer itself as this can cause problems on machines with segmented
365 memory. The first table is an array of flags that records whether we
366 have yet encountered a label and the second table is an array of arrays,
367 one entry in the latter array for each elimination. */
369 static int first_label_num;
370 static char *offsets_known_at;
371 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
373 /* Stack of addresses where an rtx has been changed. We can undo the
374 changes by popping items off the stack and restoring the original
375 value at each location.
377 We use this simplistic undo capability rather than copy_rtx as copy_rtx
378 will not make a deep copy of a normally sharable rtx, such as
379 (const (plus (symbol_ref) (const_int))). If such an expression appears
380 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
381 rtx expression would be changed. See PR 42431. */
383 typedef rtx *rtx_p;
384 DEF_VEC_P(rtx_p);
385 DEF_VEC_ALLOC_P(rtx_p,heap);
386 static VEC(rtx_p,heap) *substitute_stack;
388 /* Number of labels in the current function. */
390 static int num_labels;
392 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
393 static void maybe_fix_stack_asms (void);
394 static void copy_reloads (struct insn_chain *);
395 static void calculate_needs_all_insns (int);
396 static int find_reg (struct insn_chain *, int);
397 static void find_reload_regs (struct insn_chain *);
398 static void select_reload_regs (void);
399 static void delete_caller_save_insns (void);
401 static void spill_failure (rtx, enum reg_class);
402 static void count_spilled_pseudo (int, int, int);
403 static void delete_dead_insn (rtx);
404 static void alter_reg (int, int, bool);
405 static void set_label_offsets (rtx, rtx, int);
406 static void check_eliminable_occurrences (rtx);
407 static void elimination_effects (rtx, enum machine_mode);
408 static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
409 static int eliminate_regs_in_insn (rtx, int);
410 static void update_eliminable_offsets (void);
411 static void mark_not_eliminable (rtx, const_rtx, void *);
412 static void set_initial_elim_offsets (void);
413 static bool verify_initial_elim_offsets (void);
414 static void set_initial_label_offsets (void);
415 static void set_offsets_for_label (rtx);
416 static void init_eliminable_invariants (rtx, bool);
417 static void init_elim_table (void);
418 static void free_reg_equiv (void);
419 static void update_eliminables (HARD_REG_SET *);
420 static void elimination_costs_in_insn (rtx);
421 static void spill_hard_reg (unsigned int, int);
422 static int finish_spills (int);
423 static void scan_paradoxical_subregs (rtx);
424 static void count_pseudo (int);
425 static void order_regs_for_reload (struct insn_chain *);
426 static void reload_as_needed (int);
427 static void forget_old_reloads_1 (rtx, const_rtx, void *);
428 static void forget_marked_reloads (regset);
429 static int reload_reg_class_lower (const void *, const void *);
430 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
431 enum machine_mode);
432 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
433 enum machine_mode);
434 static int reload_reg_free_p (unsigned int, int, enum reload_type);
435 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
436 rtx, rtx, int, int);
437 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
438 rtx, rtx, int, int);
439 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
440 static int allocate_reload_reg (struct insn_chain *, int, int);
441 static int conflicts_with_override (rtx);
442 static void failed_reload (rtx, int);
443 static int set_reload_reg (int, int);
444 static void choose_reload_regs_init (struct insn_chain *, rtx *);
445 static void choose_reload_regs (struct insn_chain *);
446 static void merge_assigned_reloads (rtx);
447 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
448 rtx, int);
449 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
450 int);
451 static void do_input_reload (struct insn_chain *, struct reload *, int);
452 static void do_output_reload (struct insn_chain *, struct reload *, int);
453 static void emit_reload_insns (struct insn_chain *);
454 static void delete_output_reload (rtx, int, int, rtx);
455 static void delete_address_reloads (rtx, rtx);
456 static void delete_address_reloads_1 (rtx, rtx, rtx);
457 static rtx inc_for_reload (rtx, rtx, rtx, int);
458 #ifdef AUTO_INC_DEC
459 static void add_auto_inc_notes (rtx, rtx);
460 #endif
461 static void substitute (rtx *, const_rtx, rtx);
462 static bool gen_reload_chain_without_interm_reg_p (int, int);
463 static int reloads_conflict (int, int);
464 static rtx gen_reload (rtx, rtx, int, enum reload_type);
465 static rtx emit_insn_if_valid_for_reload (rtx);
467 /* Initialize the reload pass. This is called at the beginning of compilation
468 and may be called again if the target is reinitialized. */
470 void
471 init_reload (void)
473 int i;
475 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
476 Set spill_indirect_levels to the number of levels such addressing is
477 permitted, zero if it is not permitted at all. */
479 rtx tem
480 = gen_rtx_MEM (Pmode,
481 gen_rtx_PLUS (Pmode,
482 gen_rtx_REG (Pmode,
483 LAST_VIRTUAL_REGISTER + 1),
484 GEN_INT (4)));
485 spill_indirect_levels = 0;
487 while (memory_address_p (QImode, tem))
489 spill_indirect_levels++;
490 tem = gen_rtx_MEM (Pmode, tem);
493 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
495 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
496 indirect_symref_ok = memory_address_p (QImode, tem);
498 /* See if reg+reg is a valid (and offsettable) address. */
500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
502 tem = gen_rtx_PLUS (Pmode,
503 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
504 gen_rtx_REG (Pmode, i));
506 /* This way, we make sure that reg+reg is an offsettable address. */
507 tem = plus_constant (tem, 4);
509 if (memory_address_p (QImode, tem))
511 double_reg_address_ok = 1;
512 break;
516 /* Initialize obstack for our rtl allocation. */
517 gcc_obstack_init (&reload_obstack);
518 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
520 INIT_REG_SET (&spilled_pseudos);
521 INIT_REG_SET (&changed_allocation_pseudos);
522 INIT_REG_SET (&pseudos_counted);
525 /* List of insn chains that are currently unused. */
526 static struct insn_chain *unused_insn_chains = 0;
528 /* Allocate an empty insn_chain structure. */
529 struct insn_chain *
530 new_insn_chain (void)
532 struct insn_chain *c;
534 if (unused_insn_chains == 0)
536 c = XOBNEW (&reload_obstack, struct insn_chain);
537 INIT_REG_SET (&c->live_throughout);
538 INIT_REG_SET (&c->dead_or_set);
540 else
542 c = unused_insn_chains;
543 unused_insn_chains = c->next;
545 c->is_caller_save_insn = 0;
546 c->need_operand_change = 0;
547 c->need_reload = 0;
548 c->need_elim = 0;
549 return c;
552 /* Small utility function to set all regs in hard reg set TO which are
553 allocated to pseudos in regset FROM. */
555 void
556 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
558 unsigned int regno;
559 reg_set_iterator rsi;
561 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
563 int r = reg_renumber[regno];
565 if (r < 0)
567 /* reload_combine uses the information from DF_LIVE_IN,
568 which might still contain registers that have not
569 actually been allocated since they have an
570 equivalence. */
571 gcc_assert (ira_conflicts_p || reload_completed);
573 else
574 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
578 /* Replace all pseudos found in LOC with their corresponding
579 equivalences. */
581 static void
582 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
584 rtx x = *loc;
585 enum rtx_code code;
586 const char *fmt;
587 int i, j;
589 if (! x)
590 return;
592 code = GET_CODE (x);
593 if (code == REG)
595 unsigned int regno = REGNO (x);
597 if (regno < FIRST_PSEUDO_REGISTER)
598 return;
600 x = eliminate_regs (x, mem_mode, usage);
601 if (x != *loc)
603 *loc = x;
604 replace_pseudos_in (loc, mem_mode, usage);
605 return;
608 if (reg_equiv_constant[regno])
609 *loc = reg_equiv_constant[regno];
610 else if (reg_equiv_mem[regno])
611 *loc = reg_equiv_mem[regno];
612 else if (reg_equiv_address[regno])
613 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
614 else
616 gcc_assert (!REG_P (regno_reg_rtx[regno])
617 || REGNO (regno_reg_rtx[regno]) != regno);
618 *loc = regno_reg_rtx[regno];
621 return;
623 else if (code == MEM)
625 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
626 return;
629 /* Process each of our operands recursively. */
630 fmt = GET_RTX_FORMAT (code);
631 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
632 if (*fmt == 'e')
633 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
634 else if (*fmt == 'E')
635 for (j = 0; j < XVECLEN (x, i); j++)
636 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
639 /* Determine if the current function has an exception receiver block
640 that reaches the exit block via non-exceptional edges */
642 static bool
643 has_nonexceptional_receiver (void)
645 edge e;
646 edge_iterator ei;
647 basic_block *tos, *worklist, bb;
649 /* If we're not optimizing, then just err on the safe side. */
650 if (!optimize)
651 return true;
653 /* First determine which blocks can reach exit via normal paths. */
654 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
656 FOR_EACH_BB (bb)
657 bb->flags &= ~BB_REACHABLE;
659 /* Place the exit block on our worklist. */
660 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
661 *tos++ = EXIT_BLOCK_PTR;
663 /* Iterate: find everything reachable from what we've already seen. */
664 while (tos != worklist)
666 bb = *--tos;
668 FOR_EACH_EDGE (e, ei, bb->preds)
669 if (!(e->flags & EDGE_ABNORMAL))
671 basic_block src = e->src;
673 if (!(src->flags & BB_REACHABLE))
675 src->flags |= BB_REACHABLE;
676 *tos++ = src;
680 free (worklist);
682 /* Now see if there's a reachable block with an exceptional incoming
683 edge. */
684 FOR_EACH_BB (bb)
685 if (bb->flags & BB_REACHABLE)
686 FOR_EACH_EDGE (e, ei, bb->preds)
687 if (e->flags & EDGE_ABNORMAL)
688 return true;
690 /* No exceptional block reached exit unexceptionally. */
691 return false;
695 /* Global variables used by reload and its subroutines. */
697 /* The current basic block while in calculate_elim_costs_all_insns. */
698 static basic_block elim_bb;
700 /* Set during calculate_needs if an insn needs register elimination. */
701 static int something_needs_elimination;
702 /* Set during calculate_needs if an insn needs an operand changed. */
703 static int something_needs_operands_changed;
704 /* Set by alter_regs if we spilled a register to the stack. */
705 static bool something_was_spilled;
707 /* Nonzero means we couldn't get enough spill regs. */
708 static int failure;
710 /* Temporary array of pseudo-register number. */
711 static int *temp_pseudo_reg_arr;
713 /* Main entry point for the reload pass.
715 FIRST is the first insn of the function being compiled.
717 GLOBAL nonzero means we were called from global_alloc
718 and should attempt to reallocate any pseudoregs that we
719 displace from hard regs we will use for reloads.
720 If GLOBAL is zero, we do not have enough information to do that,
721 so any pseudo reg that is spilled must go to the stack.
723 Return value is nonzero if reload failed
724 and we must not do any more for this function. */
727 reload (rtx first, int global)
729 int i, n;
730 rtx insn;
731 struct elim_table *ep;
732 basic_block bb;
734 /* Make sure even insns with volatile mem refs are recognizable. */
735 init_recog ();
737 failure = 0;
739 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
741 /* Make sure that the last insn in the chain
742 is not something that needs reloading. */
743 emit_note (NOTE_INSN_DELETED);
745 /* Enable find_equiv_reg to distinguish insns made by reload. */
746 reload_first_uid = get_max_uid ();
748 #ifdef SECONDARY_MEMORY_NEEDED
749 /* Initialize the secondary memory table. */
750 clear_secondary_mem ();
751 #endif
753 /* We don't have a stack slot for any spill reg yet. */
754 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
755 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
757 /* Initialize the save area information for caller-save, in case some
758 are needed. */
759 init_save_areas ();
761 /* Compute which hard registers are now in use
762 as homes for pseudo registers.
763 This is done here rather than (eg) in global_alloc
764 because this point is reached even if not optimizing. */
765 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
766 mark_home_live (i);
768 /* A function that has a nonlocal label that can reach the exit
769 block via non-exceptional paths must save all call-saved
770 registers. */
771 if (cfun->has_nonlocal_label
772 && has_nonexceptional_receiver ())
773 crtl->saves_all_registers = 1;
775 if (crtl->saves_all_registers)
776 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
777 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
778 df_set_regs_ever_live (i, true);
780 reg_old_renumber = XCNEWVEC (short, max_regno);
781 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
782 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
783 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
785 CLEAR_HARD_REG_SET (bad_spill_regs_global);
787 init_eliminable_invariants (first, true);
788 init_elim_table ();
790 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
791 stack slots to the pseudos that lack hard regs or equivalents.
792 Do not touch virtual registers. */
794 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
795 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
796 temp_pseudo_reg_arr[n++] = i;
798 if (ira_conflicts_p)
799 /* Ask IRA to order pseudo-registers for better stack slot
800 sharing. */
801 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
803 for (i = 0; i < n; i++)
804 alter_reg (temp_pseudo_reg_arr[i], -1, false);
806 /* If we have some registers we think can be eliminated, scan all insns to
807 see if there is an insn that sets one of these registers to something
808 other than itself plus a constant. If so, the register cannot be
809 eliminated. Doing this scan here eliminates an extra pass through the
810 main reload loop in the most common case where register elimination
811 cannot be done. */
812 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
813 if (INSN_P (insn))
814 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
816 maybe_fix_stack_asms ();
818 insns_need_reload = 0;
819 something_needs_elimination = 0;
821 /* Initialize to -1, which means take the first spill register. */
822 last_spill_reg = -1;
824 /* Spill any hard regs that we know we can't eliminate. */
825 CLEAR_HARD_REG_SET (used_spill_regs);
826 /* There can be multiple ways to eliminate a register;
827 they should be listed adjacently.
828 Elimination for any register fails only if all possible ways fail. */
829 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
831 int from = ep->from;
832 int can_eliminate = 0;
835 can_eliminate |= ep->can_eliminate;
836 ep++;
838 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
839 if (! can_eliminate)
840 spill_hard_reg (from, 1);
843 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
844 if (frame_pointer_needed)
845 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
846 #endif
847 finish_spills (global);
849 /* From now on, we may need to generate moves differently. We may also
850 allow modifications of insns which cause them to not be recognized.
851 Any such modifications will be cleaned up during reload itself. */
852 reload_in_progress = 1;
854 /* This loop scans the entire function each go-round
855 and repeats until one repetition spills no additional hard regs. */
856 for (;;)
858 int something_changed;
859 int did_spill;
860 HOST_WIDE_INT starting_frame_size;
862 starting_frame_size = get_frame_size ();
863 something_was_spilled = false;
865 set_initial_elim_offsets ();
866 set_initial_label_offsets ();
868 /* For each pseudo register that has an equivalent location defined,
869 try to eliminate any eliminable registers (such as the frame pointer)
870 assuming initial offsets for the replacement register, which
871 is the normal case.
873 If the resulting location is directly addressable, substitute
874 the MEM we just got directly for the old REG.
876 If it is not addressable but is a constant or the sum of a hard reg
877 and constant, it is probably not addressable because the constant is
878 out of range, in that case record the address; we will generate
879 hairy code to compute the address in a register each time it is
880 needed. Similarly if it is a hard register, but one that is not
881 valid as an address register.
883 If the location is not addressable, but does not have one of the
884 above forms, assign a stack slot. We have to do this to avoid the
885 potential of producing lots of reloads if, e.g., a location involves
886 a pseudo that didn't get a hard register and has an equivalent memory
887 location that also involves a pseudo that didn't get a hard register.
889 Perhaps at some point we will improve reload_when_needed handling
890 so this problem goes away. But that's very hairy. */
892 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
893 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
895 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
896 NULL_RTX);
898 if (strict_memory_address_addr_space_p
899 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
900 MEM_ADDR_SPACE (x)))
901 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
902 else if (CONSTANT_P (XEXP (x, 0))
903 || (REG_P (XEXP (x, 0))
904 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
905 || (GET_CODE (XEXP (x, 0)) == PLUS
906 && REG_P (XEXP (XEXP (x, 0), 0))
907 && (REGNO (XEXP (XEXP (x, 0), 0))
908 < FIRST_PSEUDO_REGISTER)
909 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
910 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
911 else
913 /* Make a new stack slot. Then indicate that something
914 changed so we go back and recompute offsets for
915 eliminable registers because the allocation of memory
916 below might change some offset. reg_equiv_{mem,address}
917 will be set up for this pseudo on the next pass around
918 the loop. */
919 reg_equiv_memory_loc[i] = 0;
920 reg_equiv_init[i] = 0;
921 alter_reg (i, -1, true);
925 if (caller_save_needed)
926 setup_save_areas ();
928 /* If we allocated another stack slot, redo elimination bookkeeping. */
929 if (something_was_spilled || starting_frame_size != get_frame_size ())
930 continue;
931 if (starting_frame_size && crtl->stack_alignment_needed)
933 /* If we have a stack frame, we must align it now. The
934 stack size may be a part of the offset computation for
935 register elimination. So if this changes the stack size,
936 then repeat the elimination bookkeeping. We don't
937 realign when there is no stack, as that will cause a
938 stack frame when none is needed should
939 STARTING_FRAME_OFFSET not be already aligned to
940 STACK_BOUNDARY. */
941 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
942 if (starting_frame_size != get_frame_size ())
943 continue;
946 if (caller_save_needed)
948 save_call_clobbered_regs ();
949 /* That might have allocated new insn_chain structures. */
950 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
953 calculate_needs_all_insns (global);
955 if (! ira_conflicts_p)
956 /* Don't do it for IRA. We need this info because we don't
957 change live_throughout and dead_or_set for chains when IRA
958 is used. */
959 CLEAR_REG_SET (&spilled_pseudos);
961 did_spill = 0;
963 something_changed = 0;
965 /* If we allocated any new memory locations, make another pass
966 since it might have changed elimination offsets. */
967 if (something_was_spilled || starting_frame_size != get_frame_size ())
968 something_changed = 1;
970 /* Even if the frame size remained the same, we might still have
971 changed elimination offsets, e.g. if find_reloads called
972 force_const_mem requiring the back end to allocate a constant
973 pool base register that needs to be saved on the stack. */
974 else if (!verify_initial_elim_offsets ())
975 something_changed = 1;
978 HARD_REG_SET to_spill;
979 CLEAR_HARD_REG_SET (to_spill);
980 update_eliminables (&to_spill);
981 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
983 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
984 if (TEST_HARD_REG_BIT (to_spill, i))
986 spill_hard_reg (i, 1);
987 did_spill = 1;
989 /* Regardless of the state of spills, if we previously had
990 a register that we thought we could eliminate, but now can
991 not eliminate, we must run another pass.
993 Consider pseudos which have an entry in reg_equiv_* which
994 reference an eliminable register. We must make another pass
995 to update reg_equiv_* so that we do not substitute in the
996 old value from when we thought the elimination could be
997 performed. */
998 something_changed = 1;
1002 select_reload_regs ();
1003 if (failure)
1004 goto failed;
1006 if (insns_need_reload != 0 || did_spill)
1007 something_changed |= finish_spills (global);
1009 if (! something_changed)
1010 break;
1012 if (caller_save_needed)
1013 delete_caller_save_insns ();
1015 obstack_free (&reload_obstack, reload_firstobj);
1018 /* If global-alloc was run, notify it of any register eliminations we have
1019 done. */
1020 if (global)
1021 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1022 if (ep->can_eliminate)
1023 mark_elimination (ep->from, ep->to);
1025 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1026 If that insn didn't set the register (i.e., it copied the register to
1027 memory), just delete that insn instead of the equivalencing insn plus
1028 anything now dead. If we call delete_dead_insn on that insn, we may
1029 delete the insn that actually sets the register if the register dies
1030 there and that is incorrect. */
1032 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1034 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1036 rtx list;
1037 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1039 rtx equiv_insn = XEXP (list, 0);
1041 /* If we already deleted the insn or if it may trap, we can't
1042 delete it. The latter case shouldn't happen, but can
1043 if an insn has a variable address, gets a REG_EH_REGION
1044 note added to it, and then gets converted into a load
1045 from a constant address. */
1046 if (NOTE_P (equiv_insn)
1047 || can_throw_internal (equiv_insn))
1049 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1050 delete_dead_insn (equiv_insn);
1051 else
1052 SET_INSN_DELETED (equiv_insn);
1057 /* Use the reload registers where necessary
1058 by generating move instructions to move the must-be-register
1059 values into or out of the reload registers. */
1061 if (insns_need_reload != 0 || something_needs_elimination
1062 || something_needs_operands_changed)
1064 HOST_WIDE_INT old_frame_size = get_frame_size ();
1066 reload_as_needed (global);
1068 gcc_assert (old_frame_size == get_frame_size ());
1070 gcc_assert (verify_initial_elim_offsets ());
1073 /* If we were able to eliminate the frame pointer, show that it is no
1074 longer live at the start of any basic block. If it ls live by
1075 virtue of being in a pseudo, that pseudo will be marked live
1076 and hence the frame pointer will be known to be live via that
1077 pseudo. */
1079 if (! frame_pointer_needed)
1080 FOR_EACH_BB (bb)
1081 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1083 /* Come here (with failure set nonzero) if we can't get enough spill
1084 regs. */
1085 failed:
1087 CLEAR_REG_SET (&changed_allocation_pseudos);
1088 CLEAR_REG_SET (&spilled_pseudos);
1089 reload_in_progress = 0;
1091 /* Now eliminate all pseudo regs by modifying them into
1092 their equivalent memory references.
1093 The REG-rtx's for the pseudos are modified in place,
1094 so all insns that used to refer to them now refer to memory.
1096 For a reg that has a reg_equiv_address, all those insns
1097 were changed by reloading so that no insns refer to it any longer;
1098 but the DECL_RTL of a variable decl may refer to it,
1099 and if so this causes the debugging info to mention the variable. */
1101 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1103 rtx addr = 0;
1105 if (reg_equiv_mem[i])
1106 addr = XEXP (reg_equiv_mem[i], 0);
1108 if (reg_equiv_address[i])
1109 addr = reg_equiv_address[i];
1111 if (addr)
1113 if (reg_renumber[i] < 0)
1115 rtx reg = regno_reg_rtx[i];
1117 REG_USERVAR_P (reg) = 0;
1118 PUT_CODE (reg, MEM);
1119 XEXP (reg, 0) = addr;
1120 if (reg_equiv_memory_loc[i])
1121 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1122 else
1124 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1125 MEM_ATTRS (reg) = 0;
1127 MEM_NOTRAP_P (reg) = 1;
1129 else if (reg_equiv_mem[i])
1130 XEXP (reg_equiv_mem[i], 0) = addr;
1133 /* We don't want complex addressing modes in debug insns
1134 if simpler ones will do, so delegitimize equivalences
1135 in debug insns. */
1136 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1138 rtx reg = regno_reg_rtx[i];
1139 rtx equiv = 0;
1140 df_ref use, next;
1142 if (reg_equiv_constant[i])
1143 equiv = reg_equiv_constant[i];
1144 else if (reg_equiv_invariant[i])
1145 equiv = reg_equiv_invariant[i];
1146 else if (reg && MEM_P (reg))
1147 equiv = targetm.delegitimize_address (reg);
1148 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1149 equiv = reg;
1151 if (equiv == reg)
1152 continue;
1154 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1156 insn = DF_REF_INSN (use);
1158 /* Make sure the next ref is for a different instruction,
1159 so that we're not affected by the rescan. */
1160 next = DF_REF_NEXT_REG (use);
1161 while (next && DF_REF_INSN (next) == insn)
1162 next = DF_REF_NEXT_REG (next);
1164 if (DEBUG_INSN_P (insn))
1166 if (!equiv)
1168 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1169 df_insn_rescan_debug_internal (insn);
1171 else
1172 INSN_VAR_LOCATION_LOC (insn)
1173 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1174 reg, equiv);
1180 /* We must set reload_completed now since the cleanup_subreg_operands call
1181 below will re-recognize each insn and reload may have generated insns
1182 which are only valid during and after reload. */
1183 reload_completed = 1;
1185 /* Make a pass over all the insns and delete all USEs which we inserted
1186 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1187 notes. Delete all CLOBBER insns, except those that refer to the return
1188 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1189 from misarranging variable-array code, and simplify (subreg (reg))
1190 operands. Strip and regenerate REG_INC notes that may have been moved
1191 around. */
1193 for (insn = first; insn; insn = NEXT_INSN (insn))
1194 if (INSN_P (insn))
1196 rtx *pnote;
1198 if (CALL_P (insn))
1199 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1200 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1202 if ((GET_CODE (PATTERN (insn)) == USE
1203 /* We mark with QImode USEs introduced by reload itself. */
1204 && (GET_MODE (insn) == QImode
1205 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1206 || (GET_CODE (PATTERN (insn)) == CLOBBER
1207 && (!MEM_P (XEXP (PATTERN (insn), 0))
1208 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1209 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1210 && XEXP (XEXP (PATTERN (insn), 0), 0)
1211 != stack_pointer_rtx))
1212 && (!REG_P (XEXP (PATTERN (insn), 0))
1213 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1215 delete_insn (insn);
1216 continue;
1219 /* Some CLOBBERs may survive until here and still reference unassigned
1220 pseudos with const equivalent, which may in turn cause ICE in later
1221 passes if the reference remains in place. */
1222 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1223 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1224 VOIDmode, PATTERN (insn));
1226 /* Discard obvious no-ops, even without -O. This optimization
1227 is fast and doesn't interfere with debugging. */
1228 if (NONJUMP_INSN_P (insn)
1229 && GET_CODE (PATTERN (insn)) == SET
1230 && REG_P (SET_SRC (PATTERN (insn)))
1231 && REG_P (SET_DEST (PATTERN (insn)))
1232 && (REGNO (SET_SRC (PATTERN (insn)))
1233 == REGNO (SET_DEST (PATTERN (insn)))))
1235 delete_insn (insn);
1236 continue;
1239 pnote = &REG_NOTES (insn);
1240 while (*pnote != 0)
1242 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1243 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1244 || REG_NOTE_KIND (*pnote) == REG_INC)
1245 *pnote = XEXP (*pnote, 1);
1246 else
1247 pnote = &XEXP (*pnote, 1);
1250 #ifdef AUTO_INC_DEC
1251 add_auto_inc_notes (insn, PATTERN (insn));
1252 #endif
1254 /* Simplify (subreg (reg)) if it appears as an operand. */
1255 cleanup_subreg_operands (insn);
1257 /* Clean up invalid ASMs so that they don't confuse later passes.
1258 See PR 21299. */
1259 if (asm_noperands (PATTERN (insn)) >= 0)
1261 extract_insn (insn);
1262 if (!constrain_operands (1))
1264 error_for_asm (insn,
1265 "%<asm%> operand has impossible constraints");
1266 delete_insn (insn);
1267 continue;
1272 /* If we are doing generic stack checking, give a warning if this
1273 function's frame size is larger than we expect. */
1274 if (flag_stack_check == GENERIC_STACK_CHECK)
1276 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1277 static int verbose_warned = 0;
1279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1280 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1281 size += UNITS_PER_WORD;
1283 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1285 warning (0, "frame size too large for reliable stack checking");
1286 if (! verbose_warned)
1288 warning (0, "try reducing the number of local variables");
1289 verbose_warned = 1;
1294 free (temp_pseudo_reg_arr);
1296 /* Indicate that we no longer have known memory locations or constants. */
1297 free_reg_equiv ();
1298 reg_equiv_init = 0;
1299 free (reg_max_ref_width);
1300 free (reg_old_renumber);
1301 free (pseudo_previous_regs);
1302 free (pseudo_forbidden_regs);
1304 CLEAR_HARD_REG_SET (used_spill_regs);
1305 for (i = 0; i < n_spills; i++)
1306 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1308 /* Free all the insn_chain structures at once. */
1309 obstack_free (&reload_obstack, reload_startobj);
1310 unused_insn_chains = 0;
1311 fixup_abnormal_edges ();
1313 /* Replacing pseudos with their memory equivalents might have
1314 created shared rtx. Subsequent passes would get confused
1315 by this, so unshare everything here. */
1316 unshare_all_rtl_again (first);
1318 #ifdef STACK_BOUNDARY
1319 /* init_emit has set the alignment of the hard frame pointer
1320 to STACK_BOUNDARY. It is very likely no longer valid if
1321 the hard frame pointer was used for register allocation. */
1322 if (!frame_pointer_needed)
1323 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1324 #endif
1326 VEC_free (rtx_p, heap, substitute_stack);
1328 return failure;
1331 /* Yet another special case. Unfortunately, reg-stack forces people to
1332 write incorrect clobbers in asm statements. These clobbers must not
1333 cause the register to appear in bad_spill_regs, otherwise we'll call
1334 fatal_insn later. We clear the corresponding regnos in the live
1335 register sets to avoid this.
1336 The whole thing is rather sick, I'm afraid. */
1338 static void
1339 maybe_fix_stack_asms (void)
1341 #ifdef STACK_REGS
1342 const char *constraints[MAX_RECOG_OPERANDS];
1343 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1344 struct insn_chain *chain;
1346 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1348 int i, noperands;
1349 HARD_REG_SET clobbered, allowed;
1350 rtx pat;
1352 if (! INSN_P (chain->insn)
1353 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1354 continue;
1355 pat = PATTERN (chain->insn);
1356 if (GET_CODE (pat) != PARALLEL)
1357 continue;
1359 CLEAR_HARD_REG_SET (clobbered);
1360 CLEAR_HARD_REG_SET (allowed);
1362 /* First, make a mask of all stack regs that are clobbered. */
1363 for (i = 0; i < XVECLEN (pat, 0); i++)
1365 rtx t = XVECEXP (pat, 0, i);
1366 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1367 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1370 /* Get the operand values and constraints out of the insn. */
1371 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1372 constraints, operand_mode, NULL);
1374 /* For every operand, see what registers are allowed. */
1375 for (i = 0; i < noperands; i++)
1377 const char *p = constraints[i];
1378 /* For every alternative, we compute the class of registers allowed
1379 for reloading in CLS, and merge its contents into the reg set
1380 ALLOWED. */
1381 int cls = (int) NO_REGS;
1383 for (;;)
1385 char c = *p;
1387 if (c == '\0' || c == ',' || c == '#')
1389 /* End of one alternative - mark the regs in the current
1390 class, and reset the class. */
1391 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1392 cls = NO_REGS;
1393 p++;
1394 if (c == '#')
1395 do {
1396 c = *p++;
1397 } while (c != '\0' && c != ',');
1398 if (c == '\0')
1399 break;
1400 continue;
1403 switch (c)
1405 case '=': case '+': case '*': case '%': case '?': case '!':
1406 case '0': case '1': case '2': case '3': case '4': case '<':
1407 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1408 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1409 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1410 case TARGET_MEM_CONSTRAINT:
1411 break;
1413 case 'p':
1414 cls = (int) reg_class_subunion[cls]
1415 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1416 break;
1418 case 'g':
1419 case 'r':
1420 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1421 break;
1423 default:
1424 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1425 cls = (int) reg_class_subunion[cls]
1426 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1427 else
1428 cls = (int) reg_class_subunion[cls]
1429 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1431 p += CONSTRAINT_LEN (c, p);
1434 /* Those of the registers which are clobbered, but allowed by the
1435 constraints, must be usable as reload registers. So clear them
1436 out of the life information. */
1437 AND_HARD_REG_SET (allowed, clobbered);
1438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1439 if (TEST_HARD_REG_BIT (allowed, i))
1441 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1442 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1446 #endif
1449 /* Copy the global variables n_reloads and rld into the corresponding elts
1450 of CHAIN. */
1451 static void
1452 copy_reloads (struct insn_chain *chain)
1454 chain->n_reloads = n_reloads;
1455 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1456 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1457 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1460 /* Walk the chain of insns, and determine for each whether it needs reloads
1461 and/or eliminations. Build the corresponding insns_need_reload list, and
1462 set something_needs_elimination as appropriate. */
1463 static void
1464 calculate_needs_all_insns (int global)
1466 struct insn_chain **pprev_reload = &insns_need_reload;
1467 struct insn_chain *chain, *next = 0;
1469 something_needs_elimination = 0;
1471 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1472 for (chain = reload_insn_chain; chain != 0; chain = next)
1474 rtx insn = chain->insn;
1476 next = chain->next;
1478 /* Clear out the shortcuts. */
1479 chain->n_reloads = 0;
1480 chain->need_elim = 0;
1481 chain->need_reload = 0;
1482 chain->need_operand_change = 0;
1484 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1485 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1486 what effects this has on the known offsets at labels. */
1488 if (LABEL_P (insn) || JUMP_P (insn)
1489 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1490 set_label_offsets (insn, insn, 0);
1492 if (INSN_P (insn))
1494 rtx old_body = PATTERN (insn);
1495 int old_code = INSN_CODE (insn);
1496 rtx old_notes = REG_NOTES (insn);
1497 int did_elimination = 0;
1498 int operands_changed = 0;
1499 rtx set = single_set (insn);
1501 /* Skip insns that only set an equivalence. */
1502 if (set && REG_P (SET_DEST (set))
1503 && reg_renumber[REGNO (SET_DEST (set))] < 0
1504 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1505 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1506 && reg_equiv_init[REGNO (SET_DEST (set))])
1507 continue;
1509 /* If needed, eliminate any eliminable registers. */
1510 if (num_eliminable || num_eliminable_invariants)
1511 did_elimination = eliminate_regs_in_insn (insn, 0);
1513 /* Analyze the instruction. */
1514 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1515 global, spill_reg_order);
1517 /* If a no-op set needs more than one reload, this is likely
1518 to be something that needs input address reloads. We
1519 can't get rid of this cleanly later, and it is of no use
1520 anyway, so discard it now.
1521 We only do this when expensive_optimizations is enabled,
1522 since this complements reload inheritance / output
1523 reload deletion, and it can make debugging harder. */
1524 if (flag_expensive_optimizations && n_reloads > 1)
1526 rtx set = single_set (insn);
1527 if (set
1529 ((SET_SRC (set) == SET_DEST (set)
1530 && REG_P (SET_SRC (set))
1531 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1532 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1533 && reg_renumber[REGNO (SET_SRC (set))] < 0
1534 && reg_renumber[REGNO (SET_DEST (set))] < 0
1535 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1536 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1537 && rtx_equal_p (reg_equiv_memory_loc
1538 [REGNO (SET_SRC (set))],
1539 reg_equiv_memory_loc
1540 [REGNO (SET_DEST (set))]))))
1542 if (ira_conflicts_p)
1543 /* Inform IRA about the insn deletion. */
1544 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1545 REGNO (SET_SRC (set)));
1546 delete_insn (insn);
1547 /* Delete it from the reload chain. */
1548 if (chain->prev)
1549 chain->prev->next = next;
1550 else
1551 reload_insn_chain = next;
1552 if (next)
1553 next->prev = chain->prev;
1554 chain->next = unused_insn_chains;
1555 unused_insn_chains = chain;
1556 continue;
1559 if (num_eliminable)
1560 update_eliminable_offsets ();
1562 /* Remember for later shortcuts which insns had any reloads or
1563 register eliminations. */
1564 chain->need_elim = did_elimination;
1565 chain->need_reload = n_reloads > 0;
1566 chain->need_operand_change = operands_changed;
1568 /* Discard any register replacements done. */
1569 if (did_elimination)
1571 obstack_free (&reload_obstack, reload_insn_firstobj);
1572 PATTERN (insn) = old_body;
1573 INSN_CODE (insn) = old_code;
1574 REG_NOTES (insn) = old_notes;
1575 something_needs_elimination = 1;
1578 something_needs_operands_changed |= operands_changed;
1580 if (n_reloads != 0)
1582 copy_reloads (chain);
1583 *pprev_reload = chain;
1584 pprev_reload = &chain->next_need_reload;
1588 *pprev_reload = 0;
1591 /* This function is called from the register allocator to set up estimates
1592 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1593 an invariant. The structure is similar to calculate_needs_all_insns. */
1595 void
1596 calculate_elim_costs_all_insns (void)
1598 int *reg_equiv_init_cost;
1599 basic_block bb;
1600 int i;
1602 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1603 init_elim_table ();
1604 init_eliminable_invariants (get_insns (), false);
1606 set_initial_elim_offsets ();
1607 set_initial_label_offsets ();
1609 FOR_EACH_BB (bb)
1611 rtx insn;
1612 elim_bb = bb;
1614 FOR_BB_INSNS (bb, insn)
1616 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1617 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1618 what effects this has on the known offsets at labels. */
1620 if (LABEL_P (insn) || JUMP_P (insn)
1621 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1622 set_label_offsets (insn, insn, 0);
1624 if (INSN_P (insn))
1626 rtx set = single_set (insn);
1628 /* Skip insns that only set an equivalence. */
1629 if (set && REG_P (SET_DEST (set))
1630 && reg_renumber[REGNO (SET_DEST (set))] < 0
1631 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1632 || (reg_equiv_invariant[REGNO (SET_DEST (set))])))
1634 unsigned regno = REGNO (SET_DEST (set));
1635 rtx init = reg_equiv_init[regno];
1636 if (init)
1638 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1639 false, true);
1640 int cost = rtx_cost (t, SET,
1641 optimize_bb_for_speed_p (bb));
1642 int freq = REG_FREQ_FROM_BB (bb);
1644 reg_equiv_init_cost[regno] = cost * freq;
1645 continue;
1648 /* If needed, eliminate any eliminable registers. */
1649 if (num_eliminable || num_eliminable_invariants)
1650 elimination_costs_in_insn (insn);
1652 if (num_eliminable)
1653 update_eliminable_offsets ();
1657 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1659 if (reg_equiv_invariant[i])
1661 if (reg_equiv_init[i])
1663 int cost = reg_equiv_init_cost[i];
1664 if (dump_file)
1665 fprintf (dump_file,
1666 "Reg %d has equivalence, initial gains %d\n", i, cost);
1667 if (cost != 0)
1668 ira_adjust_equiv_reg_cost (i, cost);
1670 else
1672 if (dump_file)
1673 fprintf (dump_file,
1674 "Reg %d had equivalence, but can't be eliminated\n",
1676 ira_adjust_equiv_reg_cost (i, 0);
1681 free_reg_equiv ();
1682 free (reg_equiv_init_cost);
1685 /* Comparison function for qsort to decide which of two reloads
1686 should be handled first. *P1 and *P2 are the reload numbers. */
1688 static int
1689 reload_reg_class_lower (const void *r1p, const void *r2p)
1691 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1692 int t;
1694 /* Consider required reloads before optional ones. */
1695 t = rld[r1].optional - rld[r2].optional;
1696 if (t != 0)
1697 return t;
1699 /* Count all solitary classes before non-solitary ones. */
1700 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1701 - (reg_class_size[(int) rld[r1].rclass] == 1));
1702 if (t != 0)
1703 return t;
1705 /* Aside from solitaires, consider all multi-reg groups first. */
1706 t = rld[r2].nregs - rld[r1].nregs;
1707 if (t != 0)
1708 return t;
1710 /* Consider reloads in order of increasing reg-class number. */
1711 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1712 if (t != 0)
1713 return t;
1715 /* If reloads are equally urgent, sort by reload number,
1716 so that the results of qsort leave nothing to chance. */
1717 return r1 - r2;
1720 /* The cost of spilling each hard reg. */
1721 static int spill_cost[FIRST_PSEUDO_REGISTER];
1723 /* When spilling multiple hard registers, we use SPILL_COST for the first
1724 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1725 only the first hard reg for a multi-reg pseudo. */
1726 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1728 /* Map of hard regno to pseudo regno currently occupying the hard
1729 reg. */
1730 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1732 /* Update the spill cost arrays, considering that pseudo REG is live. */
1734 static void
1735 count_pseudo (int reg)
1737 int freq = REG_FREQ (reg);
1738 int r = reg_renumber[reg];
1739 int nregs;
1741 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1742 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1743 /* Ignore spilled pseudo-registers which can be here only if IRA
1744 is used. */
1745 || (ira_conflicts_p && r < 0))
1746 return;
1748 SET_REGNO_REG_SET (&pseudos_counted, reg);
1750 gcc_assert (r >= 0);
1752 spill_add_cost[r] += freq;
1753 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1754 while (nregs-- > 0)
1756 hard_regno_to_pseudo_regno[r + nregs] = reg;
1757 spill_cost[r + nregs] += freq;
1761 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1762 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1764 static void
1765 order_regs_for_reload (struct insn_chain *chain)
1767 unsigned i;
1768 HARD_REG_SET used_by_pseudos;
1769 HARD_REG_SET used_by_pseudos2;
1770 reg_set_iterator rsi;
1772 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1774 memset (spill_cost, 0, sizeof spill_cost);
1775 memset (spill_add_cost, 0, sizeof spill_add_cost);
1776 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1777 hard_regno_to_pseudo_regno[i] = -1;
1779 /* Count number of uses of each hard reg by pseudo regs allocated to it
1780 and then order them by decreasing use. First exclude hard registers
1781 that are live in or across this insn. */
1783 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1784 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1785 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1786 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1788 /* Now find out which pseudos are allocated to it, and update
1789 hard_reg_n_uses. */
1790 CLEAR_REG_SET (&pseudos_counted);
1792 EXECUTE_IF_SET_IN_REG_SET
1793 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1795 count_pseudo (i);
1797 EXECUTE_IF_SET_IN_REG_SET
1798 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1800 count_pseudo (i);
1802 CLEAR_REG_SET (&pseudos_counted);
1805 /* Vector of reload-numbers showing the order in which the reloads should
1806 be processed. */
1807 static short reload_order[MAX_RELOADS];
1809 /* This is used to keep track of the spill regs used in one insn. */
1810 static HARD_REG_SET used_spill_regs_local;
1812 /* We decided to spill hard register SPILLED, which has a size of
1813 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1814 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1815 update SPILL_COST/SPILL_ADD_COST. */
1817 static void
1818 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1820 int freq = REG_FREQ (reg);
1821 int r = reg_renumber[reg];
1822 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1824 /* Ignore spilled pseudo-registers which can be here only if IRA is
1825 used. */
1826 if ((ira_conflicts_p && r < 0)
1827 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1828 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1829 return;
1831 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1833 spill_add_cost[r] -= freq;
1834 while (nregs-- > 0)
1836 hard_regno_to_pseudo_regno[r + nregs] = -1;
1837 spill_cost[r + nregs] -= freq;
1841 /* Find reload register to use for reload number ORDER. */
1843 static int
1844 find_reg (struct insn_chain *chain, int order)
1846 int rnum = reload_order[order];
1847 struct reload *rl = rld + rnum;
1848 int best_cost = INT_MAX;
1849 int best_reg = -1;
1850 unsigned int i, j, n;
1851 int k;
1852 HARD_REG_SET not_usable;
1853 HARD_REG_SET used_by_other_reload;
1854 reg_set_iterator rsi;
1855 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1856 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1858 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1859 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1860 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1862 CLEAR_HARD_REG_SET (used_by_other_reload);
1863 for (k = 0; k < order; k++)
1865 int other = reload_order[k];
1867 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1868 for (j = 0; j < rld[other].nregs; j++)
1869 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1872 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1874 #ifdef REG_ALLOC_ORDER
1875 unsigned int regno = reg_alloc_order[i];
1876 #else
1877 unsigned int regno = i;
1878 #endif
1880 if (! TEST_HARD_REG_BIT (not_usable, regno)
1881 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1882 && HARD_REGNO_MODE_OK (regno, rl->mode))
1884 int this_cost = spill_cost[regno];
1885 int ok = 1;
1886 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1888 for (j = 1; j < this_nregs; j++)
1890 this_cost += spill_add_cost[regno + j];
1891 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1892 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1893 ok = 0;
1895 if (! ok)
1896 continue;
1898 if (ira_conflicts_p)
1900 /* Ask IRA to find a better pseudo-register for
1901 spilling. */
1902 for (n = j = 0; j < this_nregs; j++)
1904 int r = hard_regno_to_pseudo_regno[regno + j];
1906 if (r < 0)
1907 continue;
1908 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1909 regno_pseudo_regs[n++] = r;
1911 regno_pseudo_regs[n++] = -1;
1912 if (best_reg < 0
1913 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1914 best_regno_pseudo_regs,
1915 rl->in, rl->out,
1916 chain->insn))
1918 best_reg = regno;
1919 for (j = 0;; j++)
1921 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1922 if (regno_pseudo_regs[j] < 0)
1923 break;
1926 continue;
1929 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1930 this_cost--;
1931 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1932 this_cost--;
1933 if (this_cost < best_cost
1934 /* Among registers with equal cost, prefer caller-saved ones, or
1935 use REG_ALLOC_ORDER if it is defined. */
1936 || (this_cost == best_cost
1937 #ifdef REG_ALLOC_ORDER
1938 && (inv_reg_alloc_order[regno]
1939 < inv_reg_alloc_order[best_reg])
1940 #else
1941 && call_used_regs[regno]
1942 && ! call_used_regs[best_reg]
1943 #endif
1946 best_reg = regno;
1947 best_cost = this_cost;
1951 if (best_reg == -1)
1952 return 0;
1954 if (dump_file)
1955 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1957 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1958 rl->regno = best_reg;
1960 EXECUTE_IF_SET_IN_REG_SET
1961 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1963 count_spilled_pseudo (best_reg, rl->nregs, j);
1966 EXECUTE_IF_SET_IN_REG_SET
1967 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1969 count_spilled_pseudo (best_reg, rl->nregs, j);
1972 for (i = 0; i < rl->nregs; i++)
1974 gcc_assert (spill_cost[best_reg + i] == 0);
1975 gcc_assert (spill_add_cost[best_reg + i] == 0);
1976 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1977 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1979 return 1;
1982 /* Find more reload regs to satisfy the remaining need of an insn, which
1983 is given by CHAIN.
1984 Do it by ascending class number, since otherwise a reg
1985 might be spilled for a big class and might fail to count
1986 for a smaller class even though it belongs to that class. */
1988 static void
1989 find_reload_regs (struct insn_chain *chain)
1991 int i;
1993 /* In order to be certain of getting the registers we need,
1994 we must sort the reloads into order of increasing register class.
1995 Then our grabbing of reload registers will parallel the process
1996 that provided the reload registers. */
1997 for (i = 0; i < chain->n_reloads; i++)
1999 /* Show whether this reload already has a hard reg. */
2000 if (chain->rld[i].reg_rtx)
2002 int regno = REGNO (chain->rld[i].reg_rtx);
2003 chain->rld[i].regno = regno;
2004 chain->rld[i].nregs
2005 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2007 else
2008 chain->rld[i].regno = -1;
2009 reload_order[i] = i;
2012 n_reloads = chain->n_reloads;
2013 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2015 CLEAR_HARD_REG_SET (used_spill_regs_local);
2017 if (dump_file)
2018 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2020 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2022 /* Compute the order of preference for hard registers to spill. */
2024 order_regs_for_reload (chain);
2026 for (i = 0; i < n_reloads; i++)
2028 int r = reload_order[i];
2030 /* Ignore reloads that got marked inoperative. */
2031 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2032 && ! rld[r].optional
2033 && rld[r].regno == -1)
2034 if (! find_reg (chain, i))
2036 if (dump_file)
2037 fprintf (dump_file, "reload failure for reload %d\n", r);
2038 spill_failure (chain->insn, rld[r].rclass);
2039 failure = 1;
2040 return;
2044 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2045 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2047 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2050 static void
2051 select_reload_regs (void)
2053 struct insn_chain *chain;
2055 /* Try to satisfy the needs for each insn. */
2056 for (chain = insns_need_reload; chain != 0;
2057 chain = chain->next_need_reload)
2058 find_reload_regs (chain);
2061 /* Delete all insns that were inserted by emit_caller_save_insns during
2062 this iteration. */
2063 static void
2064 delete_caller_save_insns (void)
2066 struct insn_chain *c = reload_insn_chain;
2068 while (c != 0)
2070 while (c != 0 && c->is_caller_save_insn)
2072 struct insn_chain *next = c->next;
2073 rtx insn = c->insn;
2075 if (c == reload_insn_chain)
2076 reload_insn_chain = next;
2077 delete_insn (insn);
2079 if (next)
2080 next->prev = c->prev;
2081 if (c->prev)
2082 c->prev->next = next;
2083 c->next = unused_insn_chains;
2084 unused_insn_chains = c;
2085 c = next;
2087 if (c != 0)
2088 c = c->next;
2092 /* Handle the failure to find a register to spill.
2093 INSN should be one of the insns which needed this particular spill reg. */
2095 static void
2096 spill_failure (rtx insn, enum reg_class rclass)
2098 if (asm_noperands (PATTERN (insn)) >= 0)
2099 error_for_asm (insn, "can't find a register in class %qs while "
2100 "reloading %<asm%>",
2101 reg_class_names[rclass]);
2102 else
2104 error ("unable to find a register to spill in class %qs",
2105 reg_class_names[rclass]);
2107 if (dump_file)
2109 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2110 debug_reload_to_stream (dump_file);
2112 fatal_insn ("this is the insn:", insn);
2116 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2117 data that is dead in INSN. */
2119 static void
2120 delete_dead_insn (rtx insn)
2122 rtx prev = prev_real_insn (insn);
2123 rtx prev_dest;
2125 /* If the previous insn sets a register that dies in our insn, delete it
2126 too. */
2127 if (prev && GET_CODE (PATTERN (prev)) == SET
2128 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2129 && reg_mentioned_p (prev_dest, PATTERN (insn))
2130 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2131 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2132 delete_dead_insn (prev);
2134 SET_INSN_DELETED (insn);
2137 /* Modify the home of pseudo-reg I.
2138 The new home is present in reg_renumber[I].
2140 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2141 or it may be -1, meaning there is none or it is not relevant.
2142 This is used so that all pseudos spilled from a given hard reg
2143 can share one stack slot. */
2145 static void
2146 alter_reg (int i, int from_reg, bool dont_share_p)
2148 /* When outputting an inline function, this can happen
2149 for a reg that isn't actually used. */
2150 if (regno_reg_rtx[i] == 0)
2151 return;
2153 /* If the reg got changed to a MEM at rtl-generation time,
2154 ignore it. */
2155 if (!REG_P (regno_reg_rtx[i]))
2156 return;
2158 /* Modify the reg-rtx to contain the new hard reg
2159 number or else to contain its pseudo reg number. */
2160 SET_REGNO (regno_reg_rtx[i],
2161 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2163 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2164 allocate a stack slot for it. */
2166 if (reg_renumber[i] < 0
2167 && REG_N_REFS (i) > 0
2168 && reg_equiv_constant[i] == 0
2169 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2170 && reg_equiv_memory_loc[i] == 0)
2172 rtx x = NULL_RTX;
2173 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2174 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2175 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2176 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2177 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2178 int adjust = 0;
2180 something_was_spilled = true;
2182 if (ira_conflicts_p)
2184 /* Mark the spill for IRA. */
2185 SET_REGNO_REG_SET (&spilled_pseudos, i);
2186 if (!dont_share_p)
2187 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2190 if (x)
2193 /* Each pseudo reg has an inherent size which comes from its own mode,
2194 and a total size which provides room for paradoxical subregs
2195 which refer to the pseudo reg in wider modes.
2197 We can use a slot already allocated if it provides both
2198 enough inherent space and enough total space.
2199 Otherwise, we allocate a new slot, making sure that it has no less
2200 inherent space, and no less total space, then the previous slot. */
2201 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2203 rtx stack_slot;
2205 /* No known place to spill from => no slot to reuse. */
2206 x = assign_stack_local (mode, total_size,
2207 min_align > inherent_align
2208 || total_size > inherent_size ? -1 : 0);
2210 stack_slot = x;
2212 /* Cancel the big-endian correction done in assign_stack_local.
2213 Get the address of the beginning of the slot. This is so we
2214 can do a big-endian correction unconditionally below. */
2215 if (BYTES_BIG_ENDIAN)
2217 adjust = inherent_size - total_size;
2218 if (adjust)
2219 stack_slot
2220 = adjust_address_nv (x, mode_for_size (total_size
2221 * BITS_PER_UNIT,
2222 MODE_INT, 1),
2223 adjust);
2226 if (! dont_share_p && ira_conflicts_p)
2227 /* Inform IRA about allocation a new stack slot. */
2228 ira_mark_new_stack_slot (stack_slot, i, total_size);
2231 /* Reuse a stack slot if possible. */
2232 else if (spill_stack_slot[from_reg] != 0
2233 && spill_stack_slot_width[from_reg] >= total_size
2234 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2235 >= inherent_size)
2236 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2237 x = spill_stack_slot[from_reg];
2239 /* Allocate a bigger slot. */
2240 else
2242 /* Compute maximum size needed, both for inherent size
2243 and for total size. */
2244 rtx stack_slot;
2246 if (spill_stack_slot[from_reg])
2248 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2249 > inherent_size)
2250 mode = GET_MODE (spill_stack_slot[from_reg]);
2251 if (spill_stack_slot_width[from_reg] > total_size)
2252 total_size = spill_stack_slot_width[from_reg];
2253 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2254 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2257 /* Make a slot with that size. */
2258 x = assign_stack_local (mode, total_size,
2259 min_align > inherent_align
2260 || total_size > inherent_size ? -1 : 0);
2261 stack_slot = x;
2263 /* Cancel the big-endian correction done in assign_stack_local.
2264 Get the address of the beginning of the slot. This is so we
2265 can do a big-endian correction unconditionally below. */
2266 if (BYTES_BIG_ENDIAN)
2268 adjust = GET_MODE_SIZE (mode) - total_size;
2269 if (adjust)
2270 stack_slot
2271 = adjust_address_nv (x, mode_for_size (total_size
2272 * BITS_PER_UNIT,
2273 MODE_INT, 1),
2274 adjust);
2277 spill_stack_slot[from_reg] = stack_slot;
2278 spill_stack_slot_width[from_reg] = total_size;
2281 /* On a big endian machine, the "address" of the slot
2282 is the address of the low part that fits its inherent mode. */
2283 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2284 adjust += (total_size - inherent_size);
2286 /* If we have any adjustment to make, or if the stack slot is the
2287 wrong mode, make a new stack slot. */
2288 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2290 /* Set all of the memory attributes as appropriate for a spill. */
2291 set_mem_attrs_for_spill (x);
2293 /* Save the stack slot for later. */
2294 reg_equiv_memory_loc[i] = x;
2298 /* Mark the slots in regs_ever_live for the hard regs used by
2299 pseudo-reg number REGNO, accessed in MODE. */
2301 static void
2302 mark_home_live_1 (int regno, enum machine_mode mode)
2304 int i, lim;
2306 i = reg_renumber[regno];
2307 if (i < 0)
2308 return;
2309 lim = end_hard_regno (mode, i);
2310 while (i < lim)
2311 df_set_regs_ever_live(i++, true);
2314 /* Mark the slots in regs_ever_live for the hard regs
2315 used by pseudo-reg number REGNO. */
2317 void
2318 mark_home_live (int regno)
2320 if (reg_renumber[regno] >= 0)
2321 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2324 /* This function handles the tracking of elimination offsets around branches.
2326 X is a piece of RTL being scanned.
2328 INSN is the insn that it came from, if any.
2330 INITIAL_P is nonzero if we are to set the offset to be the initial
2331 offset and zero if we are setting the offset of the label to be the
2332 current offset. */
2334 static void
2335 set_label_offsets (rtx x, rtx insn, int initial_p)
2337 enum rtx_code code = GET_CODE (x);
2338 rtx tem;
2339 unsigned int i;
2340 struct elim_table *p;
2342 switch (code)
2344 case LABEL_REF:
2345 if (LABEL_REF_NONLOCAL_P (x))
2346 return;
2348 x = XEXP (x, 0);
2350 /* ... fall through ... */
2352 case CODE_LABEL:
2353 /* If we know nothing about this label, set the desired offsets. Note
2354 that this sets the offset at a label to be the offset before a label
2355 if we don't know anything about the label. This is not correct for
2356 the label after a BARRIER, but is the best guess we can make. If
2357 we guessed wrong, we will suppress an elimination that might have
2358 been possible had we been able to guess correctly. */
2360 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2362 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2363 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2364 = (initial_p ? reg_eliminate[i].initial_offset
2365 : reg_eliminate[i].offset);
2366 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2369 /* Otherwise, if this is the definition of a label and it is
2370 preceded by a BARRIER, set our offsets to the known offset of
2371 that label. */
2373 else if (x == insn
2374 && (tem = prev_nonnote_insn (insn)) != 0
2375 && BARRIER_P (tem))
2376 set_offsets_for_label (insn);
2377 else
2378 /* If neither of the above cases is true, compare each offset
2379 with those previously recorded and suppress any eliminations
2380 where the offsets disagree. */
2382 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2383 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2384 != (initial_p ? reg_eliminate[i].initial_offset
2385 : reg_eliminate[i].offset))
2386 reg_eliminate[i].can_eliminate = 0;
2388 return;
2390 case JUMP_INSN:
2391 set_label_offsets (PATTERN (insn), insn, initial_p);
2393 /* ... fall through ... */
2395 case INSN:
2396 case CALL_INSN:
2397 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2398 to indirectly and hence must have all eliminations at their
2399 initial offsets. */
2400 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2401 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2402 set_label_offsets (XEXP (tem, 0), insn, 1);
2403 return;
2405 case PARALLEL:
2406 case ADDR_VEC:
2407 case ADDR_DIFF_VEC:
2408 /* Each of the labels in the parallel or address vector must be
2409 at their initial offsets. We want the first field for PARALLEL
2410 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2412 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2413 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2414 insn, initial_p);
2415 return;
2417 case SET:
2418 /* We only care about setting PC. If the source is not RETURN,
2419 IF_THEN_ELSE, or a label, disable any eliminations not at
2420 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2421 isn't one of those possibilities. For branches to a label,
2422 call ourselves recursively.
2424 Note that this can disable elimination unnecessarily when we have
2425 a non-local goto since it will look like a non-constant jump to
2426 someplace in the current function. This isn't a significant
2427 problem since such jumps will normally be when all elimination
2428 pairs are back to their initial offsets. */
2430 if (SET_DEST (x) != pc_rtx)
2431 return;
2433 switch (GET_CODE (SET_SRC (x)))
2435 case PC:
2436 case RETURN:
2437 return;
2439 case LABEL_REF:
2440 set_label_offsets (SET_SRC (x), insn, initial_p);
2441 return;
2443 case IF_THEN_ELSE:
2444 tem = XEXP (SET_SRC (x), 1);
2445 if (GET_CODE (tem) == LABEL_REF)
2446 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2447 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2448 break;
2450 tem = XEXP (SET_SRC (x), 2);
2451 if (GET_CODE (tem) == LABEL_REF)
2452 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2453 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2454 break;
2455 return;
2457 default:
2458 break;
2461 /* If we reach here, all eliminations must be at their initial
2462 offset because we are doing a jump to a variable address. */
2463 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2464 if (p->offset != p->initial_offset)
2465 p->can_eliminate = 0;
2466 break;
2468 default:
2469 break;
2473 /* Called through for_each_rtx, this function examines every reg that occurs
2474 in PX and adjusts the costs for its elimination which are gathered by IRA.
2475 DATA is the insn in which PX occurs. We do not recurse into MEM
2476 expressions. */
2478 static int
2479 note_reg_elim_costly (rtx *px, void *data)
2481 rtx insn = (rtx)data;
2482 rtx x = *px;
2484 if (MEM_P (x))
2485 return -1;
2487 if (REG_P (x)
2488 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2489 && reg_equiv_init[REGNO (x)]
2490 && reg_equiv_invariant[REGNO (x)])
2492 rtx t = reg_equiv_invariant[REGNO (x)];
2493 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2494 int cost = rtx_cost (new_rtx, SET, optimize_bb_for_speed_p (elim_bb));
2495 int freq = REG_FREQ_FROM_BB (elim_bb);
2497 if (cost != 0)
2498 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2500 return 0;
2503 /* Scan X and replace any eliminable registers (such as fp) with a
2504 replacement (such as sp), plus an offset.
2506 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2507 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2508 MEM, we are allowed to replace a sum of a register and the constant zero
2509 with the register, which we cannot do outside a MEM. In addition, we need
2510 to record the fact that a register is referenced outside a MEM.
2512 If INSN is an insn, it is the insn containing X. If we replace a REG
2513 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2514 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2515 the REG is being modified.
2517 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2518 That's used when we eliminate in expressions stored in notes.
2519 This means, do not set ref_outside_mem even if the reference
2520 is outside of MEMs.
2522 If FOR_COSTS is true, we are being called before reload in order to
2523 estimate the costs of keeping registers with an equivalence unallocated.
2525 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2526 replacements done assuming all offsets are at their initial values. If
2527 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2528 encounter, return the actual location so that find_reloads will do
2529 the proper thing. */
2531 static rtx
2532 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2533 bool may_use_invariant, bool for_costs)
2535 enum rtx_code code = GET_CODE (x);
2536 struct elim_table *ep;
2537 int regno;
2538 rtx new_rtx;
2539 int i, j;
2540 const char *fmt;
2541 int copied = 0;
2543 if (! current_function_decl)
2544 return x;
2546 switch (code)
2548 case CONST_INT:
2549 case CONST_DOUBLE:
2550 case CONST_FIXED:
2551 case CONST_VECTOR:
2552 case CONST:
2553 case SYMBOL_REF:
2554 case CODE_LABEL:
2555 case PC:
2556 case CC0:
2557 case ASM_INPUT:
2558 case ADDR_VEC:
2559 case ADDR_DIFF_VEC:
2560 case RETURN:
2561 return x;
2563 case REG:
2564 regno = REGNO (x);
2566 /* First handle the case where we encounter a bare register that
2567 is eliminable. Replace it with a PLUS. */
2568 if (regno < FIRST_PSEUDO_REGISTER)
2570 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2571 ep++)
2572 if (ep->from_rtx == x && ep->can_eliminate)
2573 return plus_constant (ep->to_rtx, ep->previous_offset);
2576 else if (reg_renumber && reg_renumber[regno] < 0
2577 && reg_equiv_invariant && reg_equiv_invariant[regno])
2579 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2580 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2581 mem_mode, insn, true, for_costs);
2582 /* There exists at least one use of REGNO that cannot be
2583 eliminated. Prevent the defining insn from being deleted. */
2584 reg_equiv_init[regno] = NULL_RTX;
2585 if (!for_costs)
2586 alter_reg (regno, -1, true);
2588 return x;
2590 /* You might think handling MINUS in a manner similar to PLUS is a
2591 good idea. It is not. It has been tried multiple times and every
2592 time the change has had to have been reverted.
2594 Other parts of reload know a PLUS is special (gen_reload for example)
2595 and require special code to handle code a reloaded PLUS operand.
2597 Also consider backends where the flags register is clobbered by a
2598 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2599 lea instruction comes to mind). If we try to reload a MINUS, we
2600 may kill the flags register that was holding a useful value.
2602 So, please before trying to handle MINUS, consider reload as a
2603 whole instead of this little section as well as the backend issues. */
2604 case PLUS:
2605 /* If this is the sum of an eliminable register and a constant, rework
2606 the sum. */
2607 if (REG_P (XEXP (x, 0))
2608 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2609 && CONSTANT_P (XEXP (x, 1)))
2611 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2612 ep++)
2613 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2615 /* The only time we want to replace a PLUS with a REG (this
2616 occurs when the constant operand of the PLUS is the negative
2617 of the offset) is when we are inside a MEM. We won't want
2618 to do so at other times because that would change the
2619 structure of the insn in a way that reload can't handle.
2620 We special-case the commonest situation in
2621 eliminate_regs_in_insn, so just replace a PLUS with a
2622 PLUS here, unless inside a MEM. */
2623 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2624 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2625 return ep->to_rtx;
2626 else
2627 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2628 plus_constant (XEXP (x, 1),
2629 ep->previous_offset));
2632 /* If the register is not eliminable, we are done since the other
2633 operand is a constant. */
2634 return x;
2637 /* If this is part of an address, we want to bring any constant to the
2638 outermost PLUS. We will do this by doing register replacement in
2639 our operands and seeing if a constant shows up in one of them.
2641 Note that there is no risk of modifying the structure of the insn,
2642 since we only get called for its operands, thus we are either
2643 modifying the address inside a MEM, or something like an address
2644 operand of a load-address insn. */
2647 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2648 for_costs);
2649 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2650 for_costs);
2652 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2654 /* If one side is a PLUS and the other side is a pseudo that
2655 didn't get a hard register but has a reg_equiv_constant,
2656 we must replace the constant here since it may no longer
2657 be in the position of any operand. */
2658 if (GET_CODE (new0) == PLUS && REG_P (new1)
2659 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2660 && reg_renumber[REGNO (new1)] < 0
2661 && reg_equiv_constant != 0
2662 && reg_equiv_constant[REGNO (new1)] != 0)
2663 new1 = reg_equiv_constant[REGNO (new1)];
2664 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2665 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2666 && reg_renumber[REGNO (new0)] < 0
2667 && reg_equiv_constant[REGNO (new0)] != 0)
2668 new0 = reg_equiv_constant[REGNO (new0)];
2670 new_rtx = form_sum (GET_MODE (x), new0, new1);
2672 /* As above, if we are not inside a MEM we do not want to
2673 turn a PLUS into something else. We might try to do so here
2674 for an addition of 0 if we aren't optimizing. */
2675 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2676 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2677 else
2678 return new_rtx;
2681 return x;
2683 case MULT:
2684 /* If this is the product of an eliminable register and a
2685 constant, apply the distribute law and move the constant out
2686 so that we have (plus (mult ..) ..). This is needed in order
2687 to keep load-address insns valid. This case is pathological.
2688 We ignore the possibility of overflow here. */
2689 if (REG_P (XEXP (x, 0))
2690 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2691 && CONST_INT_P (XEXP (x, 1)))
2692 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2693 ep++)
2694 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2696 if (! mem_mode
2697 /* Refs inside notes or in DEBUG_INSNs don't count for
2698 this purpose. */
2699 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2700 || GET_CODE (insn) == INSN_LIST
2701 || DEBUG_INSN_P (insn))))
2702 ep->ref_outside_mem = 1;
2704 return
2705 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2706 ep->previous_offset * INTVAL (XEXP (x, 1)));
2709 /* ... fall through ... */
2711 case CALL:
2712 case COMPARE:
2713 /* See comments before PLUS about handling MINUS. */
2714 case MINUS:
2715 case DIV: case UDIV:
2716 case MOD: case UMOD:
2717 case AND: case IOR: case XOR:
2718 case ROTATERT: case ROTATE:
2719 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2720 case NE: case EQ:
2721 case GE: case GT: case GEU: case GTU:
2722 case LE: case LT: case LEU: case LTU:
2724 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2725 for_costs);
2726 rtx new1 = XEXP (x, 1)
2727 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2728 for_costs) : 0;
2730 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2731 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2733 return x;
2735 case EXPR_LIST:
2736 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2737 if (XEXP (x, 0))
2739 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2740 for_costs);
2741 if (new_rtx != XEXP (x, 0))
2743 /* If this is a REG_DEAD note, it is not valid anymore.
2744 Using the eliminated version could result in creating a
2745 REG_DEAD note for the stack or frame pointer. */
2746 if (REG_NOTE_KIND (x) == REG_DEAD)
2747 return (XEXP (x, 1)
2748 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2749 for_costs)
2750 : NULL_RTX);
2752 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2756 /* ... fall through ... */
2758 case INSN_LIST:
2759 /* Now do eliminations in the rest of the chain. If this was
2760 an EXPR_LIST, this might result in allocating more memory than is
2761 strictly needed, but it simplifies the code. */
2762 if (XEXP (x, 1))
2764 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2765 for_costs);
2766 if (new_rtx != XEXP (x, 1))
2767 return
2768 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2770 return x;
2772 case PRE_INC:
2773 case POST_INC:
2774 case PRE_DEC:
2775 case POST_DEC:
2776 /* We do not support elimination of a register that is modified.
2777 elimination_effects has already make sure that this does not
2778 happen. */
2779 return x;
2781 case PRE_MODIFY:
2782 case POST_MODIFY:
2783 /* We do not support elimination of a register that is modified.
2784 elimination_effects has already make sure that this does not
2785 happen. The only remaining case we need to consider here is
2786 that the increment value may be an eliminable register. */
2787 if (GET_CODE (XEXP (x, 1)) == PLUS
2788 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2790 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2791 insn, true, for_costs);
2793 if (new_rtx != XEXP (XEXP (x, 1), 1))
2794 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2795 gen_rtx_PLUS (GET_MODE (x),
2796 XEXP (x, 0), new_rtx));
2798 return x;
2800 case STRICT_LOW_PART:
2801 case NEG: case NOT:
2802 case SIGN_EXTEND: case ZERO_EXTEND:
2803 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2804 case FLOAT: case FIX:
2805 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2806 case ABS:
2807 case SQRT:
2808 case FFS:
2809 case CLZ:
2810 case CTZ:
2811 case POPCOUNT:
2812 case PARITY:
2813 case BSWAP:
2814 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2815 for_costs);
2816 if (new_rtx != XEXP (x, 0))
2817 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2818 return x;
2820 case SUBREG:
2821 /* Similar to above processing, but preserve SUBREG_BYTE.
2822 Convert (subreg (mem)) to (mem) if not paradoxical.
2823 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2824 pseudo didn't get a hard reg, we must replace this with the
2825 eliminated version of the memory location because push_reload
2826 may do the replacement in certain circumstances. */
2827 if (REG_P (SUBREG_REG (x))
2828 && (GET_MODE_SIZE (GET_MODE (x))
2829 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2830 && reg_equiv_memory_loc != 0
2831 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2833 new_rtx = SUBREG_REG (x);
2835 else
2836 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false,
2837 for_costs);
2839 if (new_rtx != SUBREG_REG (x))
2841 int x_size = GET_MODE_SIZE (GET_MODE (x));
2842 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2844 if (MEM_P (new_rtx)
2845 && ((x_size < new_size
2846 #ifdef WORD_REGISTER_OPERATIONS
2847 /* On these machines, combine can create rtl of the form
2848 (set (subreg:m1 (reg:m2 R) 0) ...)
2849 where m1 < m2, and expects something interesting to
2850 happen to the entire word. Moreover, it will use the
2851 (reg:m2 R) later, expecting all bits to be preserved.
2852 So if the number of words is the same, preserve the
2853 subreg so that push_reload can see it. */
2854 && ! ((x_size - 1) / UNITS_PER_WORD
2855 == (new_size -1 ) / UNITS_PER_WORD)
2856 #endif
2858 || x_size == new_size)
2860 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2861 else
2862 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2865 return x;
2867 case MEM:
2868 /* Our only special processing is to pass the mode of the MEM to our
2869 recursive call and copy the flags. While we are here, handle this
2870 case more efficiently. */
2872 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2873 for_costs);
2874 if (for_costs
2875 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2876 && !memory_address_p (GET_MODE (x), new_rtx))
2877 for_each_rtx (&XEXP (x, 0), note_reg_elim_costly, insn);
2879 return replace_equiv_address_nv (x, new_rtx);
2881 case USE:
2882 /* Handle insn_list USE that a call to a pure function may generate. */
2883 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2884 for_costs);
2885 if (new_rtx != XEXP (x, 0))
2886 return gen_rtx_USE (GET_MODE (x), new_rtx);
2887 return x;
2889 case CLOBBER:
2890 gcc_assert (insn && DEBUG_INSN_P (insn));
2891 break;
2893 case ASM_OPERANDS:
2894 case SET:
2895 gcc_unreachable ();
2897 default:
2898 break;
2901 /* Process each of our operands recursively. If any have changed, make a
2902 copy of the rtx. */
2903 fmt = GET_RTX_FORMAT (code);
2904 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2906 if (*fmt == 'e')
2908 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2909 for_costs);
2910 if (new_rtx != XEXP (x, i) && ! copied)
2912 x = shallow_copy_rtx (x);
2913 copied = 1;
2915 XEXP (x, i) = new_rtx;
2917 else if (*fmt == 'E')
2919 int copied_vec = 0;
2920 for (j = 0; j < XVECLEN (x, i); j++)
2922 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2923 for_costs);
2924 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2926 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2927 XVEC (x, i)->elem);
2928 if (! copied)
2930 x = shallow_copy_rtx (x);
2931 copied = 1;
2933 XVEC (x, i) = new_v;
2934 copied_vec = 1;
2936 XVECEXP (x, i, j) = new_rtx;
2941 return x;
2945 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2947 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2950 /* Scan rtx X for modifications of elimination target registers. Update
2951 the table of eliminables to reflect the changed state. MEM_MODE is
2952 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2954 static void
2955 elimination_effects (rtx x, enum machine_mode mem_mode)
2957 enum rtx_code code = GET_CODE (x);
2958 struct elim_table *ep;
2959 int regno;
2960 int i, j;
2961 const char *fmt;
2963 switch (code)
2965 case CONST_INT:
2966 case CONST_DOUBLE:
2967 case CONST_FIXED:
2968 case CONST_VECTOR:
2969 case CONST:
2970 case SYMBOL_REF:
2971 case CODE_LABEL:
2972 case PC:
2973 case CC0:
2974 case ASM_INPUT:
2975 case ADDR_VEC:
2976 case ADDR_DIFF_VEC:
2977 case RETURN:
2978 return;
2980 case REG:
2981 regno = REGNO (x);
2983 /* First handle the case where we encounter a bare register that
2984 is eliminable. Replace it with a PLUS. */
2985 if (regno < FIRST_PSEUDO_REGISTER)
2987 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2988 ep++)
2989 if (ep->from_rtx == x && ep->can_eliminate)
2991 if (! mem_mode)
2992 ep->ref_outside_mem = 1;
2993 return;
2997 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2998 && reg_equiv_constant[regno]
2999 && ! function_invariant_p (reg_equiv_constant[regno]))
3000 elimination_effects (reg_equiv_constant[regno], mem_mode);
3001 return;
3003 case PRE_INC:
3004 case POST_INC:
3005 case PRE_DEC:
3006 case POST_DEC:
3007 case POST_MODIFY:
3008 case PRE_MODIFY:
3009 /* If we modify the source of an elimination rule, disable it. */
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011 if (ep->from_rtx == XEXP (x, 0))
3012 ep->can_eliminate = 0;
3014 /* If we modify the target of an elimination rule by adding a constant,
3015 update its offset. If we modify the target in any other way, we'll
3016 have to disable the rule as well. */
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->to_rtx == XEXP (x, 0))
3020 int size = GET_MODE_SIZE (mem_mode);
3022 /* If more bytes than MEM_MODE are pushed, account for them. */
3023 #ifdef PUSH_ROUNDING
3024 if (ep->to_rtx == stack_pointer_rtx)
3025 size = PUSH_ROUNDING (size);
3026 #endif
3027 if (code == PRE_DEC || code == POST_DEC)
3028 ep->offset += size;
3029 else if (code == PRE_INC || code == POST_INC)
3030 ep->offset -= size;
3031 else if (code == PRE_MODIFY || code == POST_MODIFY)
3033 if (GET_CODE (XEXP (x, 1)) == PLUS
3034 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3035 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3036 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3037 else
3038 ep->can_eliminate = 0;
3042 /* These two aren't unary operators. */
3043 if (code == POST_MODIFY || code == PRE_MODIFY)
3044 break;
3046 /* Fall through to generic unary operation case. */
3047 case STRICT_LOW_PART:
3048 case NEG: case NOT:
3049 case SIGN_EXTEND: case ZERO_EXTEND:
3050 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3051 case FLOAT: case FIX:
3052 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3053 case ABS:
3054 case SQRT:
3055 case FFS:
3056 case CLZ:
3057 case CTZ:
3058 case POPCOUNT:
3059 case PARITY:
3060 case BSWAP:
3061 elimination_effects (XEXP (x, 0), mem_mode);
3062 return;
3064 case SUBREG:
3065 if (REG_P (SUBREG_REG (x))
3066 && (GET_MODE_SIZE (GET_MODE (x))
3067 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3068 && reg_equiv_memory_loc != 0
3069 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3070 return;
3072 elimination_effects (SUBREG_REG (x), mem_mode);
3073 return;
3075 case USE:
3076 /* If using a register that is the source of an eliminate we still
3077 think can be performed, note it cannot be performed since we don't
3078 know how this register is used. */
3079 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3080 if (ep->from_rtx == XEXP (x, 0))
3081 ep->can_eliminate = 0;
3083 elimination_effects (XEXP (x, 0), mem_mode);
3084 return;
3086 case CLOBBER:
3087 /* If clobbering a register that is the replacement register for an
3088 elimination we still think can be performed, note that it cannot
3089 be performed. Otherwise, we need not be concerned about it. */
3090 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3091 if (ep->to_rtx == XEXP (x, 0))
3092 ep->can_eliminate = 0;
3094 elimination_effects (XEXP (x, 0), mem_mode);
3095 return;
3097 case SET:
3098 /* Check for setting a register that we know about. */
3099 if (REG_P (SET_DEST (x)))
3101 /* See if this is setting the replacement register for an
3102 elimination.
3104 If DEST is the hard frame pointer, we do nothing because we
3105 assume that all assignments to the frame pointer are for
3106 non-local gotos and are being done at a time when they are valid
3107 and do not disturb anything else. Some machines want to
3108 eliminate a fake argument pointer (or even a fake frame pointer)
3109 with either the real frame or the stack pointer. Assignments to
3110 the hard frame pointer must not prevent this elimination. */
3112 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3113 ep++)
3114 if (ep->to_rtx == SET_DEST (x)
3115 && SET_DEST (x) != hard_frame_pointer_rtx)
3117 /* If it is being incremented, adjust the offset. Otherwise,
3118 this elimination can't be done. */
3119 rtx src = SET_SRC (x);
3121 if (GET_CODE (src) == PLUS
3122 && XEXP (src, 0) == SET_DEST (x)
3123 && CONST_INT_P (XEXP (src, 1)))
3124 ep->offset -= INTVAL (XEXP (src, 1));
3125 else
3126 ep->can_eliminate = 0;
3130 elimination_effects (SET_DEST (x), VOIDmode);
3131 elimination_effects (SET_SRC (x), VOIDmode);
3132 return;
3134 case MEM:
3135 /* Our only special processing is to pass the mode of the MEM to our
3136 recursive call. */
3137 elimination_effects (XEXP (x, 0), GET_MODE (x));
3138 return;
3140 default:
3141 break;
3144 fmt = GET_RTX_FORMAT (code);
3145 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3147 if (*fmt == 'e')
3148 elimination_effects (XEXP (x, i), mem_mode);
3149 else if (*fmt == 'E')
3150 for (j = 0; j < XVECLEN (x, i); j++)
3151 elimination_effects (XVECEXP (x, i, j), mem_mode);
3155 /* Descend through rtx X and verify that no references to eliminable registers
3156 remain. If any do remain, mark the involved register as not
3157 eliminable. */
3159 static void
3160 check_eliminable_occurrences (rtx x)
3162 const char *fmt;
3163 int i;
3164 enum rtx_code code;
3166 if (x == 0)
3167 return;
3169 code = GET_CODE (x);
3171 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3173 struct elim_table *ep;
3175 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3176 if (ep->from_rtx == x)
3177 ep->can_eliminate = 0;
3178 return;
3181 fmt = GET_RTX_FORMAT (code);
3182 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3184 if (*fmt == 'e')
3185 check_eliminable_occurrences (XEXP (x, i));
3186 else if (*fmt == 'E')
3188 int j;
3189 for (j = 0; j < XVECLEN (x, i); j++)
3190 check_eliminable_occurrences (XVECEXP (x, i, j));
3195 /* Scan INSN and eliminate all eliminable registers in it.
3197 If REPLACE is nonzero, do the replacement destructively. Also
3198 delete the insn as dead it if it is setting an eliminable register.
3200 If REPLACE is zero, do all our allocations in reload_obstack.
3202 If no eliminations were done and this insn doesn't require any elimination
3203 processing (these are not identical conditions: it might be updating sp,
3204 but not referencing fp; this needs to be seen during reload_as_needed so
3205 that the offset between fp and sp can be taken into consideration), zero
3206 is returned. Otherwise, 1 is returned. */
3208 static int
3209 eliminate_regs_in_insn (rtx insn, int replace)
3211 int icode = recog_memoized (insn);
3212 rtx old_body = PATTERN (insn);
3213 int insn_is_asm = asm_noperands (old_body) >= 0;
3214 rtx old_set = single_set (insn);
3215 rtx new_body;
3216 int val = 0;
3217 int i;
3218 rtx substed_operand[MAX_RECOG_OPERANDS];
3219 rtx orig_operand[MAX_RECOG_OPERANDS];
3220 struct elim_table *ep;
3221 rtx plus_src, plus_cst_src;
3223 if (! insn_is_asm && icode < 0)
3225 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3226 || GET_CODE (PATTERN (insn)) == CLOBBER
3227 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3228 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3229 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3230 || DEBUG_INSN_P (insn));
3231 if (DEBUG_INSN_P (insn))
3232 INSN_VAR_LOCATION_LOC (insn)
3233 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3234 return 0;
3237 if (old_set != 0 && REG_P (SET_DEST (old_set))
3238 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3240 /* Check for setting an eliminable register. */
3241 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3242 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3244 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3245 /* If this is setting the frame pointer register to the
3246 hardware frame pointer register and this is an elimination
3247 that will be done (tested above), this insn is really
3248 adjusting the frame pointer downward to compensate for
3249 the adjustment done before a nonlocal goto. */
3250 if (ep->from == FRAME_POINTER_REGNUM
3251 && ep->to == HARD_FRAME_POINTER_REGNUM)
3253 rtx base = SET_SRC (old_set);
3254 rtx base_insn = insn;
3255 HOST_WIDE_INT offset = 0;
3257 while (base != ep->to_rtx)
3259 rtx prev_insn, prev_set;
3261 if (GET_CODE (base) == PLUS
3262 && CONST_INT_P (XEXP (base, 1)))
3264 offset += INTVAL (XEXP (base, 1));
3265 base = XEXP (base, 0);
3267 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3268 && (prev_set = single_set (prev_insn)) != 0
3269 && rtx_equal_p (SET_DEST (prev_set), base))
3271 base = SET_SRC (prev_set);
3272 base_insn = prev_insn;
3274 else
3275 break;
3278 if (base == ep->to_rtx)
3280 rtx src
3281 = plus_constant (ep->to_rtx, offset - ep->offset);
3283 new_body = old_body;
3284 if (! replace)
3286 new_body = copy_insn (old_body);
3287 if (REG_NOTES (insn))
3288 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3290 PATTERN (insn) = new_body;
3291 old_set = single_set (insn);
3293 /* First see if this insn remains valid when we
3294 make the change. If not, keep the INSN_CODE
3295 the same and let reload fit it up. */
3296 validate_change (insn, &SET_SRC (old_set), src, 1);
3297 validate_change (insn, &SET_DEST (old_set),
3298 ep->to_rtx, 1);
3299 if (! apply_change_group ())
3301 SET_SRC (old_set) = src;
3302 SET_DEST (old_set) = ep->to_rtx;
3305 val = 1;
3306 goto done;
3309 #endif
3311 /* In this case this insn isn't serving a useful purpose. We
3312 will delete it in reload_as_needed once we know that this
3313 elimination is, in fact, being done.
3315 If REPLACE isn't set, we can't delete this insn, but needn't
3316 process it since it won't be used unless something changes. */
3317 if (replace)
3319 delete_dead_insn (insn);
3320 return 1;
3322 val = 1;
3323 goto done;
3327 /* We allow one special case which happens to work on all machines we
3328 currently support: a single set with the source or a REG_EQUAL
3329 note being a PLUS of an eliminable register and a constant. */
3330 plus_src = plus_cst_src = 0;
3331 if (old_set && REG_P (SET_DEST (old_set)))
3333 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3334 plus_src = SET_SRC (old_set);
3335 /* First see if the source is of the form (plus (...) CST). */
3336 if (plus_src
3337 && CONST_INT_P (XEXP (plus_src, 1)))
3338 plus_cst_src = plus_src;
3339 else if (REG_P (SET_SRC (old_set))
3340 || plus_src)
3342 /* Otherwise, see if we have a REG_EQUAL note of the form
3343 (plus (...) CST). */
3344 rtx links;
3345 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3347 if ((REG_NOTE_KIND (links) == REG_EQUAL
3348 || REG_NOTE_KIND (links) == REG_EQUIV)
3349 && GET_CODE (XEXP (links, 0)) == PLUS
3350 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3352 plus_cst_src = XEXP (links, 0);
3353 break;
3358 /* Check that the first operand of the PLUS is a hard reg or
3359 the lowpart subreg of one. */
3360 if (plus_cst_src)
3362 rtx reg = XEXP (plus_cst_src, 0);
3363 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3364 reg = SUBREG_REG (reg);
3366 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3367 plus_cst_src = 0;
3370 if (plus_cst_src)
3372 rtx reg = XEXP (plus_cst_src, 0);
3373 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3375 if (GET_CODE (reg) == SUBREG)
3376 reg = SUBREG_REG (reg);
3378 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3379 if (ep->from_rtx == reg && ep->can_eliminate)
3381 rtx to_rtx = ep->to_rtx;
3382 offset += ep->offset;
3383 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3385 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3386 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3387 to_rtx);
3388 /* If we have a nonzero offset, and the source is already
3389 a simple REG, the following transformation would
3390 increase the cost of the insn by replacing a simple REG
3391 with (plus (reg sp) CST). So try only when we already
3392 had a PLUS before. */
3393 if (offset == 0 || plus_src)
3395 rtx new_src = plus_constant (to_rtx, offset);
3397 new_body = old_body;
3398 if (! replace)
3400 new_body = copy_insn (old_body);
3401 if (REG_NOTES (insn))
3402 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3404 PATTERN (insn) = new_body;
3405 old_set = single_set (insn);
3407 /* First see if this insn remains valid when we make the
3408 change. If not, try to replace the whole pattern with
3409 a simple set (this may help if the original insn was a
3410 PARALLEL that was only recognized as single_set due to
3411 REG_UNUSED notes). If this isn't valid either, keep
3412 the INSN_CODE the same and let reload fix it up. */
3413 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3415 rtx new_pat = gen_rtx_SET (VOIDmode,
3416 SET_DEST (old_set), new_src);
3418 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3419 SET_SRC (old_set) = new_src;
3422 else
3423 break;
3425 val = 1;
3426 /* This can't have an effect on elimination offsets, so skip right
3427 to the end. */
3428 goto done;
3432 /* Determine the effects of this insn on elimination offsets. */
3433 elimination_effects (old_body, VOIDmode);
3435 /* Eliminate all eliminable registers occurring in operands that
3436 can be handled by reload. */
3437 extract_insn (insn);
3438 for (i = 0; i < recog_data.n_operands; i++)
3440 orig_operand[i] = recog_data.operand[i];
3441 substed_operand[i] = recog_data.operand[i];
3443 /* For an asm statement, every operand is eliminable. */
3444 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3446 bool is_set_src, in_plus;
3448 /* Check for setting a register that we know about. */
3449 if (recog_data.operand_type[i] != OP_IN
3450 && REG_P (orig_operand[i]))
3452 /* If we are assigning to a register that can be eliminated, it
3453 must be as part of a PARALLEL, since the code above handles
3454 single SETs. We must indicate that we can no longer
3455 eliminate this reg. */
3456 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3457 ep++)
3458 if (ep->from_rtx == orig_operand[i])
3459 ep->can_eliminate = 0;
3462 /* Companion to the above plus substitution, we can allow
3463 invariants as the source of a plain move. */
3464 is_set_src = false;
3465 if (old_set
3466 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3467 is_set_src = true;
3468 in_plus = false;
3469 if (plus_src
3470 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3471 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3472 in_plus = true;
3474 substed_operand[i]
3475 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3476 replace ? insn : NULL_RTX,
3477 is_set_src || in_plus, false);
3478 if (substed_operand[i] != orig_operand[i])
3479 val = 1;
3480 /* Terminate the search in check_eliminable_occurrences at
3481 this point. */
3482 *recog_data.operand_loc[i] = 0;
3484 /* If an output operand changed from a REG to a MEM and INSN is an
3485 insn, write a CLOBBER insn. */
3486 if (recog_data.operand_type[i] != OP_IN
3487 && REG_P (orig_operand[i])
3488 && MEM_P (substed_operand[i])
3489 && replace)
3490 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3494 for (i = 0; i < recog_data.n_dups; i++)
3495 *recog_data.dup_loc[i]
3496 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3498 /* If any eliminable remain, they aren't eliminable anymore. */
3499 check_eliminable_occurrences (old_body);
3501 /* Substitute the operands; the new values are in the substed_operand
3502 array. */
3503 for (i = 0; i < recog_data.n_operands; i++)
3504 *recog_data.operand_loc[i] = substed_operand[i];
3505 for (i = 0; i < recog_data.n_dups; i++)
3506 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3508 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3509 re-recognize the insn. We do this in case we had a simple addition
3510 but now can do this as a load-address. This saves an insn in this
3511 common case.
3512 If re-recognition fails, the old insn code number will still be used,
3513 and some register operands may have changed into PLUS expressions.
3514 These will be handled by find_reloads by loading them into a register
3515 again. */
3517 if (val)
3519 /* If we aren't replacing things permanently and we changed something,
3520 make another copy to ensure that all the RTL is new. Otherwise
3521 things can go wrong if find_reload swaps commutative operands
3522 and one is inside RTL that has been copied while the other is not. */
3523 new_body = old_body;
3524 if (! replace)
3526 new_body = copy_insn (old_body);
3527 if (REG_NOTES (insn))
3528 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3530 PATTERN (insn) = new_body;
3532 /* If we had a move insn but now we don't, rerecognize it. This will
3533 cause spurious re-recognition if the old move had a PARALLEL since
3534 the new one still will, but we can't call single_set without
3535 having put NEW_BODY into the insn and the re-recognition won't
3536 hurt in this rare case. */
3537 /* ??? Why this huge if statement - why don't we just rerecognize the
3538 thing always? */
3539 if (! insn_is_asm
3540 && old_set != 0
3541 && ((REG_P (SET_SRC (old_set))
3542 && (GET_CODE (new_body) != SET
3543 || !REG_P (SET_SRC (new_body))))
3544 /* If this was a load from or store to memory, compare
3545 the MEM in recog_data.operand to the one in the insn.
3546 If they are not equal, then rerecognize the insn. */
3547 || (old_set != 0
3548 && ((MEM_P (SET_SRC (old_set))
3549 && SET_SRC (old_set) != recog_data.operand[1])
3550 || (MEM_P (SET_DEST (old_set))
3551 && SET_DEST (old_set) != recog_data.operand[0])))
3552 /* If this was an add insn before, rerecognize. */
3553 || GET_CODE (SET_SRC (old_set)) == PLUS))
3555 int new_icode = recog (PATTERN (insn), insn, 0);
3556 if (new_icode >= 0)
3557 INSN_CODE (insn) = new_icode;
3561 /* Restore the old body. If there were any changes to it, we made a copy
3562 of it while the changes were still in place, so we'll correctly return
3563 a modified insn below. */
3564 if (! replace)
3566 /* Restore the old body. */
3567 for (i = 0; i < recog_data.n_operands; i++)
3568 /* Restoring a top-level match_parallel would clobber the new_body
3569 we installed in the insn. */
3570 if (recog_data.operand_loc[i] != &PATTERN (insn))
3571 *recog_data.operand_loc[i] = orig_operand[i];
3572 for (i = 0; i < recog_data.n_dups; i++)
3573 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3576 /* Update all elimination pairs to reflect the status after the current
3577 insn. The changes we make were determined by the earlier call to
3578 elimination_effects.
3580 We also detect cases where register elimination cannot be done,
3581 namely, if a register would be both changed and referenced outside a MEM
3582 in the resulting insn since such an insn is often undefined and, even if
3583 not, we cannot know what meaning will be given to it. Note that it is
3584 valid to have a register used in an address in an insn that changes it
3585 (presumably with a pre- or post-increment or decrement).
3587 If anything changes, return nonzero. */
3589 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3591 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3592 ep->can_eliminate = 0;
3594 ep->ref_outside_mem = 0;
3596 if (ep->previous_offset != ep->offset)
3597 val = 1;
3600 done:
3601 /* If we changed something, perform elimination in REG_NOTES. This is
3602 needed even when REPLACE is zero because a REG_DEAD note might refer
3603 to a register that we eliminate and could cause a different number
3604 of spill registers to be needed in the final reload pass than in
3605 the pre-passes. */
3606 if (val && REG_NOTES (insn) != 0)
3607 REG_NOTES (insn)
3608 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3609 false);
3611 return val;
3614 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3615 register allocator. INSN is the instruction we need to examine, we perform
3616 eliminations in its operands and record cases where eliminating a reg with
3617 an invariant equivalence would add extra cost. */
3619 static void
3620 elimination_costs_in_insn (rtx insn)
3622 int icode = recog_memoized (insn);
3623 rtx old_body = PATTERN (insn);
3624 int insn_is_asm = asm_noperands (old_body) >= 0;
3625 rtx old_set = single_set (insn);
3626 int i;
3627 rtx orig_operand[MAX_RECOG_OPERANDS];
3628 rtx orig_dup[MAX_RECOG_OPERANDS];
3629 struct elim_table *ep;
3630 rtx plus_src, plus_cst_src;
3631 bool sets_reg_p;
3633 if (! insn_is_asm && icode < 0)
3635 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3636 || GET_CODE (PATTERN (insn)) == CLOBBER
3637 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3638 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3639 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3640 || DEBUG_INSN_P (insn));
3641 return;
3644 if (old_set != 0 && REG_P (SET_DEST (old_set))
3645 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3647 /* Check for setting an eliminable register. */
3648 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3649 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3650 return;
3653 /* We allow one special case which happens to work on all machines we
3654 currently support: a single set with the source or a REG_EQUAL
3655 note being a PLUS of an eliminable register and a constant. */
3656 plus_src = plus_cst_src = 0;
3657 sets_reg_p = false;
3658 if (old_set && REG_P (SET_DEST (old_set)))
3660 sets_reg_p = true;
3661 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3662 plus_src = SET_SRC (old_set);
3663 /* First see if the source is of the form (plus (...) CST). */
3664 if (plus_src
3665 && CONST_INT_P (XEXP (plus_src, 1)))
3666 plus_cst_src = plus_src;
3667 else if (REG_P (SET_SRC (old_set))
3668 || plus_src)
3670 /* Otherwise, see if we have a REG_EQUAL note of the form
3671 (plus (...) CST). */
3672 rtx links;
3673 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3675 if ((REG_NOTE_KIND (links) == REG_EQUAL
3676 || REG_NOTE_KIND (links) == REG_EQUIV)
3677 && GET_CODE (XEXP (links, 0)) == PLUS
3678 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3680 plus_cst_src = XEXP (links, 0);
3681 break;
3687 /* Determine the effects of this insn on elimination offsets. */
3688 elimination_effects (old_body, VOIDmode);
3690 /* Eliminate all eliminable registers occurring in operands that
3691 can be handled by reload. */
3692 extract_insn (insn);
3693 for (i = 0; i < recog_data.n_dups; i++)
3694 orig_dup[i] = *recog_data.dup_loc[i];
3696 for (i = 0; i < recog_data.n_operands; i++)
3698 orig_operand[i] = recog_data.operand[i];
3700 /* For an asm statement, every operand is eliminable. */
3701 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3703 bool is_set_src, in_plus;
3705 /* Check for setting a register that we know about. */
3706 if (recog_data.operand_type[i] != OP_IN
3707 && REG_P (orig_operand[i]))
3709 /* If we are assigning to a register that can be eliminated, it
3710 must be as part of a PARALLEL, since the code above handles
3711 single SETs. We must indicate that we can no longer
3712 eliminate this reg. */
3713 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3714 ep++)
3715 if (ep->from_rtx == orig_operand[i])
3716 ep->can_eliminate = 0;
3719 /* Companion to the above plus substitution, we can allow
3720 invariants as the source of a plain move. */
3721 is_set_src = false;
3722 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3723 is_set_src = true;
3724 if (is_set_src && !sets_reg_p)
3725 note_reg_elim_costly (&SET_SRC (old_set), insn);
3726 in_plus = false;
3727 if (plus_src && sets_reg_p
3728 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3729 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3730 in_plus = true;
3732 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3733 NULL_RTX,
3734 is_set_src || in_plus, true);
3735 /* Terminate the search in check_eliminable_occurrences at
3736 this point. */
3737 *recog_data.operand_loc[i] = 0;
3741 for (i = 0; i < recog_data.n_dups; i++)
3742 *recog_data.dup_loc[i]
3743 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3745 /* If any eliminable remain, they aren't eliminable anymore. */
3746 check_eliminable_occurrences (old_body);
3748 /* Restore the old body. */
3749 for (i = 0; i < recog_data.n_operands; i++)
3750 *recog_data.operand_loc[i] = orig_operand[i];
3751 for (i = 0; i < recog_data.n_dups; i++)
3752 *recog_data.dup_loc[i] = orig_dup[i];
3754 /* Update all elimination pairs to reflect the status after the current
3755 insn. The changes we make were determined by the earlier call to
3756 elimination_effects. */
3758 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3760 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3761 ep->can_eliminate = 0;
3763 ep->ref_outside_mem = 0;
3766 return;
3769 /* Loop through all elimination pairs.
3770 Recalculate the number not at initial offset.
3772 Compute the maximum offset (minimum offset if the stack does not
3773 grow downward) for each elimination pair. */
3775 static void
3776 update_eliminable_offsets (void)
3778 struct elim_table *ep;
3780 num_not_at_initial_offset = 0;
3781 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3783 ep->previous_offset = ep->offset;
3784 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3785 num_not_at_initial_offset++;
3789 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3790 replacement we currently believe is valid, mark it as not eliminable if X
3791 modifies DEST in any way other than by adding a constant integer to it.
3793 If DEST is the frame pointer, we do nothing because we assume that
3794 all assignments to the hard frame pointer are nonlocal gotos and are being
3795 done at a time when they are valid and do not disturb anything else.
3796 Some machines want to eliminate a fake argument pointer with either the
3797 frame or stack pointer. Assignments to the hard frame pointer must not
3798 prevent this elimination.
3800 Called via note_stores from reload before starting its passes to scan
3801 the insns of the function. */
3803 static void
3804 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3806 unsigned int i;
3808 /* A SUBREG of a hard register here is just changing its mode. We should
3809 not see a SUBREG of an eliminable hard register, but check just in
3810 case. */
3811 if (GET_CODE (dest) == SUBREG)
3812 dest = SUBREG_REG (dest);
3814 if (dest == hard_frame_pointer_rtx)
3815 return;
3817 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3818 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3819 && (GET_CODE (x) != SET
3820 || GET_CODE (SET_SRC (x)) != PLUS
3821 || XEXP (SET_SRC (x), 0) != dest
3822 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3824 reg_eliminate[i].can_eliminate_previous
3825 = reg_eliminate[i].can_eliminate = 0;
3826 num_eliminable--;
3830 /* Verify that the initial elimination offsets did not change since the
3831 last call to set_initial_elim_offsets. This is used to catch cases
3832 where something illegal happened during reload_as_needed that could
3833 cause incorrect code to be generated if we did not check for it. */
3835 static bool
3836 verify_initial_elim_offsets (void)
3838 HOST_WIDE_INT t;
3840 if (!num_eliminable)
3841 return true;
3843 #ifdef ELIMINABLE_REGS
3845 struct elim_table *ep;
3847 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3849 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3850 if (t != ep->initial_offset)
3851 return false;
3854 #else
3855 INITIAL_FRAME_POINTER_OFFSET (t);
3856 if (t != reg_eliminate[0].initial_offset)
3857 return false;
3858 #endif
3860 return true;
3863 /* Reset all offsets on eliminable registers to their initial values. */
3865 static void
3866 set_initial_elim_offsets (void)
3868 struct elim_table *ep = reg_eliminate;
3870 #ifdef ELIMINABLE_REGS
3871 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3873 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3874 ep->previous_offset = ep->offset = ep->initial_offset;
3876 #else
3877 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3878 ep->previous_offset = ep->offset = ep->initial_offset;
3879 #endif
3881 num_not_at_initial_offset = 0;
3884 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3886 static void
3887 set_initial_eh_label_offset (rtx label)
3889 set_label_offsets (label, NULL_RTX, 1);
3892 /* Initialize the known label offsets.
3893 Set a known offset for each forced label to be at the initial offset
3894 of each elimination. We do this because we assume that all
3895 computed jumps occur from a location where each elimination is
3896 at its initial offset.
3897 For all other labels, show that we don't know the offsets. */
3899 static void
3900 set_initial_label_offsets (void)
3902 rtx x;
3903 memset (offsets_known_at, 0, num_labels);
3905 for (x = forced_labels; x; x = XEXP (x, 1))
3906 if (XEXP (x, 0))
3907 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3909 for_each_eh_label (set_initial_eh_label_offset);
3912 /* Set all elimination offsets to the known values for the code label given
3913 by INSN. */
3915 static void
3916 set_offsets_for_label (rtx insn)
3918 unsigned int i;
3919 int label_nr = CODE_LABEL_NUMBER (insn);
3920 struct elim_table *ep;
3922 num_not_at_initial_offset = 0;
3923 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3925 ep->offset = ep->previous_offset
3926 = offsets_at[label_nr - first_label_num][i];
3927 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3928 num_not_at_initial_offset++;
3932 /* See if anything that happened changes which eliminations are valid.
3933 For example, on the SPARC, whether or not the frame pointer can
3934 be eliminated can depend on what registers have been used. We need
3935 not check some conditions again (such as flag_omit_frame_pointer)
3936 since they can't have changed. */
3938 static void
3939 update_eliminables (HARD_REG_SET *pset)
3941 int previous_frame_pointer_needed = frame_pointer_needed;
3942 struct elim_table *ep;
3944 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3945 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3946 && targetm.frame_pointer_required ())
3947 #ifdef ELIMINABLE_REGS
3948 || ! targetm.can_eliminate (ep->from, ep->to)
3949 #endif
3951 ep->can_eliminate = 0;
3953 /* Look for the case where we have discovered that we can't replace
3954 register A with register B and that means that we will now be
3955 trying to replace register A with register C. This means we can
3956 no longer replace register C with register B and we need to disable
3957 such an elimination, if it exists. This occurs often with A == ap,
3958 B == sp, and C == fp. */
3960 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3962 struct elim_table *op;
3963 int new_to = -1;
3965 if (! ep->can_eliminate && ep->can_eliminate_previous)
3967 /* Find the current elimination for ep->from, if there is a
3968 new one. */
3969 for (op = reg_eliminate;
3970 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3971 if (op->from == ep->from && op->can_eliminate)
3973 new_to = op->to;
3974 break;
3977 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3978 disable it. */
3979 for (op = reg_eliminate;
3980 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3981 if (op->from == new_to && op->to == ep->to)
3982 op->can_eliminate = 0;
3986 /* See if any registers that we thought we could eliminate the previous
3987 time are no longer eliminable. If so, something has changed and we
3988 must spill the register. Also, recompute the number of eliminable
3989 registers and see if the frame pointer is needed; it is if there is
3990 no elimination of the frame pointer that we can perform. */
3992 frame_pointer_needed = 1;
3993 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3995 if (ep->can_eliminate
3996 && ep->from == FRAME_POINTER_REGNUM
3997 && ep->to != HARD_FRAME_POINTER_REGNUM
3998 && (! SUPPORTS_STACK_ALIGNMENT
3999 || ! crtl->stack_realign_needed))
4000 frame_pointer_needed = 0;
4002 if (! ep->can_eliminate && ep->can_eliminate_previous)
4004 ep->can_eliminate_previous = 0;
4005 SET_HARD_REG_BIT (*pset, ep->from);
4006 num_eliminable--;
4010 /* If we didn't need a frame pointer last time, but we do now, spill
4011 the hard frame pointer. */
4012 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4013 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4016 /* Return true if X is used as the target register of an elimination. */
4018 bool
4019 elimination_target_reg_p (rtx x)
4021 struct elim_table *ep;
4023 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4024 if (ep->to_rtx == x && ep->can_eliminate)
4025 return true;
4027 return false;
4030 /* Initialize the table of registers to eliminate.
4031 Pre-condition: global flag frame_pointer_needed has been set before
4032 calling this function. */
4034 static void
4035 init_elim_table (void)
4037 struct elim_table *ep;
4038 #ifdef ELIMINABLE_REGS
4039 const struct elim_table_1 *ep1;
4040 #endif
4042 if (!reg_eliminate)
4043 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4045 num_eliminable = 0;
4047 #ifdef ELIMINABLE_REGS
4048 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4049 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4051 ep->from = ep1->from;
4052 ep->to = ep1->to;
4053 ep->can_eliminate = ep->can_eliminate_previous
4054 = (targetm.can_eliminate (ep->from, ep->to)
4055 && ! (ep->to == STACK_POINTER_REGNUM
4056 && frame_pointer_needed
4057 && (! SUPPORTS_STACK_ALIGNMENT
4058 || ! stack_realign_fp)));
4060 #else
4061 reg_eliminate[0].from = reg_eliminate_1[0].from;
4062 reg_eliminate[0].to = reg_eliminate_1[0].to;
4063 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4064 = ! frame_pointer_needed;
4065 #endif
4067 /* Count the number of eliminable registers and build the FROM and TO
4068 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4069 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4070 We depend on this. */
4071 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4073 num_eliminable += ep->can_eliminate;
4074 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4075 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4079 /* Find all the pseudo registers that didn't get hard regs
4080 but do have known equivalent constants or memory slots.
4081 These include parameters (known equivalent to parameter slots)
4082 and cse'd or loop-moved constant memory addresses.
4084 Record constant equivalents in reg_equiv_constant
4085 so they will be substituted by find_reloads.
4086 Record memory equivalents in reg_mem_equiv so they can
4087 be substituted eventually by altering the REG-rtx's. */
4089 static void
4090 init_eliminable_invariants (rtx first, bool do_subregs)
4092 int i;
4093 rtx insn;
4095 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
4096 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
4097 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
4098 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
4099 reg_equiv_address = XCNEWVEC (rtx, max_regno);
4100 if (do_subregs)
4101 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4102 else
4103 reg_max_ref_width = NULL;
4105 num_eliminable_invariants = 0;
4107 first_label_num = get_first_label_num ();
4108 num_labels = max_label_num () - first_label_num;
4110 /* Allocate the tables used to store offset information at labels. */
4111 offsets_known_at = XNEWVEC (char, num_labels);
4112 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4114 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4115 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4116 find largest such for each pseudo. FIRST is the head of the insn
4117 list. */
4119 for (insn = first; insn; insn = NEXT_INSN (insn))
4121 rtx set = single_set (insn);
4123 /* We may introduce USEs that we want to remove at the end, so
4124 we'll mark them with QImode. Make sure there are no
4125 previously-marked insns left by say regmove. */
4126 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4127 && GET_MODE (insn) != VOIDmode)
4128 PUT_MODE (insn, VOIDmode);
4130 if (do_subregs && NONDEBUG_INSN_P (insn))
4131 scan_paradoxical_subregs (PATTERN (insn));
4133 if (set != 0 && REG_P (SET_DEST (set)))
4135 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4136 rtx x;
4138 if (! note)
4139 continue;
4141 i = REGNO (SET_DEST (set));
4142 x = XEXP (note, 0);
4144 if (i <= LAST_VIRTUAL_REGISTER)
4145 continue;
4147 /* If flag_pic and we have constant, verify it's legitimate. */
4148 if (!CONSTANT_P (x)
4149 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4151 /* It can happen that a REG_EQUIV note contains a MEM
4152 that is not a legitimate memory operand. As later
4153 stages of reload assume that all addresses found
4154 in the reg_equiv_* arrays were originally legitimate,
4155 we ignore such REG_EQUIV notes. */
4156 if (memory_operand (x, VOIDmode))
4158 /* Always unshare the equivalence, so we can
4159 substitute into this insn without touching the
4160 equivalence. */
4161 reg_equiv_memory_loc[i] = copy_rtx (x);
4163 else if (function_invariant_p (x))
4165 if (GET_CODE (x) == PLUS)
4167 /* This is PLUS of frame pointer and a constant,
4168 and might be shared. Unshare it. */
4169 reg_equiv_invariant[i] = copy_rtx (x);
4170 num_eliminable_invariants++;
4172 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4174 reg_equiv_invariant[i] = x;
4175 num_eliminable_invariants++;
4177 else if (LEGITIMATE_CONSTANT_P (x))
4178 reg_equiv_constant[i] = x;
4179 else
4181 reg_equiv_memory_loc[i]
4182 = force_const_mem (GET_MODE (SET_DEST (set)), x);
4183 if (! reg_equiv_memory_loc[i])
4184 reg_equiv_init[i] = NULL_RTX;
4187 else
4189 reg_equiv_init[i] = NULL_RTX;
4190 continue;
4193 else
4194 reg_equiv_init[i] = NULL_RTX;
4198 if (dump_file)
4199 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4200 if (reg_equiv_init[i])
4202 fprintf (dump_file, "init_insns for %u: ", i);
4203 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
4204 fprintf (dump_file, "\n");
4208 /* Indicate that we no longer have known memory locations or constants.
4209 Free all data involved in tracking these. */
4211 static void
4212 free_reg_equiv (void)
4214 int i;
4216 if (reg_equiv_constant)
4217 free (reg_equiv_constant);
4218 if (reg_equiv_invariant)
4219 free (reg_equiv_invariant);
4220 reg_equiv_constant = 0;
4221 reg_equiv_invariant = 0;
4222 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
4223 reg_equiv_memory_loc = 0;
4225 if (offsets_known_at)
4226 free (offsets_known_at);
4227 if (offsets_at)
4228 free (offsets_at);
4229 offsets_at = 0;
4230 offsets_known_at = 0;
4232 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4233 if (reg_equiv_alt_mem_list[i])
4234 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
4235 free (reg_equiv_alt_mem_list);
4237 free (reg_equiv_mem);
4238 free (reg_equiv_address);
4241 /* Kick all pseudos out of hard register REGNO.
4243 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4244 because we found we can't eliminate some register. In the case, no pseudos
4245 are allowed to be in the register, even if they are only in a block that
4246 doesn't require spill registers, unlike the case when we are spilling this
4247 hard reg to produce another spill register.
4249 Return nonzero if any pseudos needed to be kicked out. */
4251 static void
4252 spill_hard_reg (unsigned int regno, int cant_eliminate)
4254 int i;
4256 if (cant_eliminate)
4258 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4259 df_set_regs_ever_live (regno, true);
4262 /* Spill every pseudo reg that was allocated to this reg
4263 or to something that overlaps this reg. */
4265 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4266 if (reg_renumber[i] >= 0
4267 && (unsigned int) reg_renumber[i] <= regno
4268 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4269 SET_REGNO_REG_SET (&spilled_pseudos, i);
4272 /* After find_reload_regs has been run for all insn that need reloads,
4273 and/or spill_hard_regs was called, this function is used to actually
4274 spill pseudo registers and try to reallocate them. It also sets up the
4275 spill_regs array for use by choose_reload_regs. */
4277 static int
4278 finish_spills (int global)
4280 struct insn_chain *chain;
4281 int something_changed = 0;
4282 unsigned i;
4283 reg_set_iterator rsi;
4285 /* Build the spill_regs array for the function. */
4286 /* If there are some registers still to eliminate and one of the spill regs
4287 wasn't ever used before, additional stack space may have to be
4288 allocated to store this register. Thus, we may have changed the offset
4289 between the stack and frame pointers, so mark that something has changed.
4291 One might think that we need only set VAL to 1 if this is a call-used
4292 register. However, the set of registers that must be saved by the
4293 prologue is not identical to the call-used set. For example, the
4294 register used by the call insn for the return PC is a call-used register,
4295 but must be saved by the prologue. */
4297 n_spills = 0;
4298 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4299 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4301 spill_reg_order[i] = n_spills;
4302 spill_regs[n_spills++] = i;
4303 if (num_eliminable && ! df_regs_ever_live_p (i))
4304 something_changed = 1;
4305 df_set_regs_ever_live (i, true);
4307 else
4308 spill_reg_order[i] = -1;
4310 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4311 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4313 /* Record the current hard register the pseudo is allocated to
4314 in pseudo_previous_regs so we avoid reallocating it to the
4315 same hard reg in a later pass. */
4316 gcc_assert (reg_renumber[i] >= 0);
4318 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4319 /* Mark it as no longer having a hard register home. */
4320 reg_renumber[i] = -1;
4321 if (ira_conflicts_p)
4322 /* Inform IRA about the change. */
4323 ira_mark_allocation_change (i);
4324 /* We will need to scan everything again. */
4325 something_changed = 1;
4328 /* Retry global register allocation if possible. */
4329 if (global && ira_conflicts_p)
4331 unsigned int n;
4333 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4334 /* For every insn that needs reloads, set the registers used as spill
4335 regs in pseudo_forbidden_regs for every pseudo live across the
4336 insn. */
4337 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4339 EXECUTE_IF_SET_IN_REG_SET
4340 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4342 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4343 chain->used_spill_regs);
4345 EXECUTE_IF_SET_IN_REG_SET
4346 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4348 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4349 chain->used_spill_regs);
4353 /* Retry allocating the pseudos spilled in IRA and the
4354 reload. For each reg, merge the various reg sets that
4355 indicate which hard regs can't be used, and call
4356 ira_reassign_pseudos. */
4357 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4358 if (reg_old_renumber[i] != reg_renumber[i])
4360 if (reg_renumber[i] < 0)
4361 temp_pseudo_reg_arr[n++] = i;
4362 else
4363 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4365 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4366 bad_spill_regs_global,
4367 pseudo_forbidden_regs, pseudo_previous_regs,
4368 &spilled_pseudos))
4369 something_changed = 1;
4371 /* Fix up the register information in the insn chain.
4372 This involves deleting those of the spilled pseudos which did not get
4373 a new hard register home from the live_{before,after} sets. */
4374 for (chain = reload_insn_chain; chain; chain = chain->next)
4376 HARD_REG_SET used_by_pseudos;
4377 HARD_REG_SET used_by_pseudos2;
4379 if (! ira_conflicts_p)
4381 /* Don't do it for IRA because IRA and the reload still can
4382 assign hard registers to the spilled pseudos on next
4383 reload iterations. */
4384 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4385 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4387 /* Mark any unallocated hard regs as available for spills. That
4388 makes inheritance work somewhat better. */
4389 if (chain->need_reload)
4391 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4392 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4393 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4395 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4396 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4397 /* Value of chain->used_spill_regs from previous iteration
4398 may be not included in the value calculated here because
4399 of possible removing caller-saves insns (see function
4400 delete_caller_save_insns. */
4401 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4402 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4406 CLEAR_REG_SET (&changed_allocation_pseudos);
4407 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4408 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4410 int regno = reg_renumber[i];
4411 if (reg_old_renumber[i] == regno)
4412 continue;
4414 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4416 alter_reg (i, reg_old_renumber[i], false);
4417 reg_old_renumber[i] = regno;
4418 if (dump_file)
4420 if (regno == -1)
4421 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4422 else
4423 fprintf (dump_file, " Register %d now in %d.\n\n",
4424 i, reg_renumber[i]);
4428 return something_changed;
4431 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4433 static void
4434 scan_paradoxical_subregs (rtx x)
4436 int i;
4437 const char *fmt;
4438 enum rtx_code code = GET_CODE (x);
4440 switch (code)
4442 case REG:
4443 case CONST_INT:
4444 case CONST:
4445 case SYMBOL_REF:
4446 case LABEL_REF:
4447 case CONST_DOUBLE:
4448 case CONST_FIXED:
4449 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4450 case CC0:
4451 case PC:
4452 case USE:
4453 case CLOBBER:
4454 return;
4456 case SUBREG:
4457 if (REG_P (SUBREG_REG (x))
4458 && (GET_MODE_SIZE (GET_MODE (x))
4459 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4461 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4462 = GET_MODE_SIZE (GET_MODE (x));
4463 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4465 return;
4467 default:
4468 break;
4471 fmt = GET_RTX_FORMAT (code);
4472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4474 if (fmt[i] == 'e')
4475 scan_paradoxical_subregs (XEXP (x, i));
4476 else if (fmt[i] == 'E')
4478 int j;
4479 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4480 scan_paradoxical_subregs (XVECEXP (x, i, j));
4485 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4486 examine all of the reload insns between PREV and NEXT exclusive, and
4487 annotate all that may trap. */
4489 static void
4490 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4492 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4493 if (note == NULL)
4494 return;
4495 if (!insn_could_throw_p (insn))
4496 remove_note (insn, note);
4497 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4500 /* Reload pseudo-registers into hard regs around each insn as needed.
4501 Additional register load insns are output before the insn that needs it
4502 and perhaps store insns after insns that modify the reloaded pseudo reg.
4504 reg_last_reload_reg and reg_reloaded_contents keep track of
4505 which registers are already available in reload registers.
4506 We update these for the reloads that we perform,
4507 as the insns are scanned. */
4509 static void
4510 reload_as_needed (int live_known)
4512 struct insn_chain *chain;
4513 #if defined (AUTO_INC_DEC)
4514 int i;
4515 #endif
4516 rtx x;
4518 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4519 memset (spill_reg_store, 0, sizeof spill_reg_store);
4520 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4521 INIT_REG_SET (&reg_has_output_reload);
4522 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4523 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4525 set_initial_elim_offsets ();
4527 for (chain = reload_insn_chain; chain; chain = chain->next)
4529 rtx prev = 0;
4530 rtx insn = chain->insn;
4531 rtx old_next = NEXT_INSN (insn);
4532 #ifdef AUTO_INC_DEC
4533 rtx old_prev = PREV_INSN (insn);
4534 #endif
4536 /* If we pass a label, copy the offsets from the label information
4537 into the current offsets of each elimination. */
4538 if (LABEL_P (insn))
4539 set_offsets_for_label (insn);
4541 else if (INSN_P (insn))
4543 regset_head regs_to_forget;
4544 INIT_REG_SET (&regs_to_forget);
4545 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4547 /* If this is a USE and CLOBBER of a MEM, ensure that any
4548 references to eliminable registers have been removed. */
4550 if ((GET_CODE (PATTERN (insn)) == USE
4551 || GET_CODE (PATTERN (insn)) == CLOBBER)
4552 && MEM_P (XEXP (PATTERN (insn), 0)))
4553 XEXP (XEXP (PATTERN (insn), 0), 0)
4554 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4555 GET_MODE (XEXP (PATTERN (insn), 0)),
4556 NULL_RTX);
4558 /* If we need to do register elimination processing, do so.
4559 This might delete the insn, in which case we are done. */
4560 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4562 eliminate_regs_in_insn (insn, 1);
4563 if (NOTE_P (insn))
4565 update_eliminable_offsets ();
4566 CLEAR_REG_SET (&regs_to_forget);
4567 continue;
4571 /* If need_elim is nonzero but need_reload is zero, one might think
4572 that we could simply set n_reloads to 0. However, find_reloads
4573 could have done some manipulation of the insn (such as swapping
4574 commutative operands), and these manipulations are lost during
4575 the first pass for every insn that needs register elimination.
4576 So the actions of find_reloads must be redone here. */
4578 if (! chain->need_elim && ! chain->need_reload
4579 && ! chain->need_operand_change)
4580 n_reloads = 0;
4581 /* First find the pseudo regs that must be reloaded for this insn.
4582 This info is returned in the tables reload_... (see reload.h).
4583 Also modify the body of INSN by substituting RELOAD
4584 rtx's for those pseudo regs. */
4585 else
4587 CLEAR_REG_SET (&reg_has_output_reload);
4588 CLEAR_HARD_REG_SET (reg_is_output_reload);
4590 find_reloads (insn, 1, spill_indirect_levels, live_known,
4591 spill_reg_order);
4594 if (n_reloads > 0)
4596 rtx next = NEXT_INSN (insn);
4597 rtx p;
4599 prev = PREV_INSN (insn);
4601 /* Now compute which reload regs to reload them into. Perhaps
4602 reusing reload regs from previous insns, or else output
4603 load insns to reload them. Maybe output store insns too.
4604 Record the choices of reload reg in reload_reg_rtx. */
4605 choose_reload_regs (chain);
4607 /* Merge any reloads that we didn't combine for fear of
4608 increasing the number of spill registers needed but now
4609 discover can be safely merged. */
4610 if (targetm.small_register_classes_for_mode_p (VOIDmode))
4611 merge_assigned_reloads (insn);
4613 /* Generate the insns to reload operands into or out of
4614 their reload regs. */
4615 emit_reload_insns (chain);
4617 /* Substitute the chosen reload regs from reload_reg_rtx
4618 into the insn's body (or perhaps into the bodies of other
4619 load and store insn that we just made for reloading
4620 and that we moved the structure into). */
4621 subst_reloads (insn);
4623 /* Adjust the exception region notes for loads and stores. */
4624 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4625 fixup_eh_region_note (insn, prev, next);
4627 /* If this was an ASM, make sure that all the reload insns
4628 we have generated are valid. If not, give an error
4629 and delete them. */
4630 if (asm_noperands (PATTERN (insn)) >= 0)
4631 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4632 if (p != insn && INSN_P (p)
4633 && GET_CODE (PATTERN (p)) != USE
4634 && (recog_memoized (p) < 0
4635 || (extract_insn (p), ! constrain_operands (1))))
4637 error_for_asm (insn,
4638 "%<asm%> operand requires "
4639 "impossible reload");
4640 delete_insn (p);
4644 if (num_eliminable && chain->need_elim)
4645 update_eliminable_offsets ();
4647 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4648 is no longer validly lying around to save a future reload.
4649 Note that this does not detect pseudos that were reloaded
4650 for this insn in order to be stored in
4651 (obeying register constraints). That is correct; such reload
4652 registers ARE still valid. */
4653 forget_marked_reloads (&regs_to_forget);
4654 CLEAR_REG_SET (&regs_to_forget);
4656 /* There may have been CLOBBER insns placed after INSN. So scan
4657 between INSN and NEXT and use them to forget old reloads. */
4658 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4659 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4660 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4662 #ifdef AUTO_INC_DEC
4663 /* Likewise for regs altered by auto-increment in this insn.
4664 REG_INC notes have been changed by reloading:
4665 find_reloads_address_1 records substitutions for them,
4666 which have been performed by subst_reloads above. */
4667 for (i = n_reloads - 1; i >= 0; i--)
4669 rtx in_reg = rld[i].in_reg;
4670 if (in_reg)
4672 enum rtx_code code = GET_CODE (in_reg);
4673 /* PRE_INC / PRE_DEC will have the reload register ending up
4674 with the same value as the stack slot, but that doesn't
4675 hold true for POST_INC / POST_DEC. Either we have to
4676 convert the memory access to a true POST_INC / POST_DEC,
4677 or we can't use the reload register for inheritance. */
4678 if ((code == POST_INC || code == POST_DEC)
4679 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4680 REGNO (rld[i].reg_rtx))
4681 /* Make sure it is the inc/dec pseudo, and not
4682 some other (e.g. output operand) pseudo. */
4683 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4684 == REGNO (XEXP (in_reg, 0))))
4687 rtx reload_reg = rld[i].reg_rtx;
4688 enum machine_mode mode = GET_MODE (reload_reg);
4689 int n = 0;
4690 rtx p;
4692 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4694 /* We really want to ignore REG_INC notes here, so
4695 use PATTERN (p) as argument to reg_set_p . */
4696 if (reg_set_p (reload_reg, PATTERN (p)))
4697 break;
4698 n = count_occurrences (PATTERN (p), reload_reg, 0);
4699 if (! n)
4700 continue;
4701 if (n == 1)
4703 rtx replace_reg
4704 = gen_rtx_fmt_e (code, mode, reload_reg);
4706 validate_replace_rtx_group (reload_reg,
4707 replace_reg, p);
4708 n = verify_changes (0);
4710 /* We must also verify that the constraints
4711 are met after the replacement. Make sure
4712 extract_insn is only called for an insn
4713 where the replacements were found to be
4714 valid so far. */
4715 if (n)
4717 extract_insn (p);
4718 n = constrain_operands (1);
4721 /* If the constraints were not met, then
4722 undo the replacement, else confirm it. */
4723 if (!n)
4724 cancel_changes (0);
4725 else
4726 confirm_change_group ();
4728 break;
4730 if (n == 1)
4732 add_reg_note (p, REG_INC, reload_reg);
4733 /* Mark this as having an output reload so that the
4734 REG_INC processing code below won't invalidate
4735 the reload for inheritance. */
4736 SET_HARD_REG_BIT (reg_is_output_reload,
4737 REGNO (reload_reg));
4738 SET_REGNO_REG_SET (&reg_has_output_reload,
4739 REGNO (XEXP (in_reg, 0)));
4741 else
4742 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4743 NULL);
4745 else if ((code == PRE_INC || code == PRE_DEC)
4746 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4747 REGNO (rld[i].reg_rtx))
4748 /* Make sure it is the inc/dec pseudo, and not
4749 some other (e.g. output operand) pseudo. */
4750 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4751 == REGNO (XEXP (in_reg, 0))))
4753 SET_HARD_REG_BIT (reg_is_output_reload,
4754 REGNO (rld[i].reg_rtx));
4755 SET_REGNO_REG_SET (&reg_has_output_reload,
4756 REGNO (XEXP (in_reg, 0)));
4758 else if (code == PRE_INC || code == PRE_DEC
4759 || code == POST_INC || code == POST_DEC)
4761 int in_regno = REGNO (XEXP (in_reg, 0));
4763 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4765 int in_hard_regno;
4766 bool forget_p = true;
4768 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4769 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4770 in_hard_regno))
4772 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4773 x != old_next;
4774 x = NEXT_INSN (x))
4775 if (x == reg_reloaded_insn[in_hard_regno])
4777 forget_p = false;
4778 break;
4781 /* If for some reasons, we didn't set up
4782 reg_last_reload_reg in this insn,
4783 invalidate inheritance from previous
4784 insns for the incremented/decremented
4785 register. Such registers will be not in
4786 reg_has_output_reload. Invalidate it
4787 also if the corresponding element in
4788 reg_reloaded_insn is also
4789 invalidated. */
4790 if (forget_p)
4791 forget_old_reloads_1 (XEXP (in_reg, 0),
4792 NULL_RTX, NULL);
4797 /* If a pseudo that got a hard register is auto-incremented,
4798 we must purge records of copying it into pseudos without
4799 hard registers. */
4800 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4801 if (REG_NOTE_KIND (x) == REG_INC)
4803 /* See if this pseudo reg was reloaded in this insn.
4804 If so, its last-reload info is still valid
4805 because it is based on this insn's reload. */
4806 for (i = 0; i < n_reloads; i++)
4807 if (rld[i].out == XEXP (x, 0))
4808 break;
4810 if (i == n_reloads)
4811 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4813 #endif
4815 /* A reload reg's contents are unknown after a label. */
4816 if (LABEL_P (insn))
4817 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4819 /* Don't assume a reload reg is still good after a call insn
4820 if it is a call-used reg, or if it contains a value that will
4821 be partially clobbered by the call. */
4822 else if (CALL_P (insn))
4824 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4825 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4829 /* Clean up. */
4830 free (reg_last_reload_reg);
4831 CLEAR_REG_SET (&reg_has_output_reload);
4834 /* Discard all record of any value reloaded from X,
4835 or reloaded in X from someplace else;
4836 unless X is an output reload reg of the current insn.
4838 X may be a hard reg (the reload reg)
4839 or it may be a pseudo reg that was reloaded from.
4841 When DATA is non-NULL just mark the registers in regset
4842 to be forgotten later. */
4844 static void
4845 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4846 void *data)
4848 unsigned int regno;
4849 unsigned int nr;
4850 regset regs = (regset) data;
4852 /* note_stores does give us subregs of hard regs,
4853 subreg_regno_offset requires a hard reg. */
4854 while (GET_CODE (x) == SUBREG)
4856 /* We ignore the subreg offset when calculating the regno,
4857 because we are using the entire underlying hard register
4858 below. */
4859 x = SUBREG_REG (x);
4862 if (!REG_P (x))
4863 return;
4865 regno = REGNO (x);
4867 if (regno >= FIRST_PSEUDO_REGISTER)
4868 nr = 1;
4869 else
4871 unsigned int i;
4873 nr = hard_regno_nregs[regno][GET_MODE (x)];
4874 /* Storing into a spilled-reg invalidates its contents.
4875 This can happen if a block-local pseudo is allocated to that reg
4876 and it wasn't spilled because this block's total need is 0.
4877 Then some insn might have an optional reload and use this reg. */
4878 if (!regs)
4879 for (i = 0; i < nr; i++)
4880 /* But don't do this if the reg actually serves as an output
4881 reload reg in the current instruction. */
4882 if (n_reloads == 0
4883 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4885 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4886 spill_reg_store[regno + i] = 0;
4890 if (regs)
4891 while (nr-- > 0)
4892 SET_REGNO_REG_SET (regs, regno + nr);
4893 else
4895 /* Since value of X has changed,
4896 forget any value previously copied from it. */
4898 while (nr-- > 0)
4899 /* But don't forget a copy if this is the output reload
4900 that establishes the copy's validity. */
4901 if (n_reloads == 0
4902 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4903 reg_last_reload_reg[regno + nr] = 0;
4907 /* Forget the reloads marked in regset by previous function. */
4908 static void
4909 forget_marked_reloads (regset regs)
4911 unsigned int reg;
4912 reg_set_iterator rsi;
4913 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4915 if (reg < FIRST_PSEUDO_REGISTER
4916 /* But don't do this if the reg actually serves as an output
4917 reload reg in the current instruction. */
4918 && (n_reloads == 0
4919 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4921 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4922 spill_reg_store[reg] = 0;
4924 if (n_reloads == 0
4925 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4926 reg_last_reload_reg[reg] = 0;
4930 /* The following HARD_REG_SETs indicate when each hard register is
4931 used for a reload of various parts of the current insn. */
4933 /* If reg is unavailable for all reloads. */
4934 static HARD_REG_SET reload_reg_unavailable;
4935 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4936 static HARD_REG_SET reload_reg_used;
4937 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4938 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4939 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4940 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4941 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4942 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4943 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4944 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4945 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4946 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4947 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4948 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4949 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4950 static HARD_REG_SET reload_reg_used_in_op_addr;
4951 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4952 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4953 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4954 static HARD_REG_SET reload_reg_used_in_insn;
4955 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4956 static HARD_REG_SET reload_reg_used_in_other_addr;
4958 /* If reg is in use as a reload reg for any sort of reload. */
4959 static HARD_REG_SET reload_reg_used_at_all;
4961 /* If reg is use as an inherited reload. We just mark the first register
4962 in the group. */
4963 static HARD_REG_SET reload_reg_used_for_inherit;
4965 /* Records which hard regs are used in any way, either as explicit use or
4966 by being allocated to a pseudo during any point of the current insn. */
4967 static HARD_REG_SET reg_used_in_insn;
4969 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4970 TYPE. MODE is used to indicate how many consecutive regs are
4971 actually used. */
4973 static void
4974 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4975 enum machine_mode mode)
4977 unsigned int nregs = hard_regno_nregs[regno][mode];
4978 unsigned int i;
4980 for (i = regno; i < nregs + regno; i++)
4982 switch (type)
4984 case RELOAD_OTHER:
4985 SET_HARD_REG_BIT (reload_reg_used, i);
4986 break;
4988 case RELOAD_FOR_INPUT_ADDRESS:
4989 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4990 break;
4992 case RELOAD_FOR_INPADDR_ADDRESS:
4993 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4994 break;
4996 case RELOAD_FOR_OUTPUT_ADDRESS:
4997 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4998 break;
5000 case RELOAD_FOR_OUTADDR_ADDRESS:
5001 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
5002 break;
5004 case RELOAD_FOR_OPERAND_ADDRESS:
5005 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
5006 break;
5008 case RELOAD_FOR_OPADDR_ADDR:
5009 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
5010 break;
5012 case RELOAD_FOR_OTHER_ADDRESS:
5013 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
5014 break;
5016 case RELOAD_FOR_INPUT:
5017 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
5018 break;
5020 case RELOAD_FOR_OUTPUT:
5021 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
5022 break;
5024 case RELOAD_FOR_INSN:
5025 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
5026 break;
5029 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
5033 /* Similarly, but show REGNO is no longer in use for a reload. */
5035 static void
5036 clear_reload_reg_in_use (unsigned int regno, int opnum,
5037 enum reload_type type, enum machine_mode mode)
5039 unsigned int nregs = hard_regno_nregs[regno][mode];
5040 unsigned int start_regno, end_regno, r;
5041 int i;
5042 /* A complication is that for some reload types, inheritance might
5043 allow multiple reloads of the same types to share a reload register.
5044 We set check_opnum if we have to check only reloads with the same
5045 operand number, and check_any if we have to check all reloads. */
5046 int check_opnum = 0;
5047 int check_any = 0;
5048 HARD_REG_SET *used_in_set;
5050 switch (type)
5052 case RELOAD_OTHER:
5053 used_in_set = &reload_reg_used;
5054 break;
5056 case RELOAD_FOR_INPUT_ADDRESS:
5057 used_in_set = &reload_reg_used_in_input_addr[opnum];
5058 break;
5060 case RELOAD_FOR_INPADDR_ADDRESS:
5061 check_opnum = 1;
5062 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5063 break;
5065 case RELOAD_FOR_OUTPUT_ADDRESS:
5066 used_in_set = &reload_reg_used_in_output_addr[opnum];
5067 break;
5069 case RELOAD_FOR_OUTADDR_ADDRESS:
5070 check_opnum = 1;
5071 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5072 break;
5074 case RELOAD_FOR_OPERAND_ADDRESS:
5075 used_in_set = &reload_reg_used_in_op_addr;
5076 break;
5078 case RELOAD_FOR_OPADDR_ADDR:
5079 check_any = 1;
5080 used_in_set = &reload_reg_used_in_op_addr_reload;
5081 break;
5083 case RELOAD_FOR_OTHER_ADDRESS:
5084 used_in_set = &reload_reg_used_in_other_addr;
5085 check_any = 1;
5086 break;
5088 case RELOAD_FOR_INPUT:
5089 used_in_set = &reload_reg_used_in_input[opnum];
5090 break;
5092 case RELOAD_FOR_OUTPUT:
5093 used_in_set = &reload_reg_used_in_output[opnum];
5094 break;
5096 case RELOAD_FOR_INSN:
5097 used_in_set = &reload_reg_used_in_insn;
5098 break;
5099 default:
5100 gcc_unreachable ();
5102 /* We resolve conflicts with remaining reloads of the same type by
5103 excluding the intervals of reload registers by them from the
5104 interval of freed reload registers. Since we only keep track of
5105 one set of interval bounds, we might have to exclude somewhat
5106 more than what would be necessary if we used a HARD_REG_SET here.
5107 But this should only happen very infrequently, so there should
5108 be no reason to worry about it. */
5110 start_regno = regno;
5111 end_regno = regno + nregs;
5112 if (check_opnum || check_any)
5114 for (i = n_reloads - 1; i >= 0; i--)
5116 if (rld[i].when_needed == type
5117 && (check_any || rld[i].opnum == opnum)
5118 && rld[i].reg_rtx)
5120 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5121 unsigned int conflict_end
5122 = end_hard_regno (rld[i].mode, conflict_start);
5124 /* If there is an overlap with the first to-be-freed register,
5125 adjust the interval start. */
5126 if (conflict_start <= start_regno && conflict_end > start_regno)
5127 start_regno = conflict_end;
5128 /* Otherwise, if there is a conflict with one of the other
5129 to-be-freed registers, adjust the interval end. */
5130 if (conflict_start > start_regno && conflict_start < end_regno)
5131 end_regno = conflict_start;
5136 for (r = start_regno; r < end_regno; r++)
5137 CLEAR_HARD_REG_BIT (*used_in_set, r);
5140 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5141 specified by OPNUM and TYPE. */
5143 static int
5144 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5146 int i;
5148 /* In use for a RELOAD_OTHER means it's not available for anything. */
5149 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5150 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5151 return 0;
5153 switch (type)
5155 case RELOAD_OTHER:
5156 /* In use for anything means we can't use it for RELOAD_OTHER. */
5157 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5158 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5159 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5160 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5161 return 0;
5163 for (i = 0; i < reload_n_operands; i++)
5164 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5165 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5166 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5167 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5168 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5169 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5170 return 0;
5172 return 1;
5174 case RELOAD_FOR_INPUT:
5175 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5176 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5177 return 0;
5179 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5180 return 0;
5182 /* If it is used for some other input, can't use it. */
5183 for (i = 0; i < reload_n_operands; i++)
5184 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5185 return 0;
5187 /* If it is used in a later operand's address, can't use it. */
5188 for (i = opnum + 1; i < reload_n_operands; i++)
5189 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5190 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5191 return 0;
5193 return 1;
5195 case RELOAD_FOR_INPUT_ADDRESS:
5196 /* Can't use a register if it is used for an input address for this
5197 operand or used as an input in an earlier one. */
5198 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5199 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5200 return 0;
5202 for (i = 0; i < opnum; i++)
5203 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5204 return 0;
5206 return 1;
5208 case RELOAD_FOR_INPADDR_ADDRESS:
5209 /* Can't use a register if it is used for an input address
5210 for this operand or used as an input in an earlier
5211 one. */
5212 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5213 return 0;
5215 for (i = 0; i < opnum; i++)
5216 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5217 return 0;
5219 return 1;
5221 case RELOAD_FOR_OUTPUT_ADDRESS:
5222 /* Can't use a register if it is used for an output address for this
5223 operand or used as an output in this or a later operand. Note
5224 that multiple output operands are emitted in reverse order, so
5225 the conflicting ones are those with lower indices. */
5226 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5227 return 0;
5229 for (i = 0; i <= opnum; i++)
5230 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5231 return 0;
5233 return 1;
5235 case RELOAD_FOR_OUTADDR_ADDRESS:
5236 /* Can't use a register if it is used for an output address
5237 for this operand or used as an output in this or a
5238 later operand. Note that multiple output operands are
5239 emitted in reverse order, so the conflicting ones are
5240 those with lower indices. */
5241 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5242 return 0;
5244 for (i = 0; i <= opnum; i++)
5245 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5246 return 0;
5248 return 1;
5250 case RELOAD_FOR_OPERAND_ADDRESS:
5251 for (i = 0; i < reload_n_operands; i++)
5252 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5253 return 0;
5255 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5256 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5258 case RELOAD_FOR_OPADDR_ADDR:
5259 for (i = 0; i < reload_n_operands; i++)
5260 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5261 return 0;
5263 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5265 case RELOAD_FOR_OUTPUT:
5266 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5267 outputs, or an operand address for this or an earlier output.
5268 Note that multiple output operands are emitted in reverse order,
5269 so the conflicting ones are those with higher indices. */
5270 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5271 return 0;
5273 for (i = 0; i < reload_n_operands; i++)
5274 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5275 return 0;
5277 for (i = opnum; i < reload_n_operands; i++)
5278 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5279 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5280 return 0;
5282 return 1;
5284 case RELOAD_FOR_INSN:
5285 for (i = 0; i < reload_n_operands; i++)
5286 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5287 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5288 return 0;
5290 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5291 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5293 case RELOAD_FOR_OTHER_ADDRESS:
5294 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5296 default:
5297 gcc_unreachable ();
5301 /* Return 1 if the value in reload reg REGNO, as used by a reload
5302 needed for the part of the insn specified by OPNUM and TYPE,
5303 is still available in REGNO at the end of the insn.
5305 We can assume that the reload reg was already tested for availability
5306 at the time it is needed, and we should not check this again,
5307 in case the reg has already been marked in use. */
5309 static int
5310 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
5312 int i;
5314 switch (type)
5316 case RELOAD_OTHER:
5317 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5318 its value must reach the end. */
5319 return 1;
5321 /* If this use is for part of the insn,
5322 its value reaches if no subsequent part uses the same register.
5323 Just like the above function, don't try to do this with lots
5324 of fallthroughs. */
5326 case RELOAD_FOR_OTHER_ADDRESS:
5327 /* Here we check for everything else, since these don't conflict
5328 with anything else and everything comes later. */
5330 for (i = 0; i < reload_n_operands; i++)
5331 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5332 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5333 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5334 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5335 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5336 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5337 return 0;
5339 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5340 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5341 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5342 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5344 case RELOAD_FOR_INPUT_ADDRESS:
5345 case RELOAD_FOR_INPADDR_ADDRESS:
5346 /* Similar, except that we check only for this and subsequent inputs
5347 and the address of only subsequent inputs and we do not need
5348 to check for RELOAD_OTHER objects since they are known not to
5349 conflict. */
5351 for (i = opnum; i < reload_n_operands; i++)
5352 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5353 return 0;
5355 for (i = opnum + 1; i < reload_n_operands; i++)
5356 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5357 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5358 return 0;
5360 for (i = 0; i < reload_n_operands; i++)
5361 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5362 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5363 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5364 return 0;
5366 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5367 return 0;
5369 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5370 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5371 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5373 case RELOAD_FOR_INPUT:
5374 /* Similar to input address, except we start at the next operand for
5375 both input and input address and we do not check for
5376 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5377 would conflict. */
5379 for (i = opnum + 1; i < reload_n_operands; i++)
5380 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5381 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5382 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5383 return 0;
5385 /* ... fall through ... */
5387 case RELOAD_FOR_OPERAND_ADDRESS:
5388 /* Check outputs and their addresses. */
5390 for (i = 0; i < reload_n_operands; i++)
5391 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5392 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5393 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5394 return 0;
5396 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5398 case RELOAD_FOR_OPADDR_ADDR:
5399 for (i = 0; i < reload_n_operands; i++)
5400 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5401 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5402 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5403 return 0;
5405 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5406 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5407 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5409 case RELOAD_FOR_INSN:
5410 /* These conflict with other outputs with RELOAD_OTHER. So
5411 we need only check for output addresses. */
5413 opnum = reload_n_operands;
5415 /* ... fall through ... */
5417 case RELOAD_FOR_OUTPUT:
5418 case RELOAD_FOR_OUTPUT_ADDRESS:
5419 case RELOAD_FOR_OUTADDR_ADDRESS:
5420 /* We already know these can't conflict with a later output. So the
5421 only thing to check are later output addresses.
5422 Note that multiple output operands are emitted in reverse order,
5423 so the conflicting ones are those with lower indices. */
5424 for (i = 0; i < opnum; i++)
5425 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5426 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5427 return 0;
5429 return 1;
5431 default:
5432 gcc_unreachable ();
5436 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5437 every register in the range [REGNO, REGNO + NREGS). */
5439 static bool
5440 reload_regs_reach_end_p (unsigned int regno, int nregs,
5441 int opnum, enum reload_type type)
5443 int i;
5445 for (i = 0; i < nregs; i++)
5446 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5447 return false;
5448 return true;
5452 /* Returns whether R1 and R2 are uniquely chained: the value of one
5453 is used by the other, and that value is not used by any other
5454 reload for this insn. This is used to partially undo the decision
5455 made in find_reloads when in the case of multiple
5456 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5457 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5458 reloads. This code tries to avoid the conflict created by that
5459 change. It might be cleaner to explicitly keep track of which
5460 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5461 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5462 this after the fact. */
5463 static bool
5464 reloads_unique_chain_p (int r1, int r2)
5466 int i;
5468 /* We only check input reloads. */
5469 if (! rld[r1].in || ! rld[r2].in)
5470 return false;
5472 /* Avoid anything with output reloads. */
5473 if (rld[r1].out || rld[r2].out)
5474 return false;
5476 /* "chained" means one reload is a component of the other reload,
5477 not the same as the other reload. */
5478 if (rld[r1].opnum != rld[r2].opnum
5479 || rtx_equal_p (rld[r1].in, rld[r2].in)
5480 || rld[r1].optional || rld[r2].optional
5481 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5482 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5483 return false;
5485 for (i = 0; i < n_reloads; i ++)
5486 /* Look for input reloads that aren't our two */
5487 if (i != r1 && i != r2 && rld[i].in)
5489 /* If our reload is mentioned at all, it isn't a simple chain. */
5490 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5491 return false;
5493 return true;
5496 /* The recursive function change all occurrences of WHAT in *WHERE
5497 to REPL. */
5498 static void
5499 substitute (rtx *where, const_rtx what, rtx repl)
5501 const char *fmt;
5502 int i;
5503 enum rtx_code code;
5505 if (*where == 0)
5506 return;
5508 if (*where == what || rtx_equal_p (*where, what))
5510 /* Record the location of the changed rtx. */
5511 VEC_safe_push (rtx_p, heap, substitute_stack, where);
5512 *where = repl;
5513 return;
5516 code = GET_CODE (*where);
5517 fmt = GET_RTX_FORMAT (code);
5518 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5520 if (fmt[i] == 'E')
5522 int j;
5524 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5525 substitute (&XVECEXP (*where, i, j), what, repl);
5527 else if (fmt[i] == 'e')
5528 substitute (&XEXP (*where, i), what, repl);
5532 /* The function returns TRUE if chain of reload R1 and R2 (in any
5533 order) can be evaluated without usage of intermediate register for
5534 the reload containing another reload. It is important to see
5535 gen_reload to understand what the function is trying to do. As an
5536 example, let us have reload chain
5538 r2: const
5539 r1: <something> + const
5541 and reload R2 got reload reg HR. The function returns true if
5542 there is a correct insn HR = HR + <something>. Otherwise,
5543 gen_reload will use intermediate register (and this is the reload
5544 reg for R1) to reload <something>.
5546 We need this function to find a conflict for chain reloads. In our
5547 example, if HR = HR + <something> is incorrect insn, then we cannot
5548 use HR as a reload register for R2. If we do use it then we get a
5549 wrong code:
5551 HR = const
5552 HR = <something>
5553 HR = HR + HR
5556 static bool
5557 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5559 /* Assume other cases in gen_reload are not possible for
5560 chain reloads or do need an intermediate hard registers. */
5561 bool result = true;
5562 int regno, n, code;
5563 rtx out, in, tem, insn;
5564 rtx last = get_last_insn ();
5566 /* Make r2 a component of r1. */
5567 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5569 n = r1;
5570 r1 = r2;
5571 r2 = n;
5573 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5574 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5575 gcc_assert (regno >= 0);
5576 out = gen_rtx_REG (rld[r1].mode, regno);
5577 in = rld[r1].in;
5578 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5580 /* If IN is a paradoxical SUBREG, remove it and try to put the
5581 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5582 if (GET_CODE (in) == SUBREG
5583 && (GET_MODE_SIZE (GET_MODE (in))
5584 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5585 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5586 in = SUBREG_REG (in), out = tem;
5588 if (GET_CODE (in) == PLUS
5589 && (REG_P (XEXP (in, 0))
5590 || GET_CODE (XEXP (in, 0)) == SUBREG
5591 || MEM_P (XEXP (in, 0)))
5592 && (REG_P (XEXP (in, 1))
5593 || GET_CODE (XEXP (in, 1)) == SUBREG
5594 || CONSTANT_P (XEXP (in, 1))
5595 || MEM_P (XEXP (in, 1))))
5597 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5598 code = recog_memoized (insn);
5599 result = false;
5601 if (code >= 0)
5603 extract_insn (insn);
5604 /* We want constrain operands to treat this insn strictly in
5605 its validity determination, i.e., the way it would after
5606 reload has completed. */
5607 result = constrain_operands (1);
5610 delete_insns_since (last);
5613 /* Restore the original value at each changed address within R1. */
5614 while (!VEC_empty (rtx_p, substitute_stack))
5616 rtx *where = VEC_pop (rtx_p, substitute_stack);
5617 *where = rld[r2].in;
5620 return result;
5623 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5624 Return 0 otherwise.
5626 This function uses the same algorithm as reload_reg_free_p above. */
5628 static int
5629 reloads_conflict (int r1, int r2)
5631 enum reload_type r1_type = rld[r1].when_needed;
5632 enum reload_type r2_type = rld[r2].when_needed;
5633 int r1_opnum = rld[r1].opnum;
5634 int r2_opnum = rld[r2].opnum;
5636 /* RELOAD_OTHER conflicts with everything. */
5637 if (r2_type == RELOAD_OTHER)
5638 return 1;
5640 /* Otherwise, check conflicts differently for each type. */
5642 switch (r1_type)
5644 case RELOAD_FOR_INPUT:
5645 return (r2_type == RELOAD_FOR_INSN
5646 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5647 || r2_type == RELOAD_FOR_OPADDR_ADDR
5648 || r2_type == RELOAD_FOR_INPUT
5649 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5650 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5651 && r2_opnum > r1_opnum));
5653 case RELOAD_FOR_INPUT_ADDRESS:
5654 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5655 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5657 case RELOAD_FOR_INPADDR_ADDRESS:
5658 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5659 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5661 case RELOAD_FOR_OUTPUT_ADDRESS:
5662 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5663 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5665 case RELOAD_FOR_OUTADDR_ADDRESS:
5666 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5667 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5669 case RELOAD_FOR_OPERAND_ADDRESS:
5670 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5671 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5672 && (!reloads_unique_chain_p (r1, r2)
5673 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5675 case RELOAD_FOR_OPADDR_ADDR:
5676 return (r2_type == RELOAD_FOR_INPUT
5677 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5679 case RELOAD_FOR_OUTPUT:
5680 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5681 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5682 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5683 && r2_opnum >= r1_opnum));
5685 case RELOAD_FOR_INSN:
5686 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5687 || r2_type == RELOAD_FOR_INSN
5688 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5690 case RELOAD_FOR_OTHER_ADDRESS:
5691 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5693 case RELOAD_OTHER:
5694 return 1;
5696 default:
5697 gcc_unreachable ();
5701 /* Indexed by reload number, 1 if incoming value
5702 inherited from previous insns. */
5703 static char reload_inherited[MAX_RELOADS];
5705 /* For an inherited reload, this is the insn the reload was inherited from,
5706 if we know it. Otherwise, this is 0. */
5707 static rtx reload_inheritance_insn[MAX_RELOADS];
5709 /* If nonzero, this is a place to get the value of the reload,
5710 rather than using reload_in. */
5711 static rtx reload_override_in[MAX_RELOADS];
5713 /* For each reload, the hard register number of the register used,
5714 or -1 if we did not need a register for this reload. */
5715 static int reload_spill_index[MAX_RELOADS];
5717 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5718 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5720 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5721 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5723 /* Subroutine of free_for_value_p, used to check a single register.
5724 START_REGNO is the starting regno of the full reload register
5725 (possibly comprising multiple hard registers) that we are considering. */
5727 static int
5728 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5729 enum reload_type type, rtx value, rtx out,
5730 int reloadnum, int ignore_address_reloads)
5732 int time1;
5733 /* Set if we see an input reload that must not share its reload register
5734 with any new earlyclobber, but might otherwise share the reload
5735 register with an output or input-output reload. */
5736 int check_earlyclobber = 0;
5737 int i;
5738 int copy = 0;
5740 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5741 return 0;
5743 if (out == const0_rtx)
5745 copy = 1;
5746 out = NULL_RTX;
5749 /* We use some pseudo 'time' value to check if the lifetimes of the
5750 new register use would overlap with the one of a previous reload
5751 that is not read-only or uses a different value.
5752 The 'time' used doesn't have to be linear in any shape or form, just
5753 monotonic.
5754 Some reload types use different 'buckets' for each operand.
5755 So there are MAX_RECOG_OPERANDS different time values for each
5756 such reload type.
5757 We compute TIME1 as the time when the register for the prospective
5758 new reload ceases to be live, and TIME2 for each existing
5759 reload as the time when that the reload register of that reload
5760 becomes live.
5761 Where there is little to be gained by exact lifetime calculations,
5762 we just make conservative assumptions, i.e. a longer lifetime;
5763 this is done in the 'default:' cases. */
5764 switch (type)
5766 case RELOAD_FOR_OTHER_ADDRESS:
5767 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5768 time1 = copy ? 0 : 1;
5769 break;
5770 case RELOAD_OTHER:
5771 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5772 break;
5773 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5774 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5775 respectively, to the time values for these, we get distinct time
5776 values. To get distinct time values for each operand, we have to
5777 multiply opnum by at least three. We round that up to four because
5778 multiply by four is often cheaper. */
5779 case RELOAD_FOR_INPADDR_ADDRESS:
5780 time1 = opnum * 4 + 2;
5781 break;
5782 case RELOAD_FOR_INPUT_ADDRESS:
5783 time1 = opnum * 4 + 3;
5784 break;
5785 case RELOAD_FOR_INPUT:
5786 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5787 executes (inclusive). */
5788 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5789 break;
5790 case RELOAD_FOR_OPADDR_ADDR:
5791 /* opnum * 4 + 4
5792 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5793 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5794 break;
5795 case RELOAD_FOR_OPERAND_ADDRESS:
5796 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5797 is executed. */
5798 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5799 break;
5800 case RELOAD_FOR_OUTADDR_ADDRESS:
5801 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5802 break;
5803 case RELOAD_FOR_OUTPUT_ADDRESS:
5804 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5805 break;
5806 default:
5807 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5810 for (i = 0; i < n_reloads; i++)
5812 rtx reg = rld[i].reg_rtx;
5813 if (reg && REG_P (reg)
5814 && ((unsigned) regno - true_regnum (reg)
5815 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5816 && i != reloadnum)
5818 rtx other_input = rld[i].in;
5820 /* If the other reload loads the same input value, that
5821 will not cause a conflict only if it's loading it into
5822 the same register. */
5823 if (true_regnum (reg) != start_regno)
5824 other_input = NULL_RTX;
5825 if (! other_input || ! rtx_equal_p (other_input, value)
5826 || rld[i].out || out)
5828 int time2;
5829 switch (rld[i].when_needed)
5831 case RELOAD_FOR_OTHER_ADDRESS:
5832 time2 = 0;
5833 break;
5834 case RELOAD_FOR_INPADDR_ADDRESS:
5835 /* find_reloads makes sure that a
5836 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5837 by at most one - the first -
5838 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5839 address reload is inherited, the address address reload
5840 goes away, so we can ignore this conflict. */
5841 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5842 && ignore_address_reloads
5843 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5844 Then the address address is still needed to store
5845 back the new address. */
5846 && ! rld[reloadnum].out)
5847 continue;
5848 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5849 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5850 reloads go away. */
5851 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5852 && ignore_address_reloads
5853 /* Unless we are reloading an auto_inc expression. */
5854 && ! rld[reloadnum].out)
5855 continue;
5856 time2 = rld[i].opnum * 4 + 2;
5857 break;
5858 case RELOAD_FOR_INPUT_ADDRESS:
5859 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5860 && ignore_address_reloads
5861 && ! rld[reloadnum].out)
5862 continue;
5863 time2 = rld[i].opnum * 4 + 3;
5864 break;
5865 case RELOAD_FOR_INPUT:
5866 time2 = rld[i].opnum * 4 + 4;
5867 check_earlyclobber = 1;
5868 break;
5869 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5870 == MAX_RECOG_OPERAND * 4 */
5871 case RELOAD_FOR_OPADDR_ADDR:
5872 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5873 && ignore_address_reloads
5874 && ! rld[reloadnum].out)
5875 continue;
5876 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5877 break;
5878 case RELOAD_FOR_OPERAND_ADDRESS:
5879 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5880 check_earlyclobber = 1;
5881 break;
5882 case RELOAD_FOR_INSN:
5883 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5884 break;
5885 case RELOAD_FOR_OUTPUT:
5886 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5887 instruction is executed. */
5888 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5889 break;
5890 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5891 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5892 value. */
5893 case RELOAD_FOR_OUTADDR_ADDRESS:
5894 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5895 && ignore_address_reloads
5896 && ! rld[reloadnum].out)
5897 continue;
5898 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5899 break;
5900 case RELOAD_FOR_OUTPUT_ADDRESS:
5901 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5902 break;
5903 case RELOAD_OTHER:
5904 /* If there is no conflict in the input part, handle this
5905 like an output reload. */
5906 if (! rld[i].in || rtx_equal_p (other_input, value))
5908 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5909 /* Earlyclobbered outputs must conflict with inputs. */
5910 if (earlyclobber_operand_p (rld[i].out))
5911 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5913 break;
5915 time2 = 1;
5916 /* RELOAD_OTHER might be live beyond instruction execution,
5917 but this is not obvious when we set time2 = 1. So check
5918 here if there might be a problem with the new reload
5919 clobbering the register used by the RELOAD_OTHER. */
5920 if (out)
5921 return 0;
5922 break;
5923 default:
5924 return 0;
5926 if ((time1 >= time2
5927 && (! rld[i].in || rld[i].out
5928 || ! rtx_equal_p (other_input, value)))
5929 || (out && rld[reloadnum].out_reg
5930 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5931 return 0;
5936 /* Earlyclobbered outputs must conflict with inputs. */
5937 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5938 return 0;
5940 return 1;
5943 /* Return 1 if the value in reload reg REGNO, as used by a reload
5944 needed for the part of the insn specified by OPNUM and TYPE,
5945 may be used to load VALUE into it.
5947 MODE is the mode in which the register is used, this is needed to
5948 determine how many hard regs to test.
5950 Other read-only reloads with the same value do not conflict
5951 unless OUT is nonzero and these other reloads have to live while
5952 output reloads live.
5953 If OUT is CONST0_RTX, this is a special case: it means that the
5954 test should not be for using register REGNO as reload register, but
5955 for copying from register REGNO into the reload register.
5957 RELOADNUM is the number of the reload we want to load this value for;
5958 a reload does not conflict with itself.
5960 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5961 reloads that load an address for the very reload we are considering.
5963 The caller has to make sure that there is no conflict with the return
5964 register. */
5966 static int
5967 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5968 enum reload_type type, rtx value, rtx out, int reloadnum,
5969 int ignore_address_reloads)
5971 int nregs = hard_regno_nregs[regno][mode];
5972 while (nregs-- > 0)
5973 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5974 value, out, reloadnum,
5975 ignore_address_reloads))
5976 return 0;
5977 return 1;
5980 /* Return nonzero if the rtx X is invariant over the current function. */
5981 /* ??? Actually, the places where we use this expect exactly what is
5982 tested here, and not everything that is function invariant. In
5983 particular, the frame pointer and arg pointer are special cased;
5984 pic_offset_table_rtx is not, and we must not spill these things to
5985 memory. */
5988 function_invariant_p (const_rtx x)
5990 if (CONSTANT_P (x))
5991 return 1;
5992 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5993 return 1;
5994 if (GET_CODE (x) == PLUS
5995 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5996 && GET_CODE (XEXP (x, 1)) == CONST_INT)
5997 return 1;
5998 return 0;
6001 /* Determine whether the reload reg X overlaps any rtx'es used for
6002 overriding inheritance. Return nonzero if so. */
6004 static int
6005 conflicts_with_override (rtx x)
6007 int i;
6008 for (i = 0; i < n_reloads; i++)
6009 if (reload_override_in[i]
6010 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6011 return 1;
6012 return 0;
6015 /* Give an error message saying we failed to find a reload for INSN,
6016 and clear out reload R. */
6017 static void
6018 failed_reload (rtx insn, int r)
6020 if (asm_noperands (PATTERN (insn)) < 0)
6021 /* It's the compiler's fault. */
6022 fatal_insn ("could not find a spill register", insn);
6024 /* It's the user's fault; the operand's mode and constraint
6025 don't match. Disable this reload so we don't crash in final. */
6026 error_for_asm (insn,
6027 "%<asm%> operand constraint incompatible with operand size");
6028 rld[r].in = 0;
6029 rld[r].out = 0;
6030 rld[r].reg_rtx = 0;
6031 rld[r].optional = 1;
6032 rld[r].secondary_p = 1;
6035 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6036 for reload R. If it's valid, get an rtx for it. Return nonzero if
6037 successful. */
6038 static int
6039 set_reload_reg (int i, int r)
6041 int regno;
6042 rtx reg = spill_reg_rtx[i];
6044 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6045 spill_reg_rtx[i] = reg
6046 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6048 regno = true_regnum (reg);
6050 /* Detect when the reload reg can't hold the reload mode.
6051 This used to be one `if', but Sequent compiler can't handle that. */
6052 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6054 enum machine_mode test_mode = VOIDmode;
6055 if (rld[r].in)
6056 test_mode = GET_MODE (rld[r].in);
6057 /* If rld[r].in has VOIDmode, it means we will load it
6058 in whatever mode the reload reg has: to wit, rld[r].mode.
6059 We have already tested that for validity. */
6060 /* Aside from that, we need to test that the expressions
6061 to reload from or into have modes which are valid for this
6062 reload register. Otherwise the reload insns would be invalid. */
6063 if (! (rld[r].in != 0 && test_mode != VOIDmode
6064 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6065 if (! (rld[r].out != 0
6066 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6068 /* The reg is OK. */
6069 last_spill_reg = i;
6071 /* Mark as in use for this insn the reload regs we use
6072 for this. */
6073 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6074 rld[r].when_needed, rld[r].mode);
6076 rld[r].reg_rtx = reg;
6077 reload_spill_index[r] = spill_regs[i];
6078 return 1;
6081 return 0;
6084 /* Find a spill register to use as a reload register for reload R.
6085 LAST_RELOAD is nonzero if this is the last reload for the insn being
6086 processed.
6088 Set rld[R].reg_rtx to the register allocated.
6090 We return 1 if successful, or 0 if we couldn't find a spill reg and
6091 we didn't change anything. */
6093 static int
6094 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6095 int last_reload)
6097 int i, pass, count;
6099 /* If we put this reload ahead, thinking it is a group,
6100 then insist on finding a group. Otherwise we can grab a
6101 reg that some other reload needs.
6102 (That can happen when we have a 68000 DATA_OR_FP_REG
6103 which is a group of data regs or one fp reg.)
6104 We need not be so restrictive if there are no more reloads
6105 for this insn.
6107 ??? Really it would be nicer to have smarter handling
6108 for that kind of reg class, where a problem like this is normal.
6109 Perhaps those classes should be avoided for reloading
6110 by use of more alternatives. */
6112 int force_group = rld[r].nregs > 1 && ! last_reload;
6114 /* If we want a single register and haven't yet found one,
6115 take any reg in the right class and not in use.
6116 If we want a consecutive group, here is where we look for it.
6118 We use three passes so we can first look for reload regs to
6119 reuse, which are already in use for other reloads in this insn,
6120 and only then use additional registers which are not "bad", then
6121 finally any register.
6123 I think that maximizing reuse is needed to make sure we don't
6124 run out of reload regs. Suppose we have three reloads, and
6125 reloads A and B can share regs. These need two regs.
6126 Suppose A and B are given different regs.
6127 That leaves none for C. */
6128 for (pass = 0; pass < 3; pass++)
6130 /* I is the index in spill_regs.
6131 We advance it round-robin between insns to use all spill regs
6132 equally, so that inherited reloads have a chance
6133 of leapfrogging each other. */
6135 i = last_spill_reg;
6137 for (count = 0; count < n_spills; count++)
6139 int rclass = (int) rld[r].rclass;
6140 int regnum;
6142 i++;
6143 if (i >= n_spills)
6144 i -= n_spills;
6145 regnum = spill_regs[i];
6147 if ((reload_reg_free_p (regnum, rld[r].opnum,
6148 rld[r].when_needed)
6149 || (rld[r].in
6150 /* We check reload_reg_used to make sure we
6151 don't clobber the return register. */
6152 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6153 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6154 rld[r].when_needed, rld[r].in,
6155 rld[r].out, r, 1)))
6156 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6157 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6158 /* Look first for regs to share, then for unshared. But
6159 don't share regs used for inherited reloads; they are
6160 the ones we want to preserve. */
6161 && (pass
6162 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6163 regnum)
6164 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6165 regnum))))
6167 int nr = hard_regno_nregs[regnum][rld[r].mode];
6169 /* During the second pass we want to avoid reload registers
6170 which are "bad" for this reload. */
6171 if (pass == 1
6172 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6173 continue;
6175 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6176 (on 68000) got us two FP regs. If NR is 1,
6177 we would reject both of them. */
6178 if (force_group)
6179 nr = rld[r].nregs;
6180 /* If we need only one reg, we have already won. */
6181 if (nr == 1)
6183 /* But reject a single reg if we demand a group. */
6184 if (force_group)
6185 continue;
6186 break;
6188 /* Otherwise check that as many consecutive regs as we need
6189 are available here. */
6190 while (nr > 1)
6192 int regno = regnum + nr - 1;
6193 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6194 && spill_reg_order[regno] >= 0
6195 && reload_reg_free_p (regno, rld[r].opnum,
6196 rld[r].when_needed)))
6197 break;
6198 nr--;
6200 if (nr == 1)
6201 break;
6205 /* If we found something on the current pass, omit later passes. */
6206 if (count < n_spills)
6207 break;
6210 /* We should have found a spill register by now. */
6211 if (count >= n_spills)
6212 return 0;
6214 /* I is the index in SPILL_REG_RTX of the reload register we are to
6215 allocate. Get an rtx for it and find its register number. */
6217 return set_reload_reg (i, r);
6220 /* Initialize all the tables needed to allocate reload registers.
6221 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6222 is the array we use to restore the reg_rtx field for every reload. */
6224 static void
6225 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6227 int i;
6229 for (i = 0; i < n_reloads; i++)
6230 rld[i].reg_rtx = save_reload_reg_rtx[i];
6232 memset (reload_inherited, 0, MAX_RELOADS);
6233 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6234 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6236 CLEAR_HARD_REG_SET (reload_reg_used);
6237 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6238 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6239 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6240 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6241 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6243 CLEAR_HARD_REG_SET (reg_used_in_insn);
6245 HARD_REG_SET tmp;
6246 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6247 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6248 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6249 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6250 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6251 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6254 for (i = 0; i < reload_n_operands; i++)
6256 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6257 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6258 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6259 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6260 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6261 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6264 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6266 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6268 for (i = 0; i < n_reloads; i++)
6269 /* If we have already decided to use a certain register,
6270 don't use it in another way. */
6271 if (rld[i].reg_rtx)
6272 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6273 rld[i].when_needed, rld[i].mode);
6276 /* Assign hard reg targets for the pseudo-registers we must reload
6277 into hard regs for this insn.
6278 Also output the instructions to copy them in and out of the hard regs.
6280 For machines with register classes, we are responsible for
6281 finding a reload reg in the proper class. */
6283 static void
6284 choose_reload_regs (struct insn_chain *chain)
6286 rtx insn = chain->insn;
6287 int i, j;
6288 unsigned int max_group_size = 1;
6289 enum reg_class group_class = NO_REGS;
6290 int pass, win, inheritance;
6292 rtx save_reload_reg_rtx[MAX_RELOADS];
6294 /* In order to be certain of getting the registers we need,
6295 we must sort the reloads into order of increasing register class.
6296 Then our grabbing of reload registers will parallel the process
6297 that provided the reload registers.
6299 Also note whether any of the reloads wants a consecutive group of regs.
6300 If so, record the maximum size of the group desired and what
6301 register class contains all the groups needed by this insn. */
6303 for (j = 0; j < n_reloads; j++)
6305 reload_order[j] = j;
6306 if (rld[j].reg_rtx != NULL_RTX)
6308 gcc_assert (REG_P (rld[j].reg_rtx)
6309 && HARD_REGISTER_P (rld[j].reg_rtx));
6310 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6312 else
6313 reload_spill_index[j] = -1;
6315 if (rld[j].nregs > 1)
6317 max_group_size = MAX (rld[j].nregs, max_group_size);
6318 group_class
6319 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6322 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6325 if (n_reloads > 1)
6326 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6328 /* If -O, try first with inheritance, then turning it off.
6329 If not -O, don't do inheritance.
6330 Using inheritance when not optimizing leads to paradoxes
6331 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6332 because one side of the comparison might be inherited. */
6333 win = 0;
6334 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6336 choose_reload_regs_init (chain, save_reload_reg_rtx);
6338 /* Process the reloads in order of preference just found.
6339 Beyond this point, subregs can be found in reload_reg_rtx.
6341 This used to look for an existing reloaded home for all of the
6342 reloads, and only then perform any new reloads. But that could lose
6343 if the reloads were done out of reg-class order because a later
6344 reload with a looser constraint might have an old home in a register
6345 needed by an earlier reload with a tighter constraint.
6347 To solve this, we make two passes over the reloads, in the order
6348 described above. In the first pass we try to inherit a reload
6349 from a previous insn. If there is a later reload that needs a
6350 class that is a proper subset of the class being processed, we must
6351 also allocate a spill register during the first pass.
6353 Then make a second pass over the reloads to allocate any reloads
6354 that haven't been given registers yet. */
6356 for (j = 0; j < n_reloads; j++)
6358 int r = reload_order[j];
6359 rtx search_equiv = NULL_RTX;
6361 /* Ignore reloads that got marked inoperative. */
6362 if (rld[r].out == 0 && rld[r].in == 0
6363 && ! rld[r].secondary_p)
6364 continue;
6366 /* If find_reloads chose to use reload_in or reload_out as a reload
6367 register, we don't need to chose one. Otherwise, try even if it
6368 found one since we might save an insn if we find the value lying
6369 around.
6370 Try also when reload_in is a pseudo without a hard reg. */
6371 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6372 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6373 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6374 && !MEM_P (rld[r].in)
6375 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6376 continue;
6378 #if 0 /* No longer needed for correct operation.
6379 It might give better code, or might not; worth an experiment? */
6380 /* If this is an optional reload, we can't inherit from earlier insns
6381 until we are sure that any non-optional reloads have been allocated.
6382 The following code takes advantage of the fact that optional reloads
6383 are at the end of reload_order. */
6384 if (rld[r].optional != 0)
6385 for (i = 0; i < j; i++)
6386 if ((rld[reload_order[i]].out != 0
6387 || rld[reload_order[i]].in != 0
6388 || rld[reload_order[i]].secondary_p)
6389 && ! rld[reload_order[i]].optional
6390 && rld[reload_order[i]].reg_rtx == 0)
6391 allocate_reload_reg (chain, reload_order[i], 0);
6392 #endif
6394 /* First see if this pseudo is already available as reloaded
6395 for a previous insn. We cannot try to inherit for reloads
6396 that are smaller than the maximum number of registers needed
6397 for groups unless the register we would allocate cannot be used
6398 for the groups.
6400 We could check here to see if this is a secondary reload for
6401 an object that is already in a register of the desired class.
6402 This would avoid the need for the secondary reload register.
6403 But this is complex because we can't easily determine what
6404 objects might want to be loaded via this reload. So let a
6405 register be allocated here. In `emit_reload_insns' we suppress
6406 one of the loads in the case described above. */
6408 if (inheritance)
6410 int byte = 0;
6411 int regno = -1;
6412 enum machine_mode mode = VOIDmode;
6414 if (rld[r].in == 0)
6416 else if (REG_P (rld[r].in))
6418 regno = REGNO (rld[r].in);
6419 mode = GET_MODE (rld[r].in);
6421 else if (REG_P (rld[r].in_reg))
6423 regno = REGNO (rld[r].in_reg);
6424 mode = GET_MODE (rld[r].in_reg);
6426 else if (GET_CODE (rld[r].in_reg) == SUBREG
6427 && REG_P (SUBREG_REG (rld[r].in_reg)))
6429 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6430 if (regno < FIRST_PSEUDO_REGISTER)
6431 regno = subreg_regno (rld[r].in_reg);
6432 else
6433 byte = SUBREG_BYTE (rld[r].in_reg);
6434 mode = GET_MODE (rld[r].in_reg);
6436 #ifdef AUTO_INC_DEC
6437 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6438 && REG_P (XEXP (rld[r].in_reg, 0)))
6440 regno = REGNO (XEXP (rld[r].in_reg, 0));
6441 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6442 rld[r].out = rld[r].in;
6444 #endif
6445 #if 0
6446 /* This won't work, since REGNO can be a pseudo reg number.
6447 Also, it takes much more hair to keep track of all the things
6448 that can invalidate an inherited reload of part of a pseudoreg. */
6449 else if (GET_CODE (rld[r].in) == SUBREG
6450 && REG_P (SUBREG_REG (rld[r].in)))
6451 regno = subreg_regno (rld[r].in);
6452 #endif
6454 if (regno >= 0
6455 && reg_last_reload_reg[regno] != 0
6456 #ifdef CANNOT_CHANGE_MODE_CLASS
6457 /* Verify that the register it's in can be used in
6458 mode MODE. */
6459 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6460 GET_MODE (reg_last_reload_reg[regno]),
6461 mode)
6462 #endif
6465 enum reg_class rclass = rld[r].rclass, last_class;
6466 rtx last_reg = reg_last_reload_reg[regno];
6467 enum machine_mode need_mode;
6469 i = REGNO (last_reg);
6470 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6471 last_class = REGNO_REG_CLASS (i);
6473 if (byte == 0)
6474 need_mode = mode;
6475 else
6476 need_mode
6477 = smallest_mode_for_size
6478 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6479 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6480 ? MODE_INT : GET_MODE_CLASS (mode));
6482 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6483 >= GET_MODE_SIZE (need_mode))
6484 && reg_reloaded_contents[i] == regno
6485 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6486 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6487 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6488 /* Even if we can't use this register as a reload
6489 register, we might use it for reload_override_in,
6490 if copying it to the desired class is cheap
6491 enough. */
6492 || ((register_move_cost (mode, last_class, rclass)
6493 < memory_move_cost (mode, rclass, true))
6494 && (secondary_reload_class (1, rclass, mode,
6495 last_reg)
6496 == NO_REGS)
6497 #ifdef SECONDARY_MEMORY_NEEDED
6498 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6499 mode)
6500 #endif
6503 && (rld[r].nregs == max_group_size
6504 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6506 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6507 rld[r].when_needed, rld[r].in,
6508 const0_rtx, r, 1))
6510 /* If a group is needed, verify that all the subsequent
6511 registers still have their values intact. */
6512 int nr = hard_regno_nregs[i][rld[r].mode];
6513 int k;
6515 for (k = 1; k < nr; k++)
6516 if (reg_reloaded_contents[i + k] != regno
6517 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6518 break;
6520 if (k == nr)
6522 int i1;
6523 int bad_for_class;
6525 last_reg = (GET_MODE (last_reg) == mode
6526 ? last_reg : gen_rtx_REG (mode, i));
6528 bad_for_class = 0;
6529 for (k = 0; k < nr; k++)
6530 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6531 i+k);
6533 /* We found a register that contains the
6534 value we need. If this register is the
6535 same as an `earlyclobber' operand of the
6536 current insn, just mark it as a place to
6537 reload from since we can't use it as the
6538 reload register itself. */
6540 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6541 if (reg_overlap_mentioned_for_reload_p
6542 (reg_last_reload_reg[regno],
6543 reload_earlyclobbers[i1]))
6544 break;
6546 if (i1 != n_earlyclobbers
6547 || ! (free_for_value_p (i, rld[r].mode,
6548 rld[r].opnum,
6549 rld[r].when_needed, rld[r].in,
6550 rld[r].out, r, 1))
6551 /* Don't use it if we'd clobber a pseudo reg. */
6552 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6553 && rld[r].out
6554 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6555 /* Don't clobber the frame pointer. */
6556 || (i == HARD_FRAME_POINTER_REGNUM
6557 && frame_pointer_needed
6558 && rld[r].out)
6559 /* Don't really use the inherited spill reg
6560 if we need it wider than we've got it. */
6561 || (GET_MODE_SIZE (rld[r].mode)
6562 > GET_MODE_SIZE (mode))
6563 || bad_for_class
6565 /* If find_reloads chose reload_out as reload
6566 register, stay with it - that leaves the
6567 inherited register for subsequent reloads. */
6568 || (rld[r].out && rld[r].reg_rtx
6569 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6571 if (! rld[r].optional)
6573 reload_override_in[r] = last_reg;
6574 reload_inheritance_insn[r]
6575 = reg_reloaded_insn[i];
6578 else
6580 int k;
6581 /* We can use this as a reload reg. */
6582 /* Mark the register as in use for this part of
6583 the insn. */
6584 mark_reload_reg_in_use (i,
6585 rld[r].opnum,
6586 rld[r].when_needed,
6587 rld[r].mode);
6588 rld[r].reg_rtx = last_reg;
6589 reload_inherited[r] = 1;
6590 reload_inheritance_insn[r]
6591 = reg_reloaded_insn[i];
6592 reload_spill_index[r] = i;
6593 for (k = 0; k < nr; k++)
6594 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6595 i + k);
6602 /* Here's another way to see if the value is already lying around. */
6603 if (inheritance
6604 && rld[r].in != 0
6605 && ! reload_inherited[r]
6606 && rld[r].out == 0
6607 && (CONSTANT_P (rld[r].in)
6608 || GET_CODE (rld[r].in) == PLUS
6609 || REG_P (rld[r].in)
6610 || MEM_P (rld[r].in))
6611 && (rld[r].nregs == max_group_size
6612 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6613 search_equiv = rld[r].in;
6614 /* If this is an output reload from a simple move insn, look
6615 if an equivalence for the input is available. */
6616 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6618 rtx set = single_set (insn);
6620 if (set
6621 && rtx_equal_p (rld[r].out, SET_DEST (set))
6622 && CONSTANT_P (SET_SRC (set)))
6623 search_equiv = SET_SRC (set);
6626 if (search_equiv)
6628 rtx equiv
6629 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6630 -1, NULL, 0, rld[r].mode);
6631 int regno = 0;
6633 if (equiv != 0)
6635 if (REG_P (equiv))
6636 regno = REGNO (equiv);
6637 else
6639 /* This must be a SUBREG of a hard register.
6640 Make a new REG since this might be used in an
6641 address and not all machines support SUBREGs
6642 there. */
6643 gcc_assert (GET_CODE (equiv) == SUBREG);
6644 regno = subreg_regno (equiv);
6645 equiv = gen_rtx_REG (rld[r].mode, regno);
6646 /* If we choose EQUIV as the reload register, but the
6647 loop below decides to cancel the inheritance, we'll
6648 end up reloading EQUIV in rld[r].mode, not the mode
6649 it had originally. That isn't safe when EQUIV isn't
6650 available as a spill register since its value might
6651 still be live at this point. */
6652 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6653 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6654 equiv = 0;
6658 /* If we found a spill reg, reject it unless it is free
6659 and of the desired class. */
6660 if (equiv != 0)
6662 int regs_used = 0;
6663 int bad_for_class = 0;
6664 int max_regno = regno + rld[r].nregs;
6666 for (i = regno; i < max_regno; i++)
6668 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6670 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6674 if ((regs_used
6675 && ! free_for_value_p (regno, rld[r].mode,
6676 rld[r].opnum, rld[r].when_needed,
6677 rld[r].in, rld[r].out, r, 1))
6678 || bad_for_class)
6679 equiv = 0;
6682 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6683 equiv = 0;
6685 /* We found a register that contains the value we need.
6686 If this register is the same as an `earlyclobber' operand
6687 of the current insn, just mark it as a place to reload from
6688 since we can't use it as the reload register itself. */
6690 if (equiv != 0)
6691 for (i = 0; i < n_earlyclobbers; i++)
6692 if (reg_overlap_mentioned_for_reload_p (equiv,
6693 reload_earlyclobbers[i]))
6695 if (! rld[r].optional)
6696 reload_override_in[r] = equiv;
6697 equiv = 0;
6698 break;
6701 /* If the equiv register we have found is explicitly clobbered
6702 in the current insn, it depends on the reload type if we
6703 can use it, use it for reload_override_in, or not at all.
6704 In particular, we then can't use EQUIV for a
6705 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6707 if (equiv != 0)
6709 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6710 switch (rld[r].when_needed)
6712 case RELOAD_FOR_OTHER_ADDRESS:
6713 case RELOAD_FOR_INPADDR_ADDRESS:
6714 case RELOAD_FOR_INPUT_ADDRESS:
6715 case RELOAD_FOR_OPADDR_ADDR:
6716 break;
6717 case RELOAD_OTHER:
6718 case RELOAD_FOR_INPUT:
6719 case RELOAD_FOR_OPERAND_ADDRESS:
6720 if (! rld[r].optional)
6721 reload_override_in[r] = equiv;
6722 /* Fall through. */
6723 default:
6724 equiv = 0;
6725 break;
6727 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6728 switch (rld[r].when_needed)
6730 case RELOAD_FOR_OTHER_ADDRESS:
6731 case RELOAD_FOR_INPADDR_ADDRESS:
6732 case RELOAD_FOR_INPUT_ADDRESS:
6733 case RELOAD_FOR_OPADDR_ADDR:
6734 case RELOAD_FOR_OPERAND_ADDRESS:
6735 case RELOAD_FOR_INPUT:
6736 break;
6737 case RELOAD_OTHER:
6738 if (! rld[r].optional)
6739 reload_override_in[r] = equiv;
6740 /* Fall through. */
6741 default:
6742 equiv = 0;
6743 break;
6747 /* If we found an equivalent reg, say no code need be generated
6748 to load it, and use it as our reload reg. */
6749 if (equiv != 0
6750 && (regno != HARD_FRAME_POINTER_REGNUM
6751 || !frame_pointer_needed))
6753 int nr = hard_regno_nregs[regno][rld[r].mode];
6754 int k;
6755 rld[r].reg_rtx = equiv;
6756 reload_spill_index[r] = regno;
6757 reload_inherited[r] = 1;
6759 /* If reg_reloaded_valid is not set for this register,
6760 there might be a stale spill_reg_store lying around.
6761 We must clear it, since otherwise emit_reload_insns
6762 might delete the store. */
6763 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6764 spill_reg_store[regno] = NULL_RTX;
6765 /* If any of the hard registers in EQUIV are spill
6766 registers, mark them as in use for this insn. */
6767 for (k = 0; k < nr; k++)
6769 i = spill_reg_order[regno + k];
6770 if (i >= 0)
6772 mark_reload_reg_in_use (regno, rld[r].opnum,
6773 rld[r].when_needed,
6774 rld[r].mode);
6775 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6776 regno + k);
6782 /* If we found a register to use already, or if this is an optional
6783 reload, we are done. */
6784 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6785 continue;
6787 #if 0
6788 /* No longer needed for correct operation. Might or might
6789 not give better code on the average. Want to experiment? */
6791 /* See if there is a later reload that has a class different from our
6792 class that intersects our class or that requires less register
6793 than our reload. If so, we must allocate a register to this
6794 reload now, since that reload might inherit a previous reload
6795 and take the only available register in our class. Don't do this
6796 for optional reloads since they will force all previous reloads
6797 to be allocated. Also don't do this for reloads that have been
6798 turned off. */
6800 for (i = j + 1; i < n_reloads; i++)
6802 int s = reload_order[i];
6804 if ((rld[s].in == 0 && rld[s].out == 0
6805 && ! rld[s].secondary_p)
6806 || rld[s].optional)
6807 continue;
6809 if ((rld[s].rclass != rld[r].rclass
6810 && reg_classes_intersect_p (rld[r].rclass,
6811 rld[s].rclass))
6812 || rld[s].nregs < rld[r].nregs)
6813 break;
6816 if (i == n_reloads)
6817 continue;
6819 allocate_reload_reg (chain, r, j == n_reloads - 1);
6820 #endif
6823 /* Now allocate reload registers for anything non-optional that
6824 didn't get one yet. */
6825 for (j = 0; j < n_reloads; j++)
6827 int r = reload_order[j];
6829 /* Ignore reloads that got marked inoperative. */
6830 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6831 continue;
6833 /* Skip reloads that already have a register allocated or are
6834 optional. */
6835 if (rld[r].reg_rtx != 0 || rld[r].optional)
6836 continue;
6838 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6839 break;
6842 /* If that loop got all the way, we have won. */
6843 if (j == n_reloads)
6845 win = 1;
6846 break;
6849 /* Loop around and try without any inheritance. */
6852 if (! win)
6854 /* First undo everything done by the failed attempt
6855 to allocate with inheritance. */
6856 choose_reload_regs_init (chain, save_reload_reg_rtx);
6858 /* Some sanity tests to verify that the reloads found in the first
6859 pass are identical to the ones we have now. */
6860 gcc_assert (chain->n_reloads == n_reloads);
6862 for (i = 0; i < n_reloads; i++)
6864 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6865 continue;
6866 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6867 for (j = 0; j < n_spills; j++)
6868 if (spill_regs[j] == chain->rld[i].regno)
6869 if (! set_reload_reg (j, i))
6870 failed_reload (chain->insn, i);
6874 /* If we thought we could inherit a reload, because it seemed that
6875 nothing else wanted the same reload register earlier in the insn,
6876 verify that assumption, now that all reloads have been assigned.
6877 Likewise for reloads where reload_override_in has been set. */
6879 /* If doing expensive optimizations, do one preliminary pass that doesn't
6880 cancel any inheritance, but removes reloads that have been needed only
6881 for reloads that we know can be inherited. */
6882 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6884 for (j = 0; j < n_reloads; j++)
6886 int r = reload_order[j];
6887 rtx check_reg;
6888 if (reload_inherited[r] && rld[r].reg_rtx)
6889 check_reg = rld[r].reg_rtx;
6890 else if (reload_override_in[r]
6891 && (REG_P (reload_override_in[r])
6892 || GET_CODE (reload_override_in[r]) == SUBREG))
6893 check_reg = reload_override_in[r];
6894 else
6895 continue;
6896 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6897 rld[r].opnum, rld[r].when_needed, rld[r].in,
6898 (reload_inherited[r]
6899 ? rld[r].out : const0_rtx),
6900 r, 1))
6902 if (pass)
6903 continue;
6904 reload_inherited[r] = 0;
6905 reload_override_in[r] = 0;
6907 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6908 reload_override_in, then we do not need its related
6909 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6910 likewise for other reload types.
6911 We handle this by removing a reload when its only replacement
6912 is mentioned in reload_in of the reload we are going to inherit.
6913 A special case are auto_inc expressions; even if the input is
6914 inherited, we still need the address for the output. We can
6915 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6916 If we succeeded removing some reload and we are doing a preliminary
6917 pass just to remove such reloads, make another pass, since the
6918 removal of one reload might allow us to inherit another one. */
6919 else if (rld[r].in
6920 && rld[r].out != rld[r].in
6921 && remove_address_replacements (rld[r].in) && pass)
6922 pass = 2;
6926 /* Now that reload_override_in is known valid,
6927 actually override reload_in. */
6928 for (j = 0; j < n_reloads; j++)
6929 if (reload_override_in[j])
6930 rld[j].in = reload_override_in[j];
6932 /* If this reload won't be done because it has been canceled or is
6933 optional and not inherited, clear reload_reg_rtx so other
6934 routines (such as subst_reloads) don't get confused. */
6935 for (j = 0; j < n_reloads; j++)
6936 if (rld[j].reg_rtx != 0
6937 && ((rld[j].optional && ! reload_inherited[j])
6938 || (rld[j].in == 0 && rld[j].out == 0
6939 && ! rld[j].secondary_p)))
6941 int regno = true_regnum (rld[j].reg_rtx);
6943 if (spill_reg_order[regno] >= 0)
6944 clear_reload_reg_in_use (regno, rld[j].opnum,
6945 rld[j].when_needed, rld[j].mode);
6946 rld[j].reg_rtx = 0;
6947 reload_spill_index[j] = -1;
6950 /* Record which pseudos and which spill regs have output reloads. */
6951 for (j = 0; j < n_reloads; j++)
6953 int r = reload_order[j];
6955 i = reload_spill_index[r];
6957 /* I is nonneg if this reload uses a register.
6958 If rld[r].reg_rtx is 0, this is an optional reload
6959 that we opted to ignore. */
6960 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6961 && rld[r].reg_rtx != 0)
6963 int nregno = REGNO (rld[r].out_reg);
6964 int nr = 1;
6966 if (nregno < FIRST_PSEUDO_REGISTER)
6967 nr = hard_regno_nregs[nregno][rld[r].mode];
6969 while (--nr >= 0)
6970 SET_REGNO_REG_SET (&reg_has_output_reload,
6971 nregno + nr);
6973 if (i >= 0)
6975 nr = hard_regno_nregs[i][rld[r].mode];
6976 while (--nr >= 0)
6977 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6980 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6981 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6982 || rld[r].when_needed == RELOAD_FOR_INSN);
6987 /* Deallocate the reload register for reload R. This is called from
6988 remove_address_replacements. */
6990 void
6991 deallocate_reload_reg (int r)
6993 int regno;
6995 if (! rld[r].reg_rtx)
6996 return;
6997 regno = true_regnum (rld[r].reg_rtx);
6998 rld[r].reg_rtx = 0;
6999 if (spill_reg_order[regno] >= 0)
7000 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7001 rld[r].mode);
7002 reload_spill_index[r] = -1;
7005 /* If the small_register_classes_for_mode_p target hook returns true for
7006 some machine modes, we may not have merged two reloads of the same item
7007 for fear that we might not have enough reload registers. However,
7008 normally they will get the same reload register and hence actually need
7009 not be loaded twice.
7011 Here we check for the most common case of this phenomenon: when we have
7012 a number of reloads for the same object, each of which were allocated
7013 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
7014 reload, and is not modified in the insn itself. If we find such,
7015 merge all the reloads and set the resulting reload to RELOAD_OTHER.
7016 This will not increase the number of spill registers needed and will
7017 prevent redundant code. */
7019 static void
7020 merge_assigned_reloads (rtx insn)
7022 int i, j;
7024 /* Scan all the reloads looking for ones that only load values and
7025 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
7026 assigned and not modified by INSN. */
7028 for (i = 0; i < n_reloads; i++)
7030 int conflicting_input = 0;
7031 int max_input_address_opnum = -1;
7032 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
7034 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
7035 || rld[i].out != 0 || rld[i].reg_rtx == 0
7036 || reg_set_p (rld[i].reg_rtx, insn))
7037 continue;
7039 /* Look at all other reloads. Ensure that the only use of this
7040 reload_reg_rtx is in a reload that just loads the same value
7041 as we do. Note that any secondary reloads must be of the identical
7042 class since the values, modes, and result registers are the
7043 same, so we need not do anything with any secondary reloads. */
7045 for (j = 0; j < n_reloads; j++)
7047 if (i == j || rld[j].reg_rtx == 0
7048 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
7049 rld[i].reg_rtx))
7050 continue;
7052 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7053 && rld[j].opnum > max_input_address_opnum)
7054 max_input_address_opnum = rld[j].opnum;
7056 /* If the reload regs aren't exactly the same (e.g, different modes)
7057 or if the values are different, we can't merge this reload.
7058 But if it is an input reload, we might still merge
7059 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
7061 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7062 || rld[j].out != 0 || rld[j].in == 0
7063 || ! rtx_equal_p (rld[i].in, rld[j].in))
7065 if (rld[j].when_needed != RELOAD_FOR_INPUT
7066 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
7067 || rld[i].opnum > rld[j].opnum)
7068 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
7069 break;
7070 conflicting_input = 1;
7071 if (min_conflicting_input_opnum > rld[j].opnum)
7072 min_conflicting_input_opnum = rld[j].opnum;
7076 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
7077 we, in fact, found any matching reloads. */
7079 if (j == n_reloads
7080 && max_input_address_opnum <= min_conflicting_input_opnum)
7082 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
7084 for (j = 0; j < n_reloads; j++)
7085 if (i != j && rld[j].reg_rtx != 0
7086 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7087 && (! conflicting_input
7088 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7089 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
7091 rld[i].when_needed = RELOAD_OTHER;
7092 rld[j].in = 0;
7093 reload_spill_index[j] = -1;
7094 transfer_replacements (i, j);
7097 /* If this is now RELOAD_OTHER, look for any reloads that
7098 load parts of this operand and set them to
7099 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
7100 RELOAD_OTHER for outputs. Note that this test is
7101 equivalent to looking for reloads for this operand
7102 number.
7104 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
7105 it may share registers with a RELOAD_FOR_INPUT, so we can
7106 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
7107 never need to, since we do not modify RELOAD_FOR_OUTPUT.
7109 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
7110 instruction is assigned the same register as the earlier
7111 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
7112 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
7113 instruction to be deleted later on. */
7115 if (rld[i].when_needed == RELOAD_OTHER)
7116 for (j = 0; j < n_reloads; j++)
7117 if (rld[j].in != 0
7118 && rld[j].when_needed != RELOAD_OTHER
7119 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
7120 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
7121 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
7122 && (! conflicting_input
7123 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7124 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7125 && reg_overlap_mentioned_for_reload_p (rld[j].in,
7126 rld[i].in))
7128 int k;
7130 rld[j].when_needed
7131 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7132 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7133 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
7135 /* Check to see if we accidentally converted two
7136 reloads that use the same reload register with
7137 different inputs to the same type. If so, the
7138 resulting code won't work. */
7139 if (rld[j].reg_rtx)
7140 for (k = 0; k < j; k++)
7141 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
7142 || rld[k].when_needed != rld[j].when_needed
7143 || !rtx_equal_p (rld[k].reg_rtx,
7144 rld[j].reg_rtx)
7145 || rtx_equal_p (rld[k].in,
7146 rld[j].in));
7152 /* These arrays are filled by emit_reload_insns and its subroutines. */
7153 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
7154 static rtx other_input_address_reload_insns = 0;
7155 static rtx other_input_reload_insns = 0;
7156 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
7157 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7158 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
7159 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
7160 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7161 static rtx operand_reload_insns = 0;
7162 static rtx other_operand_reload_insns = 0;
7163 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
7165 /* Values to be put in spill_reg_store are put here first. */
7166 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7167 static HARD_REG_SET reg_reloaded_died;
7169 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7170 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7171 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7172 adjusted register, and return true. Otherwise, return false. */
7173 static bool
7174 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7175 enum reg_class new_class,
7176 enum machine_mode new_mode)
7179 rtx reg;
7181 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7183 unsigned regno = REGNO (reg);
7185 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7186 continue;
7187 if (GET_MODE (reg) != new_mode)
7189 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7190 continue;
7191 if (hard_regno_nregs[regno][new_mode]
7192 > hard_regno_nregs[regno][GET_MODE (reg)])
7193 continue;
7194 reg = reload_adjust_reg_for_mode (reg, new_mode);
7196 *reload_reg = reg;
7197 return true;
7199 return false;
7202 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7203 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7204 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7205 adjusted register, and return true. Otherwise, return false. */
7206 static bool
7207 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7208 enum insn_code icode)
7211 enum reg_class new_class = scratch_reload_class (icode);
7212 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7214 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7215 new_class, new_mode);
7218 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7219 has the number J. OLD contains the value to be used as input. */
7221 static void
7222 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7223 rtx old, int j)
7225 rtx insn = chain->insn;
7226 rtx reloadreg;
7227 rtx oldequiv_reg = 0;
7228 rtx oldequiv = 0;
7229 int special = 0;
7230 enum machine_mode mode;
7231 rtx *where;
7233 /* delete_output_reload is only invoked properly if old contains
7234 the original pseudo register. Since this is replaced with a
7235 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7236 find the pseudo in RELOAD_IN_REG. */
7237 if (reload_override_in[j]
7238 && REG_P (rl->in_reg))
7240 oldequiv = old;
7241 old = rl->in_reg;
7243 if (oldequiv == 0)
7244 oldequiv = old;
7245 else if (REG_P (oldequiv))
7246 oldequiv_reg = oldequiv;
7247 else if (GET_CODE (oldequiv) == SUBREG)
7248 oldequiv_reg = SUBREG_REG (oldequiv);
7250 reloadreg = reload_reg_rtx_for_input[j];
7251 mode = GET_MODE (reloadreg);
7253 /* If we are reloading from a register that was recently stored in
7254 with an output-reload, see if we can prove there was
7255 actually no need to store the old value in it. */
7257 if (optimize && REG_P (oldequiv)
7258 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7259 && spill_reg_store[REGNO (oldequiv)]
7260 && REG_P (old)
7261 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7262 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7263 rl->out_reg)))
7264 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7266 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7267 OLDEQUIV. */
7269 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7270 oldequiv = SUBREG_REG (oldequiv);
7271 if (GET_MODE (oldequiv) != VOIDmode
7272 && mode != GET_MODE (oldequiv))
7273 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7275 /* Switch to the right place to emit the reload insns. */
7276 switch (rl->when_needed)
7278 case RELOAD_OTHER:
7279 where = &other_input_reload_insns;
7280 break;
7281 case RELOAD_FOR_INPUT:
7282 where = &input_reload_insns[rl->opnum];
7283 break;
7284 case RELOAD_FOR_INPUT_ADDRESS:
7285 where = &input_address_reload_insns[rl->opnum];
7286 break;
7287 case RELOAD_FOR_INPADDR_ADDRESS:
7288 where = &inpaddr_address_reload_insns[rl->opnum];
7289 break;
7290 case RELOAD_FOR_OUTPUT_ADDRESS:
7291 where = &output_address_reload_insns[rl->opnum];
7292 break;
7293 case RELOAD_FOR_OUTADDR_ADDRESS:
7294 where = &outaddr_address_reload_insns[rl->opnum];
7295 break;
7296 case RELOAD_FOR_OPERAND_ADDRESS:
7297 where = &operand_reload_insns;
7298 break;
7299 case RELOAD_FOR_OPADDR_ADDR:
7300 where = &other_operand_reload_insns;
7301 break;
7302 case RELOAD_FOR_OTHER_ADDRESS:
7303 where = &other_input_address_reload_insns;
7304 break;
7305 default:
7306 gcc_unreachable ();
7309 push_to_sequence (*where);
7311 /* Auto-increment addresses must be reloaded in a special way. */
7312 if (rl->out && ! rl->out_reg)
7314 /* We are not going to bother supporting the case where a
7315 incremented register can't be copied directly from
7316 OLDEQUIV since this seems highly unlikely. */
7317 gcc_assert (rl->secondary_in_reload < 0);
7319 if (reload_inherited[j])
7320 oldequiv = reloadreg;
7322 old = XEXP (rl->in_reg, 0);
7324 if (optimize && REG_P (oldequiv)
7325 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7326 && spill_reg_store[REGNO (oldequiv)]
7327 && REG_P (old)
7328 && (dead_or_set_p (insn,
7329 spill_reg_stored_to[REGNO (oldequiv)])
7330 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7331 old)))
7332 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7334 /* Prevent normal processing of this reload. */
7335 special = 1;
7336 /* Output a special code sequence for this case. */
7337 new_spill_reg_store[REGNO (reloadreg)]
7338 = inc_for_reload (reloadreg, oldequiv, rl->out,
7339 rl->inc);
7342 /* If we are reloading a pseudo-register that was set by the previous
7343 insn, see if we can get rid of that pseudo-register entirely
7344 by redirecting the previous insn into our reload register. */
7346 else if (optimize && REG_P (old)
7347 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7348 && dead_or_set_p (insn, old)
7349 /* This is unsafe if some other reload
7350 uses the same reg first. */
7351 && ! conflicts_with_override (reloadreg)
7352 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7353 rl->when_needed, old, rl->out, j, 0))
7355 rtx temp = PREV_INSN (insn);
7356 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7357 temp = PREV_INSN (temp);
7358 if (temp
7359 && NONJUMP_INSN_P (temp)
7360 && GET_CODE (PATTERN (temp)) == SET
7361 && SET_DEST (PATTERN (temp)) == old
7362 /* Make sure we can access insn_operand_constraint. */
7363 && asm_noperands (PATTERN (temp)) < 0
7364 /* This is unsafe if operand occurs more than once in current
7365 insn. Perhaps some occurrences aren't reloaded. */
7366 && count_occurrences (PATTERN (insn), old, 0) == 1)
7368 rtx old = SET_DEST (PATTERN (temp));
7369 /* Store into the reload register instead of the pseudo. */
7370 SET_DEST (PATTERN (temp)) = reloadreg;
7372 /* Verify that resulting insn is valid. */
7373 extract_insn (temp);
7374 if (constrain_operands (1))
7376 /* If the previous insn is an output reload, the source is
7377 a reload register, and its spill_reg_store entry will
7378 contain the previous destination. This is now
7379 invalid. */
7380 if (REG_P (SET_SRC (PATTERN (temp)))
7381 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7383 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7384 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7387 /* If these are the only uses of the pseudo reg,
7388 pretend for GDB it lives in the reload reg we used. */
7389 if (REG_N_DEATHS (REGNO (old)) == 1
7390 && REG_N_SETS (REGNO (old)) == 1)
7392 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7393 if (ira_conflicts_p)
7394 /* Inform IRA about the change. */
7395 ira_mark_allocation_change (REGNO (old));
7396 alter_reg (REGNO (old), -1, false);
7398 special = 1;
7400 /* Adjust any debug insns between temp and insn. */
7401 while ((temp = NEXT_INSN (temp)) != insn)
7402 if (DEBUG_INSN_P (temp))
7403 replace_rtx (PATTERN (temp), old, reloadreg);
7404 else
7405 gcc_assert (NOTE_P (temp));
7407 else
7409 SET_DEST (PATTERN (temp)) = old;
7414 /* We can't do that, so output an insn to load RELOADREG. */
7416 /* If we have a secondary reload, pick up the secondary register
7417 and icode, if any. If OLDEQUIV and OLD are different or
7418 if this is an in-out reload, recompute whether or not we
7419 still need a secondary register and what the icode should
7420 be. If we still need a secondary register and the class or
7421 icode is different, go back to reloading from OLD if using
7422 OLDEQUIV means that we got the wrong type of register. We
7423 cannot have different class or icode due to an in-out reload
7424 because we don't make such reloads when both the input and
7425 output need secondary reload registers. */
7427 if (! special && rl->secondary_in_reload >= 0)
7429 rtx second_reload_reg = 0;
7430 rtx third_reload_reg = 0;
7431 int secondary_reload = rl->secondary_in_reload;
7432 rtx real_oldequiv = oldequiv;
7433 rtx real_old = old;
7434 rtx tmp;
7435 enum insn_code icode;
7436 enum insn_code tertiary_icode = CODE_FOR_nothing;
7438 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7439 and similarly for OLD.
7440 See comments in get_secondary_reload in reload.c. */
7441 /* If it is a pseudo that cannot be replaced with its
7442 equivalent MEM, we must fall back to reload_in, which
7443 will have all the necessary substitutions registered.
7444 Likewise for a pseudo that can't be replaced with its
7445 equivalent constant.
7447 Take extra care for subregs of such pseudos. Note that
7448 we cannot use reg_equiv_mem in this case because it is
7449 not in the right mode. */
7451 tmp = oldequiv;
7452 if (GET_CODE (tmp) == SUBREG)
7453 tmp = SUBREG_REG (tmp);
7454 if (REG_P (tmp)
7455 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7456 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7457 || reg_equiv_constant[REGNO (tmp)] != 0))
7459 if (! reg_equiv_mem[REGNO (tmp)]
7460 || num_not_at_initial_offset
7461 || GET_CODE (oldequiv) == SUBREG)
7462 real_oldequiv = rl->in;
7463 else
7464 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7467 tmp = old;
7468 if (GET_CODE (tmp) == SUBREG)
7469 tmp = SUBREG_REG (tmp);
7470 if (REG_P (tmp)
7471 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7472 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7473 || reg_equiv_constant[REGNO (tmp)] != 0))
7475 if (! reg_equiv_mem[REGNO (tmp)]
7476 || num_not_at_initial_offset
7477 || GET_CODE (old) == SUBREG)
7478 real_old = rl->in;
7479 else
7480 real_old = reg_equiv_mem[REGNO (tmp)];
7483 second_reload_reg = rld[secondary_reload].reg_rtx;
7484 if (rld[secondary_reload].secondary_in_reload >= 0)
7486 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7488 third_reload_reg = rld[tertiary_reload].reg_rtx;
7489 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7490 /* We'd have to add more code for quartary reloads. */
7491 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7493 icode = rl->secondary_in_icode;
7495 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7496 || (rl->in != 0 && rl->out != 0))
7498 secondary_reload_info sri, sri2;
7499 enum reg_class new_class, new_t_class;
7501 sri.icode = CODE_FOR_nothing;
7502 sri.prev_sri = NULL;
7503 new_class
7504 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7505 rl->rclass, mode,
7506 &sri);
7508 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7509 second_reload_reg = 0;
7510 else if (new_class == NO_REGS)
7512 if (reload_adjust_reg_for_icode (&second_reload_reg,
7513 third_reload_reg,
7514 (enum insn_code) sri.icode))
7516 icode = (enum insn_code) sri.icode;
7517 third_reload_reg = 0;
7519 else
7521 oldequiv = old;
7522 real_oldequiv = real_old;
7525 else if (sri.icode != CODE_FOR_nothing)
7526 /* We currently lack a way to express this in reloads. */
7527 gcc_unreachable ();
7528 else
7530 sri2.icode = CODE_FOR_nothing;
7531 sri2.prev_sri = &sri;
7532 new_t_class
7533 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7534 new_class, mode,
7535 &sri);
7536 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7538 if (reload_adjust_reg_for_temp (&second_reload_reg,
7539 third_reload_reg,
7540 new_class, mode))
7542 third_reload_reg = 0;
7543 tertiary_icode = (enum insn_code) sri2.icode;
7545 else
7547 oldequiv = old;
7548 real_oldequiv = real_old;
7551 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7553 rtx intermediate = second_reload_reg;
7555 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7556 new_class, mode)
7557 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7558 ((enum insn_code)
7559 sri2.icode)))
7561 second_reload_reg = intermediate;
7562 tertiary_icode = (enum insn_code) sri2.icode;
7564 else
7566 oldequiv = old;
7567 real_oldequiv = real_old;
7570 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7572 rtx intermediate = second_reload_reg;
7574 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7575 new_class, mode)
7576 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7577 new_t_class, mode))
7579 second_reload_reg = intermediate;
7580 tertiary_icode = (enum insn_code) sri2.icode;
7582 else
7584 oldequiv = old;
7585 real_oldequiv = real_old;
7588 else
7590 /* This could be handled more intelligently too. */
7591 oldequiv = old;
7592 real_oldequiv = real_old;
7597 /* If we still need a secondary reload register, check
7598 to see if it is being used as a scratch or intermediate
7599 register and generate code appropriately. If we need
7600 a scratch register, use REAL_OLDEQUIV since the form of
7601 the insn may depend on the actual address if it is
7602 a MEM. */
7604 if (second_reload_reg)
7606 if (icode != CODE_FOR_nothing)
7608 /* We'd have to add extra code to handle this case. */
7609 gcc_assert (!third_reload_reg);
7611 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7612 second_reload_reg));
7613 special = 1;
7615 else
7617 /* See if we need a scratch register to load the
7618 intermediate register (a tertiary reload). */
7619 if (tertiary_icode != CODE_FOR_nothing)
7621 emit_insn ((GEN_FCN (tertiary_icode)
7622 (second_reload_reg, real_oldequiv,
7623 third_reload_reg)));
7625 else if (third_reload_reg)
7627 gen_reload (third_reload_reg, real_oldequiv,
7628 rl->opnum,
7629 rl->when_needed);
7630 gen_reload (second_reload_reg, third_reload_reg,
7631 rl->opnum,
7632 rl->when_needed);
7634 else
7635 gen_reload (second_reload_reg, real_oldequiv,
7636 rl->opnum,
7637 rl->when_needed);
7639 oldequiv = second_reload_reg;
7644 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7646 rtx real_oldequiv = oldequiv;
7648 if ((REG_P (oldequiv)
7649 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7650 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7651 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7652 || (GET_CODE (oldequiv) == SUBREG
7653 && REG_P (SUBREG_REG (oldequiv))
7654 && (REGNO (SUBREG_REG (oldequiv))
7655 >= FIRST_PSEUDO_REGISTER)
7656 && ((reg_equiv_memory_loc
7657 [REGNO (SUBREG_REG (oldequiv))] != 0)
7658 || (reg_equiv_constant
7659 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7660 || (CONSTANT_P (oldequiv)
7661 && (PREFERRED_RELOAD_CLASS (oldequiv,
7662 REGNO_REG_CLASS (REGNO (reloadreg)))
7663 == NO_REGS)))
7664 real_oldequiv = rl->in;
7665 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7666 rl->when_needed);
7669 if (cfun->can_throw_non_call_exceptions)
7670 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7672 /* End this sequence. */
7673 *where = get_insns ();
7674 end_sequence ();
7676 /* Update reload_override_in so that delete_address_reloads_1
7677 can see the actual register usage. */
7678 if (oldequiv_reg)
7679 reload_override_in[j] = oldequiv;
7682 /* Generate insns to for the output reload RL, which is for the insn described
7683 by CHAIN and has the number J. */
7684 static void
7685 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7686 int j)
7688 rtx reloadreg;
7689 rtx insn = chain->insn;
7690 int special = 0;
7691 rtx old = rl->out;
7692 enum machine_mode mode;
7693 rtx p;
7694 rtx rl_reg_rtx;
7696 if (rl->when_needed == RELOAD_OTHER)
7697 start_sequence ();
7698 else
7699 push_to_sequence (output_reload_insns[rl->opnum]);
7701 rl_reg_rtx = reload_reg_rtx_for_output[j];
7702 mode = GET_MODE (rl_reg_rtx);
7704 reloadreg = rl_reg_rtx;
7706 /* If we need two reload regs, set RELOADREG to the intermediate
7707 one, since it will be stored into OLD. We might need a secondary
7708 register only for an input reload, so check again here. */
7710 if (rl->secondary_out_reload >= 0)
7712 rtx real_old = old;
7713 int secondary_reload = rl->secondary_out_reload;
7714 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7716 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7717 && reg_equiv_mem[REGNO (old)] != 0)
7718 real_old = reg_equiv_mem[REGNO (old)];
7720 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7722 rtx second_reloadreg = reloadreg;
7723 reloadreg = rld[secondary_reload].reg_rtx;
7725 /* See if RELOADREG is to be used as a scratch register
7726 or as an intermediate register. */
7727 if (rl->secondary_out_icode != CODE_FOR_nothing)
7729 /* We'd have to add extra code to handle this case. */
7730 gcc_assert (tertiary_reload < 0);
7732 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7733 (real_old, second_reloadreg, reloadreg)));
7734 special = 1;
7736 else
7738 /* See if we need both a scratch and intermediate reload
7739 register. */
7741 enum insn_code tertiary_icode
7742 = rld[secondary_reload].secondary_out_icode;
7744 /* We'd have to add more code for quartary reloads. */
7745 gcc_assert (tertiary_reload < 0
7746 || rld[tertiary_reload].secondary_out_reload < 0);
7748 if (GET_MODE (reloadreg) != mode)
7749 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7751 if (tertiary_icode != CODE_FOR_nothing)
7753 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7754 rtx tem;
7756 /* Copy primary reload reg to secondary reload reg.
7757 (Note that these have been swapped above, then
7758 secondary reload reg to OLD using our insn.) */
7760 /* If REAL_OLD is a paradoxical SUBREG, remove it
7761 and try to put the opposite SUBREG on
7762 RELOADREG. */
7763 if (GET_CODE (real_old) == SUBREG
7764 && (GET_MODE_SIZE (GET_MODE (real_old))
7765 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7766 && 0 != (tem = gen_lowpart_common
7767 (GET_MODE (SUBREG_REG (real_old)),
7768 reloadreg)))
7769 real_old = SUBREG_REG (real_old), reloadreg = tem;
7771 gen_reload (reloadreg, second_reloadreg,
7772 rl->opnum, rl->when_needed);
7773 emit_insn ((GEN_FCN (tertiary_icode)
7774 (real_old, reloadreg, third_reloadreg)));
7775 special = 1;
7778 else
7780 /* Copy between the reload regs here and then to
7781 OUT later. */
7783 gen_reload (reloadreg, second_reloadreg,
7784 rl->opnum, rl->when_needed);
7785 if (tertiary_reload >= 0)
7787 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7789 gen_reload (third_reloadreg, reloadreg,
7790 rl->opnum, rl->when_needed);
7791 reloadreg = third_reloadreg;
7798 /* Output the last reload insn. */
7799 if (! special)
7801 rtx set;
7803 /* Don't output the last reload if OLD is not the dest of
7804 INSN and is in the src and is clobbered by INSN. */
7805 if (! flag_expensive_optimizations
7806 || !REG_P (old)
7807 || !(set = single_set (insn))
7808 || rtx_equal_p (old, SET_DEST (set))
7809 || !reg_mentioned_p (old, SET_SRC (set))
7810 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7811 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7812 gen_reload (old, reloadreg, rl->opnum,
7813 rl->when_needed);
7816 /* Look at all insns we emitted, just to be safe. */
7817 for (p = get_insns (); p; p = NEXT_INSN (p))
7818 if (INSN_P (p))
7820 rtx pat = PATTERN (p);
7822 /* If this output reload doesn't come from a spill reg,
7823 clear any memory of reloaded copies of the pseudo reg.
7824 If this output reload comes from a spill reg,
7825 reg_has_output_reload will make this do nothing. */
7826 note_stores (pat, forget_old_reloads_1, NULL);
7828 if (reg_mentioned_p (rl_reg_rtx, pat))
7830 rtx set = single_set (insn);
7831 if (reload_spill_index[j] < 0
7832 && set
7833 && SET_SRC (set) == rl_reg_rtx)
7835 int src = REGNO (SET_SRC (set));
7837 reload_spill_index[j] = src;
7838 SET_HARD_REG_BIT (reg_is_output_reload, src);
7839 if (find_regno_note (insn, REG_DEAD, src))
7840 SET_HARD_REG_BIT (reg_reloaded_died, src);
7842 if (HARD_REGISTER_P (rl_reg_rtx))
7844 int s = rl->secondary_out_reload;
7845 set = single_set (p);
7846 /* If this reload copies only to the secondary reload
7847 register, the secondary reload does the actual
7848 store. */
7849 if (s >= 0 && set == NULL_RTX)
7850 /* We can't tell what function the secondary reload
7851 has and where the actual store to the pseudo is
7852 made; leave new_spill_reg_store alone. */
7854 else if (s >= 0
7855 && SET_SRC (set) == rl_reg_rtx
7856 && SET_DEST (set) == rld[s].reg_rtx)
7858 /* Usually the next instruction will be the
7859 secondary reload insn; if we can confirm
7860 that it is, setting new_spill_reg_store to
7861 that insn will allow an extra optimization. */
7862 rtx s_reg = rld[s].reg_rtx;
7863 rtx next = NEXT_INSN (p);
7864 rld[s].out = rl->out;
7865 rld[s].out_reg = rl->out_reg;
7866 set = single_set (next);
7867 if (set && SET_SRC (set) == s_reg
7868 && ! new_spill_reg_store[REGNO (s_reg)])
7870 SET_HARD_REG_BIT (reg_is_output_reload,
7871 REGNO (s_reg));
7872 new_spill_reg_store[REGNO (s_reg)] = next;
7875 else
7876 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7881 if (rl->when_needed == RELOAD_OTHER)
7883 emit_insn (other_output_reload_insns[rl->opnum]);
7884 other_output_reload_insns[rl->opnum] = get_insns ();
7886 else
7887 output_reload_insns[rl->opnum] = get_insns ();
7889 if (cfun->can_throw_non_call_exceptions)
7890 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7892 end_sequence ();
7895 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7896 and has the number J. */
7897 static void
7898 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7900 rtx insn = chain->insn;
7901 rtx old = (rl->in && MEM_P (rl->in)
7902 ? rl->in_reg : rl->in);
7903 rtx reg_rtx = rl->reg_rtx;
7905 if (old && reg_rtx)
7907 enum machine_mode mode;
7909 /* Determine the mode to reload in.
7910 This is very tricky because we have three to choose from.
7911 There is the mode the insn operand wants (rl->inmode).
7912 There is the mode of the reload register RELOADREG.
7913 There is the intrinsic mode of the operand, which we could find
7914 by stripping some SUBREGs.
7915 It turns out that RELOADREG's mode is irrelevant:
7916 we can change that arbitrarily.
7918 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7919 then the reload reg may not support QImode moves, so use SImode.
7920 If foo is in memory due to spilling a pseudo reg, this is safe,
7921 because the QImode value is in the least significant part of a
7922 slot big enough for a SImode. If foo is some other sort of
7923 memory reference, then it is impossible to reload this case,
7924 so previous passes had better make sure this never happens.
7926 Then consider a one-word union which has SImode and one of its
7927 members is a float, being fetched as (SUBREG:SF union:SI).
7928 We must fetch that as SFmode because we could be loading into
7929 a float-only register. In this case OLD's mode is correct.
7931 Consider an immediate integer: it has VOIDmode. Here we need
7932 to get a mode from something else.
7934 In some cases, there is a fourth mode, the operand's
7935 containing mode. If the insn specifies a containing mode for
7936 this operand, it overrides all others.
7938 I am not sure whether the algorithm here is always right,
7939 but it does the right things in those cases. */
7941 mode = GET_MODE (old);
7942 if (mode == VOIDmode)
7943 mode = rl->inmode;
7945 /* We cannot use gen_lowpart_common since it can do the wrong thing
7946 when REG_RTX has a multi-word mode. Note that REG_RTX must
7947 always be a REG here. */
7948 if (GET_MODE (reg_rtx) != mode)
7949 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7951 reload_reg_rtx_for_input[j] = reg_rtx;
7953 if (old != 0
7954 /* AUTO_INC reloads need to be handled even if inherited. We got an
7955 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7956 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7957 && ! rtx_equal_p (reg_rtx, old)
7958 && reg_rtx != 0)
7959 emit_input_reload_insns (chain, rld + j, old, j);
7961 /* When inheriting a wider reload, we have a MEM in rl->in,
7962 e.g. inheriting a SImode output reload for
7963 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7964 if (optimize && reload_inherited[j] && rl->in
7965 && MEM_P (rl->in)
7966 && MEM_P (rl->in_reg)
7967 && reload_spill_index[j] >= 0
7968 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7969 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7971 /* If we are reloading a register that was recently stored in with an
7972 output-reload, see if we can prove there was
7973 actually no need to store the old value in it. */
7975 if (optimize
7976 && (reload_inherited[j] || reload_override_in[j])
7977 && reg_rtx
7978 && REG_P (reg_rtx)
7979 && spill_reg_store[REGNO (reg_rtx)] != 0
7980 #if 0
7981 /* There doesn't seem to be any reason to restrict this to pseudos
7982 and doing so loses in the case where we are copying from a
7983 register of the wrong class. */
7984 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7985 #endif
7986 /* The insn might have already some references to stackslots
7987 replaced by MEMs, while reload_out_reg still names the
7988 original pseudo. */
7989 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7990 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7991 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7994 /* Do output reloading for reload RL, which is for the insn described by
7995 CHAIN and has the number J.
7996 ??? At some point we need to support handling output reloads of
7997 JUMP_INSNs or insns that set cc0. */
7998 static void
7999 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8001 rtx note, old;
8002 rtx insn = chain->insn;
8003 /* If this is an output reload that stores something that is
8004 not loaded in this same reload, see if we can eliminate a previous
8005 store. */
8006 rtx pseudo = rl->out_reg;
8007 rtx reg_rtx = rl->reg_rtx;
8009 if (rl->out && reg_rtx)
8011 enum machine_mode mode;
8013 /* Determine the mode to reload in.
8014 See comments above (for input reloading). */
8015 mode = GET_MODE (rl->out);
8016 if (mode == VOIDmode)
8018 /* VOIDmode should never happen for an output. */
8019 if (asm_noperands (PATTERN (insn)) < 0)
8020 /* It's the compiler's fault. */
8021 fatal_insn ("VOIDmode on an output", insn);
8022 error_for_asm (insn, "output operand is constant in %<asm%>");
8023 /* Prevent crash--use something we know is valid. */
8024 mode = word_mode;
8025 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8027 if (GET_MODE (reg_rtx) != mode)
8028 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8030 reload_reg_rtx_for_output[j] = reg_rtx;
8032 if (pseudo
8033 && optimize
8034 && REG_P (pseudo)
8035 && ! rtx_equal_p (rl->in_reg, pseudo)
8036 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8037 && reg_last_reload_reg[REGNO (pseudo)])
8039 int pseudo_no = REGNO (pseudo);
8040 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8042 /* We don't need to test full validity of last_regno for
8043 inherit here; we only want to know if the store actually
8044 matches the pseudo. */
8045 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8046 && reg_reloaded_contents[last_regno] == pseudo_no
8047 && spill_reg_store[last_regno]
8048 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8049 delete_output_reload (insn, j, last_regno, reg_rtx);
8052 old = rl->out_reg;
8053 if (old == 0
8054 || reg_rtx == 0
8055 || rtx_equal_p (old, reg_rtx))
8056 return;
8058 /* An output operand that dies right away does need a reload,
8059 but need not be copied from it. Show the new location in the
8060 REG_UNUSED note. */
8061 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8062 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8064 XEXP (note, 0) = reg_rtx;
8065 return;
8067 /* Likewise for a SUBREG of an operand that dies. */
8068 else if (GET_CODE (old) == SUBREG
8069 && REG_P (SUBREG_REG (old))
8070 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8071 SUBREG_REG (old))))
8073 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8074 return;
8076 else if (GET_CODE (old) == SCRATCH)
8077 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8078 but we don't want to make an output reload. */
8079 return;
8081 /* If is a JUMP_INSN, we can't support output reloads yet. */
8082 gcc_assert (NONJUMP_INSN_P (insn));
8084 emit_output_reload_insns (chain, rld + j, j);
8087 /* A reload copies values of MODE from register SRC to register DEST.
8088 Return true if it can be treated for inheritance purposes like a
8089 group of reloads, each one reloading a single hard register. The
8090 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8091 occupy the same number of hard registers. */
8093 static bool
8094 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8095 int src ATTRIBUTE_UNUSED,
8096 enum machine_mode mode ATTRIBUTE_UNUSED)
8098 #ifdef CANNOT_CHANGE_MODE_CLASS
8099 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8100 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8101 #else
8102 return true;
8103 #endif
8106 /* Output insns to reload values in and out of the chosen reload regs. */
8108 static void
8109 emit_reload_insns (struct insn_chain *chain)
8111 rtx insn = chain->insn;
8113 int j;
8115 CLEAR_HARD_REG_SET (reg_reloaded_died);
8117 for (j = 0; j < reload_n_operands; j++)
8118 input_reload_insns[j] = input_address_reload_insns[j]
8119 = inpaddr_address_reload_insns[j]
8120 = output_reload_insns[j] = output_address_reload_insns[j]
8121 = outaddr_address_reload_insns[j]
8122 = other_output_reload_insns[j] = 0;
8123 other_input_address_reload_insns = 0;
8124 other_input_reload_insns = 0;
8125 operand_reload_insns = 0;
8126 other_operand_reload_insns = 0;
8128 /* Dump reloads into the dump file. */
8129 if (dump_file)
8131 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8132 debug_reload_to_stream (dump_file);
8135 /* Now output the instructions to copy the data into and out of the
8136 reload registers. Do these in the order that the reloads were reported,
8137 since reloads of base and index registers precede reloads of operands
8138 and the operands may need the base and index registers reloaded. */
8140 for (j = 0; j < n_reloads; j++)
8142 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8144 unsigned int i;
8146 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8147 new_spill_reg_store[i] = 0;
8150 do_input_reload (chain, rld + j, j);
8151 do_output_reload (chain, rld + j, j);
8154 /* Now write all the insns we made for reloads in the order expected by
8155 the allocation functions. Prior to the insn being reloaded, we write
8156 the following reloads:
8158 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8160 RELOAD_OTHER reloads.
8162 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8163 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8164 RELOAD_FOR_INPUT reload for the operand.
8166 RELOAD_FOR_OPADDR_ADDRS reloads.
8168 RELOAD_FOR_OPERAND_ADDRESS reloads.
8170 After the insn being reloaded, we write the following:
8172 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8173 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8174 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8175 reloads for the operand. The RELOAD_OTHER output reloads are
8176 output in descending order by reload number. */
8178 emit_insn_before (other_input_address_reload_insns, insn);
8179 emit_insn_before (other_input_reload_insns, insn);
8181 for (j = 0; j < reload_n_operands; j++)
8183 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8184 emit_insn_before (input_address_reload_insns[j], insn);
8185 emit_insn_before (input_reload_insns[j], insn);
8188 emit_insn_before (other_operand_reload_insns, insn);
8189 emit_insn_before (operand_reload_insns, insn);
8191 for (j = 0; j < reload_n_operands; j++)
8193 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8194 x = emit_insn_after (output_address_reload_insns[j], x);
8195 x = emit_insn_after (output_reload_insns[j], x);
8196 emit_insn_after (other_output_reload_insns[j], x);
8199 /* For all the spill regs newly reloaded in this instruction,
8200 record what they were reloaded from, so subsequent instructions
8201 can inherit the reloads.
8203 Update spill_reg_store for the reloads of this insn.
8204 Copy the elements that were updated in the loop above. */
8206 for (j = 0; j < n_reloads; j++)
8208 int r = reload_order[j];
8209 int i = reload_spill_index[r];
8211 /* If this is a non-inherited input reload from a pseudo, we must
8212 clear any memory of a previous store to the same pseudo. Only do
8213 something if there will not be an output reload for the pseudo
8214 being reloaded. */
8215 if (rld[r].in_reg != 0
8216 && ! (reload_inherited[r] || reload_override_in[r]))
8218 rtx reg = rld[r].in_reg;
8220 if (GET_CODE (reg) == SUBREG)
8221 reg = SUBREG_REG (reg);
8223 if (REG_P (reg)
8224 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8225 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8227 int nregno = REGNO (reg);
8229 if (reg_last_reload_reg[nregno])
8231 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8233 if (reg_reloaded_contents[last_regno] == nregno)
8234 spill_reg_store[last_regno] = 0;
8239 /* I is nonneg if this reload used a register.
8240 If rld[r].reg_rtx is 0, this is an optional reload
8241 that we opted to ignore. */
8243 if (i >= 0 && rld[r].reg_rtx != 0)
8245 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8246 int k;
8248 /* For a multi register reload, we need to check if all or part
8249 of the value lives to the end. */
8250 for (k = 0; k < nr; k++)
8251 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
8252 rld[r].when_needed))
8253 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8255 /* Maybe the spill reg contains a copy of reload_out. */
8256 if (rld[r].out != 0
8257 && (REG_P (rld[r].out)
8258 #ifdef AUTO_INC_DEC
8259 || ! rld[r].out_reg
8260 #endif
8261 || REG_P (rld[r].out_reg)))
8263 rtx reg;
8264 enum machine_mode mode;
8265 int regno, nregs;
8267 reg = reload_reg_rtx_for_output[r];
8268 mode = GET_MODE (reg);
8269 regno = REGNO (reg);
8270 nregs = hard_regno_nregs[regno][mode];
8271 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8272 rld[r].when_needed))
8274 rtx out = (REG_P (rld[r].out)
8275 ? rld[r].out
8276 : rld[r].out_reg
8277 ? rld[r].out_reg
8278 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8279 int out_regno = REGNO (out);
8280 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8281 : hard_regno_nregs[out_regno][mode]);
8282 bool piecemeal;
8284 spill_reg_store[regno] = new_spill_reg_store[regno];
8285 spill_reg_stored_to[regno] = out;
8286 reg_last_reload_reg[out_regno] = reg;
8288 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8289 && nregs == out_nregs
8290 && inherit_piecemeal_p (out_regno, regno, mode));
8292 /* If OUT_REGNO is a hard register, it may occupy more than
8293 one register. If it does, say what is in the
8294 rest of the registers assuming that both registers
8295 agree on how many words the object takes. If not,
8296 invalidate the subsequent registers. */
8298 if (HARD_REGISTER_NUM_P (out_regno))
8299 for (k = 1; k < out_nregs; k++)
8300 reg_last_reload_reg[out_regno + k]
8301 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8303 /* Now do the inverse operation. */
8304 for (k = 0; k < nregs; k++)
8306 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8307 reg_reloaded_contents[regno + k]
8308 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8309 ? out_regno
8310 : out_regno + k);
8311 reg_reloaded_insn[regno + k] = insn;
8312 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8313 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8314 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8315 regno + k);
8316 else
8317 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8318 regno + k);
8322 /* Maybe the spill reg contains a copy of reload_in. Only do
8323 something if there will not be an output reload for
8324 the register being reloaded. */
8325 else if (rld[r].out_reg == 0
8326 && rld[r].in != 0
8327 && ((REG_P (rld[r].in)
8328 && !HARD_REGISTER_P (rld[r].in)
8329 && !REGNO_REG_SET_P (&reg_has_output_reload,
8330 REGNO (rld[r].in)))
8331 || (REG_P (rld[r].in_reg)
8332 && !REGNO_REG_SET_P (&reg_has_output_reload,
8333 REGNO (rld[r].in_reg))))
8334 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8336 rtx reg;
8337 enum machine_mode mode;
8338 int regno, nregs;
8340 reg = reload_reg_rtx_for_input[r];
8341 mode = GET_MODE (reg);
8342 regno = REGNO (reg);
8343 nregs = hard_regno_nregs[regno][mode];
8344 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8345 rld[r].when_needed))
8347 int in_regno;
8348 int in_nregs;
8349 rtx in;
8350 bool piecemeal;
8352 if (REG_P (rld[r].in)
8353 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8354 in = rld[r].in;
8355 else if (REG_P (rld[r].in_reg))
8356 in = rld[r].in_reg;
8357 else
8358 in = XEXP (rld[r].in_reg, 0);
8359 in_regno = REGNO (in);
8361 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8362 : hard_regno_nregs[in_regno][mode]);
8364 reg_last_reload_reg[in_regno] = reg;
8366 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8367 && nregs == in_nregs
8368 && inherit_piecemeal_p (regno, in_regno, mode));
8370 if (HARD_REGISTER_NUM_P (in_regno))
8371 for (k = 1; k < in_nregs; k++)
8372 reg_last_reload_reg[in_regno + k]
8373 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8375 /* Unless we inherited this reload, show we haven't
8376 recently done a store.
8377 Previous stores of inherited auto_inc expressions
8378 also have to be discarded. */
8379 if (! reload_inherited[r]
8380 || (rld[r].out && ! rld[r].out_reg))
8381 spill_reg_store[regno] = 0;
8383 for (k = 0; k < nregs; k++)
8385 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8386 reg_reloaded_contents[regno + k]
8387 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8388 ? in_regno
8389 : in_regno + k);
8390 reg_reloaded_insn[regno + k] = insn;
8391 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8392 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8393 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8394 regno + k);
8395 else
8396 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8397 regno + k);
8403 /* The following if-statement was #if 0'd in 1.34 (or before...).
8404 It's reenabled in 1.35 because supposedly nothing else
8405 deals with this problem. */
8407 /* If a register gets output-reloaded from a non-spill register,
8408 that invalidates any previous reloaded copy of it.
8409 But forget_old_reloads_1 won't get to see it, because
8410 it thinks only about the original insn. So invalidate it here.
8411 Also do the same thing for RELOAD_OTHER constraints where the
8412 output is discarded. */
8413 if (i < 0
8414 && ((rld[r].out != 0
8415 && (REG_P (rld[r].out)
8416 || (MEM_P (rld[r].out)
8417 && REG_P (rld[r].out_reg))))
8418 || (rld[r].out == 0 && rld[r].out_reg
8419 && REG_P (rld[r].out_reg))))
8421 rtx out = ((rld[r].out && REG_P (rld[r].out))
8422 ? rld[r].out : rld[r].out_reg);
8423 int out_regno = REGNO (out);
8424 enum machine_mode mode = GET_MODE (out);
8426 /* REG_RTX is now set or clobbered by the main instruction.
8427 As the comment above explains, forget_old_reloads_1 only
8428 sees the original instruction, and there is no guarantee
8429 that the original instruction also clobbered REG_RTX.
8430 For example, if find_reloads sees that the input side of
8431 a matched operand pair dies in this instruction, it may
8432 use the input register as the reload register.
8434 Calling forget_old_reloads_1 is a waste of effort if
8435 REG_RTX is also the output register.
8437 If we know that REG_RTX holds the value of a pseudo
8438 register, the code after the call will record that fact. */
8439 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8440 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8442 if (!HARD_REGISTER_NUM_P (out_regno))
8444 rtx src_reg, store_insn = NULL_RTX;
8446 reg_last_reload_reg[out_regno] = 0;
8448 /* If we can find a hard register that is stored, record
8449 the storing insn so that we may delete this insn with
8450 delete_output_reload. */
8451 src_reg = reload_reg_rtx_for_output[r];
8453 /* If this is an optional reload, try to find the source reg
8454 from an input reload. */
8455 if (! src_reg)
8457 rtx set = single_set (insn);
8458 if (set && SET_DEST (set) == rld[r].out)
8460 int k;
8462 src_reg = SET_SRC (set);
8463 store_insn = insn;
8464 for (k = 0; k < n_reloads; k++)
8466 if (rld[k].in == src_reg)
8468 src_reg = reload_reg_rtx_for_input[k];
8469 break;
8474 else
8475 store_insn = new_spill_reg_store[REGNO (src_reg)];
8476 if (src_reg && REG_P (src_reg)
8477 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8479 int src_regno, src_nregs, k;
8480 rtx note;
8482 gcc_assert (GET_MODE (src_reg) == mode);
8483 src_regno = REGNO (src_reg);
8484 src_nregs = hard_regno_nregs[src_regno][mode];
8485 /* The place where to find a death note varies with
8486 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8487 necessarily checked exactly in the code that moves
8488 notes, so just check both locations. */
8489 note = find_regno_note (insn, REG_DEAD, src_regno);
8490 if (! note && store_insn)
8491 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8492 for (k = 0; k < src_nregs; k++)
8494 spill_reg_store[src_regno + k] = store_insn;
8495 spill_reg_stored_to[src_regno + k] = out;
8496 reg_reloaded_contents[src_regno + k] = out_regno;
8497 reg_reloaded_insn[src_regno + k] = store_insn;
8498 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8499 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8500 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8501 mode))
8502 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8503 src_regno + k);
8504 else
8505 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8506 src_regno + k);
8507 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8508 if (note)
8509 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8510 else
8511 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8513 reg_last_reload_reg[out_regno] = src_reg;
8514 /* We have to set reg_has_output_reload here, or else
8515 forget_old_reloads_1 will clear reg_last_reload_reg
8516 right away. */
8517 SET_REGNO_REG_SET (&reg_has_output_reload,
8518 out_regno);
8521 else
8523 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8525 for (k = 0; k < out_nregs; k++)
8526 reg_last_reload_reg[out_regno + k] = 0;
8530 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8533 /* Go through the motions to emit INSN and test if it is strictly valid.
8534 Return the emitted insn if valid, else return NULL. */
8536 static rtx
8537 emit_insn_if_valid_for_reload (rtx insn)
8539 rtx last = get_last_insn ();
8540 int code;
8542 insn = emit_insn (insn);
8543 code = recog_memoized (insn);
8545 if (code >= 0)
8547 extract_insn (insn);
8548 /* We want constrain operands to treat this insn strictly in its
8549 validity determination, i.e., the way it would after reload has
8550 completed. */
8551 if (constrain_operands (1))
8552 return insn;
8555 delete_insns_since (last);
8556 return NULL;
8559 /* Emit code to perform a reload from IN (which may be a reload register) to
8560 OUT (which may also be a reload register). IN or OUT is from operand
8561 OPNUM with reload type TYPE.
8563 Returns first insn emitted. */
8565 static rtx
8566 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8568 rtx last = get_last_insn ();
8569 rtx tem;
8571 /* If IN is a paradoxical SUBREG, remove it and try to put the
8572 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8573 if (GET_CODE (in) == SUBREG
8574 && (GET_MODE_SIZE (GET_MODE (in))
8575 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8576 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8577 in = SUBREG_REG (in), out = tem;
8578 else if (GET_CODE (out) == SUBREG
8579 && (GET_MODE_SIZE (GET_MODE (out))
8580 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8581 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8582 out = SUBREG_REG (out), in = tem;
8584 /* How to do this reload can get quite tricky. Normally, we are being
8585 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8586 register that didn't get a hard register. In that case we can just
8587 call emit_move_insn.
8589 We can also be asked to reload a PLUS that adds a register or a MEM to
8590 another register, constant or MEM. This can occur during frame pointer
8591 elimination and while reloading addresses. This case is handled by
8592 trying to emit a single insn to perform the add. If it is not valid,
8593 we use a two insn sequence.
8595 Or we can be asked to reload an unary operand that was a fragment of
8596 an addressing mode, into a register. If it isn't recognized as-is,
8597 we try making the unop operand and the reload-register the same:
8598 (set reg:X (unop:X expr:Y))
8599 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8601 Finally, we could be called to handle an 'o' constraint by putting
8602 an address into a register. In that case, we first try to do this
8603 with a named pattern of "reload_load_address". If no such pattern
8604 exists, we just emit a SET insn and hope for the best (it will normally
8605 be valid on machines that use 'o').
8607 This entire process is made complex because reload will never
8608 process the insns we generate here and so we must ensure that
8609 they will fit their constraints and also by the fact that parts of
8610 IN might be being reloaded separately and replaced with spill registers.
8611 Because of this, we are, in some sense, just guessing the right approach
8612 here. The one listed above seems to work.
8614 ??? At some point, this whole thing needs to be rethought. */
8616 if (GET_CODE (in) == PLUS
8617 && (REG_P (XEXP (in, 0))
8618 || GET_CODE (XEXP (in, 0)) == SUBREG
8619 || MEM_P (XEXP (in, 0)))
8620 && (REG_P (XEXP (in, 1))
8621 || GET_CODE (XEXP (in, 1)) == SUBREG
8622 || CONSTANT_P (XEXP (in, 1))
8623 || MEM_P (XEXP (in, 1))))
8625 /* We need to compute the sum of a register or a MEM and another
8626 register, constant, or MEM, and put it into the reload
8627 register. The best possible way of doing this is if the machine
8628 has a three-operand ADD insn that accepts the required operands.
8630 The simplest approach is to try to generate such an insn and see if it
8631 is recognized and matches its constraints. If so, it can be used.
8633 It might be better not to actually emit the insn unless it is valid,
8634 but we need to pass the insn as an operand to `recog' and
8635 `extract_insn' and it is simpler to emit and then delete the insn if
8636 not valid than to dummy things up. */
8638 rtx op0, op1, tem, insn;
8639 int code;
8641 op0 = find_replacement (&XEXP (in, 0));
8642 op1 = find_replacement (&XEXP (in, 1));
8644 /* Since constraint checking is strict, commutativity won't be
8645 checked, so we need to do that here to avoid spurious failure
8646 if the add instruction is two-address and the second operand
8647 of the add is the same as the reload reg, which is frequently
8648 the case. If the insn would be A = B + A, rearrange it so
8649 it will be A = A + B as constrain_operands expects. */
8651 if (REG_P (XEXP (in, 1))
8652 && REGNO (out) == REGNO (XEXP (in, 1)))
8653 tem = op0, op0 = op1, op1 = tem;
8655 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8656 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8658 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8659 if (insn)
8660 return insn;
8662 /* If that failed, we must use a conservative two-insn sequence.
8664 Use a move to copy one operand into the reload register. Prefer
8665 to reload a constant, MEM or pseudo since the move patterns can
8666 handle an arbitrary operand. If OP1 is not a constant, MEM or
8667 pseudo and OP1 is not a valid operand for an add instruction, then
8668 reload OP1.
8670 After reloading one of the operands into the reload register, add
8671 the reload register to the output register.
8673 If there is another way to do this for a specific machine, a
8674 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8675 we emit below. */
8677 code = (int) optab_handler (add_optab, GET_MODE (out));
8679 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8680 || (REG_P (op1)
8681 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8682 || (code != CODE_FOR_nothing
8683 && ! ((*insn_data[code].operand[2].predicate)
8684 (op1, insn_data[code].operand[2].mode))))
8685 tem = op0, op0 = op1, op1 = tem;
8687 gen_reload (out, op0, opnum, type);
8689 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8690 This fixes a problem on the 32K where the stack pointer cannot
8691 be used as an operand of an add insn. */
8693 if (rtx_equal_p (op0, op1))
8694 op1 = out;
8696 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8697 if (insn)
8699 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8700 set_unique_reg_note (insn, REG_EQUIV, in);
8701 return insn;
8704 /* If that failed, copy the address register to the reload register.
8705 Then add the constant to the reload register. */
8707 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8708 gen_reload (out, op1, opnum, type);
8709 insn = emit_insn (gen_add2_insn (out, op0));
8710 set_unique_reg_note (insn, REG_EQUIV, in);
8713 #ifdef SECONDARY_MEMORY_NEEDED
8714 /* If we need a memory location to do the move, do it that way. */
8715 else if ((REG_P (in)
8716 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8717 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8718 && (REG_P (out)
8719 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8720 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8721 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8722 REGNO_REG_CLASS (reg_or_subregno (out)),
8723 GET_MODE (out)))
8725 /* Get the memory to use and rewrite both registers to its mode. */
8726 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8728 if (GET_MODE (loc) != GET_MODE (out))
8729 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8731 if (GET_MODE (loc) != GET_MODE (in))
8732 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8734 gen_reload (loc, in, opnum, type);
8735 gen_reload (out, loc, opnum, type);
8737 #endif
8738 else if (REG_P (out) && UNARY_P (in))
8740 rtx insn;
8741 rtx op1;
8742 rtx out_moded;
8743 rtx set;
8745 op1 = find_replacement (&XEXP (in, 0));
8746 if (op1 != XEXP (in, 0))
8747 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8749 /* First, try a plain SET. */
8750 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8751 if (set)
8752 return set;
8754 /* If that failed, move the inner operand to the reload
8755 register, and try the same unop with the inner expression
8756 replaced with the reload register. */
8758 if (GET_MODE (op1) != GET_MODE (out))
8759 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8760 else
8761 out_moded = out;
8763 gen_reload (out_moded, op1, opnum, type);
8765 insn
8766 = gen_rtx_SET (VOIDmode, out,
8767 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8768 out_moded));
8769 insn = emit_insn_if_valid_for_reload (insn);
8770 if (insn)
8772 set_unique_reg_note (insn, REG_EQUIV, in);
8773 return insn;
8776 fatal_insn ("Failure trying to reload:", set);
8778 /* If IN is a simple operand, use gen_move_insn. */
8779 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8781 tem = emit_insn (gen_move_insn (out, in));
8782 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8783 mark_jump_label (in, tem, 0);
8786 #ifdef HAVE_reload_load_address
8787 else if (HAVE_reload_load_address)
8788 emit_insn (gen_reload_load_address (out, in));
8789 #endif
8791 /* Otherwise, just write (set OUT IN) and hope for the best. */
8792 else
8793 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8795 /* Return the first insn emitted.
8796 We can not just return get_last_insn, because there may have
8797 been multiple instructions emitted. Also note that gen_move_insn may
8798 emit more than one insn itself, so we can not assume that there is one
8799 insn emitted per emit_insn_before call. */
8801 return last ? NEXT_INSN (last) : get_insns ();
8804 /* Delete a previously made output-reload whose result we now believe
8805 is not needed. First we double-check.
8807 INSN is the insn now being processed.
8808 LAST_RELOAD_REG is the hard register number for which we want to delete
8809 the last output reload.
8810 J is the reload-number that originally used REG. The caller has made
8811 certain that reload J doesn't use REG any longer for input.
8812 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8814 static void
8815 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8817 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8818 rtx reg = spill_reg_stored_to[last_reload_reg];
8819 int k;
8820 int n_occurrences;
8821 int n_inherited = 0;
8822 rtx i1;
8823 rtx substed;
8825 /* It is possible that this reload has been only used to set another reload
8826 we eliminated earlier and thus deleted this instruction too. */
8827 if (INSN_DELETED_P (output_reload_insn))
8828 return;
8830 /* Get the raw pseudo-register referred to. */
8832 while (GET_CODE (reg) == SUBREG)
8833 reg = SUBREG_REG (reg);
8834 substed = reg_equiv_memory_loc[REGNO (reg)];
8836 /* This is unsafe if the operand occurs more often in the current
8837 insn than it is inherited. */
8838 for (k = n_reloads - 1; k >= 0; k--)
8840 rtx reg2 = rld[k].in;
8841 if (! reg2)
8842 continue;
8843 if (MEM_P (reg2) || reload_override_in[k])
8844 reg2 = rld[k].in_reg;
8845 #ifdef AUTO_INC_DEC
8846 if (rld[k].out && ! rld[k].out_reg)
8847 reg2 = XEXP (rld[k].in_reg, 0);
8848 #endif
8849 while (GET_CODE (reg2) == SUBREG)
8850 reg2 = SUBREG_REG (reg2);
8851 if (rtx_equal_p (reg2, reg))
8853 if (reload_inherited[k] || reload_override_in[k] || k == j)
8854 n_inherited++;
8855 else
8856 return;
8859 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8860 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8861 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8862 reg, 0);
8863 if (substed)
8864 n_occurrences += count_occurrences (PATTERN (insn),
8865 eliminate_regs (substed, VOIDmode,
8866 NULL_RTX), 0);
8867 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8869 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8870 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8872 if (n_occurrences > n_inherited)
8873 return;
8875 /* If the pseudo-reg we are reloading is no longer referenced
8876 anywhere between the store into it and here,
8877 and we're within the same basic block, then the value can only
8878 pass through the reload reg and end up here.
8879 Otherwise, give up--return. */
8880 for (i1 = NEXT_INSN (output_reload_insn);
8881 i1 != insn; i1 = NEXT_INSN (i1))
8883 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8884 return;
8885 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8886 && reg_mentioned_p (reg, PATTERN (i1)))
8888 /* If this is USE in front of INSN, we only have to check that
8889 there are no more references than accounted for by inheritance. */
8890 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8892 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8893 i1 = NEXT_INSN (i1);
8895 if (n_occurrences <= n_inherited && i1 == insn)
8896 break;
8897 return;
8901 /* We will be deleting the insn. Remove the spill reg information. */
8902 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8904 spill_reg_store[last_reload_reg + k] = 0;
8905 spill_reg_stored_to[last_reload_reg + k] = 0;
8908 /* The caller has already checked that REG dies or is set in INSN.
8909 It has also checked that we are optimizing, and thus some
8910 inaccuracies in the debugging information are acceptable.
8911 So we could just delete output_reload_insn. But in some cases
8912 we can improve the debugging information without sacrificing
8913 optimization - maybe even improving the code: See if the pseudo
8914 reg has been completely replaced with reload regs. If so, delete
8915 the store insn and forget we had a stack slot for the pseudo. */
8916 if (rld[j].out != rld[j].in
8917 && REG_N_DEATHS (REGNO (reg)) == 1
8918 && REG_N_SETS (REGNO (reg)) == 1
8919 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8920 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8922 rtx i2;
8924 /* We know that it was used only between here and the beginning of
8925 the current basic block. (We also know that the last use before
8926 INSN was the output reload we are thinking of deleting, but never
8927 mind that.) Search that range; see if any ref remains. */
8928 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8930 rtx set = single_set (i2);
8932 /* Uses which just store in the pseudo don't count,
8933 since if they are the only uses, they are dead. */
8934 if (set != 0 && SET_DEST (set) == reg)
8935 continue;
8936 if (LABEL_P (i2)
8937 || JUMP_P (i2))
8938 break;
8939 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8940 && reg_mentioned_p (reg, PATTERN (i2)))
8942 /* Some other ref remains; just delete the output reload we
8943 know to be dead. */
8944 delete_address_reloads (output_reload_insn, insn);
8945 delete_insn (output_reload_insn);
8946 return;
8950 /* Delete the now-dead stores into this pseudo. Note that this
8951 loop also takes care of deleting output_reload_insn. */
8952 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8954 rtx set = single_set (i2);
8956 if (set != 0 && SET_DEST (set) == reg)
8958 delete_address_reloads (i2, insn);
8959 delete_insn (i2);
8961 if (LABEL_P (i2)
8962 || JUMP_P (i2))
8963 break;
8966 /* For the debugging info, say the pseudo lives in this reload reg. */
8967 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8968 if (ira_conflicts_p)
8969 /* Inform IRA about the change. */
8970 ira_mark_allocation_change (REGNO (reg));
8971 alter_reg (REGNO (reg), -1, false);
8973 else
8975 delete_address_reloads (output_reload_insn, insn);
8976 delete_insn (output_reload_insn);
8980 /* We are going to delete DEAD_INSN. Recursively delete loads of
8981 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8982 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8983 static void
8984 delete_address_reloads (rtx dead_insn, rtx current_insn)
8986 rtx set = single_set (dead_insn);
8987 rtx set2, dst, prev, next;
8988 if (set)
8990 rtx dst = SET_DEST (set);
8991 if (MEM_P (dst))
8992 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8994 /* If we deleted the store from a reloaded post_{in,de}c expression,
8995 we can delete the matching adds. */
8996 prev = PREV_INSN (dead_insn);
8997 next = NEXT_INSN (dead_insn);
8998 if (! prev || ! next)
8999 return;
9000 set = single_set (next);
9001 set2 = single_set (prev);
9002 if (! set || ! set2
9003 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9004 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9005 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9006 return;
9007 dst = SET_DEST (set);
9008 if (! rtx_equal_p (dst, SET_DEST (set2))
9009 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9010 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9011 || (INTVAL (XEXP (SET_SRC (set), 1))
9012 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9013 return;
9014 delete_related_insns (prev);
9015 delete_related_insns (next);
9018 /* Subfunction of delete_address_reloads: process registers found in X. */
9019 static void
9020 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
9022 rtx prev, set, dst, i2;
9023 int i, j;
9024 enum rtx_code code = GET_CODE (x);
9026 if (code != REG)
9028 const char *fmt = GET_RTX_FORMAT (code);
9029 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9031 if (fmt[i] == 'e')
9032 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9033 else if (fmt[i] == 'E')
9035 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9036 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9037 current_insn);
9040 return;
9043 if (spill_reg_order[REGNO (x)] < 0)
9044 return;
9046 /* Scan backwards for the insn that sets x. This might be a way back due
9047 to inheritance. */
9048 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9050 code = GET_CODE (prev);
9051 if (code == CODE_LABEL || code == JUMP_INSN)
9052 return;
9053 if (!INSN_P (prev))
9054 continue;
9055 if (reg_set_p (x, PATTERN (prev)))
9056 break;
9057 if (reg_referenced_p (x, PATTERN (prev)))
9058 return;
9060 if (! prev || INSN_UID (prev) < reload_first_uid)
9061 return;
9062 /* Check that PREV only sets the reload register. */
9063 set = single_set (prev);
9064 if (! set)
9065 return;
9066 dst = SET_DEST (set);
9067 if (!REG_P (dst)
9068 || ! rtx_equal_p (dst, x))
9069 return;
9070 if (! reg_set_p (dst, PATTERN (dead_insn)))
9072 /* Check if DST was used in a later insn -
9073 it might have been inherited. */
9074 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9076 if (LABEL_P (i2))
9077 break;
9078 if (! INSN_P (i2))
9079 continue;
9080 if (reg_referenced_p (dst, PATTERN (i2)))
9082 /* If there is a reference to the register in the current insn,
9083 it might be loaded in a non-inherited reload. If no other
9084 reload uses it, that means the register is set before
9085 referenced. */
9086 if (i2 == current_insn)
9088 for (j = n_reloads - 1; j >= 0; j--)
9089 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9090 || reload_override_in[j] == dst)
9091 return;
9092 for (j = n_reloads - 1; j >= 0; j--)
9093 if (rld[j].in && rld[j].reg_rtx == dst)
9094 break;
9095 if (j >= 0)
9096 break;
9098 return;
9100 if (JUMP_P (i2))
9101 break;
9102 /* If DST is still live at CURRENT_INSN, check if it is used for
9103 any reload. Note that even if CURRENT_INSN sets DST, we still
9104 have to check the reloads. */
9105 if (i2 == current_insn)
9107 for (j = n_reloads - 1; j >= 0; j--)
9108 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9109 || reload_override_in[j] == dst)
9110 return;
9111 /* ??? We can't finish the loop here, because dst might be
9112 allocated to a pseudo in this block if no reload in this
9113 block needs any of the classes containing DST - see
9114 spill_hard_reg. There is no easy way to tell this, so we
9115 have to scan till the end of the basic block. */
9117 if (reg_set_p (dst, PATTERN (i2)))
9118 break;
9121 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9122 reg_reloaded_contents[REGNO (dst)] = -1;
9123 delete_insn (prev);
9126 /* Output reload-insns to reload VALUE into RELOADREG.
9127 VALUE is an autoincrement or autodecrement RTX whose operand
9128 is a register or memory location;
9129 so reloading involves incrementing that location.
9130 IN is either identical to VALUE, or some cheaper place to reload from.
9132 INC_AMOUNT is the number to increment or decrement by (always positive).
9133 This cannot be deduced from VALUE.
9135 Return the instruction that stores into RELOADREG. */
9137 static rtx
9138 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9140 /* REG or MEM to be copied and incremented. */
9141 rtx incloc = find_replacement (&XEXP (value, 0));
9142 /* Nonzero if increment after copying. */
9143 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9144 || GET_CODE (value) == POST_MODIFY);
9145 rtx last;
9146 rtx inc;
9147 rtx add_insn;
9148 int code;
9149 rtx store;
9150 rtx real_in = in == value ? incloc : in;
9152 /* No hard register is equivalent to this register after
9153 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9154 we could inc/dec that register as well (maybe even using it for
9155 the source), but I'm not sure it's worth worrying about. */
9156 if (REG_P (incloc))
9157 reg_last_reload_reg[REGNO (incloc)] = 0;
9159 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9161 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9162 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9164 else
9166 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9167 inc_amount = -inc_amount;
9169 inc = GEN_INT (inc_amount);
9172 /* If this is post-increment, first copy the location to the reload reg. */
9173 if (post && real_in != reloadreg)
9174 emit_insn (gen_move_insn (reloadreg, real_in));
9176 if (in == value)
9178 /* See if we can directly increment INCLOC. Use a method similar to
9179 that in gen_reload. */
9181 last = get_last_insn ();
9182 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9183 gen_rtx_PLUS (GET_MODE (incloc),
9184 incloc, inc)));
9186 code = recog_memoized (add_insn);
9187 if (code >= 0)
9189 extract_insn (add_insn);
9190 if (constrain_operands (1))
9192 /* If this is a pre-increment and we have incremented the value
9193 where it lives, copy the incremented value to RELOADREG to
9194 be used as an address. */
9196 if (! post)
9197 emit_insn (gen_move_insn (reloadreg, incloc));
9199 return add_insn;
9202 delete_insns_since (last);
9205 /* If couldn't do the increment directly, must increment in RELOADREG.
9206 The way we do this depends on whether this is pre- or post-increment.
9207 For pre-increment, copy INCLOC to the reload register, increment it
9208 there, then save back. */
9210 if (! post)
9212 if (in != reloadreg)
9213 emit_insn (gen_move_insn (reloadreg, real_in));
9214 emit_insn (gen_add2_insn (reloadreg, inc));
9215 store = emit_insn (gen_move_insn (incloc, reloadreg));
9217 else
9219 /* Postincrement.
9220 Because this might be a jump insn or a compare, and because RELOADREG
9221 may not be available after the insn in an input reload, we must do
9222 the incrementation before the insn being reloaded for.
9224 We have already copied IN to RELOADREG. Increment the copy in
9225 RELOADREG, save that back, then decrement RELOADREG so it has
9226 the original value. */
9228 emit_insn (gen_add2_insn (reloadreg, inc));
9229 store = emit_insn (gen_move_insn (incloc, reloadreg));
9230 if (CONST_INT_P (inc))
9231 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
9232 else
9233 emit_insn (gen_sub2_insn (reloadreg, inc));
9236 return store;
9239 #ifdef AUTO_INC_DEC
9240 static void
9241 add_auto_inc_notes (rtx insn, rtx x)
9243 enum rtx_code code = GET_CODE (x);
9244 const char *fmt;
9245 int i, j;
9247 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9249 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9250 return;
9253 /* Scan all the operand sub-expressions. */
9254 fmt = GET_RTX_FORMAT (code);
9255 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9257 if (fmt[i] == 'e')
9258 add_auto_inc_notes (insn, XEXP (x, i));
9259 else if (fmt[i] == 'E')
9260 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9261 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9264 #endif
9266 /* This is used by reload pass, that does emit some instructions after
9267 abnormal calls moving basic block end, but in fact it wants to emit
9268 them on the edge. Looks for abnormal call edges, find backward the
9269 proper call and fix the damage.
9271 Similar handle instructions throwing exceptions internally. */
9272 void
9273 fixup_abnormal_edges (void)
9275 bool inserted = false;
9276 basic_block bb;
9278 FOR_EACH_BB (bb)
9280 edge e;
9281 edge_iterator ei;
9283 /* Look for cases we are interested in - calls or instructions causing
9284 exceptions. */
9285 FOR_EACH_EDGE (e, ei, bb->succs)
9287 if (e->flags & EDGE_ABNORMAL_CALL)
9288 break;
9289 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9290 == (EDGE_ABNORMAL | EDGE_EH))
9291 break;
9293 if (e && !CALL_P (BB_END (bb))
9294 && !can_throw_internal (BB_END (bb)))
9296 rtx insn;
9298 /* Get past the new insns generated. Allow notes, as the insns
9299 may be already deleted. */
9300 insn = BB_END (bb);
9301 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
9302 && !can_throw_internal (insn)
9303 && insn != BB_HEAD (bb))
9304 insn = PREV_INSN (insn);
9306 if (CALL_P (insn) || can_throw_internal (insn))
9308 rtx stop, next;
9310 stop = NEXT_INSN (BB_END (bb));
9311 BB_END (bb) = insn;
9312 insn = NEXT_INSN (insn);
9314 FOR_EACH_EDGE (e, ei, bb->succs)
9315 if (e->flags & EDGE_FALLTHRU)
9316 break;
9318 while (insn && insn != stop)
9320 next = NEXT_INSN (insn);
9321 if (INSN_P (insn))
9323 delete_insn (insn);
9325 /* Sometimes there's still the return value USE.
9326 If it's placed after a trapping call (i.e. that
9327 call is the last insn anyway), we have no fallthru
9328 edge. Simply delete this use and don't try to insert
9329 on the non-existent edge. */
9330 if (GET_CODE (PATTERN (insn)) != USE)
9332 /* We're not deleting it, we're moving it. */
9333 INSN_DELETED_P (insn) = 0;
9334 PREV_INSN (insn) = NULL_RTX;
9335 NEXT_INSN (insn) = NULL_RTX;
9337 insert_insn_on_edge (insn, e);
9338 inserted = true;
9341 else if (!BARRIER_P (insn))
9342 set_block_for_insn (insn, NULL);
9343 insn = next;
9347 /* It may be that we don't find any such trapping insn. In this
9348 case we discovered quite late that the insn that had been
9349 marked as can_throw_internal in fact couldn't trap at all.
9350 So we should in fact delete the EH edges out of the block. */
9351 else
9352 purge_dead_edges (bb);
9356 /* We've possibly turned single trapping insn into multiple ones. */
9357 if (cfun->can_throw_non_call_exceptions)
9359 sbitmap blocks;
9360 blocks = sbitmap_alloc (last_basic_block);
9361 sbitmap_ones (blocks);
9362 find_many_sub_basic_blocks (blocks);
9363 sbitmap_free (blocks);
9366 if (inserted)
9367 commit_edge_insertions ();
9369 #ifdef ENABLE_CHECKING
9370 /* Verify that we didn't turn one trapping insn into many, and that
9371 we found and corrected all of the problems wrt fixups on the
9372 fallthru edge. */
9373 verify_flow_info ();
9374 #endif