Clean up some minor white space issues in trans-decl.c and trans-expr.c
[official-gcc.git] / gcc / reload1.c
blob2229fd32a4a521c1db39a3be75ae6cc2b5c9ca03
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "optabs.h"
31 #include "regs.h"
32 #include "ira.h"
33 #include "recog.h"
35 #include "rtl-error.h"
36 #include "expr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "reload.h"
41 #include "except.h"
42 #include "dumpfile.h"
43 #include "rtl-iter.h"
45 /* This file contains the reload pass of the compiler, which is
46 run after register allocation has been done. It checks that
47 each insn is valid (operands required to be in registers really
48 are in registers of the proper class) and fixes up invalid ones
49 by copying values temporarily into registers for the insns
50 that need them.
52 The results of register allocation are described by the vector
53 reg_renumber; the insns still contain pseudo regs, but reg_renumber
54 can be used to find which hard reg, if any, a pseudo reg is in.
56 The technique we always use is to free up a few hard regs that are
57 called ``reload regs'', and for each place where a pseudo reg
58 must be in a hard reg, copy it temporarily into one of the reload regs.
60 Reload regs are allocated locally for every instruction that needs
61 reloads. When there are pseudos which are allocated to a register that
62 has been chosen as a reload reg, such pseudos must be ``spilled''.
63 This means that they go to other hard regs, or to stack slots if no other
64 available hard regs can be found. Spilling can invalidate more
65 insns, requiring additional need for reloads, so we must keep checking
66 until the process stabilizes.
68 For machines with different classes of registers, we must keep track
69 of the register class needed for each reload, and make sure that
70 we allocate enough reload registers of each class.
72 The file reload.c contains the code that checks one insn for
73 validity and reports the reloads that it needs. This file
74 is in charge of scanning the entire rtl code, accumulating the
75 reload needs, spilling, assigning reload registers to use for
76 fixing up each insn, and generating the new insns to copy values
77 into the reload registers. */
79 struct target_reload default_target_reload;
80 #if SWITCHABLE_TARGET
81 struct target_reload *this_target_reload = &default_target_reload;
82 #endif
84 #define spill_indirect_levels \
85 (this_target_reload->x_spill_indirect_levels)
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88 into which reg N has been reloaded (perhaps for a previous insn). */
89 static rtx *reg_last_reload_reg;
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93 static regset_head reg_has_output_reload;
95 /* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97 static HARD_REG_SET reg_is_output_reload;
99 /* Widest width in which each pseudo reg is referred to (via subreg). */
100 static unsigned int *reg_max_ref_width;
102 /* Vector to remember old contents of reg_renumber before spilling. */
103 static short *reg_old_renumber;
105 /* During reload_as_needed, element N contains the last pseudo regno reloaded
106 into hard register N. If that pseudo reg occupied more than one register,
107 reg_reloaded_contents points to that pseudo for each spill register in
108 use; all of these must remain set for an inheritance to occur. */
109 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111 /* During reload_as_needed, element N contains the insn for which
112 hard register N was last used. Its contents are significant only
113 when reg_reloaded_valid is set for this register. */
114 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
117 static HARD_REG_SET reg_reloaded_valid;
118 /* Indicate if the register was dead at the end of the reload.
119 This is only valid if reg_reloaded_contents is set and valid. */
120 static HARD_REG_SET reg_reloaded_dead;
122 /* Indicate whether the register's current value is one that is not
123 safe to retain across a call, even for registers that are normally
124 call-saved. This is only meaningful for members of reg_reloaded_valid. */
125 static HARD_REG_SET reg_reloaded_call_part_clobbered;
127 /* Number of spill-regs so far; number of valid elements of spill_regs. */
128 static int n_spills;
130 /* In parallel with spill_regs, contains REG rtx's for those regs.
131 Holds the last rtx used for any given reg, or 0 if it has never
132 been used for spilling yet. This rtx is reused, provided it has
133 the proper mode. */
134 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136 /* In parallel with spill_regs, contains nonzero for a spill reg
137 that was stored after the last time it was used.
138 The precise value is the insn generated to do the store. */
139 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141 /* This is the register that was stored with spill_reg_store. This is a
142 copy of reload_out / reload_out_reg when the value was stored; if
143 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
144 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs.
151 ?!? This is no longer accurate. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that can't be used as spill registers for
155 the currently processed insn. These are the hard registers which are live
156 during the insn, but not allocated to pseudos, as well as fixed
157 registers. */
158 static HARD_REG_SET bad_spill_regs;
160 /* These are the hard registers that can't be used as spill register for any
161 insn. This includes registers used for user variables and registers that
162 we can't eliminate. A register that appears in this set also can't be used
163 to retry register allocation. */
164 static HARD_REG_SET bad_spill_regs_global;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `n_spills' is the number of
168 elements that are actually valid; new ones are added at the end.
170 Both spill_regs and spill_reg_order are used on two occasions:
171 once during find_reload_regs, where they keep track of the spill registers
172 for a single insn, but also during reload_as_needed where they show all
173 the registers ever used by reload. For the latter case, the information
174 is calculated during finish_spills. */
175 static short spill_regs[FIRST_PSEUDO_REGISTER];
177 /* This vector of reg sets indicates, for each pseudo, which hard registers
178 may not be used for retrying global allocation because the register was
179 formerly spilled from one of them. If we allowed reallocating a pseudo to
180 a register that it was already allocated to, reload might not
181 terminate. */
182 static HARD_REG_SET *pseudo_previous_regs;
184 /* This vector of reg sets indicates, for each pseudo, which hard
185 registers may not be used for retrying global allocation because they
186 are used as spill registers during one of the insns in which the
187 pseudo is live. */
188 static HARD_REG_SET *pseudo_forbidden_regs;
190 /* All hard regs that have been used as spill registers for any insn are
191 marked in this set. */
192 static HARD_REG_SET used_spill_regs;
194 /* Index of last register assigned as a spill register. We allocate in
195 a round-robin fashion. */
196 static int last_spill_reg;
198 /* Record the stack slot for each spilled hard register. */
199 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201 /* Width allocated so far for that stack slot. */
202 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204 /* Record which pseudos needed to be spilled. */
205 static regset_head spilled_pseudos;
207 /* Record which pseudos changed their allocation in finish_spills. */
208 static regset_head changed_allocation_pseudos;
210 /* Used for communication between order_regs_for_reload and count_pseudo.
211 Used to avoid counting one pseudo twice. */
212 static regset_head pseudos_counted;
214 /* First uid used by insns created by reload in this function.
215 Used in find_equiv_reg. */
216 int reload_first_uid;
218 /* Flag set by local-alloc or global-alloc if anything is live in
219 a call-clobbered reg across calls. */
220 int caller_save_needed;
222 /* Set to 1 while reload_as_needed is operating.
223 Required by some machines to handle any generated moves differently. */
224 int reload_in_progress = 0;
226 /* This obstack is used for allocation of rtl during register elimination.
227 The allocated storage can be freed once find_reloads has processed the
228 insn. */
229 static struct obstack reload_obstack;
231 /* Points to the beginning of the reload_obstack. All insn_chain structures
232 are allocated first. */
233 static char *reload_startobj;
235 /* The point after all insn_chain structures. Used to quickly deallocate
236 memory allocated in copy_reloads during calculate_needs_all_insns. */
237 static char *reload_firstobj;
239 /* This points before all local rtl generated by register elimination.
240 Used to quickly free all memory after processing one insn. */
241 static char *reload_insn_firstobj;
243 /* List of insn_chain instructions, one for every insn that reload needs to
244 examine. */
245 struct insn_chain *reload_insn_chain;
247 /* TRUE if we potentially left dead insns in the insn stream and want to
248 run DCE immediately after reload, FALSE otherwise. */
249 static bool need_dce;
251 /* List of all insns needing reloads. */
252 static struct insn_chain *insns_need_reload;
254 /* This structure is used to record information about register eliminations.
255 Each array entry describes one possible way of eliminating a register
256 in favor of another. If there is more than one way of eliminating a
257 particular register, the most preferred should be specified first. */
259 struct elim_table
261 int from; /* Register number to be eliminated. */
262 int to; /* Register number used as replacement. */
263 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
264 int can_eliminate; /* Nonzero if this elimination can be done. */
265 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
266 target hook in previous scan over insns
267 made by reload. */
268 HOST_WIDE_INT offset; /* Current offset between the two regs. */
269 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
270 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
271 rtx from_rtx; /* REG rtx for the register to be eliminated.
272 We cannot simply compare the number since
273 we might then spuriously replace a hard
274 register corresponding to a pseudo
275 assigned to the reg to be eliminated. */
276 rtx to_rtx; /* REG rtx for the replacement. */
279 static struct elim_table *reg_eliminate = 0;
281 /* This is an intermediate structure to initialize the table. It has
282 exactly the members provided by ELIMINABLE_REGS. */
283 static const struct elim_table_1
285 const int from;
286 const int to;
287 } reg_eliminate_1[] =
289 /* If a set of eliminable registers was specified, define the table from it.
290 Otherwise, default to the normal case of the frame pointer being
291 replaced by the stack pointer. */
293 #ifdef ELIMINABLE_REGS
294 ELIMINABLE_REGS;
295 #else
296 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
297 #endif
299 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
301 /* Record the number of pending eliminations that have an offset not equal
302 to their initial offset. If nonzero, we use a new copy of each
303 replacement result in any insns encountered. */
304 int num_not_at_initial_offset;
306 /* Count the number of registers that we may be able to eliminate. */
307 static int num_eliminable;
308 /* And the number of registers that are equivalent to a constant that
309 can be eliminated to frame_pointer / arg_pointer + constant. */
310 static int num_eliminable_invariants;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the difference of the
315 number of the label and the first label number. We can't offset the
316 pointer itself as this can cause problems on machines with segmented
317 memory. The first table is an array of flags that records whether we
318 have yet encountered a label and the second table is an array of arrays,
319 one entry in the latter array for each elimination. */
321 static int first_label_num;
322 static char *offsets_known_at;
323 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
325 vec<reg_equivs_t, va_gc> *reg_equivs;
327 /* Stack of addresses where an rtx has been changed. We can undo the
328 changes by popping items off the stack and restoring the original
329 value at each location.
331 We use this simplistic undo capability rather than copy_rtx as copy_rtx
332 will not make a deep copy of a normally sharable rtx, such as
333 (const (plus (symbol_ref) (const_int))). If such an expression appears
334 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
335 rtx expression would be changed. See PR 42431. */
337 typedef rtx *rtx_p;
338 static vec<rtx_p> substitute_stack;
340 /* Number of labels in the current function. */
342 static int num_labels;
344 static void replace_pseudos_in (rtx *, machine_mode, rtx);
345 static void maybe_fix_stack_asms (void);
346 static void copy_reloads (struct insn_chain *);
347 static void calculate_needs_all_insns (int);
348 static int find_reg (struct insn_chain *, int);
349 static void find_reload_regs (struct insn_chain *);
350 static void select_reload_regs (void);
351 static void delete_caller_save_insns (void);
353 static void spill_failure (rtx_insn *, enum reg_class);
354 static void count_spilled_pseudo (int, int, int);
355 static void delete_dead_insn (rtx_insn *);
356 static void alter_reg (int, int, bool);
357 static void set_label_offsets (rtx, rtx_insn *, int);
358 static void check_eliminable_occurrences (rtx);
359 static void elimination_effects (rtx, machine_mode);
360 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
361 static int eliminate_regs_in_insn (rtx_insn *, int);
362 static void update_eliminable_offsets (void);
363 static void mark_not_eliminable (rtx, const_rtx, void *);
364 static void set_initial_elim_offsets (void);
365 static bool verify_initial_elim_offsets (void);
366 static void set_initial_label_offsets (void);
367 static void set_offsets_for_label (rtx_insn *);
368 static void init_eliminable_invariants (rtx_insn *, bool);
369 static void init_elim_table (void);
370 static void free_reg_equiv (void);
371 static void update_eliminables (HARD_REG_SET *);
372 static bool update_eliminables_and_spill (void);
373 static void elimination_costs_in_insn (rtx_insn *);
374 static void spill_hard_reg (unsigned int, int);
375 static int finish_spills (int);
376 static void scan_paradoxical_subregs (rtx);
377 static void count_pseudo (int);
378 static void order_regs_for_reload (struct insn_chain *);
379 static void reload_as_needed (int);
380 static void forget_old_reloads_1 (rtx, const_rtx, void *);
381 static void forget_marked_reloads (regset);
382 static int reload_reg_class_lower (const void *, const void *);
383 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
384 machine_mode);
385 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
386 machine_mode);
387 static int reload_reg_free_p (unsigned int, int, enum reload_type);
388 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
389 rtx, rtx, int, int);
390 static int free_for_value_p (int, machine_mode, int, enum reload_type,
391 rtx, rtx, int, int);
392 static int allocate_reload_reg (struct insn_chain *, int, int);
393 static int conflicts_with_override (rtx);
394 static void failed_reload (rtx_insn *, int);
395 static int set_reload_reg (int, int);
396 static void choose_reload_regs_init (struct insn_chain *, rtx *);
397 static void choose_reload_regs (struct insn_chain *);
398 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
399 rtx, int);
400 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
401 int);
402 static void do_input_reload (struct insn_chain *, struct reload *, int);
403 static void do_output_reload (struct insn_chain *, struct reload *, int);
404 static void emit_reload_insns (struct insn_chain *);
405 static void delete_output_reload (rtx_insn *, int, int, rtx);
406 static void delete_address_reloads (rtx_insn *, rtx_insn *);
407 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
408 static void inc_for_reload (rtx, rtx, rtx, int);
409 static void add_auto_inc_notes (rtx_insn *, rtx);
410 static void substitute (rtx *, const_rtx, rtx);
411 static bool gen_reload_chain_without_interm_reg_p (int, int);
412 static int reloads_conflict (int, int);
413 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
414 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
416 /* Initialize the reload pass. This is called at the beginning of compilation
417 and may be called again if the target is reinitialized. */
419 void
420 init_reload (void)
422 int i;
424 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
425 Set spill_indirect_levels to the number of levels such addressing is
426 permitted, zero if it is not permitted at all. */
428 rtx tem
429 = gen_rtx_MEM (Pmode,
430 gen_rtx_PLUS (Pmode,
431 gen_rtx_REG (Pmode,
432 LAST_VIRTUAL_REGISTER + 1),
433 gen_int_mode (4, Pmode)));
434 spill_indirect_levels = 0;
436 while (memory_address_p (QImode, tem))
438 spill_indirect_levels++;
439 tem = gen_rtx_MEM (Pmode, tem);
442 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
444 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
445 indirect_symref_ok = memory_address_p (QImode, tem);
447 /* See if reg+reg is a valid (and offsettable) address. */
449 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
451 tem = gen_rtx_PLUS (Pmode,
452 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
453 gen_rtx_REG (Pmode, i));
455 /* This way, we make sure that reg+reg is an offsettable address. */
456 tem = plus_constant (Pmode, tem, 4);
458 if (memory_address_p (QImode, tem))
460 double_reg_address_ok = 1;
461 break;
465 /* Initialize obstack for our rtl allocation. */
466 if (reload_startobj == NULL)
468 gcc_obstack_init (&reload_obstack);
469 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
472 INIT_REG_SET (&spilled_pseudos);
473 INIT_REG_SET (&changed_allocation_pseudos);
474 INIT_REG_SET (&pseudos_counted);
477 /* List of insn chains that are currently unused. */
478 static struct insn_chain *unused_insn_chains = 0;
480 /* Allocate an empty insn_chain structure. */
481 struct insn_chain *
482 new_insn_chain (void)
484 struct insn_chain *c;
486 if (unused_insn_chains == 0)
488 c = XOBNEW (&reload_obstack, struct insn_chain);
489 INIT_REG_SET (&c->live_throughout);
490 INIT_REG_SET (&c->dead_or_set);
492 else
494 c = unused_insn_chains;
495 unused_insn_chains = c->next;
497 c->is_caller_save_insn = 0;
498 c->need_operand_change = 0;
499 c->need_reload = 0;
500 c->need_elim = 0;
501 return c;
504 /* Small utility function to set all regs in hard reg set TO which are
505 allocated to pseudos in regset FROM. */
507 void
508 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
510 unsigned int regno;
511 reg_set_iterator rsi;
513 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
515 int r = reg_renumber[regno];
517 if (r < 0)
519 /* reload_combine uses the information from DF_LIVE_IN,
520 which might still contain registers that have not
521 actually been allocated since they have an
522 equivalence. */
523 gcc_assert (ira_conflicts_p || reload_completed);
525 else
526 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
530 /* Replace all pseudos found in LOC with their corresponding
531 equivalences. */
533 static void
534 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
536 rtx x = *loc;
537 enum rtx_code code;
538 const char *fmt;
539 int i, j;
541 if (! x)
542 return;
544 code = GET_CODE (x);
545 if (code == REG)
547 unsigned int regno = REGNO (x);
549 if (regno < FIRST_PSEUDO_REGISTER)
550 return;
552 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
553 if (x != *loc)
555 *loc = x;
556 replace_pseudos_in (loc, mem_mode, usage);
557 return;
560 if (reg_equiv_constant (regno))
561 *loc = reg_equiv_constant (regno);
562 else if (reg_equiv_invariant (regno))
563 *loc = reg_equiv_invariant (regno);
564 else if (reg_equiv_mem (regno))
565 *loc = reg_equiv_mem (regno);
566 else if (reg_equiv_address (regno))
567 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
568 else
570 gcc_assert (!REG_P (regno_reg_rtx[regno])
571 || REGNO (regno_reg_rtx[regno]) != regno);
572 *loc = regno_reg_rtx[regno];
575 return;
577 else if (code == MEM)
579 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
580 return;
583 /* Process each of our operands recursively. */
584 fmt = GET_RTX_FORMAT (code);
585 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
586 if (*fmt == 'e')
587 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
588 else if (*fmt == 'E')
589 for (j = 0; j < XVECLEN (x, i); j++)
590 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
593 /* Determine if the current function has an exception receiver block
594 that reaches the exit block via non-exceptional edges */
596 static bool
597 has_nonexceptional_receiver (void)
599 edge e;
600 edge_iterator ei;
601 basic_block *tos, *worklist, bb;
603 /* If we're not optimizing, then just err on the safe side. */
604 if (!optimize)
605 return true;
607 /* First determine which blocks can reach exit via normal paths. */
608 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
610 FOR_EACH_BB_FN (bb, cfun)
611 bb->flags &= ~BB_REACHABLE;
613 /* Place the exit block on our worklist. */
614 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
615 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
617 /* Iterate: find everything reachable from what we've already seen. */
618 while (tos != worklist)
620 bb = *--tos;
622 FOR_EACH_EDGE (e, ei, bb->preds)
623 if (!(e->flags & EDGE_ABNORMAL))
625 basic_block src = e->src;
627 if (!(src->flags & BB_REACHABLE))
629 src->flags |= BB_REACHABLE;
630 *tos++ = src;
634 free (worklist);
636 /* Now see if there's a reachable block with an exceptional incoming
637 edge. */
638 FOR_EACH_BB_FN (bb, cfun)
639 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
640 return true;
642 /* No exceptional block reached exit unexceptionally. */
643 return false;
646 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
647 zero elements) to MAX_REG_NUM elements.
649 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
650 void
651 grow_reg_equivs (void)
653 int old_size = vec_safe_length (reg_equivs);
654 int max_regno = max_reg_num ();
655 int i;
656 reg_equivs_t ze;
658 memset (&ze, 0, sizeof (reg_equivs_t));
659 vec_safe_reserve (reg_equivs, max_regno);
660 for (i = old_size; i < max_regno; i++)
661 reg_equivs->quick_insert (i, ze);
665 /* Global variables used by reload and its subroutines. */
667 /* The current basic block while in calculate_elim_costs_all_insns. */
668 static basic_block elim_bb;
670 /* Set during calculate_needs if an insn needs register elimination. */
671 static int something_needs_elimination;
672 /* Set during calculate_needs if an insn needs an operand changed. */
673 static int something_needs_operands_changed;
674 /* Set by alter_regs if we spilled a register to the stack. */
675 static bool something_was_spilled;
677 /* Nonzero means we couldn't get enough spill regs. */
678 static int failure;
680 /* Temporary array of pseudo-register number. */
681 static int *temp_pseudo_reg_arr;
683 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
684 If that insn didn't set the register (i.e., it copied the register to
685 memory), just delete that insn instead of the equivalencing insn plus
686 anything now dead. If we call delete_dead_insn on that insn, we may
687 delete the insn that actually sets the register if the register dies
688 there and that is incorrect. */
689 static void
690 remove_init_insns ()
692 for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
694 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
696 rtx list;
697 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
699 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
701 /* If we already deleted the insn or if it may trap, we can't
702 delete it. The latter case shouldn't happen, but can
703 if an insn has a variable address, gets a REG_EH_REGION
704 note added to it, and then gets converted into a load
705 from a constant address. */
706 if (NOTE_P (equiv_insn)
707 || can_throw_internal (equiv_insn))
709 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
710 delete_dead_insn (equiv_insn);
711 else
712 SET_INSN_DELETED (equiv_insn);
718 /* Return true if remove_init_insns will delete INSN. */
719 static bool
720 will_delete_init_insn_p (rtx_insn *insn)
722 rtx set = single_set (insn);
723 if (!set || !REG_P (SET_DEST (set)))
724 return false;
725 unsigned regno = REGNO (SET_DEST (set));
727 if (can_throw_internal (insn))
728 return false;
730 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
731 return false;
733 for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
735 rtx equiv_insn = XEXP (list, 0);
736 if (equiv_insn == insn)
737 return true;
739 return false;
742 /* Main entry point for the reload pass.
744 FIRST is the first insn of the function being compiled.
746 GLOBAL nonzero means we were called from global_alloc
747 and should attempt to reallocate any pseudoregs that we
748 displace from hard regs we will use for reloads.
749 If GLOBAL is zero, we do not have enough information to do that,
750 so any pseudo reg that is spilled must go to the stack.
752 Return value is TRUE if reload likely left dead insns in the
753 stream and a DCE pass should be run to elimiante them. Else the
754 return value is FALSE. */
756 bool
757 reload (rtx_insn *first, int global)
759 int i, n;
760 rtx_insn *insn;
761 struct elim_table *ep;
762 basic_block bb;
763 bool inserted;
765 /* Make sure even insns with volatile mem refs are recognizable. */
766 init_recog ();
768 failure = 0;
770 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
772 /* Make sure that the last insn in the chain
773 is not something that needs reloading. */
774 emit_note (NOTE_INSN_DELETED);
776 /* Enable find_equiv_reg to distinguish insns made by reload. */
777 reload_first_uid = get_max_uid ();
779 #ifdef SECONDARY_MEMORY_NEEDED
780 /* Initialize the secondary memory table. */
781 clear_secondary_mem ();
782 #endif
784 /* We don't have a stack slot for any spill reg yet. */
785 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
786 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
788 /* Initialize the save area information for caller-save, in case some
789 are needed. */
790 init_save_areas ();
792 /* Compute which hard registers are now in use
793 as homes for pseudo registers.
794 This is done here rather than (eg) in global_alloc
795 because this point is reached even if not optimizing. */
796 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
797 mark_home_live (i);
799 /* A function that has a nonlocal label that can reach the exit
800 block via non-exceptional paths must save all call-saved
801 registers. */
802 if (cfun->has_nonlocal_label
803 && has_nonexceptional_receiver ())
804 crtl->saves_all_registers = 1;
806 if (crtl->saves_all_registers)
807 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
808 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
809 df_set_regs_ever_live (i, true);
811 /* Find all the pseudo registers that didn't get hard regs
812 but do have known equivalent constants or memory slots.
813 These include parameters (known equivalent to parameter slots)
814 and cse'd or loop-moved constant memory addresses.
816 Record constant equivalents in reg_equiv_constant
817 so they will be substituted by find_reloads.
818 Record memory equivalents in reg_mem_equiv so they can
819 be substituted eventually by altering the REG-rtx's. */
821 grow_reg_equivs ();
822 reg_old_renumber = XCNEWVEC (short, max_regno);
823 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
824 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
825 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
827 CLEAR_HARD_REG_SET (bad_spill_regs_global);
829 init_eliminable_invariants (first, true);
830 init_elim_table ();
832 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
833 stack slots to the pseudos that lack hard regs or equivalents.
834 Do not touch virtual registers. */
836 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
837 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
838 temp_pseudo_reg_arr[n++] = i;
840 if (ira_conflicts_p)
841 /* Ask IRA to order pseudo-registers for better stack slot
842 sharing. */
843 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
845 for (i = 0; i < n; i++)
846 alter_reg (temp_pseudo_reg_arr[i], -1, false);
848 /* If we have some registers we think can be eliminated, scan all insns to
849 see if there is an insn that sets one of these registers to something
850 other than itself plus a constant. If so, the register cannot be
851 eliminated. Doing this scan here eliminates an extra pass through the
852 main reload loop in the most common case where register elimination
853 cannot be done. */
854 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
855 if (INSN_P (insn))
856 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
858 maybe_fix_stack_asms ();
860 insns_need_reload = 0;
861 something_needs_elimination = 0;
863 /* Initialize to -1, which means take the first spill register. */
864 last_spill_reg = -1;
866 /* Spill any hard regs that we know we can't eliminate. */
867 CLEAR_HARD_REG_SET (used_spill_regs);
868 /* There can be multiple ways to eliminate a register;
869 they should be listed adjacently.
870 Elimination for any register fails only if all possible ways fail. */
871 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
873 int from = ep->from;
874 int can_eliminate = 0;
877 can_eliminate |= ep->can_eliminate;
878 ep++;
880 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
881 if (! can_eliminate)
882 spill_hard_reg (from, 1);
885 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
886 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
888 finish_spills (global);
890 /* From now on, we may need to generate moves differently. We may also
891 allow modifications of insns which cause them to not be recognized.
892 Any such modifications will be cleaned up during reload itself. */
893 reload_in_progress = 1;
895 /* This loop scans the entire function each go-round
896 and repeats until one repetition spills no additional hard regs. */
897 for (;;)
899 int something_changed;
900 int did_spill;
901 HOST_WIDE_INT starting_frame_size;
903 starting_frame_size = get_frame_size ();
904 something_was_spilled = false;
906 set_initial_elim_offsets ();
907 set_initial_label_offsets ();
909 /* For each pseudo register that has an equivalent location defined,
910 try to eliminate any eliminable registers (such as the frame pointer)
911 assuming initial offsets for the replacement register, which
912 is the normal case.
914 If the resulting location is directly addressable, substitute
915 the MEM we just got directly for the old REG.
917 If it is not addressable but is a constant or the sum of a hard reg
918 and constant, it is probably not addressable because the constant is
919 out of range, in that case record the address; we will generate
920 hairy code to compute the address in a register each time it is
921 needed. Similarly if it is a hard register, but one that is not
922 valid as an address register.
924 If the location is not addressable, but does not have one of the
925 above forms, assign a stack slot. We have to do this to avoid the
926 potential of producing lots of reloads if, e.g., a location involves
927 a pseudo that didn't get a hard register and has an equivalent memory
928 location that also involves a pseudo that didn't get a hard register.
930 Perhaps at some point we will improve reload_when_needed handling
931 so this problem goes away. But that's very hairy. */
933 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
934 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
936 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
937 NULL_RTX);
939 if (strict_memory_address_addr_space_p
940 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
941 MEM_ADDR_SPACE (x)))
942 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
943 else if (CONSTANT_P (XEXP (x, 0))
944 || (REG_P (XEXP (x, 0))
945 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
946 || (GET_CODE (XEXP (x, 0)) == PLUS
947 && REG_P (XEXP (XEXP (x, 0), 0))
948 && (REGNO (XEXP (XEXP (x, 0), 0))
949 < FIRST_PSEUDO_REGISTER)
950 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
951 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
952 else
954 /* Make a new stack slot. Then indicate that something
955 changed so we go back and recompute offsets for
956 eliminable registers because the allocation of memory
957 below might change some offset. reg_equiv_{mem,address}
958 will be set up for this pseudo on the next pass around
959 the loop. */
960 reg_equiv_memory_loc (i) = 0;
961 reg_equiv_init (i) = 0;
962 alter_reg (i, -1, true);
966 if (caller_save_needed)
967 setup_save_areas ();
969 if (starting_frame_size && crtl->stack_alignment_needed)
971 /* If we have a stack frame, we must align it now. The
972 stack size may be a part of the offset computation for
973 register elimination. So if this changes the stack size,
974 then repeat the elimination bookkeeping. We don't
975 realign when there is no stack, as that will cause a
976 stack frame when none is needed should
977 STARTING_FRAME_OFFSET not be already aligned to
978 STACK_BOUNDARY. */
979 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
981 /* If we allocated another stack slot, redo elimination bookkeeping. */
982 if (something_was_spilled || starting_frame_size != get_frame_size ())
984 update_eliminables_and_spill ();
985 continue;
988 if (caller_save_needed)
990 save_call_clobbered_regs ();
991 /* That might have allocated new insn_chain structures. */
992 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
995 calculate_needs_all_insns (global);
997 if (! ira_conflicts_p)
998 /* Don't do it for IRA. We need this info because we don't
999 change live_throughout and dead_or_set for chains when IRA
1000 is used. */
1001 CLEAR_REG_SET (&spilled_pseudos);
1003 did_spill = 0;
1005 something_changed = 0;
1007 /* If we allocated any new memory locations, make another pass
1008 since it might have changed elimination offsets. */
1009 if (something_was_spilled || starting_frame_size != get_frame_size ())
1010 something_changed = 1;
1012 /* Even if the frame size remained the same, we might still have
1013 changed elimination offsets, e.g. if find_reloads called
1014 force_const_mem requiring the back end to allocate a constant
1015 pool base register that needs to be saved on the stack. */
1016 else if (!verify_initial_elim_offsets ())
1017 something_changed = 1;
1019 if (update_eliminables_and_spill ())
1021 did_spill = 1;
1022 something_changed = 1;
1025 select_reload_regs ();
1026 if (failure)
1027 goto failed;
1029 if (insns_need_reload != 0 || did_spill)
1030 something_changed |= finish_spills (global);
1032 if (! something_changed)
1033 break;
1035 if (caller_save_needed)
1036 delete_caller_save_insns ();
1038 obstack_free (&reload_obstack, reload_firstobj);
1041 /* If global-alloc was run, notify it of any register eliminations we have
1042 done. */
1043 if (global)
1044 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1045 if (ep->can_eliminate)
1046 mark_elimination (ep->from, ep->to);
1048 remove_init_insns ();
1050 /* Use the reload registers where necessary
1051 by generating move instructions to move the must-be-register
1052 values into or out of the reload registers. */
1054 if (insns_need_reload != 0 || something_needs_elimination
1055 || something_needs_operands_changed)
1057 HOST_WIDE_INT old_frame_size = get_frame_size ();
1059 reload_as_needed (global);
1061 gcc_assert (old_frame_size == get_frame_size ());
1063 gcc_assert (verify_initial_elim_offsets ());
1066 /* If we were able to eliminate the frame pointer, show that it is no
1067 longer live at the start of any basic block. If it ls live by
1068 virtue of being in a pseudo, that pseudo will be marked live
1069 and hence the frame pointer will be known to be live via that
1070 pseudo. */
1072 if (! frame_pointer_needed)
1073 FOR_EACH_BB_FN (bb, cfun)
1074 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1076 /* Come here (with failure set nonzero) if we can't get enough spill
1077 regs. */
1078 failed:
1080 CLEAR_REG_SET (&changed_allocation_pseudos);
1081 CLEAR_REG_SET (&spilled_pseudos);
1082 reload_in_progress = 0;
1084 /* Now eliminate all pseudo regs by modifying them into
1085 their equivalent memory references.
1086 The REG-rtx's for the pseudos are modified in place,
1087 so all insns that used to refer to them now refer to memory.
1089 For a reg that has a reg_equiv_address, all those insns
1090 were changed by reloading so that no insns refer to it any longer;
1091 but the DECL_RTL of a variable decl may refer to it,
1092 and if so this causes the debugging info to mention the variable. */
1094 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1096 rtx addr = 0;
1098 if (reg_equiv_mem (i))
1099 addr = XEXP (reg_equiv_mem (i), 0);
1101 if (reg_equiv_address (i))
1102 addr = reg_equiv_address (i);
1104 if (addr)
1106 if (reg_renumber[i] < 0)
1108 rtx reg = regno_reg_rtx[i];
1110 REG_USERVAR_P (reg) = 0;
1111 PUT_CODE (reg, MEM);
1112 XEXP (reg, 0) = addr;
1113 if (reg_equiv_memory_loc (i))
1114 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1115 else
1116 MEM_ATTRS (reg) = 0;
1117 MEM_NOTRAP_P (reg) = 1;
1119 else if (reg_equiv_mem (i))
1120 XEXP (reg_equiv_mem (i), 0) = addr;
1123 /* We don't want complex addressing modes in debug insns
1124 if simpler ones will do, so delegitimize equivalences
1125 in debug insns. */
1126 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1128 rtx reg = regno_reg_rtx[i];
1129 rtx equiv = 0;
1130 df_ref use, next;
1132 if (reg_equiv_constant (i))
1133 equiv = reg_equiv_constant (i);
1134 else if (reg_equiv_invariant (i))
1135 equiv = reg_equiv_invariant (i);
1136 else if (reg && MEM_P (reg))
1137 equiv = targetm.delegitimize_address (reg);
1138 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1139 equiv = reg;
1141 if (equiv == reg)
1142 continue;
1144 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1146 insn = DF_REF_INSN (use);
1148 /* Make sure the next ref is for a different instruction,
1149 so that we're not affected by the rescan. */
1150 next = DF_REF_NEXT_REG (use);
1151 while (next && DF_REF_INSN (next) == insn)
1152 next = DF_REF_NEXT_REG (next);
1154 if (DEBUG_INSN_P (insn))
1156 if (!equiv)
1158 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1159 df_insn_rescan_debug_internal (insn);
1161 else
1162 INSN_VAR_LOCATION_LOC (insn)
1163 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1164 reg, equiv);
1170 /* We must set reload_completed now since the cleanup_subreg_operands call
1171 below will re-recognize each insn and reload may have generated insns
1172 which are only valid during and after reload. */
1173 reload_completed = 1;
1175 /* Make a pass over all the insns and delete all USEs which we inserted
1176 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1177 notes. Delete all CLOBBER insns, except those that refer to the return
1178 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1179 from misarranging variable-array code, and simplify (subreg (reg))
1180 operands. Strip and regenerate REG_INC notes that may have been moved
1181 around. */
1183 for (insn = first; insn; insn = NEXT_INSN (insn))
1184 if (INSN_P (insn))
1186 rtx *pnote;
1188 if (CALL_P (insn))
1189 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1190 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1192 if ((GET_CODE (PATTERN (insn)) == USE
1193 /* We mark with QImode USEs introduced by reload itself. */
1194 && (GET_MODE (insn) == QImode
1195 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1196 || (GET_CODE (PATTERN (insn)) == CLOBBER
1197 && (!MEM_P (XEXP (PATTERN (insn), 0))
1198 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1199 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1200 && XEXP (XEXP (PATTERN (insn), 0), 0)
1201 != stack_pointer_rtx))
1202 && (!REG_P (XEXP (PATTERN (insn), 0))
1203 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1205 delete_insn (insn);
1206 continue;
1209 /* Some CLOBBERs may survive until here and still reference unassigned
1210 pseudos with const equivalent, which may in turn cause ICE in later
1211 passes if the reference remains in place. */
1212 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1213 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1214 VOIDmode, PATTERN (insn));
1216 /* Discard obvious no-ops, even without -O. This optimization
1217 is fast and doesn't interfere with debugging. */
1218 if (NONJUMP_INSN_P (insn)
1219 && GET_CODE (PATTERN (insn)) == SET
1220 && REG_P (SET_SRC (PATTERN (insn)))
1221 && REG_P (SET_DEST (PATTERN (insn)))
1222 && (REGNO (SET_SRC (PATTERN (insn)))
1223 == REGNO (SET_DEST (PATTERN (insn)))))
1225 delete_insn (insn);
1226 continue;
1229 pnote = &REG_NOTES (insn);
1230 while (*pnote != 0)
1232 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1233 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1234 || REG_NOTE_KIND (*pnote) == REG_INC)
1235 *pnote = XEXP (*pnote, 1);
1236 else
1237 pnote = &XEXP (*pnote, 1);
1240 if (AUTO_INC_DEC)
1241 add_auto_inc_notes (insn, PATTERN (insn));
1243 /* Simplify (subreg (reg)) if it appears as an operand. */
1244 cleanup_subreg_operands (insn);
1246 /* Clean up invalid ASMs so that they don't confuse later passes.
1247 See PR 21299. */
1248 if (asm_noperands (PATTERN (insn)) >= 0)
1250 extract_insn (insn);
1251 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1253 error_for_asm (insn,
1254 "%<asm%> operand has impossible constraints");
1255 delete_insn (insn);
1256 continue;
1261 /* If we are doing generic stack checking, give a warning if this
1262 function's frame size is larger than we expect. */
1263 if (flag_stack_check == GENERIC_STACK_CHECK)
1265 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1266 static int verbose_warned = 0;
1268 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1269 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1270 size += UNITS_PER_WORD;
1272 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1274 warning (0, "frame size too large for reliable stack checking");
1275 if (! verbose_warned)
1277 warning (0, "try reducing the number of local variables");
1278 verbose_warned = 1;
1283 free (temp_pseudo_reg_arr);
1285 /* Indicate that we no longer have known memory locations or constants. */
1286 free_reg_equiv ();
1288 free (reg_max_ref_width);
1289 free (reg_old_renumber);
1290 free (pseudo_previous_regs);
1291 free (pseudo_forbidden_regs);
1293 CLEAR_HARD_REG_SET (used_spill_regs);
1294 for (i = 0; i < n_spills; i++)
1295 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1297 /* Free all the insn_chain structures at once. */
1298 obstack_free (&reload_obstack, reload_startobj);
1299 unused_insn_chains = 0;
1301 inserted = fixup_abnormal_edges ();
1303 /* We've possibly turned single trapping insn into multiple ones. */
1304 if (cfun->can_throw_non_call_exceptions)
1306 sbitmap blocks;
1307 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1308 bitmap_ones (blocks);
1309 find_many_sub_basic_blocks (blocks);
1310 sbitmap_free (blocks);
1313 if (inserted)
1314 commit_edge_insertions ();
1316 /* Replacing pseudos with their memory equivalents might have
1317 created shared rtx. Subsequent passes would get confused
1318 by this, so unshare everything here. */
1319 unshare_all_rtl_again (first);
1321 #ifdef STACK_BOUNDARY
1322 /* init_emit has set the alignment of the hard frame pointer
1323 to STACK_BOUNDARY. It is very likely no longer valid if
1324 the hard frame pointer was used for register allocation. */
1325 if (!frame_pointer_needed)
1326 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1327 #endif
1329 substitute_stack.release ();
1331 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1333 reload_completed = !failure;
1335 return need_dce;
1338 /* Yet another special case. Unfortunately, reg-stack forces people to
1339 write incorrect clobbers in asm statements. These clobbers must not
1340 cause the register to appear in bad_spill_regs, otherwise we'll call
1341 fatal_insn later. We clear the corresponding regnos in the live
1342 register sets to avoid this.
1343 The whole thing is rather sick, I'm afraid. */
1345 static void
1346 maybe_fix_stack_asms (void)
1348 #ifdef STACK_REGS
1349 const char *constraints[MAX_RECOG_OPERANDS];
1350 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1351 struct insn_chain *chain;
1353 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1355 int i, noperands;
1356 HARD_REG_SET clobbered, allowed;
1357 rtx pat;
1359 if (! INSN_P (chain->insn)
1360 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1361 continue;
1362 pat = PATTERN (chain->insn);
1363 if (GET_CODE (pat) != PARALLEL)
1364 continue;
1366 CLEAR_HARD_REG_SET (clobbered);
1367 CLEAR_HARD_REG_SET (allowed);
1369 /* First, make a mask of all stack regs that are clobbered. */
1370 for (i = 0; i < XVECLEN (pat, 0); i++)
1372 rtx t = XVECEXP (pat, 0, i);
1373 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1374 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1377 /* Get the operand values and constraints out of the insn. */
1378 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1379 constraints, operand_mode, NULL);
1381 /* For every operand, see what registers are allowed. */
1382 for (i = 0; i < noperands; i++)
1384 const char *p = constraints[i];
1385 /* For every alternative, we compute the class of registers allowed
1386 for reloading in CLS, and merge its contents into the reg set
1387 ALLOWED. */
1388 int cls = (int) NO_REGS;
1390 for (;;)
1392 char c = *p;
1394 if (c == '\0' || c == ',' || c == '#')
1396 /* End of one alternative - mark the regs in the current
1397 class, and reset the class. */
1398 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1399 cls = NO_REGS;
1400 p++;
1401 if (c == '#')
1402 do {
1403 c = *p++;
1404 } while (c != '\0' && c != ',');
1405 if (c == '\0')
1406 break;
1407 continue;
1410 switch (c)
1412 case 'g':
1413 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1414 break;
1416 default:
1417 enum constraint_num cn = lookup_constraint (p);
1418 if (insn_extra_address_constraint (cn))
1419 cls = (int) reg_class_subunion[cls]
1420 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1421 ADDRESS, SCRATCH)];
1422 else
1423 cls = (int) reg_class_subunion[cls]
1424 [reg_class_for_constraint (cn)];
1425 break;
1427 p += CONSTRAINT_LEN (c, p);
1430 /* Those of the registers which are clobbered, but allowed by the
1431 constraints, must be usable as reload registers. So clear them
1432 out of the life information. */
1433 AND_HARD_REG_SET (allowed, clobbered);
1434 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1435 if (TEST_HARD_REG_BIT (allowed, i))
1437 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1438 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1442 #endif
1445 /* Copy the global variables n_reloads and rld into the corresponding elts
1446 of CHAIN. */
1447 static void
1448 copy_reloads (struct insn_chain *chain)
1450 chain->n_reloads = n_reloads;
1451 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1452 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1453 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1456 /* Walk the chain of insns, and determine for each whether it needs reloads
1457 and/or eliminations. Build the corresponding insns_need_reload list, and
1458 set something_needs_elimination as appropriate. */
1459 static void
1460 calculate_needs_all_insns (int global)
1462 struct insn_chain **pprev_reload = &insns_need_reload;
1463 struct insn_chain *chain, *next = 0;
1465 something_needs_elimination = 0;
1467 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1468 for (chain = reload_insn_chain; chain != 0; chain = next)
1470 rtx_insn *insn = chain->insn;
1472 next = chain->next;
1474 /* Clear out the shortcuts. */
1475 chain->n_reloads = 0;
1476 chain->need_elim = 0;
1477 chain->need_reload = 0;
1478 chain->need_operand_change = 0;
1480 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1481 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1482 what effects this has on the known offsets at labels. */
1484 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1485 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1486 set_label_offsets (insn, insn, 0);
1488 if (INSN_P (insn))
1490 rtx old_body = PATTERN (insn);
1491 int old_code = INSN_CODE (insn);
1492 rtx old_notes = REG_NOTES (insn);
1493 int did_elimination = 0;
1494 int operands_changed = 0;
1496 /* Skip insns that only set an equivalence. */
1497 if (will_delete_init_insn_p (insn))
1498 continue;
1500 /* If needed, eliminate any eliminable registers. */
1501 if (num_eliminable || num_eliminable_invariants)
1502 did_elimination = eliminate_regs_in_insn (insn, 0);
1504 /* Analyze the instruction. */
1505 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1506 global, spill_reg_order);
1508 /* If a no-op set needs more than one reload, this is likely
1509 to be something that needs input address reloads. We
1510 can't get rid of this cleanly later, and it is of no use
1511 anyway, so discard it now.
1512 We only do this when expensive_optimizations is enabled,
1513 since this complements reload inheritance / output
1514 reload deletion, and it can make debugging harder. */
1515 if (flag_expensive_optimizations && n_reloads > 1)
1517 rtx set = single_set (insn);
1518 if (set
1520 ((SET_SRC (set) == SET_DEST (set)
1521 && REG_P (SET_SRC (set))
1522 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1523 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1524 && reg_renumber[REGNO (SET_SRC (set))] < 0
1525 && reg_renumber[REGNO (SET_DEST (set))] < 0
1526 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1527 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1528 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1529 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1531 if (ira_conflicts_p)
1532 /* Inform IRA about the insn deletion. */
1533 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1534 REGNO (SET_SRC (set)));
1535 delete_insn (insn);
1536 /* Delete it from the reload chain. */
1537 if (chain->prev)
1538 chain->prev->next = next;
1539 else
1540 reload_insn_chain = next;
1541 if (next)
1542 next->prev = chain->prev;
1543 chain->next = unused_insn_chains;
1544 unused_insn_chains = chain;
1545 continue;
1548 if (num_eliminable)
1549 update_eliminable_offsets ();
1551 /* Remember for later shortcuts which insns had any reloads or
1552 register eliminations. */
1553 chain->need_elim = did_elimination;
1554 chain->need_reload = n_reloads > 0;
1555 chain->need_operand_change = operands_changed;
1557 /* Discard any register replacements done. */
1558 if (did_elimination)
1560 obstack_free (&reload_obstack, reload_insn_firstobj);
1561 PATTERN (insn) = old_body;
1562 INSN_CODE (insn) = old_code;
1563 REG_NOTES (insn) = old_notes;
1564 something_needs_elimination = 1;
1567 something_needs_operands_changed |= operands_changed;
1569 if (n_reloads != 0)
1571 copy_reloads (chain);
1572 *pprev_reload = chain;
1573 pprev_reload = &chain->next_need_reload;
1577 *pprev_reload = 0;
1580 /* This function is called from the register allocator to set up estimates
1581 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1582 an invariant. The structure is similar to calculate_needs_all_insns. */
1584 void
1585 calculate_elim_costs_all_insns (void)
1587 int *reg_equiv_init_cost;
1588 basic_block bb;
1589 int i;
1591 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1592 init_elim_table ();
1593 init_eliminable_invariants (get_insns (), false);
1595 set_initial_elim_offsets ();
1596 set_initial_label_offsets ();
1598 FOR_EACH_BB_FN (bb, cfun)
1600 rtx_insn *insn;
1601 elim_bb = bb;
1603 FOR_BB_INSNS (bb, insn)
1605 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1606 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1607 what effects this has on the known offsets at labels. */
1609 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1610 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1611 set_label_offsets (insn, insn, 0);
1613 if (INSN_P (insn))
1615 rtx set = single_set (insn);
1617 /* Skip insns that only set an equivalence. */
1618 if (set && REG_P (SET_DEST (set))
1619 && reg_renumber[REGNO (SET_DEST (set))] < 0
1620 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1621 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1623 unsigned regno = REGNO (SET_DEST (set));
1624 rtx_insn_list *init = reg_equiv_init (regno);
1625 if (init)
1627 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1628 false, true);
1629 machine_mode mode = GET_MODE (SET_DEST (set));
1630 int cost = set_src_cost (t, mode,
1631 optimize_bb_for_speed_p (bb));
1632 int freq = REG_FREQ_FROM_BB (bb);
1634 reg_equiv_init_cost[regno] = cost * freq;
1635 continue;
1638 /* If needed, eliminate any eliminable registers. */
1639 if (num_eliminable || num_eliminable_invariants)
1640 elimination_costs_in_insn (insn);
1642 if (num_eliminable)
1643 update_eliminable_offsets ();
1647 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1649 if (reg_equiv_invariant (i))
1651 if (reg_equiv_init (i))
1653 int cost = reg_equiv_init_cost[i];
1654 if (dump_file)
1655 fprintf (dump_file,
1656 "Reg %d has equivalence, initial gains %d\n", i, cost);
1657 if (cost != 0)
1658 ira_adjust_equiv_reg_cost (i, cost);
1660 else
1662 if (dump_file)
1663 fprintf (dump_file,
1664 "Reg %d had equivalence, but can't be eliminated\n",
1666 ira_adjust_equiv_reg_cost (i, 0);
1671 free (reg_equiv_init_cost);
1672 free (offsets_known_at);
1673 free (offsets_at);
1674 offsets_at = NULL;
1675 offsets_known_at = NULL;
1678 /* Comparison function for qsort to decide which of two reloads
1679 should be handled first. *P1 and *P2 are the reload numbers. */
1681 static int
1682 reload_reg_class_lower (const void *r1p, const void *r2p)
1684 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1685 int t;
1687 /* Consider required reloads before optional ones. */
1688 t = rld[r1].optional - rld[r2].optional;
1689 if (t != 0)
1690 return t;
1692 /* Count all solitary classes before non-solitary ones. */
1693 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1694 - (reg_class_size[(int) rld[r1].rclass] == 1));
1695 if (t != 0)
1696 return t;
1698 /* Aside from solitaires, consider all multi-reg groups first. */
1699 t = rld[r2].nregs - rld[r1].nregs;
1700 if (t != 0)
1701 return t;
1703 /* Consider reloads in order of increasing reg-class number. */
1704 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1705 if (t != 0)
1706 return t;
1708 /* If reloads are equally urgent, sort by reload number,
1709 so that the results of qsort leave nothing to chance. */
1710 return r1 - r2;
1713 /* The cost of spilling each hard reg. */
1714 static int spill_cost[FIRST_PSEUDO_REGISTER];
1716 /* When spilling multiple hard registers, we use SPILL_COST for the first
1717 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1718 only the first hard reg for a multi-reg pseudo. */
1719 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1721 /* Map of hard regno to pseudo regno currently occupying the hard
1722 reg. */
1723 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1725 /* Update the spill cost arrays, considering that pseudo REG is live. */
1727 static void
1728 count_pseudo (int reg)
1730 int freq = REG_FREQ (reg);
1731 int r = reg_renumber[reg];
1732 int nregs;
1734 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1735 if (ira_conflicts_p && r < 0)
1736 return;
1738 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1739 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1740 return;
1742 SET_REGNO_REG_SET (&pseudos_counted, reg);
1744 gcc_assert (r >= 0);
1746 spill_add_cost[r] += freq;
1747 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1748 while (nregs-- > 0)
1750 hard_regno_to_pseudo_regno[r + nregs] = reg;
1751 spill_cost[r + nregs] += freq;
1755 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1756 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1758 static void
1759 order_regs_for_reload (struct insn_chain *chain)
1761 unsigned i;
1762 HARD_REG_SET used_by_pseudos;
1763 HARD_REG_SET used_by_pseudos2;
1764 reg_set_iterator rsi;
1766 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1768 memset (spill_cost, 0, sizeof spill_cost);
1769 memset (spill_add_cost, 0, sizeof spill_add_cost);
1770 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1771 hard_regno_to_pseudo_regno[i] = -1;
1773 /* Count number of uses of each hard reg by pseudo regs allocated to it
1774 and then order them by decreasing use. First exclude hard registers
1775 that are live in or across this insn. */
1777 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1778 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1779 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1780 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1782 /* Now find out which pseudos are allocated to it, and update
1783 hard_reg_n_uses. */
1784 CLEAR_REG_SET (&pseudos_counted);
1786 EXECUTE_IF_SET_IN_REG_SET
1787 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1789 count_pseudo (i);
1791 EXECUTE_IF_SET_IN_REG_SET
1792 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1794 count_pseudo (i);
1796 CLEAR_REG_SET (&pseudos_counted);
1799 /* Vector of reload-numbers showing the order in which the reloads should
1800 be processed. */
1801 static short reload_order[MAX_RELOADS];
1803 /* This is used to keep track of the spill regs used in one insn. */
1804 static HARD_REG_SET used_spill_regs_local;
1806 /* We decided to spill hard register SPILLED, which has a size of
1807 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1808 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1809 update SPILL_COST/SPILL_ADD_COST. */
1811 static void
1812 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1814 int freq = REG_FREQ (reg);
1815 int r = reg_renumber[reg];
1816 int nregs;
1818 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1819 if (ira_conflicts_p && r < 0)
1820 return;
1822 gcc_assert (r >= 0);
1824 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1826 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1827 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1828 return;
1830 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1832 spill_add_cost[r] -= freq;
1833 while (nregs-- > 0)
1835 hard_regno_to_pseudo_regno[r + nregs] = -1;
1836 spill_cost[r + nregs] -= freq;
1840 /* Find reload register to use for reload number ORDER. */
1842 static int
1843 find_reg (struct insn_chain *chain, int order)
1845 int rnum = reload_order[order];
1846 struct reload *rl = rld + rnum;
1847 int best_cost = INT_MAX;
1848 int best_reg = -1;
1849 unsigned int i, j, n;
1850 int k;
1851 HARD_REG_SET not_usable;
1852 HARD_REG_SET used_by_other_reload;
1853 reg_set_iterator rsi;
1854 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1855 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1857 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1858 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1859 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1861 CLEAR_HARD_REG_SET (used_by_other_reload);
1862 for (k = 0; k < order; k++)
1864 int other = reload_order[k];
1866 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1867 for (j = 0; j < rld[other].nregs; j++)
1868 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1871 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1873 #ifdef REG_ALLOC_ORDER
1874 unsigned int regno = reg_alloc_order[i];
1875 #else
1876 unsigned int regno = i;
1877 #endif
1879 if (! TEST_HARD_REG_BIT (not_usable, regno)
1880 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1881 && HARD_REGNO_MODE_OK (regno, rl->mode))
1883 int this_cost = spill_cost[regno];
1884 int ok = 1;
1885 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1887 for (j = 1; j < this_nregs; j++)
1889 this_cost += spill_add_cost[regno + j];
1890 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1891 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1892 ok = 0;
1894 if (! ok)
1895 continue;
1897 if (ira_conflicts_p)
1899 /* Ask IRA to find a better pseudo-register for
1900 spilling. */
1901 for (n = j = 0; j < this_nregs; j++)
1903 int r = hard_regno_to_pseudo_regno[regno + j];
1905 if (r < 0)
1906 continue;
1907 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1908 regno_pseudo_regs[n++] = r;
1910 regno_pseudo_regs[n++] = -1;
1911 if (best_reg < 0
1912 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1913 best_regno_pseudo_regs,
1914 rl->in, rl->out,
1915 chain->insn))
1917 best_reg = regno;
1918 for (j = 0;; j++)
1920 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1921 if (regno_pseudo_regs[j] < 0)
1922 break;
1925 continue;
1928 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1929 this_cost--;
1930 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1931 this_cost--;
1932 if (this_cost < best_cost
1933 /* Among registers with equal cost, prefer caller-saved ones, or
1934 use REG_ALLOC_ORDER if it is defined. */
1935 || (this_cost == best_cost
1936 #ifdef REG_ALLOC_ORDER
1937 && (inv_reg_alloc_order[regno]
1938 < inv_reg_alloc_order[best_reg])
1939 #else
1940 && call_used_regs[regno]
1941 && ! call_used_regs[best_reg]
1942 #endif
1945 best_reg = regno;
1946 best_cost = this_cost;
1950 if (best_reg == -1)
1951 return 0;
1953 if (dump_file)
1954 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1956 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1957 rl->regno = best_reg;
1959 EXECUTE_IF_SET_IN_REG_SET
1960 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1962 count_spilled_pseudo (best_reg, rl->nregs, j);
1965 EXECUTE_IF_SET_IN_REG_SET
1966 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1968 count_spilled_pseudo (best_reg, rl->nregs, j);
1971 for (i = 0; i < rl->nregs; i++)
1973 gcc_assert (spill_cost[best_reg + i] == 0);
1974 gcc_assert (spill_add_cost[best_reg + i] == 0);
1975 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1976 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1978 return 1;
1981 /* Find more reload regs to satisfy the remaining need of an insn, which
1982 is given by CHAIN.
1983 Do it by ascending class number, since otherwise a reg
1984 might be spilled for a big class and might fail to count
1985 for a smaller class even though it belongs to that class. */
1987 static void
1988 find_reload_regs (struct insn_chain *chain)
1990 int i;
1992 /* In order to be certain of getting the registers we need,
1993 we must sort the reloads into order of increasing register class.
1994 Then our grabbing of reload registers will parallel the process
1995 that provided the reload registers. */
1996 for (i = 0; i < chain->n_reloads; i++)
1998 /* Show whether this reload already has a hard reg. */
1999 if (chain->rld[i].reg_rtx)
2001 int regno = REGNO (chain->rld[i].reg_rtx);
2002 chain->rld[i].regno = regno;
2003 chain->rld[i].nregs
2004 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2006 else
2007 chain->rld[i].regno = -1;
2008 reload_order[i] = i;
2011 n_reloads = chain->n_reloads;
2012 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2014 CLEAR_HARD_REG_SET (used_spill_regs_local);
2016 if (dump_file)
2017 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2019 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2021 /* Compute the order of preference for hard registers to spill. */
2023 order_regs_for_reload (chain);
2025 for (i = 0; i < n_reloads; i++)
2027 int r = reload_order[i];
2029 /* Ignore reloads that got marked inoperative. */
2030 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2031 && ! rld[r].optional
2032 && rld[r].regno == -1)
2033 if (! find_reg (chain, i))
2035 if (dump_file)
2036 fprintf (dump_file, "reload failure for reload %d\n", r);
2037 spill_failure (chain->insn, rld[r].rclass);
2038 failure = 1;
2039 return;
2043 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2044 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2046 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2049 static void
2050 select_reload_regs (void)
2052 struct insn_chain *chain;
2054 /* Try to satisfy the needs for each insn. */
2055 for (chain = insns_need_reload; chain != 0;
2056 chain = chain->next_need_reload)
2057 find_reload_regs (chain);
2060 /* Delete all insns that were inserted by emit_caller_save_insns during
2061 this iteration. */
2062 static void
2063 delete_caller_save_insns (void)
2065 struct insn_chain *c = reload_insn_chain;
2067 while (c != 0)
2069 while (c != 0 && c->is_caller_save_insn)
2071 struct insn_chain *next = c->next;
2072 rtx_insn *insn = c->insn;
2074 if (c == reload_insn_chain)
2075 reload_insn_chain = next;
2076 delete_insn (insn);
2078 if (next)
2079 next->prev = c->prev;
2080 if (c->prev)
2081 c->prev->next = next;
2082 c->next = unused_insn_chains;
2083 unused_insn_chains = c;
2084 c = next;
2086 if (c != 0)
2087 c = c->next;
2091 /* Handle the failure to find a register to spill.
2092 INSN should be one of the insns which needed this particular spill reg. */
2094 static void
2095 spill_failure (rtx_insn *insn, enum reg_class rclass)
2097 if (asm_noperands (PATTERN (insn)) >= 0)
2098 error_for_asm (insn, "can%'t find a register in class %qs while "
2099 "reloading %<asm%>",
2100 reg_class_names[rclass]);
2101 else
2103 error ("unable to find a register to spill in class %qs",
2104 reg_class_names[rclass]);
2106 if (dump_file)
2108 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2109 debug_reload_to_stream (dump_file);
2111 fatal_insn ("this is the insn:", insn);
2115 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2116 data that is dead in INSN. */
2118 static void
2119 delete_dead_insn (rtx_insn *insn)
2121 rtx_insn *prev = prev_active_insn (insn);
2122 rtx prev_dest;
2124 /* If the previous insn sets a register that dies in our insn make
2125 a note that we want to run DCE immediately after reload.
2127 We used to delete the previous insn & recurse, but that's wrong for
2128 block local equivalences. Instead of trying to figure out the exact
2129 circumstances where we can delete the potentially dead insns, just
2130 let DCE do the job. */
2131 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2132 && GET_CODE (PATTERN (prev)) == SET
2133 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2134 && reg_mentioned_p (prev_dest, PATTERN (insn))
2135 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2136 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2137 need_dce = 1;
2139 SET_INSN_DELETED (insn);
2142 /* Modify the home of pseudo-reg I.
2143 The new home is present in reg_renumber[I].
2145 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2146 or it may be -1, meaning there is none or it is not relevant.
2147 This is used so that all pseudos spilled from a given hard reg
2148 can share one stack slot. */
2150 static void
2151 alter_reg (int i, int from_reg, bool dont_share_p)
2153 /* When outputting an inline function, this can happen
2154 for a reg that isn't actually used. */
2155 if (regno_reg_rtx[i] == 0)
2156 return;
2158 /* If the reg got changed to a MEM at rtl-generation time,
2159 ignore it. */
2160 if (!REG_P (regno_reg_rtx[i]))
2161 return;
2163 /* Modify the reg-rtx to contain the new hard reg
2164 number or else to contain its pseudo reg number. */
2165 SET_REGNO (regno_reg_rtx[i],
2166 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2168 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2169 allocate a stack slot for it. */
2171 if (reg_renumber[i] < 0
2172 && REG_N_REFS (i) > 0
2173 && reg_equiv_constant (i) == 0
2174 && (reg_equiv_invariant (i) == 0
2175 || reg_equiv_init (i) == 0)
2176 && reg_equiv_memory_loc (i) == 0)
2178 rtx x = NULL_RTX;
2179 machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2180 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2181 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2182 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2183 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2184 int adjust = 0;
2186 something_was_spilled = true;
2188 if (ira_conflicts_p)
2190 /* Mark the spill for IRA. */
2191 SET_REGNO_REG_SET (&spilled_pseudos, i);
2192 if (!dont_share_p)
2193 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2196 if (x)
2199 /* Each pseudo reg has an inherent size which comes from its own mode,
2200 and a total size which provides room for paradoxical subregs
2201 which refer to the pseudo reg in wider modes.
2203 We can use a slot already allocated if it provides both
2204 enough inherent space and enough total space.
2205 Otherwise, we allocate a new slot, making sure that it has no less
2206 inherent space, and no less total space, then the previous slot. */
2207 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2209 rtx stack_slot;
2211 /* No known place to spill from => no slot to reuse. */
2212 x = assign_stack_local (mode, total_size,
2213 min_align > inherent_align
2214 || total_size > inherent_size ? -1 : 0);
2216 stack_slot = x;
2218 /* Cancel the big-endian correction done in assign_stack_local.
2219 Get the address of the beginning of the slot. This is so we
2220 can do a big-endian correction unconditionally below. */
2221 if (BYTES_BIG_ENDIAN)
2223 adjust = inherent_size - total_size;
2224 if (adjust)
2225 stack_slot
2226 = adjust_address_nv (x, mode_for_size (total_size
2227 * BITS_PER_UNIT,
2228 MODE_INT, 1),
2229 adjust);
2232 if (! dont_share_p && ira_conflicts_p)
2233 /* Inform IRA about allocation a new stack slot. */
2234 ira_mark_new_stack_slot (stack_slot, i, total_size);
2237 /* Reuse a stack slot if possible. */
2238 else if (spill_stack_slot[from_reg] != 0
2239 && spill_stack_slot_width[from_reg] >= total_size
2240 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2241 >= inherent_size)
2242 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2243 x = spill_stack_slot[from_reg];
2245 /* Allocate a bigger slot. */
2246 else
2248 /* Compute maximum size needed, both for inherent size
2249 and for total size. */
2250 rtx stack_slot;
2252 if (spill_stack_slot[from_reg])
2254 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2255 > inherent_size)
2256 mode = GET_MODE (spill_stack_slot[from_reg]);
2257 if (spill_stack_slot_width[from_reg] > total_size)
2258 total_size = spill_stack_slot_width[from_reg];
2259 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2260 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2263 /* Make a slot with that size. */
2264 x = assign_stack_local (mode, total_size,
2265 min_align > inherent_align
2266 || total_size > inherent_size ? -1 : 0);
2267 stack_slot = x;
2269 /* Cancel the big-endian correction done in assign_stack_local.
2270 Get the address of the beginning of the slot. This is so we
2271 can do a big-endian correction unconditionally below. */
2272 if (BYTES_BIG_ENDIAN)
2274 adjust = GET_MODE_SIZE (mode) - total_size;
2275 if (adjust)
2276 stack_slot
2277 = adjust_address_nv (x, mode_for_size (total_size
2278 * BITS_PER_UNIT,
2279 MODE_INT, 1),
2280 adjust);
2283 spill_stack_slot[from_reg] = stack_slot;
2284 spill_stack_slot_width[from_reg] = total_size;
2287 /* On a big endian machine, the "address" of the slot
2288 is the address of the low part that fits its inherent mode. */
2289 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2290 adjust += (total_size - inherent_size);
2292 /* If we have any adjustment to make, or if the stack slot is the
2293 wrong mode, make a new stack slot. */
2294 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2296 /* Set all of the memory attributes as appropriate for a spill. */
2297 set_mem_attrs_for_spill (x);
2299 /* Save the stack slot for later. */
2300 reg_equiv_memory_loc (i) = x;
2304 /* Mark the slots in regs_ever_live for the hard regs used by
2305 pseudo-reg number REGNO, accessed in MODE. */
2307 static void
2308 mark_home_live_1 (int regno, machine_mode mode)
2310 int i, lim;
2312 i = reg_renumber[regno];
2313 if (i < 0)
2314 return;
2315 lim = end_hard_regno (mode, i);
2316 while (i < lim)
2317 df_set_regs_ever_live (i++, true);
2320 /* Mark the slots in regs_ever_live for the hard regs
2321 used by pseudo-reg number REGNO. */
2323 void
2324 mark_home_live (int regno)
2326 if (reg_renumber[regno] >= 0)
2327 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2330 /* This function handles the tracking of elimination offsets around branches.
2332 X is a piece of RTL being scanned.
2334 INSN is the insn that it came from, if any.
2336 INITIAL_P is nonzero if we are to set the offset to be the initial
2337 offset and zero if we are setting the offset of the label to be the
2338 current offset. */
2340 static void
2341 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2343 enum rtx_code code = GET_CODE (x);
2344 rtx tem;
2345 unsigned int i;
2346 struct elim_table *p;
2348 switch (code)
2350 case LABEL_REF:
2351 if (LABEL_REF_NONLOCAL_P (x))
2352 return;
2354 x = LABEL_REF_LABEL (x);
2356 /* ... fall through ... */
2358 case CODE_LABEL:
2359 /* If we know nothing about this label, set the desired offsets. Note
2360 that this sets the offset at a label to be the offset before a label
2361 if we don't know anything about the label. This is not correct for
2362 the label after a BARRIER, but is the best guess we can make. If
2363 we guessed wrong, we will suppress an elimination that might have
2364 been possible had we been able to guess correctly. */
2366 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2368 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2369 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2370 = (initial_p ? reg_eliminate[i].initial_offset
2371 : reg_eliminate[i].offset);
2372 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2375 /* Otherwise, if this is the definition of a label and it is
2376 preceded by a BARRIER, set our offsets to the known offset of
2377 that label. */
2379 else if (x == insn
2380 && (tem = prev_nonnote_insn (insn)) != 0
2381 && BARRIER_P (tem))
2382 set_offsets_for_label (insn);
2383 else
2384 /* If neither of the above cases is true, compare each offset
2385 with those previously recorded and suppress any eliminations
2386 where the offsets disagree. */
2388 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2389 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2390 != (initial_p ? reg_eliminate[i].initial_offset
2391 : reg_eliminate[i].offset))
2392 reg_eliminate[i].can_eliminate = 0;
2394 return;
2396 case JUMP_TABLE_DATA:
2397 set_label_offsets (PATTERN (insn), insn, initial_p);
2398 return;
2400 case JUMP_INSN:
2401 set_label_offsets (PATTERN (insn), insn, initial_p);
2403 /* ... fall through ... */
2405 case INSN:
2406 case CALL_INSN:
2407 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2408 to indirectly and hence must have all eliminations at their
2409 initial offsets. */
2410 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2411 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2412 set_label_offsets (XEXP (tem, 0), insn, 1);
2413 return;
2415 case PARALLEL:
2416 case ADDR_VEC:
2417 case ADDR_DIFF_VEC:
2418 /* Each of the labels in the parallel or address vector must be
2419 at their initial offsets. We want the first field for PARALLEL
2420 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2422 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2423 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2424 insn, initial_p);
2425 return;
2427 case SET:
2428 /* We only care about setting PC. If the source is not RETURN,
2429 IF_THEN_ELSE, or a label, disable any eliminations not at
2430 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2431 isn't one of those possibilities. For branches to a label,
2432 call ourselves recursively.
2434 Note that this can disable elimination unnecessarily when we have
2435 a non-local goto since it will look like a non-constant jump to
2436 someplace in the current function. This isn't a significant
2437 problem since such jumps will normally be when all elimination
2438 pairs are back to their initial offsets. */
2440 if (SET_DEST (x) != pc_rtx)
2441 return;
2443 switch (GET_CODE (SET_SRC (x)))
2445 case PC:
2446 case RETURN:
2447 return;
2449 case LABEL_REF:
2450 set_label_offsets (SET_SRC (x), insn, initial_p);
2451 return;
2453 case IF_THEN_ELSE:
2454 tem = XEXP (SET_SRC (x), 1);
2455 if (GET_CODE (tem) == LABEL_REF)
2456 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2457 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2458 break;
2460 tem = XEXP (SET_SRC (x), 2);
2461 if (GET_CODE (tem) == LABEL_REF)
2462 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2463 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2464 break;
2465 return;
2467 default:
2468 break;
2471 /* If we reach here, all eliminations must be at their initial
2472 offset because we are doing a jump to a variable address. */
2473 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2474 if (p->offset != p->initial_offset)
2475 p->can_eliminate = 0;
2476 break;
2478 default:
2479 break;
2483 /* This function examines every reg that occurs in X and adjusts the
2484 costs for its elimination which are gathered by IRA. INSN is the
2485 insn in which X occurs. We do not recurse into MEM expressions. */
2487 static void
2488 note_reg_elim_costly (const_rtx x, rtx insn)
2490 subrtx_iterator::array_type array;
2491 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2493 const_rtx x = *iter;
2494 if (MEM_P (x))
2495 iter.skip_subrtxes ();
2496 else if (REG_P (x)
2497 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2498 && reg_equiv_init (REGNO (x))
2499 && reg_equiv_invariant (REGNO (x)))
2501 rtx t = reg_equiv_invariant (REGNO (x));
2502 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2503 int cost = set_src_cost (new_rtx, Pmode,
2504 optimize_bb_for_speed_p (elim_bb));
2505 int freq = REG_FREQ_FROM_BB (elim_bb);
2507 if (cost != 0)
2508 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2513 /* Scan X and replace any eliminable registers (such as fp) with a
2514 replacement (such as sp), plus an offset.
2516 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2517 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2518 MEM, we are allowed to replace a sum of a register and the constant zero
2519 with the register, which we cannot do outside a MEM. In addition, we need
2520 to record the fact that a register is referenced outside a MEM.
2522 If INSN is an insn, it is the insn containing X. If we replace a REG
2523 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2524 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2525 the REG is being modified.
2527 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2528 That's used when we eliminate in expressions stored in notes.
2529 This means, do not set ref_outside_mem even if the reference
2530 is outside of MEMs.
2532 If FOR_COSTS is true, we are being called before reload in order to
2533 estimate the costs of keeping registers with an equivalence unallocated.
2535 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2536 replacements done assuming all offsets are at their initial values. If
2537 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2538 encounter, return the actual location so that find_reloads will do
2539 the proper thing. */
2541 static rtx
2542 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2543 bool may_use_invariant, bool for_costs)
2545 enum rtx_code code = GET_CODE (x);
2546 struct elim_table *ep;
2547 int regno;
2548 rtx new_rtx;
2549 int i, j;
2550 const char *fmt;
2551 int copied = 0;
2553 if (! current_function_decl)
2554 return x;
2556 switch (code)
2558 CASE_CONST_ANY:
2559 case CONST:
2560 case SYMBOL_REF:
2561 case CODE_LABEL:
2562 case PC:
2563 case CC0:
2564 case ASM_INPUT:
2565 case ADDR_VEC:
2566 case ADDR_DIFF_VEC:
2567 case RETURN:
2568 return x;
2570 case REG:
2571 regno = REGNO (x);
2573 /* First handle the case where we encounter a bare register that
2574 is eliminable. Replace it with a PLUS. */
2575 if (regno < FIRST_PSEUDO_REGISTER)
2577 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2578 ep++)
2579 if (ep->from_rtx == x && ep->can_eliminate)
2580 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2583 else if (reg_renumber && reg_renumber[regno] < 0
2584 && reg_equivs
2585 && reg_equiv_invariant (regno))
2587 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2588 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2589 mem_mode, insn, true, for_costs);
2590 /* There exists at least one use of REGNO that cannot be
2591 eliminated. Prevent the defining insn from being deleted. */
2592 reg_equiv_init (regno) = NULL;
2593 if (!for_costs)
2594 alter_reg (regno, -1, true);
2596 return x;
2598 /* You might think handling MINUS in a manner similar to PLUS is a
2599 good idea. It is not. It has been tried multiple times and every
2600 time the change has had to have been reverted.
2602 Other parts of reload know a PLUS is special (gen_reload for example)
2603 and require special code to handle code a reloaded PLUS operand.
2605 Also consider backends where the flags register is clobbered by a
2606 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2607 lea instruction comes to mind). If we try to reload a MINUS, we
2608 may kill the flags register that was holding a useful value.
2610 So, please before trying to handle MINUS, consider reload as a
2611 whole instead of this little section as well as the backend issues. */
2612 case PLUS:
2613 /* If this is the sum of an eliminable register and a constant, rework
2614 the sum. */
2615 if (REG_P (XEXP (x, 0))
2616 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2617 && CONSTANT_P (XEXP (x, 1)))
2619 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2620 ep++)
2621 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2623 /* The only time we want to replace a PLUS with a REG (this
2624 occurs when the constant operand of the PLUS is the negative
2625 of the offset) is when we are inside a MEM. We won't want
2626 to do so at other times because that would change the
2627 structure of the insn in a way that reload can't handle.
2628 We special-case the commonest situation in
2629 eliminate_regs_in_insn, so just replace a PLUS with a
2630 PLUS here, unless inside a MEM. */
2631 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2632 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2633 return ep->to_rtx;
2634 else
2635 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2636 plus_constant (Pmode, XEXP (x, 1),
2637 ep->previous_offset));
2640 /* If the register is not eliminable, we are done since the other
2641 operand is a constant. */
2642 return x;
2645 /* If this is part of an address, we want to bring any constant to the
2646 outermost PLUS. We will do this by doing register replacement in
2647 our operands and seeing if a constant shows up in one of them.
2649 Note that there is no risk of modifying the structure of the insn,
2650 since we only get called for its operands, thus we are either
2651 modifying the address inside a MEM, or something like an address
2652 operand of a load-address insn. */
2655 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2656 for_costs);
2657 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2658 for_costs);
2660 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2662 /* If one side is a PLUS and the other side is a pseudo that
2663 didn't get a hard register but has a reg_equiv_constant,
2664 we must replace the constant here since it may no longer
2665 be in the position of any operand. */
2666 if (GET_CODE (new0) == PLUS && REG_P (new1)
2667 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2668 && reg_renumber[REGNO (new1)] < 0
2669 && reg_equivs
2670 && reg_equiv_constant (REGNO (new1)) != 0)
2671 new1 = reg_equiv_constant (REGNO (new1));
2672 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2673 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2674 && reg_renumber[REGNO (new0)] < 0
2675 && reg_equiv_constant (REGNO (new0)) != 0)
2676 new0 = reg_equiv_constant (REGNO (new0));
2678 new_rtx = form_sum (GET_MODE (x), new0, new1);
2680 /* As above, if we are not inside a MEM we do not want to
2681 turn a PLUS into something else. We might try to do so here
2682 for an addition of 0 if we aren't optimizing. */
2683 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2684 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2685 else
2686 return new_rtx;
2689 return x;
2691 case MULT:
2692 /* If this is the product of an eliminable register and a
2693 constant, apply the distribute law and move the constant out
2694 so that we have (plus (mult ..) ..). This is needed in order
2695 to keep load-address insns valid. This case is pathological.
2696 We ignore the possibility of overflow here. */
2697 if (REG_P (XEXP (x, 0))
2698 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2699 && CONST_INT_P (XEXP (x, 1)))
2700 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2701 ep++)
2702 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2704 if (! mem_mode
2705 /* Refs inside notes or in DEBUG_INSNs don't count for
2706 this purpose. */
2707 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2708 || GET_CODE (insn) == INSN_LIST
2709 || DEBUG_INSN_P (insn))))
2710 ep->ref_outside_mem = 1;
2712 return
2713 plus_constant (Pmode,
2714 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2715 ep->previous_offset * INTVAL (XEXP (x, 1)));
2718 /* ... fall through ... */
2720 case CALL:
2721 case COMPARE:
2722 /* See comments before PLUS about handling MINUS. */
2723 case MINUS:
2724 case DIV: case UDIV:
2725 case MOD: case UMOD:
2726 case AND: case IOR: case XOR:
2727 case ROTATERT: case ROTATE:
2728 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2729 case NE: case EQ:
2730 case GE: case GT: case GEU: case GTU:
2731 case LE: case LT: case LEU: case LTU:
2733 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2734 for_costs);
2735 rtx new1 = XEXP (x, 1)
2736 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2737 for_costs) : 0;
2739 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2740 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2742 return x;
2744 case EXPR_LIST:
2745 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2746 if (XEXP (x, 0))
2748 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2749 for_costs);
2750 if (new_rtx != XEXP (x, 0))
2752 /* If this is a REG_DEAD note, it is not valid anymore.
2753 Using the eliminated version could result in creating a
2754 REG_DEAD note for the stack or frame pointer. */
2755 if (REG_NOTE_KIND (x) == REG_DEAD)
2756 return (XEXP (x, 1)
2757 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2758 for_costs)
2759 : NULL_RTX);
2761 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2765 /* ... fall through ... */
2767 case INSN_LIST:
2768 case INT_LIST:
2769 /* Now do eliminations in the rest of the chain. If this was
2770 an EXPR_LIST, this might result in allocating more memory than is
2771 strictly needed, but it simplifies the code. */
2772 if (XEXP (x, 1))
2774 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2775 for_costs);
2776 if (new_rtx != XEXP (x, 1))
2777 return
2778 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2780 return x;
2782 case PRE_INC:
2783 case POST_INC:
2784 case PRE_DEC:
2785 case POST_DEC:
2786 /* We do not support elimination of a register that is modified.
2787 elimination_effects has already make sure that this does not
2788 happen. */
2789 return x;
2791 case PRE_MODIFY:
2792 case POST_MODIFY:
2793 /* We do not support elimination of a register that is modified.
2794 elimination_effects has already make sure that this does not
2795 happen. The only remaining case we need to consider here is
2796 that the increment value may be an eliminable register. */
2797 if (GET_CODE (XEXP (x, 1)) == PLUS
2798 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2800 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2801 insn, true, for_costs);
2803 if (new_rtx != XEXP (XEXP (x, 1), 1))
2804 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2805 gen_rtx_PLUS (GET_MODE (x),
2806 XEXP (x, 0), new_rtx));
2808 return x;
2810 case STRICT_LOW_PART:
2811 case NEG: case NOT:
2812 case SIGN_EXTEND: case ZERO_EXTEND:
2813 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2814 case FLOAT: case FIX:
2815 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2816 case ABS:
2817 case SQRT:
2818 case FFS:
2819 case CLZ:
2820 case CTZ:
2821 case POPCOUNT:
2822 case PARITY:
2823 case BSWAP:
2824 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2825 for_costs);
2826 if (new_rtx != XEXP (x, 0))
2827 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2828 return x;
2830 case SUBREG:
2831 /* Similar to above processing, but preserve SUBREG_BYTE.
2832 Convert (subreg (mem)) to (mem) if not paradoxical.
2833 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2834 pseudo didn't get a hard reg, we must replace this with the
2835 eliminated version of the memory location because push_reload
2836 may do the replacement in certain circumstances. */
2837 if (REG_P (SUBREG_REG (x))
2838 && !paradoxical_subreg_p (x)
2839 && reg_equivs
2840 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2842 new_rtx = SUBREG_REG (x);
2844 else
2845 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2847 if (new_rtx != SUBREG_REG (x))
2849 int x_size = GET_MODE_SIZE (GET_MODE (x));
2850 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2852 if (MEM_P (new_rtx)
2853 && ((x_size < new_size
2854 #if WORD_REGISTER_OPERATIONS
2855 /* On these machines, combine can create rtl of the form
2856 (set (subreg:m1 (reg:m2 R) 0) ...)
2857 where m1 < m2, and expects something interesting to
2858 happen to the entire word. Moreover, it will use the
2859 (reg:m2 R) later, expecting all bits to be preserved.
2860 So if the number of words is the same, preserve the
2861 subreg so that push_reload can see it. */
2862 && ! ((x_size - 1) / UNITS_PER_WORD
2863 == (new_size -1 ) / UNITS_PER_WORD)
2864 #endif
2866 || x_size == new_size)
2868 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2869 else
2870 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2873 return x;
2875 case MEM:
2876 /* Our only special processing is to pass the mode of the MEM to our
2877 recursive call and copy the flags. While we are here, handle this
2878 case more efficiently. */
2880 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2881 for_costs);
2882 if (for_costs
2883 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2884 && !memory_address_p (GET_MODE (x), new_rtx))
2885 note_reg_elim_costly (XEXP (x, 0), insn);
2887 return replace_equiv_address_nv (x, new_rtx);
2889 case USE:
2890 /* Handle insn_list USE that a call to a pure function may generate. */
2891 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2892 for_costs);
2893 if (new_rtx != XEXP (x, 0))
2894 return gen_rtx_USE (GET_MODE (x), new_rtx);
2895 return x;
2897 case CLOBBER:
2898 case ASM_OPERANDS:
2899 gcc_assert (insn && DEBUG_INSN_P (insn));
2900 break;
2902 case SET:
2903 gcc_unreachable ();
2905 default:
2906 break;
2909 /* Process each of our operands recursively. If any have changed, make a
2910 copy of the rtx. */
2911 fmt = GET_RTX_FORMAT (code);
2912 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2914 if (*fmt == 'e')
2916 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2917 for_costs);
2918 if (new_rtx != XEXP (x, i) && ! copied)
2920 x = shallow_copy_rtx (x);
2921 copied = 1;
2923 XEXP (x, i) = new_rtx;
2925 else if (*fmt == 'E')
2927 int copied_vec = 0;
2928 for (j = 0; j < XVECLEN (x, i); j++)
2930 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2931 for_costs);
2932 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2934 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2935 XVEC (x, i)->elem);
2936 if (! copied)
2938 x = shallow_copy_rtx (x);
2939 copied = 1;
2941 XVEC (x, i) = new_v;
2942 copied_vec = 1;
2944 XVECEXP (x, i, j) = new_rtx;
2949 return x;
2953 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2955 if (reg_eliminate == NULL)
2957 gcc_assert (targetm.no_register_allocation);
2958 return x;
2960 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2963 /* Scan rtx X for modifications of elimination target registers. Update
2964 the table of eliminables to reflect the changed state. MEM_MODE is
2965 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2967 static void
2968 elimination_effects (rtx x, machine_mode mem_mode)
2970 enum rtx_code code = GET_CODE (x);
2971 struct elim_table *ep;
2972 int regno;
2973 int i, j;
2974 const char *fmt;
2976 switch (code)
2978 CASE_CONST_ANY:
2979 case CONST:
2980 case SYMBOL_REF:
2981 case CODE_LABEL:
2982 case PC:
2983 case CC0:
2984 case ASM_INPUT:
2985 case ADDR_VEC:
2986 case ADDR_DIFF_VEC:
2987 case RETURN:
2988 return;
2990 case REG:
2991 regno = REGNO (x);
2993 /* First handle the case where we encounter a bare register that
2994 is eliminable. Replace it with a PLUS. */
2995 if (regno < FIRST_PSEUDO_REGISTER)
2997 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2998 ep++)
2999 if (ep->from_rtx == x && ep->can_eliminate)
3001 if (! mem_mode)
3002 ep->ref_outside_mem = 1;
3003 return;
3007 else if (reg_renumber[regno] < 0
3008 && reg_equivs
3009 && reg_equiv_constant (regno)
3010 && ! function_invariant_p (reg_equiv_constant (regno)))
3011 elimination_effects (reg_equiv_constant (regno), mem_mode);
3012 return;
3014 case PRE_INC:
3015 case POST_INC:
3016 case PRE_DEC:
3017 case POST_DEC:
3018 case POST_MODIFY:
3019 case PRE_MODIFY:
3020 /* If we modify the source of an elimination rule, disable it. */
3021 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3022 if (ep->from_rtx == XEXP (x, 0))
3023 ep->can_eliminate = 0;
3025 /* If we modify the target of an elimination rule by adding a constant,
3026 update its offset. If we modify the target in any other way, we'll
3027 have to disable the rule as well. */
3028 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3029 if (ep->to_rtx == XEXP (x, 0))
3031 int size = GET_MODE_SIZE (mem_mode);
3033 /* If more bytes than MEM_MODE are pushed, account for them. */
3034 #ifdef PUSH_ROUNDING
3035 if (ep->to_rtx == stack_pointer_rtx)
3036 size = PUSH_ROUNDING (size);
3037 #endif
3038 if (code == PRE_DEC || code == POST_DEC)
3039 ep->offset += size;
3040 else if (code == PRE_INC || code == POST_INC)
3041 ep->offset -= size;
3042 else if (code == PRE_MODIFY || code == POST_MODIFY)
3044 if (GET_CODE (XEXP (x, 1)) == PLUS
3045 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3046 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3047 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3048 else
3049 ep->can_eliminate = 0;
3053 /* These two aren't unary operators. */
3054 if (code == POST_MODIFY || code == PRE_MODIFY)
3055 break;
3057 /* Fall through to generic unary operation case. */
3058 case STRICT_LOW_PART:
3059 case NEG: case NOT:
3060 case SIGN_EXTEND: case ZERO_EXTEND:
3061 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3062 case FLOAT: case FIX:
3063 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3064 case ABS:
3065 case SQRT:
3066 case FFS:
3067 case CLZ:
3068 case CTZ:
3069 case POPCOUNT:
3070 case PARITY:
3071 case BSWAP:
3072 elimination_effects (XEXP (x, 0), mem_mode);
3073 return;
3075 case SUBREG:
3076 if (REG_P (SUBREG_REG (x))
3077 && (GET_MODE_SIZE (GET_MODE (x))
3078 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3079 && reg_equivs
3080 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3081 return;
3083 elimination_effects (SUBREG_REG (x), mem_mode);
3084 return;
3086 case USE:
3087 /* If using a register that is the source of an eliminate we still
3088 think can be performed, note it cannot be performed since we don't
3089 know how this register is used. */
3090 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3091 if (ep->from_rtx == XEXP (x, 0))
3092 ep->can_eliminate = 0;
3094 elimination_effects (XEXP (x, 0), mem_mode);
3095 return;
3097 case CLOBBER:
3098 /* If clobbering a register that is the replacement register for an
3099 elimination we still think can be performed, note that it cannot
3100 be performed. Otherwise, we need not be concerned about it. */
3101 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3102 if (ep->to_rtx == XEXP (x, 0))
3103 ep->can_eliminate = 0;
3105 elimination_effects (XEXP (x, 0), mem_mode);
3106 return;
3108 case SET:
3109 /* Check for setting a register that we know about. */
3110 if (REG_P (SET_DEST (x)))
3112 /* See if this is setting the replacement register for an
3113 elimination.
3115 If DEST is the hard frame pointer, we do nothing because we
3116 assume that all assignments to the frame pointer are for
3117 non-local gotos and are being done at a time when they are valid
3118 and do not disturb anything else. Some machines want to
3119 eliminate a fake argument pointer (or even a fake frame pointer)
3120 with either the real frame or the stack pointer. Assignments to
3121 the hard frame pointer must not prevent this elimination. */
3123 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3124 ep++)
3125 if (ep->to_rtx == SET_DEST (x)
3126 && SET_DEST (x) != hard_frame_pointer_rtx)
3128 /* If it is being incremented, adjust the offset. Otherwise,
3129 this elimination can't be done. */
3130 rtx src = SET_SRC (x);
3132 if (GET_CODE (src) == PLUS
3133 && XEXP (src, 0) == SET_DEST (x)
3134 && CONST_INT_P (XEXP (src, 1)))
3135 ep->offset -= INTVAL (XEXP (src, 1));
3136 else
3137 ep->can_eliminate = 0;
3141 elimination_effects (SET_DEST (x), VOIDmode);
3142 elimination_effects (SET_SRC (x), VOIDmode);
3143 return;
3145 case MEM:
3146 /* Our only special processing is to pass the mode of the MEM to our
3147 recursive call. */
3148 elimination_effects (XEXP (x, 0), GET_MODE (x));
3149 return;
3151 default:
3152 break;
3155 fmt = GET_RTX_FORMAT (code);
3156 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3158 if (*fmt == 'e')
3159 elimination_effects (XEXP (x, i), mem_mode);
3160 else if (*fmt == 'E')
3161 for (j = 0; j < XVECLEN (x, i); j++)
3162 elimination_effects (XVECEXP (x, i, j), mem_mode);
3166 /* Descend through rtx X and verify that no references to eliminable registers
3167 remain. If any do remain, mark the involved register as not
3168 eliminable. */
3170 static void
3171 check_eliminable_occurrences (rtx x)
3173 const char *fmt;
3174 int i;
3175 enum rtx_code code;
3177 if (x == 0)
3178 return;
3180 code = GET_CODE (x);
3182 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3184 struct elim_table *ep;
3186 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3187 if (ep->from_rtx == x)
3188 ep->can_eliminate = 0;
3189 return;
3192 fmt = GET_RTX_FORMAT (code);
3193 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3195 if (*fmt == 'e')
3196 check_eliminable_occurrences (XEXP (x, i));
3197 else if (*fmt == 'E')
3199 int j;
3200 for (j = 0; j < XVECLEN (x, i); j++)
3201 check_eliminable_occurrences (XVECEXP (x, i, j));
3206 /* Scan INSN and eliminate all eliminable registers in it.
3208 If REPLACE is nonzero, do the replacement destructively. Also
3209 delete the insn as dead it if it is setting an eliminable register.
3211 If REPLACE is zero, do all our allocations in reload_obstack.
3213 If no eliminations were done and this insn doesn't require any elimination
3214 processing (these are not identical conditions: it might be updating sp,
3215 but not referencing fp; this needs to be seen during reload_as_needed so
3216 that the offset between fp and sp can be taken into consideration), zero
3217 is returned. Otherwise, 1 is returned. */
3219 static int
3220 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3222 int icode = recog_memoized (insn);
3223 rtx old_body = PATTERN (insn);
3224 int insn_is_asm = asm_noperands (old_body) >= 0;
3225 rtx old_set = single_set (insn);
3226 rtx new_body;
3227 int val = 0;
3228 int i;
3229 rtx substed_operand[MAX_RECOG_OPERANDS];
3230 rtx orig_operand[MAX_RECOG_OPERANDS];
3231 struct elim_table *ep;
3232 rtx plus_src, plus_cst_src;
3234 if (! insn_is_asm && icode < 0)
3236 gcc_assert (DEBUG_INSN_P (insn)
3237 || GET_CODE (PATTERN (insn)) == USE
3238 || GET_CODE (PATTERN (insn)) == CLOBBER
3239 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3240 if (DEBUG_INSN_P (insn))
3241 INSN_VAR_LOCATION_LOC (insn)
3242 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3243 return 0;
3246 if (old_set != 0 && REG_P (SET_DEST (old_set))
3247 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3249 /* Check for setting an eliminable register. */
3250 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3251 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3253 /* If this is setting the frame pointer register to the
3254 hardware frame pointer register and this is an elimination
3255 that will be done (tested above), this insn is really
3256 adjusting the frame pointer downward to compensate for
3257 the adjustment done before a nonlocal goto. */
3258 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3259 && ep->from == FRAME_POINTER_REGNUM
3260 && ep->to == HARD_FRAME_POINTER_REGNUM)
3262 rtx base = SET_SRC (old_set);
3263 rtx_insn *base_insn = insn;
3264 HOST_WIDE_INT offset = 0;
3266 while (base != ep->to_rtx)
3268 rtx_insn *prev_insn;
3269 rtx prev_set;
3271 if (GET_CODE (base) == PLUS
3272 && CONST_INT_P (XEXP (base, 1)))
3274 offset += INTVAL (XEXP (base, 1));
3275 base = XEXP (base, 0);
3277 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3278 && (prev_set = single_set (prev_insn)) != 0
3279 && rtx_equal_p (SET_DEST (prev_set), base))
3281 base = SET_SRC (prev_set);
3282 base_insn = prev_insn;
3284 else
3285 break;
3288 if (base == ep->to_rtx)
3290 rtx src = plus_constant (Pmode, ep->to_rtx,
3291 offset - ep->offset);
3293 new_body = old_body;
3294 if (! replace)
3296 new_body = copy_insn (old_body);
3297 if (REG_NOTES (insn))
3298 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3300 PATTERN (insn) = new_body;
3301 old_set = single_set (insn);
3303 /* First see if this insn remains valid when we
3304 make the change. If not, keep the INSN_CODE
3305 the same and let reload fit it up. */
3306 validate_change (insn, &SET_SRC (old_set), src, 1);
3307 validate_change (insn, &SET_DEST (old_set),
3308 ep->to_rtx, 1);
3309 if (! apply_change_group ())
3311 SET_SRC (old_set) = src;
3312 SET_DEST (old_set) = ep->to_rtx;
3315 val = 1;
3316 goto done;
3320 /* In this case this insn isn't serving a useful purpose. We
3321 will delete it in reload_as_needed once we know that this
3322 elimination is, in fact, being done.
3324 If REPLACE isn't set, we can't delete this insn, but needn't
3325 process it since it won't be used unless something changes. */
3326 if (replace)
3328 delete_dead_insn (insn);
3329 return 1;
3331 val = 1;
3332 goto done;
3336 /* We allow one special case which happens to work on all machines we
3337 currently support: a single set with the source or a REG_EQUAL
3338 note being a PLUS of an eliminable register and a constant. */
3339 plus_src = plus_cst_src = 0;
3340 if (old_set && REG_P (SET_DEST (old_set)))
3342 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3343 plus_src = SET_SRC (old_set);
3344 /* First see if the source is of the form (plus (...) CST). */
3345 if (plus_src
3346 && CONST_INT_P (XEXP (plus_src, 1)))
3347 plus_cst_src = plus_src;
3348 else if (REG_P (SET_SRC (old_set))
3349 || plus_src)
3351 /* Otherwise, see if we have a REG_EQUAL note of the form
3352 (plus (...) CST). */
3353 rtx links;
3354 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3356 if ((REG_NOTE_KIND (links) == REG_EQUAL
3357 || REG_NOTE_KIND (links) == REG_EQUIV)
3358 && GET_CODE (XEXP (links, 0)) == PLUS
3359 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3361 plus_cst_src = XEXP (links, 0);
3362 break;
3367 /* Check that the first operand of the PLUS is a hard reg or
3368 the lowpart subreg of one. */
3369 if (plus_cst_src)
3371 rtx reg = XEXP (plus_cst_src, 0);
3372 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3373 reg = SUBREG_REG (reg);
3375 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3376 plus_cst_src = 0;
3379 if (plus_cst_src)
3381 rtx reg = XEXP (plus_cst_src, 0);
3382 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3384 if (GET_CODE (reg) == SUBREG)
3385 reg = SUBREG_REG (reg);
3387 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3388 if (ep->from_rtx == reg && ep->can_eliminate)
3390 rtx to_rtx = ep->to_rtx;
3391 offset += ep->offset;
3392 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3394 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3395 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3396 to_rtx);
3397 /* If we have a nonzero offset, and the source is already
3398 a simple REG, the following transformation would
3399 increase the cost of the insn by replacing a simple REG
3400 with (plus (reg sp) CST). So try only when we already
3401 had a PLUS before. */
3402 if (offset == 0 || plus_src)
3404 rtx new_src = plus_constant (GET_MODE (to_rtx),
3405 to_rtx, offset);
3407 new_body = old_body;
3408 if (! replace)
3410 new_body = copy_insn (old_body);
3411 if (REG_NOTES (insn))
3412 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3414 PATTERN (insn) = new_body;
3415 old_set = single_set (insn);
3417 /* First see if this insn remains valid when we make the
3418 change. If not, try to replace the whole pattern with
3419 a simple set (this may help if the original insn was a
3420 PARALLEL that was only recognized as single_set due to
3421 REG_UNUSED notes). If this isn't valid either, keep
3422 the INSN_CODE the same and let reload fix it up. */
3423 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3425 rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3427 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3428 SET_SRC (old_set) = new_src;
3431 else
3432 break;
3434 val = 1;
3435 /* This can't have an effect on elimination offsets, so skip right
3436 to the end. */
3437 goto done;
3441 /* Determine the effects of this insn on elimination offsets. */
3442 elimination_effects (old_body, VOIDmode);
3444 /* Eliminate all eliminable registers occurring in operands that
3445 can be handled by reload. */
3446 extract_insn (insn);
3447 for (i = 0; i < recog_data.n_operands; i++)
3449 orig_operand[i] = recog_data.operand[i];
3450 substed_operand[i] = recog_data.operand[i];
3452 /* For an asm statement, every operand is eliminable. */
3453 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3455 bool is_set_src, in_plus;
3457 /* Check for setting a register that we know about. */
3458 if (recog_data.operand_type[i] != OP_IN
3459 && REG_P (orig_operand[i]))
3461 /* If we are assigning to a register that can be eliminated, it
3462 must be as part of a PARALLEL, since the code above handles
3463 single SETs. We must indicate that we can no longer
3464 eliminate this reg. */
3465 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3466 ep++)
3467 if (ep->from_rtx == orig_operand[i])
3468 ep->can_eliminate = 0;
3471 /* Companion to the above plus substitution, we can allow
3472 invariants as the source of a plain move. */
3473 is_set_src = false;
3474 if (old_set
3475 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3476 is_set_src = true;
3477 in_plus = false;
3478 if (plus_src
3479 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3480 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3481 in_plus = true;
3483 substed_operand[i]
3484 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3485 replace ? insn : NULL_RTX,
3486 is_set_src || in_plus, false);
3487 if (substed_operand[i] != orig_operand[i])
3488 val = 1;
3489 /* Terminate the search in check_eliminable_occurrences at
3490 this point. */
3491 *recog_data.operand_loc[i] = 0;
3493 /* If an output operand changed from a REG to a MEM and INSN is an
3494 insn, write a CLOBBER insn. */
3495 if (recog_data.operand_type[i] != OP_IN
3496 && REG_P (orig_operand[i])
3497 && MEM_P (substed_operand[i])
3498 && replace)
3499 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3503 for (i = 0; i < recog_data.n_dups; i++)
3504 *recog_data.dup_loc[i]
3505 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3507 /* If any eliminable remain, they aren't eliminable anymore. */
3508 check_eliminable_occurrences (old_body);
3510 /* Substitute the operands; the new values are in the substed_operand
3511 array. */
3512 for (i = 0; i < recog_data.n_operands; i++)
3513 *recog_data.operand_loc[i] = substed_operand[i];
3514 for (i = 0; i < recog_data.n_dups; i++)
3515 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3517 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3518 re-recognize the insn. We do this in case we had a simple addition
3519 but now can do this as a load-address. This saves an insn in this
3520 common case.
3521 If re-recognition fails, the old insn code number will still be used,
3522 and some register operands may have changed into PLUS expressions.
3523 These will be handled by find_reloads by loading them into a register
3524 again. */
3526 if (val)
3528 /* If we aren't replacing things permanently and we changed something,
3529 make another copy to ensure that all the RTL is new. Otherwise
3530 things can go wrong if find_reload swaps commutative operands
3531 and one is inside RTL that has been copied while the other is not. */
3532 new_body = old_body;
3533 if (! replace)
3535 new_body = copy_insn (old_body);
3536 if (REG_NOTES (insn))
3537 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3539 PATTERN (insn) = new_body;
3541 /* If we had a move insn but now we don't, rerecognize it. This will
3542 cause spurious re-recognition if the old move had a PARALLEL since
3543 the new one still will, but we can't call single_set without
3544 having put NEW_BODY into the insn and the re-recognition won't
3545 hurt in this rare case. */
3546 /* ??? Why this huge if statement - why don't we just rerecognize the
3547 thing always? */
3548 if (! insn_is_asm
3549 && old_set != 0
3550 && ((REG_P (SET_SRC (old_set))
3551 && (GET_CODE (new_body) != SET
3552 || !REG_P (SET_SRC (new_body))))
3553 /* If this was a load from or store to memory, compare
3554 the MEM in recog_data.operand to the one in the insn.
3555 If they are not equal, then rerecognize the insn. */
3556 || (old_set != 0
3557 && ((MEM_P (SET_SRC (old_set))
3558 && SET_SRC (old_set) != recog_data.operand[1])
3559 || (MEM_P (SET_DEST (old_set))
3560 && SET_DEST (old_set) != recog_data.operand[0])))
3561 /* If this was an add insn before, rerecognize. */
3562 || GET_CODE (SET_SRC (old_set)) == PLUS))
3564 int new_icode = recog (PATTERN (insn), insn, 0);
3565 if (new_icode >= 0)
3566 INSN_CODE (insn) = new_icode;
3570 /* Restore the old body. If there were any changes to it, we made a copy
3571 of it while the changes were still in place, so we'll correctly return
3572 a modified insn below. */
3573 if (! replace)
3575 /* Restore the old body. */
3576 for (i = 0; i < recog_data.n_operands; i++)
3577 /* Restoring a top-level match_parallel would clobber the new_body
3578 we installed in the insn. */
3579 if (recog_data.operand_loc[i] != &PATTERN (insn))
3580 *recog_data.operand_loc[i] = orig_operand[i];
3581 for (i = 0; i < recog_data.n_dups; i++)
3582 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3585 /* Update all elimination pairs to reflect the status after the current
3586 insn. The changes we make were determined by the earlier call to
3587 elimination_effects.
3589 We also detect cases where register elimination cannot be done,
3590 namely, if a register would be both changed and referenced outside a MEM
3591 in the resulting insn since such an insn is often undefined and, even if
3592 not, we cannot know what meaning will be given to it. Note that it is
3593 valid to have a register used in an address in an insn that changes it
3594 (presumably with a pre- or post-increment or decrement).
3596 If anything changes, return nonzero. */
3598 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3600 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3601 ep->can_eliminate = 0;
3603 ep->ref_outside_mem = 0;
3605 if (ep->previous_offset != ep->offset)
3606 val = 1;
3609 done:
3610 /* If we changed something, perform elimination in REG_NOTES. This is
3611 needed even when REPLACE is zero because a REG_DEAD note might refer
3612 to a register that we eliminate and could cause a different number
3613 of spill registers to be needed in the final reload pass than in
3614 the pre-passes. */
3615 if (val && REG_NOTES (insn) != 0)
3616 REG_NOTES (insn)
3617 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3618 false);
3620 return val;
3623 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3624 register allocator. INSN is the instruction we need to examine, we perform
3625 eliminations in its operands and record cases where eliminating a reg with
3626 an invariant equivalence would add extra cost. */
3628 #pragma GCC diagnostic push
3629 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3630 static void
3631 elimination_costs_in_insn (rtx_insn *insn)
3633 int icode = recog_memoized (insn);
3634 rtx old_body = PATTERN (insn);
3635 int insn_is_asm = asm_noperands (old_body) >= 0;
3636 rtx old_set = single_set (insn);
3637 int i;
3638 rtx orig_operand[MAX_RECOG_OPERANDS];
3639 rtx orig_dup[MAX_RECOG_OPERANDS];
3640 struct elim_table *ep;
3641 rtx plus_src, plus_cst_src;
3642 bool sets_reg_p;
3644 if (! insn_is_asm && icode < 0)
3646 gcc_assert (DEBUG_INSN_P (insn)
3647 || GET_CODE (PATTERN (insn)) == USE
3648 || GET_CODE (PATTERN (insn)) == CLOBBER
3649 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3650 return;
3653 if (old_set != 0 && REG_P (SET_DEST (old_set))
3654 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3656 /* Check for setting an eliminable register. */
3657 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3658 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3659 return;
3662 /* We allow one special case which happens to work on all machines we
3663 currently support: a single set with the source or a REG_EQUAL
3664 note being a PLUS of an eliminable register and a constant. */
3665 plus_src = plus_cst_src = 0;
3666 sets_reg_p = false;
3667 if (old_set && REG_P (SET_DEST (old_set)))
3669 sets_reg_p = true;
3670 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3671 plus_src = SET_SRC (old_set);
3672 /* First see if the source is of the form (plus (...) CST). */
3673 if (plus_src
3674 && CONST_INT_P (XEXP (plus_src, 1)))
3675 plus_cst_src = plus_src;
3676 else if (REG_P (SET_SRC (old_set))
3677 || plus_src)
3679 /* Otherwise, see if we have a REG_EQUAL note of the form
3680 (plus (...) CST). */
3681 rtx links;
3682 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3684 if ((REG_NOTE_KIND (links) == REG_EQUAL
3685 || REG_NOTE_KIND (links) == REG_EQUIV)
3686 && GET_CODE (XEXP (links, 0)) == PLUS
3687 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3689 plus_cst_src = XEXP (links, 0);
3690 break;
3696 /* Determine the effects of this insn on elimination offsets. */
3697 elimination_effects (old_body, VOIDmode);
3699 /* Eliminate all eliminable registers occurring in operands that
3700 can be handled by reload. */
3701 extract_insn (insn);
3702 int n_dups = recog_data.n_dups;
3703 for (i = 0; i < n_dups; i++)
3704 orig_dup[i] = *recog_data.dup_loc[i];
3706 int n_operands = recog_data.n_operands;
3707 for (i = 0; i < n_operands; i++)
3709 orig_operand[i] = recog_data.operand[i];
3711 /* For an asm statement, every operand is eliminable. */
3712 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3714 bool is_set_src, in_plus;
3716 /* Check for setting a register that we know about. */
3717 if (recog_data.operand_type[i] != OP_IN
3718 && REG_P (orig_operand[i]))
3720 /* If we are assigning to a register that can be eliminated, it
3721 must be as part of a PARALLEL, since the code above handles
3722 single SETs. We must indicate that we can no longer
3723 eliminate this reg. */
3724 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3725 ep++)
3726 if (ep->from_rtx == orig_operand[i])
3727 ep->can_eliminate = 0;
3730 /* Companion to the above plus substitution, we can allow
3731 invariants as the source of a plain move. */
3732 is_set_src = false;
3733 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3734 is_set_src = true;
3735 if (is_set_src && !sets_reg_p)
3736 note_reg_elim_costly (SET_SRC (old_set), insn);
3737 in_plus = false;
3738 if (plus_src && sets_reg_p
3739 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3740 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3741 in_plus = true;
3743 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3744 NULL_RTX,
3745 is_set_src || in_plus, true);
3746 /* Terminate the search in check_eliminable_occurrences at
3747 this point. */
3748 *recog_data.operand_loc[i] = 0;
3752 for (i = 0; i < n_dups; i++)
3753 *recog_data.dup_loc[i]
3754 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3756 /* If any eliminable remain, they aren't eliminable anymore. */
3757 check_eliminable_occurrences (old_body);
3759 /* Restore the old body. */
3760 for (i = 0; i < n_operands; i++)
3761 *recog_data.operand_loc[i] = orig_operand[i];
3762 for (i = 0; i < n_dups; i++)
3763 *recog_data.dup_loc[i] = orig_dup[i];
3765 /* Update all elimination pairs to reflect the status after the current
3766 insn. The changes we make were determined by the earlier call to
3767 elimination_effects. */
3769 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3771 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3772 ep->can_eliminate = 0;
3774 ep->ref_outside_mem = 0;
3777 return;
3779 #pragma GCC diagnostic pop
3781 /* Loop through all elimination pairs.
3782 Recalculate the number not at initial offset.
3784 Compute the maximum offset (minimum offset if the stack does not
3785 grow downward) for each elimination pair. */
3787 static void
3788 update_eliminable_offsets (void)
3790 struct elim_table *ep;
3792 num_not_at_initial_offset = 0;
3793 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3795 ep->previous_offset = ep->offset;
3796 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3797 num_not_at_initial_offset++;
3801 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3802 replacement we currently believe is valid, mark it as not eliminable if X
3803 modifies DEST in any way other than by adding a constant integer to it.
3805 If DEST is the frame pointer, we do nothing because we assume that
3806 all assignments to the hard frame pointer are nonlocal gotos and are being
3807 done at a time when they are valid and do not disturb anything else.
3808 Some machines want to eliminate a fake argument pointer with either the
3809 frame or stack pointer. Assignments to the hard frame pointer must not
3810 prevent this elimination.
3812 Called via note_stores from reload before starting its passes to scan
3813 the insns of the function. */
3815 static void
3816 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3818 unsigned int i;
3820 /* A SUBREG of a hard register here is just changing its mode. We should
3821 not see a SUBREG of an eliminable hard register, but check just in
3822 case. */
3823 if (GET_CODE (dest) == SUBREG)
3824 dest = SUBREG_REG (dest);
3826 if (dest == hard_frame_pointer_rtx)
3827 return;
3829 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3830 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3831 && (GET_CODE (x) != SET
3832 || GET_CODE (SET_SRC (x)) != PLUS
3833 || XEXP (SET_SRC (x), 0) != dest
3834 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3836 reg_eliminate[i].can_eliminate_previous
3837 = reg_eliminate[i].can_eliminate = 0;
3838 num_eliminable--;
3842 /* Verify that the initial elimination offsets did not change since the
3843 last call to set_initial_elim_offsets. This is used to catch cases
3844 where something illegal happened during reload_as_needed that could
3845 cause incorrect code to be generated if we did not check for it. */
3847 static bool
3848 verify_initial_elim_offsets (void)
3850 HOST_WIDE_INT t;
3852 if (!num_eliminable)
3853 return true;
3855 #ifdef ELIMINABLE_REGS
3857 struct elim_table *ep;
3859 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3861 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3862 if (t != ep->initial_offset)
3863 return false;
3866 #else
3867 INITIAL_FRAME_POINTER_OFFSET (t);
3868 if (t != reg_eliminate[0].initial_offset)
3869 return false;
3870 #endif
3872 return true;
3875 /* Reset all offsets on eliminable registers to their initial values. */
3877 static void
3878 set_initial_elim_offsets (void)
3880 struct elim_table *ep = reg_eliminate;
3882 #ifdef ELIMINABLE_REGS
3883 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3885 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3886 ep->previous_offset = ep->offset = ep->initial_offset;
3888 #else
3889 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3890 ep->previous_offset = ep->offset = ep->initial_offset;
3891 #endif
3893 num_not_at_initial_offset = 0;
3896 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3898 static void
3899 set_initial_eh_label_offset (rtx label)
3901 set_label_offsets (label, NULL, 1);
3904 /* Initialize the known label offsets.
3905 Set a known offset for each forced label to be at the initial offset
3906 of each elimination. We do this because we assume that all
3907 computed jumps occur from a location where each elimination is
3908 at its initial offset.
3909 For all other labels, show that we don't know the offsets. */
3911 static void
3912 set_initial_label_offsets (void)
3914 memset (offsets_known_at, 0, num_labels);
3916 for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3917 if (x->insn ())
3918 set_label_offsets (x->insn (), NULL, 1);
3920 for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3921 if (x->insn ())
3922 set_label_offsets (x->insn (), NULL, 1);
3924 for_each_eh_label (set_initial_eh_label_offset);
3927 /* Set all elimination offsets to the known values for the code label given
3928 by INSN. */
3930 static void
3931 set_offsets_for_label (rtx_insn *insn)
3933 unsigned int i;
3934 int label_nr = CODE_LABEL_NUMBER (insn);
3935 struct elim_table *ep;
3937 num_not_at_initial_offset = 0;
3938 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3940 ep->offset = ep->previous_offset
3941 = offsets_at[label_nr - first_label_num][i];
3942 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3943 num_not_at_initial_offset++;
3947 /* See if anything that happened changes which eliminations are valid.
3948 For example, on the SPARC, whether or not the frame pointer can
3949 be eliminated can depend on what registers have been used. We need
3950 not check some conditions again (such as flag_omit_frame_pointer)
3951 since they can't have changed. */
3953 static void
3954 update_eliminables (HARD_REG_SET *pset)
3956 int previous_frame_pointer_needed = frame_pointer_needed;
3957 struct elim_table *ep;
3959 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3960 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3961 && targetm.frame_pointer_required ())
3962 #ifdef ELIMINABLE_REGS
3963 || ! targetm.can_eliminate (ep->from, ep->to)
3964 #endif
3966 ep->can_eliminate = 0;
3968 /* Look for the case where we have discovered that we can't replace
3969 register A with register B and that means that we will now be
3970 trying to replace register A with register C. This means we can
3971 no longer replace register C with register B and we need to disable
3972 such an elimination, if it exists. This occurs often with A == ap,
3973 B == sp, and C == fp. */
3975 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3977 struct elim_table *op;
3978 int new_to = -1;
3980 if (! ep->can_eliminate && ep->can_eliminate_previous)
3982 /* Find the current elimination for ep->from, if there is a
3983 new one. */
3984 for (op = reg_eliminate;
3985 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3986 if (op->from == ep->from && op->can_eliminate)
3988 new_to = op->to;
3989 break;
3992 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3993 disable it. */
3994 for (op = reg_eliminate;
3995 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3996 if (op->from == new_to && op->to == ep->to)
3997 op->can_eliminate = 0;
4001 /* See if any registers that we thought we could eliminate the previous
4002 time are no longer eliminable. If so, something has changed and we
4003 must spill the register. Also, recompute the number of eliminable
4004 registers and see if the frame pointer is needed; it is if there is
4005 no elimination of the frame pointer that we can perform. */
4007 frame_pointer_needed = 1;
4008 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4010 if (ep->can_eliminate
4011 && ep->from == FRAME_POINTER_REGNUM
4012 && ep->to != HARD_FRAME_POINTER_REGNUM
4013 && (! SUPPORTS_STACK_ALIGNMENT
4014 || ! crtl->stack_realign_needed))
4015 frame_pointer_needed = 0;
4017 if (! ep->can_eliminate && ep->can_eliminate_previous)
4019 ep->can_eliminate_previous = 0;
4020 SET_HARD_REG_BIT (*pset, ep->from);
4021 num_eliminable--;
4025 /* If we didn't need a frame pointer last time, but we do now, spill
4026 the hard frame pointer. */
4027 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4028 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4031 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4032 Return true iff a register was spilled. */
4034 static bool
4035 update_eliminables_and_spill (void)
4037 int i;
4038 bool did_spill = false;
4039 HARD_REG_SET to_spill;
4040 CLEAR_HARD_REG_SET (to_spill);
4041 update_eliminables (&to_spill);
4042 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4044 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4045 if (TEST_HARD_REG_BIT (to_spill, i))
4047 spill_hard_reg (i, 1);
4048 did_spill = true;
4050 /* Regardless of the state of spills, if we previously had
4051 a register that we thought we could eliminate, but now can
4052 not eliminate, we must run another pass.
4054 Consider pseudos which have an entry in reg_equiv_* which
4055 reference an eliminable register. We must make another pass
4056 to update reg_equiv_* so that we do not substitute in the
4057 old value from when we thought the elimination could be
4058 performed. */
4060 return did_spill;
4063 /* Return true if X is used as the target register of an elimination. */
4065 bool
4066 elimination_target_reg_p (rtx x)
4068 struct elim_table *ep;
4070 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4071 if (ep->to_rtx == x && ep->can_eliminate)
4072 return true;
4074 return false;
4077 /* Initialize the table of registers to eliminate.
4078 Pre-condition: global flag frame_pointer_needed has been set before
4079 calling this function. */
4081 static void
4082 init_elim_table (void)
4084 struct elim_table *ep;
4085 #ifdef ELIMINABLE_REGS
4086 const struct elim_table_1 *ep1;
4087 #endif
4089 if (!reg_eliminate)
4090 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4092 num_eliminable = 0;
4094 #ifdef ELIMINABLE_REGS
4095 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4096 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4098 ep->from = ep1->from;
4099 ep->to = ep1->to;
4100 ep->can_eliminate = ep->can_eliminate_previous
4101 = (targetm.can_eliminate (ep->from, ep->to)
4102 && ! (ep->to == STACK_POINTER_REGNUM
4103 && frame_pointer_needed
4104 && (! SUPPORTS_STACK_ALIGNMENT
4105 || ! stack_realign_fp)));
4107 #else
4108 reg_eliminate[0].from = reg_eliminate_1[0].from;
4109 reg_eliminate[0].to = reg_eliminate_1[0].to;
4110 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4111 = ! frame_pointer_needed;
4112 #endif
4114 /* Count the number of eliminable registers and build the FROM and TO
4115 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4116 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4117 We depend on this. */
4118 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4120 num_eliminable += ep->can_eliminate;
4121 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4122 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4126 /* Find all the pseudo registers that didn't get hard regs
4127 but do have known equivalent constants or memory slots.
4128 These include parameters (known equivalent to parameter slots)
4129 and cse'd or loop-moved constant memory addresses.
4131 Record constant equivalents in reg_equiv_constant
4132 so they will be substituted by find_reloads.
4133 Record memory equivalents in reg_mem_equiv so they can
4134 be substituted eventually by altering the REG-rtx's. */
4136 static void
4137 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4139 int i;
4140 rtx_insn *insn;
4142 grow_reg_equivs ();
4143 if (do_subregs)
4144 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4145 else
4146 reg_max_ref_width = NULL;
4148 num_eliminable_invariants = 0;
4150 first_label_num = get_first_label_num ();
4151 num_labels = max_label_num () - first_label_num;
4153 /* Allocate the tables used to store offset information at labels. */
4154 offsets_known_at = XNEWVEC (char, num_labels);
4155 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4157 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4158 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4159 find largest such for each pseudo. FIRST is the head of the insn
4160 list. */
4162 for (insn = first; insn; insn = NEXT_INSN (insn))
4164 rtx set = single_set (insn);
4166 /* We may introduce USEs that we want to remove at the end, so
4167 we'll mark them with QImode. Make sure there are no
4168 previously-marked insns left by say regmove. */
4169 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4170 && GET_MODE (insn) != VOIDmode)
4171 PUT_MODE (insn, VOIDmode);
4173 if (do_subregs && NONDEBUG_INSN_P (insn))
4174 scan_paradoxical_subregs (PATTERN (insn));
4176 if (set != 0 && REG_P (SET_DEST (set)))
4178 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4179 rtx x;
4181 if (! note)
4182 continue;
4184 i = REGNO (SET_DEST (set));
4185 x = XEXP (note, 0);
4187 if (i <= LAST_VIRTUAL_REGISTER)
4188 continue;
4190 /* If flag_pic and we have constant, verify it's legitimate. */
4191 if (!CONSTANT_P (x)
4192 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4194 /* It can happen that a REG_EQUIV note contains a MEM
4195 that is not a legitimate memory operand. As later
4196 stages of reload assume that all addresses found
4197 in the reg_equiv_* arrays were originally legitimate,
4198 we ignore such REG_EQUIV notes. */
4199 if (memory_operand (x, VOIDmode))
4201 /* Always unshare the equivalence, so we can
4202 substitute into this insn without touching the
4203 equivalence. */
4204 reg_equiv_memory_loc (i) = copy_rtx (x);
4206 else if (function_invariant_p (x))
4208 machine_mode mode;
4210 mode = GET_MODE (SET_DEST (set));
4211 if (GET_CODE (x) == PLUS)
4213 /* This is PLUS of frame pointer and a constant,
4214 and might be shared. Unshare it. */
4215 reg_equiv_invariant (i) = copy_rtx (x);
4216 num_eliminable_invariants++;
4218 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4220 reg_equiv_invariant (i) = x;
4221 num_eliminable_invariants++;
4223 else if (targetm.legitimate_constant_p (mode, x))
4224 reg_equiv_constant (i) = x;
4225 else
4227 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4228 if (! reg_equiv_memory_loc (i))
4229 reg_equiv_init (i) = NULL;
4232 else
4234 reg_equiv_init (i) = NULL;
4235 continue;
4238 else
4239 reg_equiv_init (i) = NULL;
4243 if (dump_file)
4244 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4245 if (reg_equiv_init (i))
4247 fprintf (dump_file, "init_insns for %u: ", i);
4248 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4249 fprintf (dump_file, "\n");
4253 /* Indicate that we no longer have known memory locations or constants.
4254 Free all data involved in tracking these. */
4256 static void
4257 free_reg_equiv (void)
4259 int i;
4261 free (offsets_known_at);
4262 free (offsets_at);
4263 offsets_at = 0;
4264 offsets_known_at = 0;
4266 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4267 if (reg_equiv_alt_mem_list (i))
4268 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4269 vec_free (reg_equivs);
4272 /* Kick all pseudos out of hard register REGNO.
4274 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4275 because we found we can't eliminate some register. In the case, no pseudos
4276 are allowed to be in the register, even if they are only in a block that
4277 doesn't require spill registers, unlike the case when we are spilling this
4278 hard reg to produce another spill register.
4280 Return nonzero if any pseudos needed to be kicked out. */
4282 static void
4283 spill_hard_reg (unsigned int regno, int cant_eliminate)
4285 int i;
4287 if (cant_eliminate)
4289 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4290 df_set_regs_ever_live (regno, true);
4293 /* Spill every pseudo reg that was allocated to this reg
4294 or to something that overlaps this reg. */
4296 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4297 if (reg_renumber[i] >= 0
4298 && (unsigned int) reg_renumber[i] <= regno
4299 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4300 SET_REGNO_REG_SET (&spilled_pseudos, i);
4303 /* After find_reload_regs has been run for all insn that need reloads,
4304 and/or spill_hard_regs was called, this function is used to actually
4305 spill pseudo registers and try to reallocate them. It also sets up the
4306 spill_regs array for use by choose_reload_regs. */
4308 static int
4309 finish_spills (int global)
4311 struct insn_chain *chain;
4312 int something_changed = 0;
4313 unsigned i;
4314 reg_set_iterator rsi;
4316 /* Build the spill_regs array for the function. */
4317 /* If there are some registers still to eliminate and one of the spill regs
4318 wasn't ever used before, additional stack space may have to be
4319 allocated to store this register. Thus, we may have changed the offset
4320 between the stack and frame pointers, so mark that something has changed.
4322 One might think that we need only set VAL to 1 if this is a call-used
4323 register. However, the set of registers that must be saved by the
4324 prologue is not identical to the call-used set. For example, the
4325 register used by the call insn for the return PC is a call-used register,
4326 but must be saved by the prologue. */
4328 n_spills = 0;
4329 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4330 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4332 spill_reg_order[i] = n_spills;
4333 spill_regs[n_spills++] = i;
4334 if (num_eliminable && ! df_regs_ever_live_p (i))
4335 something_changed = 1;
4336 df_set_regs_ever_live (i, true);
4338 else
4339 spill_reg_order[i] = -1;
4341 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4342 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4344 /* Record the current hard register the pseudo is allocated to
4345 in pseudo_previous_regs so we avoid reallocating it to the
4346 same hard reg in a later pass. */
4347 gcc_assert (reg_renumber[i] >= 0);
4349 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4350 /* Mark it as no longer having a hard register home. */
4351 reg_renumber[i] = -1;
4352 if (ira_conflicts_p)
4353 /* Inform IRA about the change. */
4354 ira_mark_allocation_change (i);
4355 /* We will need to scan everything again. */
4356 something_changed = 1;
4359 /* Retry global register allocation if possible. */
4360 if (global && ira_conflicts_p)
4362 unsigned int n;
4364 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4365 /* For every insn that needs reloads, set the registers used as spill
4366 regs in pseudo_forbidden_regs for every pseudo live across the
4367 insn. */
4368 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4370 EXECUTE_IF_SET_IN_REG_SET
4371 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4373 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4374 chain->used_spill_regs);
4376 EXECUTE_IF_SET_IN_REG_SET
4377 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4379 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4380 chain->used_spill_regs);
4384 /* Retry allocating the pseudos spilled in IRA and the
4385 reload. For each reg, merge the various reg sets that
4386 indicate which hard regs can't be used, and call
4387 ira_reassign_pseudos. */
4388 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4389 if (reg_old_renumber[i] != reg_renumber[i])
4391 if (reg_renumber[i] < 0)
4392 temp_pseudo_reg_arr[n++] = i;
4393 else
4394 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4396 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4397 bad_spill_regs_global,
4398 pseudo_forbidden_regs, pseudo_previous_regs,
4399 &spilled_pseudos))
4400 something_changed = 1;
4402 /* Fix up the register information in the insn chain.
4403 This involves deleting those of the spilled pseudos which did not get
4404 a new hard register home from the live_{before,after} sets. */
4405 for (chain = reload_insn_chain; chain; chain = chain->next)
4407 HARD_REG_SET used_by_pseudos;
4408 HARD_REG_SET used_by_pseudos2;
4410 if (! ira_conflicts_p)
4412 /* Don't do it for IRA because IRA and the reload still can
4413 assign hard registers to the spilled pseudos on next
4414 reload iterations. */
4415 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4416 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4418 /* Mark any unallocated hard regs as available for spills. That
4419 makes inheritance work somewhat better. */
4420 if (chain->need_reload)
4422 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4423 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4424 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4426 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4427 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4428 /* Value of chain->used_spill_regs from previous iteration
4429 may be not included in the value calculated here because
4430 of possible removing caller-saves insns (see function
4431 delete_caller_save_insns. */
4432 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4433 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4437 CLEAR_REG_SET (&changed_allocation_pseudos);
4438 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4439 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4441 int regno = reg_renumber[i];
4442 if (reg_old_renumber[i] == regno)
4443 continue;
4445 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4447 alter_reg (i, reg_old_renumber[i], false);
4448 reg_old_renumber[i] = regno;
4449 if (dump_file)
4451 if (regno == -1)
4452 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4453 else
4454 fprintf (dump_file, " Register %d now in %d.\n\n",
4455 i, reg_renumber[i]);
4459 return something_changed;
4462 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4464 static void
4465 scan_paradoxical_subregs (rtx x)
4467 int i;
4468 const char *fmt;
4469 enum rtx_code code = GET_CODE (x);
4471 switch (code)
4473 case REG:
4474 case CONST:
4475 case SYMBOL_REF:
4476 case LABEL_REF:
4477 CASE_CONST_ANY:
4478 case CC0:
4479 case PC:
4480 case USE:
4481 case CLOBBER:
4482 return;
4484 case SUBREG:
4485 if (REG_P (SUBREG_REG (x))
4486 && (GET_MODE_SIZE (GET_MODE (x))
4487 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4489 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4490 = GET_MODE_SIZE (GET_MODE (x));
4491 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4493 return;
4495 default:
4496 break;
4499 fmt = GET_RTX_FORMAT (code);
4500 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4502 if (fmt[i] == 'e')
4503 scan_paradoxical_subregs (XEXP (x, i));
4504 else if (fmt[i] == 'E')
4506 int j;
4507 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4508 scan_paradoxical_subregs (XVECEXP (x, i, j));
4513 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4514 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4515 and apply the corresponding narrowing subreg to *OTHER_PTR.
4516 Return true if the operands were changed, false otherwise. */
4518 static bool
4519 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4521 rtx op, inner, other, tem;
4523 op = *op_ptr;
4524 if (!paradoxical_subreg_p (op))
4525 return false;
4526 inner = SUBREG_REG (op);
4528 other = *other_ptr;
4529 tem = gen_lowpart_common (GET_MODE (inner), other);
4530 if (!tem)
4531 return false;
4533 /* If the lowpart operation turned a hard register into a subreg,
4534 rather than simplifying it to another hard register, then the
4535 mode change cannot be properly represented. For example, OTHER
4536 might be valid in its current mode, but not in the new one. */
4537 if (GET_CODE (tem) == SUBREG
4538 && REG_P (other)
4539 && HARD_REGISTER_P (other))
4540 return false;
4542 *op_ptr = inner;
4543 *other_ptr = tem;
4544 return true;
4547 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4548 examine all of the reload insns between PREV and NEXT exclusive, and
4549 annotate all that may trap. */
4551 static void
4552 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4554 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4555 if (note == NULL)
4556 return;
4557 if (!insn_could_throw_p (insn))
4558 remove_note (insn, note);
4559 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4562 /* Reload pseudo-registers into hard regs around each insn as needed.
4563 Additional register load insns are output before the insn that needs it
4564 and perhaps store insns after insns that modify the reloaded pseudo reg.
4566 reg_last_reload_reg and reg_reloaded_contents keep track of
4567 which registers are already available in reload registers.
4568 We update these for the reloads that we perform,
4569 as the insns are scanned. */
4571 static void
4572 reload_as_needed (int live_known)
4574 struct insn_chain *chain;
4575 #if AUTO_INC_DEC
4576 int i;
4577 #endif
4578 rtx_note *marker;
4580 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4581 memset (spill_reg_store, 0, sizeof spill_reg_store);
4582 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4583 INIT_REG_SET (&reg_has_output_reload);
4584 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4585 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4587 set_initial_elim_offsets ();
4589 /* Generate a marker insn that we will move around. */
4590 marker = emit_note (NOTE_INSN_DELETED);
4591 unlink_insn_chain (marker, marker);
4593 for (chain = reload_insn_chain; chain; chain = chain->next)
4595 rtx_insn *prev = 0;
4596 rtx_insn *insn = chain->insn;
4597 rtx_insn *old_next = NEXT_INSN (insn);
4598 #if AUTO_INC_DEC
4599 rtx_insn *old_prev = PREV_INSN (insn);
4600 #endif
4602 if (will_delete_init_insn_p (insn))
4603 continue;
4605 /* If we pass a label, copy the offsets from the label information
4606 into the current offsets of each elimination. */
4607 if (LABEL_P (insn))
4608 set_offsets_for_label (insn);
4610 else if (INSN_P (insn))
4612 regset_head regs_to_forget;
4613 INIT_REG_SET (&regs_to_forget);
4614 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4616 /* If this is a USE and CLOBBER of a MEM, ensure that any
4617 references to eliminable registers have been removed. */
4619 if ((GET_CODE (PATTERN (insn)) == USE
4620 || GET_CODE (PATTERN (insn)) == CLOBBER)
4621 && MEM_P (XEXP (PATTERN (insn), 0)))
4622 XEXP (XEXP (PATTERN (insn), 0), 0)
4623 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4624 GET_MODE (XEXP (PATTERN (insn), 0)),
4625 NULL_RTX);
4627 /* If we need to do register elimination processing, do so.
4628 This might delete the insn, in which case we are done. */
4629 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4631 eliminate_regs_in_insn (insn, 1);
4632 if (NOTE_P (insn))
4634 update_eliminable_offsets ();
4635 CLEAR_REG_SET (&regs_to_forget);
4636 continue;
4640 /* If need_elim is nonzero but need_reload is zero, one might think
4641 that we could simply set n_reloads to 0. However, find_reloads
4642 could have done some manipulation of the insn (such as swapping
4643 commutative operands), and these manipulations are lost during
4644 the first pass for every insn that needs register elimination.
4645 So the actions of find_reloads must be redone here. */
4647 if (! chain->need_elim && ! chain->need_reload
4648 && ! chain->need_operand_change)
4649 n_reloads = 0;
4650 /* First find the pseudo regs that must be reloaded for this insn.
4651 This info is returned in the tables reload_... (see reload.h).
4652 Also modify the body of INSN by substituting RELOAD
4653 rtx's for those pseudo regs. */
4654 else
4656 CLEAR_REG_SET (&reg_has_output_reload);
4657 CLEAR_HARD_REG_SET (reg_is_output_reload);
4659 find_reloads (insn, 1, spill_indirect_levels, live_known,
4660 spill_reg_order);
4663 if (n_reloads > 0)
4665 rtx_insn *next = NEXT_INSN (insn);
4667 /* ??? PREV can get deleted by reload inheritance.
4668 Work around this by emitting a marker note. */
4669 prev = PREV_INSN (insn);
4670 reorder_insns_nobb (marker, marker, prev);
4672 /* Now compute which reload regs to reload them into. Perhaps
4673 reusing reload regs from previous insns, or else output
4674 load insns to reload them. Maybe output store insns too.
4675 Record the choices of reload reg in reload_reg_rtx. */
4676 choose_reload_regs (chain);
4678 /* Generate the insns to reload operands into or out of
4679 their reload regs. */
4680 emit_reload_insns (chain);
4682 /* Substitute the chosen reload regs from reload_reg_rtx
4683 into the insn's body (or perhaps into the bodies of other
4684 load and store insn that we just made for reloading
4685 and that we moved the structure into). */
4686 subst_reloads (insn);
4688 prev = PREV_INSN (marker);
4689 unlink_insn_chain (marker, marker);
4691 /* Adjust the exception region notes for loads and stores. */
4692 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4693 fixup_eh_region_note (insn, prev, next);
4695 /* Adjust the location of REG_ARGS_SIZE. */
4696 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4697 if (p)
4699 remove_note (insn, p);
4700 fixup_args_size_notes (prev, PREV_INSN (next),
4701 INTVAL (XEXP (p, 0)));
4704 /* If this was an ASM, make sure that all the reload insns
4705 we have generated are valid. If not, give an error
4706 and delete them. */
4707 if (asm_noperands (PATTERN (insn)) >= 0)
4708 for (rtx_insn *p = NEXT_INSN (prev);
4709 p != next;
4710 p = NEXT_INSN (p))
4711 if (p != insn && INSN_P (p)
4712 && GET_CODE (PATTERN (p)) != USE
4713 && (recog_memoized (p) < 0
4714 || (extract_insn (p),
4715 !(constrain_operands (1,
4716 get_enabled_alternatives (p))))))
4718 error_for_asm (insn,
4719 "%<asm%> operand requires "
4720 "impossible reload");
4721 delete_insn (p);
4725 if (num_eliminable && chain->need_elim)
4726 update_eliminable_offsets ();
4728 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4729 is no longer validly lying around to save a future reload.
4730 Note that this does not detect pseudos that were reloaded
4731 for this insn in order to be stored in
4732 (obeying register constraints). That is correct; such reload
4733 registers ARE still valid. */
4734 forget_marked_reloads (&regs_to_forget);
4735 CLEAR_REG_SET (&regs_to_forget);
4737 /* There may have been CLOBBER insns placed after INSN. So scan
4738 between INSN and NEXT and use them to forget old reloads. */
4739 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4740 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4741 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4743 #if AUTO_INC_DEC
4744 /* Likewise for regs altered by auto-increment in this insn.
4745 REG_INC notes have been changed by reloading:
4746 find_reloads_address_1 records substitutions for them,
4747 which have been performed by subst_reloads above. */
4748 for (i = n_reloads - 1; i >= 0; i--)
4750 rtx in_reg = rld[i].in_reg;
4751 if (in_reg)
4753 enum rtx_code code = GET_CODE (in_reg);
4754 /* PRE_INC / PRE_DEC will have the reload register ending up
4755 with the same value as the stack slot, but that doesn't
4756 hold true for POST_INC / POST_DEC. Either we have to
4757 convert the memory access to a true POST_INC / POST_DEC,
4758 or we can't use the reload register for inheritance. */
4759 if ((code == POST_INC || code == POST_DEC)
4760 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4761 REGNO (rld[i].reg_rtx))
4762 /* Make sure it is the inc/dec pseudo, and not
4763 some other (e.g. output operand) pseudo. */
4764 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4765 == REGNO (XEXP (in_reg, 0))))
4768 rtx reload_reg = rld[i].reg_rtx;
4769 machine_mode mode = GET_MODE (reload_reg);
4770 int n = 0;
4771 rtx_insn *p;
4773 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4775 /* We really want to ignore REG_INC notes here, so
4776 use PATTERN (p) as argument to reg_set_p . */
4777 if (reg_set_p (reload_reg, PATTERN (p)))
4778 break;
4779 n = count_occurrences (PATTERN (p), reload_reg, 0);
4780 if (! n)
4781 continue;
4782 if (n == 1)
4784 rtx replace_reg
4785 = gen_rtx_fmt_e (code, mode, reload_reg);
4787 validate_replace_rtx_group (reload_reg,
4788 replace_reg, p);
4789 n = verify_changes (0);
4791 /* We must also verify that the constraints
4792 are met after the replacement. Make sure
4793 extract_insn is only called for an insn
4794 where the replacements were found to be
4795 valid so far. */
4796 if (n)
4798 extract_insn (p);
4799 n = constrain_operands (1,
4800 get_enabled_alternatives (p));
4803 /* If the constraints were not met, then
4804 undo the replacement, else confirm it. */
4805 if (!n)
4806 cancel_changes (0);
4807 else
4808 confirm_change_group ();
4810 break;
4812 if (n == 1)
4814 add_reg_note (p, REG_INC, reload_reg);
4815 /* Mark this as having an output reload so that the
4816 REG_INC processing code below won't invalidate
4817 the reload for inheritance. */
4818 SET_HARD_REG_BIT (reg_is_output_reload,
4819 REGNO (reload_reg));
4820 SET_REGNO_REG_SET (&reg_has_output_reload,
4821 REGNO (XEXP (in_reg, 0)));
4823 else
4824 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4825 NULL);
4827 else if ((code == PRE_INC || code == PRE_DEC)
4828 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4829 REGNO (rld[i].reg_rtx))
4830 /* Make sure it is the inc/dec pseudo, and not
4831 some other (e.g. output operand) pseudo. */
4832 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4833 == REGNO (XEXP (in_reg, 0))))
4835 SET_HARD_REG_BIT (reg_is_output_reload,
4836 REGNO (rld[i].reg_rtx));
4837 SET_REGNO_REG_SET (&reg_has_output_reload,
4838 REGNO (XEXP (in_reg, 0)));
4840 else if (code == PRE_INC || code == PRE_DEC
4841 || code == POST_INC || code == POST_DEC)
4843 int in_regno = REGNO (XEXP (in_reg, 0));
4845 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4847 int in_hard_regno;
4848 bool forget_p = true;
4850 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4851 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4852 in_hard_regno))
4854 for (rtx_insn *x = (old_prev ?
4855 NEXT_INSN (old_prev) : insn);
4856 x != old_next;
4857 x = NEXT_INSN (x))
4858 if (x == reg_reloaded_insn[in_hard_regno])
4860 forget_p = false;
4861 break;
4864 /* If for some reasons, we didn't set up
4865 reg_last_reload_reg in this insn,
4866 invalidate inheritance from previous
4867 insns for the incremented/decremented
4868 register. Such registers will be not in
4869 reg_has_output_reload. Invalidate it
4870 also if the corresponding element in
4871 reg_reloaded_insn is also
4872 invalidated. */
4873 if (forget_p)
4874 forget_old_reloads_1 (XEXP (in_reg, 0),
4875 NULL_RTX, NULL);
4880 /* If a pseudo that got a hard register is auto-incremented,
4881 we must purge records of copying it into pseudos without
4882 hard registers. */
4883 for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4884 if (REG_NOTE_KIND (x) == REG_INC)
4886 /* See if this pseudo reg was reloaded in this insn.
4887 If so, its last-reload info is still valid
4888 because it is based on this insn's reload. */
4889 for (i = 0; i < n_reloads; i++)
4890 if (rld[i].out == XEXP (x, 0))
4891 break;
4893 if (i == n_reloads)
4894 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4896 #endif
4898 /* A reload reg's contents are unknown after a label. */
4899 if (LABEL_P (insn))
4900 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4902 /* Don't assume a reload reg is still good after a call insn
4903 if it is a call-used reg, or if it contains a value that will
4904 be partially clobbered by the call. */
4905 else if (CALL_P (insn))
4907 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4908 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4910 /* If this is a call to a setjmp-type function, we must not
4911 reuse any reload reg contents across the call; that will
4912 just be clobbered by other uses of the register in later
4913 code, before the longjmp. */
4914 if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4915 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4919 /* Clean up. */
4920 free (reg_last_reload_reg);
4921 CLEAR_REG_SET (&reg_has_output_reload);
4924 /* Discard all record of any value reloaded from X,
4925 or reloaded in X from someplace else;
4926 unless X is an output reload reg of the current insn.
4928 X may be a hard reg (the reload reg)
4929 or it may be a pseudo reg that was reloaded from.
4931 When DATA is non-NULL just mark the registers in regset
4932 to be forgotten later. */
4934 static void
4935 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4936 void *data)
4938 unsigned int regno;
4939 unsigned int nr;
4940 regset regs = (regset) data;
4942 /* note_stores does give us subregs of hard regs,
4943 subreg_regno_offset requires a hard reg. */
4944 while (GET_CODE (x) == SUBREG)
4946 /* We ignore the subreg offset when calculating the regno,
4947 because we are using the entire underlying hard register
4948 below. */
4949 x = SUBREG_REG (x);
4952 if (!REG_P (x))
4953 return;
4955 regno = REGNO (x);
4957 if (regno >= FIRST_PSEUDO_REGISTER)
4958 nr = 1;
4959 else
4961 unsigned int i;
4963 nr = hard_regno_nregs[regno][GET_MODE (x)];
4964 /* Storing into a spilled-reg invalidates its contents.
4965 This can happen if a block-local pseudo is allocated to that reg
4966 and it wasn't spilled because this block's total need is 0.
4967 Then some insn might have an optional reload and use this reg. */
4968 if (!regs)
4969 for (i = 0; i < nr; i++)
4970 /* But don't do this if the reg actually serves as an output
4971 reload reg in the current instruction. */
4972 if (n_reloads == 0
4973 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4975 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4976 spill_reg_store[regno + i] = 0;
4980 if (regs)
4981 while (nr-- > 0)
4982 SET_REGNO_REG_SET (regs, regno + nr);
4983 else
4985 /* Since value of X has changed,
4986 forget any value previously copied from it. */
4988 while (nr-- > 0)
4989 /* But don't forget a copy if this is the output reload
4990 that establishes the copy's validity. */
4991 if (n_reloads == 0
4992 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4993 reg_last_reload_reg[regno + nr] = 0;
4997 /* Forget the reloads marked in regset by previous function. */
4998 static void
4999 forget_marked_reloads (regset regs)
5001 unsigned int reg;
5002 reg_set_iterator rsi;
5003 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
5005 if (reg < FIRST_PSEUDO_REGISTER
5006 /* But don't do this if the reg actually serves as an output
5007 reload reg in the current instruction. */
5008 && (n_reloads == 0
5009 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
5011 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
5012 spill_reg_store[reg] = 0;
5014 if (n_reloads == 0
5015 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
5016 reg_last_reload_reg[reg] = 0;
5020 /* The following HARD_REG_SETs indicate when each hard register is
5021 used for a reload of various parts of the current insn. */
5023 /* If reg is unavailable for all reloads. */
5024 static HARD_REG_SET reload_reg_unavailable;
5025 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5026 static HARD_REG_SET reload_reg_used;
5027 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5028 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5029 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5030 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5031 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5032 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5033 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5034 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5035 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5036 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5037 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5038 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5039 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5040 static HARD_REG_SET reload_reg_used_in_op_addr;
5041 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5042 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5043 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5044 static HARD_REG_SET reload_reg_used_in_insn;
5045 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5046 static HARD_REG_SET reload_reg_used_in_other_addr;
5048 /* If reg is in use as a reload reg for any sort of reload. */
5049 static HARD_REG_SET reload_reg_used_at_all;
5051 /* If reg is use as an inherited reload. We just mark the first register
5052 in the group. */
5053 static HARD_REG_SET reload_reg_used_for_inherit;
5055 /* Records which hard regs are used in any way, either as explicit use or
5056 by being allocated to a pseudo during any point of the current insn. */
5057 static HARD_REG_SET reg_used_in_insn;
5059 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5060 TYPE. MODE is used to indicate how many consecutive regs are
5061 actually used. */
5063 static void
5064 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5065 machine_mode mode)
5067 switch (type)
5069 case RELOAD_OTHER:
5070 add_to_hard_reg_set (&reload_reg_used, mode, regno);
5071 break;
5073 case RELOAD_FOR_INPUT_ADDRESS:
5074 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5075 break;
5077 case RELOAD_FOR_INPADDR_ADDRESS:
5078 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5079 break;
5081 case RELOAD_FOR_OUTPUT_ADDRESS:
5082 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5083 break;
5085 case RELOAD_FOR_OUTADDR_ADDRESS:
5086 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5087 break;
5089 case RELOAD_FOR_OPERAND_ADDRESS:
5090 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5091 break;
5093 case RELOAD_FOR_OPADDR_ADDR:
5094 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5095 break;
5097 case RELOAD_FOR_OTHER_ADDRESS:
5098 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5099 break;
5101 case RELOAD_FOR_INPUT:
5102 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5103 break;
5105 case RELOAD_FOR_OUTPUT:
5106 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5107 break;
5109 case RELOAD_FOR_INSN:
5110 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
5111 break;
5114 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5117 /* Similarly, but show REGNO is no longer in use for a reload. */
5119 static void
5120 clear_reload_reg_in_use (unsigned int regno, int opnum,
5121 enum reload_type type, machine_mode mode)
5123 unsigned int nregs = hard_regno_nregs[regno][mode];
5124 unsigned int start_regno, end_regno, r;
5125 int i;
5126 /* A complication is that for some reload types, inheritance might
5127 allow multiple reloads of the same types to share a reload register.
5128 We set check_opnum if we have to check only reloads with the same
5129 operand number, and check_any if we have to check all reloads. */
5130 int check_opnum = 0;
5131 int check_any = 0;
5132 HARD_REG_SET *used_in_set;
5134 switch (type)
5136 case RELOAD_OTHER:
5137 used_in_set = &reload_reg_used;
5138 break;
5140 case RELOAD_FOR_INPUT_ADDRESS:
5141 used_in_set = &reload_reg_used_in_input_addr[opnum];
5142 break;
5144 case RELOAD_FOR_INPADDR_ADDRESS:
5145 check_opnum = 1;
5146 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5147 break;
5149 case RELOAD_FOR_OUTPUT_ADDRESS:
5150 used_in_set = &reload_reg_used_in_output_addr[opnum];
5151 break;
5153 case RELOAD_FOR_OUTADDR_ADDRESS:
5154 check_opnum = 1;
5155 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5156 break;
5158 case RELOAD_FOR_OPERAND_ADDRESS:
5159 used_in_set = &reload_reg_used_in_op_addr;
5160 break;
5162 case RELOAD_FOR_OPADDR_ADDR:
5163 check_any = 1;
5164 used_in_set = &reload_reg_used_in_op_addr_reload;
5165 break;
5167 case RELOAD_FOR_OTHER_ADDRESS:
5168 used_in_set = &reload_reg_used_in_other_addr;
5169 check_any = 1;
5170 break;
5172 case RELOAD_FOR_INPUT:
5173 used_in_set = &reload_reg_used_in_input[opnum];
5174 break;
5176 case RELOAD_FOR_OUTPUT:
5177 used_in_set = &reload_reg_used_in_output[opnum];
5178 break;
5180 case RELOAD_FOR_INSN:
5181 used_in_set = &reload_reg_used_in_insn;
5182 break;
5183 default:
5184 gcc_unreachable ();
5186 /* We resolve conflicts with remaining reloads of the same type by
5187 excluding the intervals of reload registers by them from the
5188 interval of freed reload registers. Since we only keep track of
5189 one set of interval bounds, we might have to exclude somewhat
5190 more than what would be necessary if we used a HARD_REG_SET here.
5191 But this should only happen very infrequently, so there should
5192 be no reason to worry about it. */
5194 start_regno = regno;
5195 end_regno = regno + nregs;
5196 if (check_opnum || check_any)
5198 for (i = n_reloads - 1; i >= 0; i--)
5200 if (rld[i].when_needed == type
5201 && (check_any || rld[i].opnum == opnum)
5202 && rld[i].reg_rtx)
5204 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5205 unsigned int conflict_end
5206 = end_hard_regno (rld[i].mode, conflict_start);
5208 /* If there is an overlap with the first to-be-freed register,
5209 adjust the interval start. */
5210 if (conflict_start <= start_regno && conflict_end > start_regno)
5211 start_regno = conflict_end;
5212 /* Otherwise, if there is a conflict with one of the other
5213 to-be-freed registers, adjust the interval end. */
5214 if (conflict_start > start_regno && conflict_start < end_regno)
5215 end_regno = conflict_start;
5220 for (r = start_regno; r < end_regno; r++)
5221 CLEAR_HARD_REG_BIT (*used_in_set, r);
5224 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5225 specified by OPNUM and TYPE. */
5227 static int
5228 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5230 int i;
5232 /* In use for a RELOAD_OTHER means it's not available for anything. */
5233 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5234 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5235 return 0;
5237 switch (type)
5239 case RELOAD_OTHER:
5240 /* In use for anything means we can't use it for RELOAD_OTHER. */
5241 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5242 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5243 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5244 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5245 return 0;
5247 for (i = 0; i < reload_n_operands; i++)
5248 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5249 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5250 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5251 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5252 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5253 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5254 return 0;
5256 return 1;
5258 case RELOAD_FOR_INPUT:
5259 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5260 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5261 return 0;
5263 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5264 return 0;
5266 /* If it is used for some other input, can't use it. */
5267 for (i = 0; i < reload_n_operands; i++)
5268 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5269 return 0;
5271 /* If it is used in a later operand's address, can't use it. */
5272 for (i = opnum + 1; i < reload_n_operands; i++)
5273 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5274 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5275 return 0;
5277 return 1;
5279 case RELOAD_FOR_INPUT_ADDRESS:
5280 /* Can't use a register if it is used for an input address for this
5281 operand or used as an input in an earlier one. */
5282 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5283 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5284 return 0;
5286 for (i = 0; i < opnum; i++)
5287 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5288 return 0;
5290 return 1;
5292 case RELOAD_FOR_INPADDR_ADDRESS:
5293 /* Can't use a register if it is used for an input address
5294 for this operand or used as an input in an earlier
5295 one. */
5296 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5297 return 0;
5299 for (i = 0; i < opnum; i++)
5300 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5301 return 0;
5303 return 1;
5305 case RELOAD_FOR_OUTPUT_ADDRESS:
5306 /* Can't use a register if it is used for an output address for this
5307 operand or used as an output in this or a later operand. Note
5308 that multiple output operands are emitted in reverse order, so
5309 the conflicting ones are those with lower indices. */
5310 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5311 return 0;
5313 for (i = 0; i <= opnum; i++)
5314 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5315 return 0;
5317 return 1;
5319 case RELOAD_FOR_OUTADDR_ADDRESS:
5320 /* Can't use a register if it is used for an output address
5321 for this operand or used as an output in this or a
5322 later operand. Note that multiple output operands are
5323 emitted in reverse order, so the conflicting ones are
5324 those with lower indices. */
5325 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5326 return 0;
5328 for (i = 0; i <= opnum; i++)
5329 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5330 return 0;
5332 return 1;
5334 case RELOAD_FOR_OPERAND_ADDRESS:
5335 for (i = 0; i < reload_n_operands; i++)
5336 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5337 return 0;
5339 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5340 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5342 case RELOAD_FOR_OPADDR_ADDR:
5343 for (i = 0; i < reload_n_operands; i++)
5344 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5345 return 0;
5347 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5349 case RELOAD_FOR_OUTPUT:
5350 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5351 outputs, or an operand address for this or an earlier output.
5352 Note that multiple output operands are emitted in reverse order,
5353 so the conflicting ones are those with higher indices. */
5354 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5355 return 0;
5357 for (i = 0; i < reload_n_operands; i++)
5358 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5359 return 0;
5361 for (i = opnum; i < reload_n_operands; i++)
5362 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5363 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5364 return 0;
5366 return 1;
5368 case RELOAD_FOR_INSN:
5369 for (i = 0; i < reload_n_operands; i++)
5370 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5371 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5372 return 0;
5374 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5375 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5377 case RELOAD_FOR_OTHER_ADDRESS:
5378 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5380 default:
5381 gcc_unreachable ();
5385 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5386 the number RELOADNUM, is still available in REGNO at the end of the insn.
5388 We can assume that the reload reg was already tested for availability
5389 at the time it is needed, and we should not check this again,
5390 in case the reg has already been marked in use. */
5392 static int
5393 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5395 int opnum = rld[reloadnum].opnum;
5396 enum reload_type type = rld[reloadnum].when_needed;
5397 int i;
5399 /* See if there is a reload with the same type for this operand, using
5400 the same register. This case is not handled by the code below. */
5401 for (i = reloadnum + 1; i < n_reloads; i++)
5403 rtx reg;
5404 int nregs;
5406 if (rld[i].opnum != opnum || rld[i].when_needed != type)
5407 continue;
5408 reg = rld[i].reg_rtx;
5409 if (reg == NULL_RTX)
5410 continue;
5411 nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5412 if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5413 return 0;
5416 switch (type)
5418 case RELOAD_OTHER:
5419 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5420 its value must reach the end. */
5421 return 1;
5423 /* If this use is for part of the insn,
5424 its value reaches if no subsequent part uses the same register.
5425 Just like the above function, don't try to do this with lots
5426 of fallthroughs. */
5428 case RELOAD_FOR_OTHER_ADDRESS:
5429 /* Here we check for everything else, since these don't conflict
5430 with anything else and everything comes later. */
5432 for (i = 0; i < reload_n_operands; i++)
5433 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5434 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5435 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5436 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5437 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5438 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5439 return 0;
5441 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5442 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5443 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5444 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5446 case RELOAD_FOR_INPUT_ADDRESS:
5447 case RELOAD_FOR_INPADDR_ADDRESS:
5448 /* Similar, except that we check only for this and subsequent inputs
5449 and the address of only subsequent inputs and we do not need
5450 to check for RELOAD_OTHER objects since they are known not to
5451 conflict. */
5453 for (i = opnum; i < reload_n_operands; i++)
5454 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5455 return 0;
5457 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5458 could be killed if the register is also used by reload with type
5459 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5460 if (type == RELOAD_FOR_INPADDR_ADDRESS
5461 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5462 return 0;
5464 for (i = opnum + 1; i < reload_n_operands; i++)
5465 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5466 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5467 return 0;
5469 for (i = 0; i < reload_n_operands; i++)
5470 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5471 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5472 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5473 return 0;
5475 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5476 return 0;
5478 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5479 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5480 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5482 case RELOAD_FOR_INPUT:
5483 /* Similar to input address, except we start at the next operand for
5484 both input and input address and we do not check for
5485 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5486 would conflict. */
5488 for (i = opnum + 1; i < reload_n_operands; i++)
5489 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5490 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5491 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5492 return 0;
5494 /* ... fall through ... */
5496 case RELOAD_FOR_OPERAND_ADDRESS:
5497 /* Check outputs and their addresses. */
5499 for (i = 0; i < reload_n_operands; i++)
5500 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5501 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5502 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5503 return 0;
5505 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5507 case RELOAD_FOR_OPADDR_ADDR:
5508 for (i = 0; i < reload_n_operands; i++)
5509 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5510 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5511 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5512 return 0;
5514 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5515 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5516 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5518 case RELOAD_FOR_INSN:
5519 /* These conflict with other outputs with RELOAD_OTHER. So
5520 we need only check for output addresses. */
5522 opnum = reload_n_operands;
5524 /* ... fall through ... */
5526 case RELOAD_FOR_OUTPUT:
5527 case RELOAD_FOR_OUTPUT_ADDRESS:
5528 case RELOAD_FOR_OUTADDR_ADDRESS:
5529 /* We already know these can't conflict with a later output. So the
5530 only thing to check are later output addresses.
5531 Note that multiple output operands are emitted in reverse order,
5532 so the conflicting ones are those with lower indices. */
5533 for (i = 0; i < opnum; i++)
5534 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5535 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5536 return 0;
5538 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5539 could be killed if the register is also used by reload with type
5540 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5541 if (type == RELOAD_FOR_OUTADDR_ADDRESS
5542 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5543 return 0;
5545 return 1;
5547 default:
5548 gcc_unreachable ();
5552 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5553 every register in REG. */
5555 static bool
5556 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5558 unsigned int i;
5560 for (i = REGNO (reg); i < END_REGNO (reg); i++)
5561 if (!reload_reg_reaches_end_p (i, reloadnum))
5562 return false;
5563 return true;
5567 /* Returns whether R1 and R2 are uniquely chained: the value of one
5568 is used by the other, and that value is not used by any other
5569 reload for this insn. This is used to partially undo the decision
5570 made in find_reloads when in the case of multiple
5571 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5572 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5573 reloads. This code tries to avoid the conflict created by that
5574 change. It might be cleaner to explicitly keep track of which
5575 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5576 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5577 this after the fact. */
5578 static bool
5579 reloads_unique_chain_p (int r1, int r2)
5581 int i;
5583 /* We only check input reloads. */
5584 if (! rld[r1].in || ! rld[r2].in)
5585 return false;
5587 /* Avoid anything with output reloads. */
5588 if (rld[r1].out || rld[r2].out)
5589 return false;
5591 /* "chained" means one reload is a component of the other reload,
5592 not the same as the other reload. */
5593 if (rld[r1].opnum != rld[r2].opnum
5594 || rtx_equal_p (rld[r1].in, rld[r2].in)
5595 || rld[r1].optional || rld[r2].optional
5596 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5597 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5598 return false;
5600 /* The following loop assumes that r1 is the reload that feeds r2. */
5601 if (r1 > r2)
5602 std::swap (r1, r2);
5604 for (i = 0; i < n_reloads; i ++)
5605 /* Look for input reloads that aren't our two */
5606 if (i != r1 && i != r2 && rld[i].in)
5608 /* If our reload is mentioned at all, it isn't a simple chain. */
5609 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5610 return false;
5612 return true;
5615 /* The recursive function change all occurrences of WHAT in *WHERE
5616 to REPL. */
5617 static void
5618 substitute (rtx *where, const_rtx what, rtx repl)
5620 const char *fmt;
5621 int i;
5622 enum rtx_code code;
5624 if (*where == 0)
5625 return;
5627 if (*where == what || rtx_equal_p (*where, what))
5629 /* Record the location of the changed rtx. */
5630 substitute_stack.safe_push (where);
5631 *where = repl;
5632 return;
5635 code = GET_CODE (*where);
5636 fmt = GET_RTX_FORMAT (code);
5637 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5639 if (fmt[i] == 'E')
5641 int j;
5643 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5644 substitute (&XVECEXP (*where, i, j), what, repl);
5646 else if (fmt[i] == 'e')
5647 substitute (&XEXP (*where, i), what, repl);
5651 /* The function returns TRUE if chain of reload R1 and R2 (in any
5652 order) can be evaluated without usage of intermediate register for
5653 the reload containing another reload. It is important to see
5654 gen_reload to understand what the function is trying to do. As an
5655 example, let us have reload chain
5657 r2: const
5658 r1: <something> + const
5660 and reload R2 got reload reg HR. The function returns true if
5661 there is a correct insn HR = HR + <something>. Otherwise,
5662 gen_reload will use intermediate register (and this is the reload
5663 reg for R1) to reload <something>.
5665 We need this function to find a conflict for chain reloads. In our
5666 example, if HR = HR + <something> is incorrect insn, then we cannot
5667 use HR as a reload register for R2. If we do use it then we get a
5668 wrong code:
5670 HR = const
5671 HR = <something>
5672 HR = HR + HR
5675 static bool
5676 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5678 /* Assume other cases in gen_reload are not possible for
5679 chain reloads or do need an intermediate hard registers. */
5680 bool result = true;
5681 int regno, code;
5682 rtx out, in;
5683 rtx_insn *insn;
5684 rtx_insn *last = get_last_insn ();
5686 /* Make r2 a component of r1. */
5687 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5688 std::swap (r1, r2);
5690 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5691 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5692 gcc_assert (regno >= 0);
5693 out = gen_rtx_REG (rld[r1].mode, regno);
5694 in = rld[r1].in;
5695 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5697 /* If IN is a paradoxical SUBREG, remove it and try to put the
5698 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5699 strip_paradoxical_subreg (&in, &out);
5701 if (GET_CODE (in) == PLUS
5702 && (REG_P (XEXP (in, 0))
5703 || GET_CODE (XEXP (in, 0)) == SUBREG
5704 || MEM_P (XEXP (in, 0)))
5705 && (REG_P (XEXP (in, 1))
5706 || GET_CODE (XEXP (in, 1)) == SUBREG
5707 || CONSTANT_P (XEXP (in, 1))
5708 || MEM_P (XEXP (in, 1))))
5710 insn = emit_insn (gen_rtx_SET (out, in));
5711 code = recog_memoized (insn);
5712 result = false;
5714 if (code >= 0)
5716 extract_insn (insn);
5717 /* We want constrain operands to treat this insn strictly in
5718 its validity determination, i.e., the way it would after
5719 reload has completed. */
5720 result = constrain_operands (1, get_enabled_alternatives (insn));
5723 delete_insns_since (last);
5726 /* Restore the original value at each changed address within R1. */
5727 while (!substitute_stack.is_empty ())
5729 rtx *where = substitute_stack.pop ();
5730 *where = rld[r2].in;
5733 return result;
5736 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5737 Return 0 otherwise.
5739 This function uses the same algorithm as reload_reg_free_p above. */
5741 static int
5742 reloads_conflict (int r1, int r2)
5744 enum reload_type r1_type = rld[r1].when_needed;
5745 enum reload_type r2_type = rld[r2].when_needed;
5746 int r1_opnum = rld[r1].opnum;
5747 int r2_opnum = rld[r2].opnum;
5749 /* RELOAD_OTHER conflicts with everything. */
5750 if (r2_type == RELOAD_OTHER)
5751 return 1;
5753 /* Otherwise, check conflicts differently for each type. */
5755 switch (r1_type)
5757 case RELOAD_FOR_INPUT:
5758 return (r2_type == RELOAD_FOR_INSN
5759 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5760 || r2_type == RELOAD_FOR_OPADDR_ADDR
5761 || r2_type == RELOAD_FOR_INPUT
5762 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5763 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5764 && r2_opnum > r1_opnum));
5766 case RELOAD_FOR_INPUT_ADDRESS:
5767 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5768 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5770 case RELOAD_FOR_INPADDR_ADDRESS:
5771 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5772 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5774 case RELOAD_FOR_OUTPUT_ADDRESS:
5775 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5776 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5778 case RELOAD_FOR_OUTADDR_ADDRESS:
5779 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5780 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5782 case RELOAD_FOR_OPERAND_ADDRESS:
5783 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5784 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5785 && (!reloads_unique_chain_p (r1, r2)
5786 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5788 case RELOAD_FOR_OPADDR_ADDR:
5789 return (r2_type == RELOAD_FOR_INPUT
5790 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5792 case RELOAD_FOR_OUTPUT:
5793 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5794 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5795 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5796 && r2_opnum >= r1_opnum));
5798 case RELOAD_FOR_INSN:
5799 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5800 || r2_type == RELOAD_FOR_INSN
5801 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5803 case RELOAD_FOR_OTHER_ADDRESS:
5804 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5806 case RELOAD_OTHER:
5807 return 1;
5809 default:
5810 gcc_unreachable ();
5814 /* Indexed by reload number, 1 if incoming value
5815 inherited from previous insns. */
5816 static char reload_inherited[MAX_RELOADS];
5818 /* For an inherited reload, this is the insn the reload was inherited from,
5819 if we know it. Otherwise, this is 0. */
5820 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5822 /* If nonzero, this is a place to get the value of the reload,
5823 rather than using reload_in. */
5824 static rtx reload_override_in[MAX_RELOADS];
5826 /* For each reload, the hard register number of the register used,
5827 or -1 if we did not need a register for this reload. */
5828 static int reload_spill_index[MAX_RELOADS];
5830 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5831 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5833 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5834 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5836 /* Subroutine of free_for_value_p, used to check a single register.
5837 START_REGNO is the starting regno of the full reload register
5838 (possibly comprising multiple hard registers) that we are considering. */
5840 static int
5841 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5842 enum reload_type type, rtx value, rtx out,
5843 int reloadnum, int ignore_address_reloads)
5845 int time1;
5846 /* Set if we see an input reload that must not share its reload register
5847 with any new earlyclobber, but might otherwise share the reload
5848 register with an output or input-output reload. */
5849 int check_earlyclobber = 0;
5850 int i;
5851 int copy = 0;
5853 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5854 return 0;
5856 if (out == const0_rtx)
5858 copy = 1;
5859 out = NULL_RTX;
5862 /* We use some pseudo 'time' value to check if the lifetimes of the
5863 new register use would overlap with the one of a previous reload
5864 that is not read-only or uses a different value.
5865 The 'time' used doesn't have to be linear in any shape or form, just
5866 monotonic.
5867 Some reload types use different 'buckets' for each operand.
5868 So there are MAX_RECOG_OPERANDS different time values for each
5869 such reload type.
5870 We compute TIME1 as the time when the register for the prospective
5871 new reload ceases to be live, and TIME2 for each existing
5872 reload as the time when that the reload register of that reload
5873 becomes live.
5874 Where there is little to be gained by exact lifetime calculations,
5875 we just make conservative assumptions, i.e. a longer lifetime;
5876 this is done in the 'default:' cases. */
5877 switch (type)
5879 case RELOAD_FOR_OTHER_ADDRESS:
5880 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5881 time1 = copy ? 0 : 1;
5882 break;
5883 case RELOAD_OTHER:
5884 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5885 break;
5886 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5887 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5888 respectively, to the time values for these, we get distinct time
5889 values. To get distinct time values for each operand, we have to
5890 multiply opnum by at least three. We round that up to four because
5891 multiply by four is often cheaper. */
5892 case RELOAD_FOR_INPADDR_ADDRESS:
5893 time1 = opnum * 4 + 2;
5894 break;
5895 case RELOAD_FOR_INPUT_ADDRESS:
5896 time1 = opnum * 4 + 3;
5897 break;
5898 case RELOAD_FOR_INPUT:
5899 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5900 executes (inclusive). */
5901 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5902 break;
5903 case RELOAD_FOR_OPADDR_ADDR:
5904 /* opnum * 4 + 4
5905 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5906 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5907 break;
5908 case RELOAD_FOR_OPERAND_ADDRESS:
5909 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5910 is executed. */
5911 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5912 break;
5913 case RELOAD_FOR_OUTADDR_ADDRESS:
5914 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5915 break;
5916 case RELOAD_FOR_OUTPUT_ADDRESS:
5917 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5918 break;
5919 default:
5920 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5923 for (i = 0; i < n_reloads; i++)
5925 rtx reg = rld[i].reg_rtx;
5926 if (reg && REG_P (reg)
5927 && ((unsigned) regno - true_regnum (reg)
5928 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5929 && i != reloadnum)
5931 rtx other_input = rld[i].in;
5933 /* If the other reload loads the same input value, that
5934 will not cause a conflict only if it's loading it into
5935 the same register. */
5936 if (true_regnum (reg) != start_regno)
5937 other_input = NULL_RTX;
5938 if (! other_input || ! rtx_equal_p (other_input, value)
5939 || rld[i].out || out)
5941 int time2;
5942 switch (rld[i].when_needed)
5944 case RELOAD_FOR_OTHER_ADDRESS:
5945 time2 = 0;
5946 break;
5947 case RELOAD_FOR_INPADDR_ADDRESS:
5948 /* find_reloads makes sure that a
5949 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5950 by at most one - the first -
5951 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5952 address reload is inherited, the address address reload
5953 goes away, so we can ignore this conflict. */
5954 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5955 && ignore_address_reloads
5956 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5957 Then the address address is still needed to store
5958 back the new address. */
5959 && ! rld[reloadnum].out)
5960 continue;
5961 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5962 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5963 reloads go away. */
5964 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5965 && ignore_address_reloads
5966 /* Unless we are reloading an auto_inc expression. */
5967 && ! rld[reloadnum].out)
5968 continue;
5969 time2 = rld[i].opnum * 4 + 2;
5970 break;
5971 case RELOAD_FOR_INPUT_ADDRESS:
5972 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5973 && ignore_address_reloads
5974 && ! rld[reloadnum].out)
5975 continue;
5976 time2 = rld[i].opnum * 4 + 3;
5977 break;
5978 case RELOAD_FOR_INPUT:
5979 time2 = rld[i].opnum * 4 + 4;
5980 check_earlyclobber = 1;
5981 break;
5982 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5983 == MAX_RECOG_OPERAND * 4 */
5984 case RELOAD_FOR_OPADDR_ADDR:
5985 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5986 && ignore_address_reloads
5987 && ! rld[reloadnum].out)
5988 continue;
5989 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5990 break;
5991 case RELOAD_FOR_OPERAND_ADDRESS:
5992 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5993 check_earlyclobber = 1;
5994 break;
5995 case RELOAD_FOR_INSN:
5996 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5997 break;
5998 case RELOAD_FOR_OUTPUT:
5999 /* All RELOAD_FOR_OUTPUT reloads become live just after the
6000 instruction is executed. */
6001 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6002 break;
6003 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
6004 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
6005 value. */
6006 case RELOAD_FOR_OUTADDR_ADDRESS:
6007 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
6008 && ignore_address_reloads
6009 && ! rld[reloadnum].out)
6010 continue;
6011 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
6012 break;
6013 case RELOAD_FOR_OUTPUT_ADDRESS:
6014 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
6015 break;
6016 case RELOAD_OTHER:
6017 /* If there is no conflict in the input part, handle this
6018 like an output reload. */
6019 if (! rld[i].in || rtx_equal_p (other_input, value))
6021 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6022 /* Earlyclobbered outputs must conflict with inputs. */
6023 if (earlyclobber_operand_p (rld[i].out))
6024 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6026 break;
6028 time2 = 1;
6029 /* RELOAD_OTHER might be live beyond instruction execution,
6030 but this is not obvious when we set time2 = 1. So check
6031 here if there might be a problem with the new reload
6032 clobbering the register used by the RELOAD_OTHER. */
6033 if (out)
6034 return 0;
6035 break;
6036 default:
6037 return 0;
6039 if ((time1 >= time2
6040 && (! rld[i].in || rld[i].out
6041 || ! rtx_equal_p (other_input, value)))
6042 || (out && rld[reloadnum].out_reg
6043 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6044 return 0;
6049 /* Earlyclobbered outputs must conflict with inputs. */
6050 if (check_earlyclobber && out && earlyclobber_operand_p (out))
6051 return 0;
6053 return 1;
6056 /* Return 1 if the value in reload reg REGNO, as used by a reload
6057 needed for the part of the insn specified by OPNUM and TYPE,
6058 may be used to load VALUE into it.
6060 MODE is the mode in which the register is used, this is needed to
6061 determine how many hard regs to test.
6063 Other read-only reloads with the same value do not conflict
6064 unless OUT is nonzero and these other reloads have to live while
6065 output reloads live.
6066 If OUT is CONST0_RTX, this is a special case: it means that the
6067 test should not be for using register REGNO as reload register, but
6068 for copying from register REGNO into the reload register.
6070 RELOADNUM is the number of the reload we want to load this value for;
6071 a reload does not conflict with itself.
6073 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6074 reloads that load an address for the very reload we are considering.
6076 The caller has to make sure that there is no conflict with the return
6077 register. */
6079 static int
6080 free_for_value_p (int regno, machine_mode mode, int opnum,
6081 enum reload_type type, rtx value, rtx out, int reloadnum,
6082 int ignore_address_reloads)
6084 int nregs = hard_regno_nregs[regno][mode];
6085 while (nregs-- > 0)
6086 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6087 value, out, reloadnum,
6088 ignore_address_reloads))
6089 return 0;
6090 return 1;
6093 /* Return nonzero if the rtx X is invariant over the current function. */
6094 /* ??? Actually, the places where we use this expect exactly what is
6095 tested here, and not everything that is function invariant. In
6096 particular, the frame pointer and arg pointer are special cased;
6097 pic_offset_table_rtx is not, and we must not spill these things to
6098 memory. */
6101 function_invariant_p (const_rtx x)
6103 if (CONSTANT_P (x))
6104 return 1;
6105 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6106 return 1;
6107 if (GET_CODE (x) == PLUS
6108 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6109 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6110 return 1;
6111 return 0;
6114 /* Determine whether the reload reg X overlaps any rtx'es used for
6115 overriding inheritance. Return nonzero if so. */
6117 static int
6118 conflicts_with_override (rtx x)
6120 int i;
6121 for (i = 0; i < n_reloads; i++)
6122 if (reload_override_in[i]
6123 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6124 return 1;
6125 return 0;
6128 /* Give an error message saying we failed to find a reload for INSN,
6129 and clear out reload R. */
6130 static void
6131 failed_reload (rtx_insn *insn, int r)
6133 if (asm_noperands (PATTERN (insn)) < 0)
6134 /* It's the compiler's fault. */
6135 fatal_insn ("could not find a spill register", insn);
6137 /* It's the user's fault; the operand's mode and constraint
6138 don't match. Disable this reload so we don't crash in final. */
6139 error_for_asm (insn,
6140 "%<asm%> operand constraint incompatible with operand size");
6141 rld[r].in = 0;
6142 rld[r].out = 0;
6143 rld[r].reg_rtx = 0;
6144 rld[r].optional = 1;
6145 rld[r].secondary_p = 1;
6148 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6149 for reload R. If it's valid, get an rtx for it. Return nonzero if
6150 successful. */
6151 static int
6152 set_reload_reg (int i, int r)
6154 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6155 parameter. */
6156 int regno ATTRIBUTE_UNUSED;
6157 rtx reg = spill_reg_rtx[i];
6159 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6160 spill_reg_rtx[i] = reg
6161 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6163 regno = true_regnum (reg);
6165 /* Detect when the reload reg can't hold the reload mode.
6166 This used to be one `if', but Sequent compiler can't handle that. */
6167 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6169 machine_mode test_mode = VOIDmode;
6170 if (rld[r].in)
6171 test_mode = GET_MODE (rld[r].in);
6172 /* If rld[r].in has VOIDmode, it means we will load it
6173 in whatever mode the reload reg has: to wit, rld[r].mode.
6174 We have already tested that for validity. */
6175 /* Aside from that, we need to test that the expressions
6176 to reload from or into have modes which are valid for this
6177 reload register. Otherwise the reload insns would be invalid. */
6178 if (! (rld[r].in != 0 && test_mode != VOIDmode
6179 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6180 if (! (rld[r].out != 0
6181 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6183 /* The reg is OK. */
6184 last_spill_reg = i;
6186 /* Mark as in use for this insn the reload regs we use
6187 for this. */
6188 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6189 rld[r].when_needed, rld[r].mode);
6191 rld[r].reg_rtx = reg;
6192 reload_spill_index[r] = spill_regs[i];
6193 return 1;
6196 return 0;
6199 /* Find a spill register to use as a reload register for reload R.
6200 LAST_RELOAD is nonzero if this is the last reload for the insn being
6201 processed.
6203 Set rld[R].reg_rtx to the register allocated.
6205 We return 1 if successful, or 0 if we couldn't find a spill reg and
6206 we didn't change anything. */
6208 static int
6209 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6210 int last_reload)
6212 int i, pass, count;
6214 /* If we put this reload ahead, thinking it is a group,
6215 then insist on finding a group. Otherwise we can grab a
6216 reg that some other reload needs.
6217 (That can happen when we have a 68000 DATA_OR_FP_REG
6218 which is a group of data regs or one fp reg.)
6219 We need not be so restrictive if there are no more reloads
6220 for this insn.
6222 ??? Really it would be nicer to have smarter handling
6223 for that kind of reg class, where a problem like this is normal.
6224 Perhaps those classes should be avoided for reloading
6225 by use of more alternatives. */
6227 int force_group = rld[r].nregs > 1 && ! last_reload;
6229 /* If we want a single register and haven't yet found one,
6230 take any reg in the right class and not in use.
6231 If we want a consecutive group, here is where we look for it.
6233 We use three passes so we can first look for reload regs to
6234 reuse, which are already in use for other reloads in this insn,
6235 and only then use additional registers which are not "bad", then
6236 finally any register.
6238 I think that maximizing reuse is needed to make sure we don't
6239 run out of reload regs. Suppose we have three reloads, and
6240 reloads A and B can share regs. These need two regs.
6241 Suppose A and B are given different regs.
6242 That leaves none for C. */
6243 for (pass = 0; pass < 3; pass++)
6245 /* I is the index in spill_regs.
6246 We advance it round-robin between insns to use all spill regs
6247 equally, so that inherited reloads have a chance
6248 of leapfrogging each other. */
6250 i = last_spill_reg;
6252 for (count = 0; count < n_spills; count++)
6254 int rclass = (int) rld[r].rclass;
6255 int regnum;
6257 i++;
6258 if (i >= n_spills)
6259 i -= n_spills;
6260 regnum = spill_regs[i];
6262 if ((reload_reg_free_p (regnum, rld[r].opnum,
6263 rld[r].when_needed)
6264 || (rld[r].in
6265 /* We check reload_reg_used to make sure we
6266 don't clobber the return register. */
6267 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6268 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6269 rld[r].when_needed, rld[r].in,
6270 rld[r].out, r, 1)))
6271 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6272 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6273 /* Look first for regs to share, then for unshared. But
6274 don't share regs used for inherited reloads; they are
6275 the ones we want to preserve. */
6276 && (pass
6277 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6278 regnum)
6279 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6280 regnum))))
6282 int nr = hard_regno_nregs[regnum][rld[r].mode];
6284 /* During the second pass we want to avoid reload registers
6285 which are "bad" for this reload. */
6286 if (pass == 1
6287 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6288 continue;
6290 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6291 (on 68000) got us two FP regs. If NR is 1,
6292 we would reject both of them. */
6293 if (force_group)
6294 nr = rld[r].nregs;
6295 /* If we need only one reg, we have already won. */
6296 if (nr == 1)
6298 /* But reject a single reg if we demand a group. */
6299 if (force_group)
6300 continue;
6301 break;
6303 /* Otherwise check that as many consecutive regs as we need
6304 are available here. */
6305 while (nr > 1)
6307 int regno = regnum + nr - 1;
6308 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6309 && spill_reg_order[regno] >= 0
6310 && reload_reg_free_p (regno, rld[r].opnum,
6311 rld[r].when_needed)))
6312 break;
6313 nr--;
6315 if (nr == 1)
6316 break;
6320 /* If we found something on the current pass, omit later passes. */
6321 if (count < n_spills)
6322 break;
6325 /* We should have found a spill register by now. */
6326 if (count >= n_spills)
6327 return 0;
6329 /* I is the index in SPILL_REG_RTX of the reload register we are to
6330 allocate. Get an rtx for it and find its register number. */
6332 return set_reload_reg (i, r);
6335 /* Initialize all the tables needed to allocate reload registers.
6336 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6337 is the array we use to restore the reg_rtx field for every reload. */
6339 static void
6340 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6342 int i;
6344 for (i = 0; i < n_reloads; i++)
6345 rld[i].reg_rtx = save_reload_reg_rtx[i];
6347 memset (reload_inherited, 0, MAX_RELOADS);
6348 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6349 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6351 CLEAR_HARD_REG_SET (reload_reg_used);
6352 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6353 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6354 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6355 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6356 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6358 CLEAR_HARD_REG_SET (reg_used_in_insn);
6360 HARD_REG_SET tmp;
6361 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6362 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6363 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6364 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6365 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6366 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6369 for (i = 0; i < reload_n_operands; i++)
6371 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6372 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6373 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6374 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6375 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6376 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6379 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6381 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6383 for (i = 0; i < n_reloads; i++)
6384 /* If we have already decided to use a certain register,
6385 don't use it in another way. */
6386 if (rld[i].reg_rtx)
6387 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6388 rld[i].when_needed, rld[i].mode);
6391 #ifdef SECONDARY_MEMORY_NEEDED
6392 /* If X is not a subreg, return it unmodified. If it is a subreg,
6393 look up whether we made a replacement for the SUBREG_REG. Return
6394 either the replacement or the SUBREG_REG. */
6396 static rtx
6397 replaced_subreg (rtx x)
6399 if (GET_CODE (x) == SUBREG)
6400 return find_replacement (&SUBREG_REG (x));
6401 return x;
6403 #endif
6405 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6406 mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6407 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6408 otherwise it is NULL. */
6410 static int
6411 compute_reload_subreg_offset (machine_mode outermode,
6412 rtx subreg,
6413 machine_mode innermode)
6415 int outer_offset;
6416 machine_mode middlemode;
6418 if (!subreg)
6419 return subreg_lowpart_offset (outermode, innermode);
6421 outer_offset = SUBREG_BYTE (subreg);
6422 middlemode = GET_MODE (SUBREG_REG (subreg));
6424 /* If SUBREG is paradoxical then return the normal lowpart offset
6425 for OUTERMODE and INNERMODE. Our caller has already checked
6426 that OUTERMODE fits in INNERMODE. */
6427 if (outer_offset == 0
6428 && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6429 return subreg_lowpart_offset (outermode, innermode);
6431 /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6432 plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6433 return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6436 /* Assign hard reg targets for the pseudo-registers we must reload
6437 into hard regs for this insn.
6438 Also output the instructions to copy them in and out of the hard regs.
6440 For machines with register classes, we are responsible for
6441 finding a reload reg in the proper class. */
6443 static void
6444 choose_reload_regs (struct insn_chain *chain)
6446 rtx_insn *insn = chain->insn;
6447 int i, j;
6448 unsigned int max_group_size = 1;
6449 enum reg_class group_class = NO_REGS;
6450 int pass, win, inheritance;
6452 rtx save_reload_reg_rtx[MAX_RELOADS];
6454 /* In order to be certain of getting the registers we need,
6455 we must sort the reloads into order of increasing register class.
6456 Then our grabbing of reload registers will parallel the process
6457 that provided the reload registers.
6459 Also note whether any of the reloads wants a consecutive group of regs.
6460 If so, record the maximum size of the group desired and what
6461 register class contains all the groups needed by this insn. */
6463 for (j = 0; j < n_reloads; j++)
6465 reload_order[j] = j;
6466 if (rld[j].reg_rtx != NULL_RTX)
6468 gcc_assert (REG_P (rld[j].reg_rtx)
6469 && HARD_REGISTER_P (rld[j].reg_rtx));
6470 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6472 else
6473 reload_spill_index[j] = -1;
6475 if (rld[j].nregs > 1)
6477 max_group_size = MAX (rld[j].nregs, max_group_size);
6478 group_class
6479 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6482 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6485 if (n_reloads > 1)
6486 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6488 /* If -O, try first with inheritance, then turning it off.
6489 If not -O, don't do inheritance.
6490 Using inheritance when not optimizing leads to paradoxes
6491 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6492 because one side of the comparison might be inherited. */
6493 win = 0;
6494 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6496 choose_reload_regs_init (chain, save_reload_reg_rtx);
6498 /* Process the reloads in order of preference just found.
6499 Beyond this point, subregs can be found in reload_reg_rtx.
6501 This used to look for an existing reloaded home for all of the
6502 reloads, and only then perform any new reloads. But that could lose
6503 if the reloads were done out of reg-class order because a later
6504 reload with a looser constraint might have an old home in a register
6505 needed by an earlier reload with a tighter constraint.
6507 To solve this, we make two passes over the reloads, in the order
6508 described above. In the first pass we try to inherit a reload
6509 from a previous insn. If there is a later reload that needs a
6510 class that is a proper subset of the class being processed, we must
6511 also allocate a spill register during the first pass.
6513 Then make a second pass over the reloads to allocate any reloads
6514 that haven't been given registers yet. */
6516 for (j = 0; j < n_reloads; j++)
6518 int r = reload_order[j];
6519 rtx search_equiv = NULL_RTX;
6521 /* Ignore reloads that got marked inoperative. */
6522 if (rld[r].out == 0 && rld[r].in == 0
6523 && ! rld[r].secondary_p)
6524 continue;
6526 /* If find_reloads chose to use reload_in or reload_out as a reload
6527 register, we don't need to chose one. Otherwise, try even if it
6528 found one since we might save an insn if we find the value lying
6529 around.
6530 Try also when reload_in is a pseudo without a hard reg. */
6531 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6532 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6533 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6534 && !MEM_P (rld[r].in)
6535 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6536 continue;
6538 #if 0 /* No longer needed for correct operation.
6539 It might give better code, or might not; worth an experiment? */
6540 /* If this is an optional reload, we can't inherit from earlier insns
6541 until we are sure that any non-optional reloads have been allocated.
6542 The following code takes advantage of the fact that optional reloads
6543 are at the end of reload_order. */
6544 if (rld[r].optional != 0)
6545 for (i = 0; i < j; i++)
6546 if ((rld[reload_order[i]].out != 0
6547 || rld[reload_order[i]].in != 0
6548 || rld[reload_order[i]].secondary_p)
6549 && ! rld[reload_order[i]].optional
6550 && rld[reload_order[i]].reg_rtx == 0)
6551 allocate_reload_reg (chain, reload_order[i], 0);
6552 #endif
6554 /* First see if this pseudo is already available as reloaded
6555 for a previous insn. We cannot try to inherit for reloads
6556 that are smaller than the maximum number of registers needed
6557 for groups unless the register we would allocate cannot be used
6558 for the groups.
6560 We could check here to see if this is a secondary reload for
6561 an object that is already in a register of the desired class.
6562 This would avoid the need for the secondary reload register.
6563 But this is complex because we can't easily determine what
6564 objects might want to be loaded via this reload. So let a
6565 register be allocated here. In `emit_reload_insns' we suppress
6566 one of the loads in the case described above. */
6568 if (inheritance)
6570 int byte = 0;
6571 int regno = -1;
6572 machine_mode mode = VOIDmode;
6573 rtx subreg = NULL_RTX;
6575 if (rld[r].in == 0)
6577 else if (REG_P (rld[r].in))
6579 regno = REGNO (rld[r].in);
6580 mode = GET_MODE (rld[r].in);
6582 else if (REG_P (rld[r].in_reg))
6584 regno = REGNO (rld[r].in_reg);
6585 mode = GET_MODE (rld[r].in_reg);
6587 else if (GET_CODE (rld[r].in_reg) == SUBREG
6588 && REG_P (SUBREG_REG (rld[r].in_reg)))
6590 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6591 if (regno < FIRST_PSEUDO_REGISTER)
6592 regno = subreg_regno (rld[r].in_reg);
6593 else
6595 subreg = rld[r].in_reg;
6596 byte = SUBREG_BYTE (subreg);
6598 mode = GET_MODE (rld[r].in_reg);
6600 #if AUTO_INC_DEC
6601 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6602 && REG_P (XEXP (rld[r].in_reg, 0)))
6604 regno = REGNO (XEXP (rld[r].in_reg, 0));
6605 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6606 rld[r].out = rld[r].in;
6608 #endif
6609 #if 0
6610 /* This won't work, since REGNO can be a pseudo reg number.
6611 Also, it takes much more hair to keep track of all the things
6612 that can invalidate an inherited reload of part of a pseudoreg. */
6613 else if (GET_CODE (rld[r].in) == SUBREG
6614 && REG_P (SUBREG_REG (rld[r].in)))
6615 regno = subreg_regno (rld[r].in);
6616 #endif
6618 if (regno >= 0
6619 && reg_last_reload_reg[regno] != 0
6620 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6621 >= GET_MODE_SIZE (mode) + byte)
6622 #ifdef CANNOT_CHANGE_MODE_CLASS
6623 /* Verify that the register it's in can be used in
6624 mode MODE. */
6625 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6626 GET_MODE (reg_last_reload_reg[regno]),
6627 mode)
6628 #endif
6631 enum reg_class rclass = rld[r].rclass, last_class;
6632 rtx last_reg = reg_last_reload_reg[regno];
6634 i = REGNO (last_reg);
6635 byte = compute_reload_subreg_offset (mode,
6636 subreg,
6637 GET_MODE (last_reg));
6638 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6639 last_class = REGNO_REG_CLASS (i);
6641 if (reg_reloaded_contents[i] == regno
6642 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6643 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6644 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6645 /* Even if we can't use this register as a reload
6646 register, we might use it for reload_override_in,
6647 if copying it to the desired class is cheap
6648 enough. */
6649 || ((register_move_cost (mode, last_class, rclass)
6650 < memory_move_cost (mode, rclass, true))
6651 && (secondary_reload_class (1, rclass, mode,
6652 last_reg)
6653 == NO_REGS)
6654 #ifdef SECONDARY_MEMORY_NEEDED
6655 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6656 mode)
6657 #endif
6660 && (rld[r].nregs == max_group_size
6661 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6663 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6664 rld[r].when_needed, rld[r].in,
6665 const0_rtx, r, 1))
6667 /* If a group is needed, verify that all the subsequent
6668 registers still have their values intact. */
6669 int nr = hard_regno_nregs[i][rld[r].mode];
6670 int k;
6672 for (k = 1; k < nr; k++)
6673 if (reg_reloaded_contents[i + k] != regno
6674 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6675 break;
6677 if (k == nr)
6679 int i1;
6680 int bad_for_class;
6682 last_reg = (GET_MODE (last_reg) == mode
6683 ? last_reg : gen_rtx_REG (mode, i));
6685 bad_for_class = 0;
6686 for (k = 0; k < nr; k++)
6687 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6688 i+k);
6690 /* We found a register that contains the
6691 value we need. If this register is the
6692 same as an `earlyclobber' operand of the
6693 current insn, just mark it as a place to
6694 reload from since we can't use it as the
6695 reload register itself. */
6697 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6698 if (reg_overlap_mentioned_for_reload_p
6699 (reg_last_reload_reg[regno],
6700 reload_earlyclobbers[i1]))
6701 break;
6703 if (i1 != n_earlyclobbers
6704 || ! (free_for_value_p (i, rld[r].mode,
6705 rld[r].opnum,
6706 rld[r].when_needed, rld[r].in,
6707 rld[r].out, r, 1))
6708 /* Don't use it if we'd clobber a pseudo reg. */
6709 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6710 && rld[r].out
6711 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6712 /* Don't clobber the frame pointer. */
6713 || (i == HARD_FRAME_POINTER_REGNUM
6714 && frame_pointer_needed
6715 && rld[r].out)
6716 /* Don't really use the inherited spill reg
6717 if we need it wider than we've got it. */
6718 || (GET_MODE_SIZE (rld[r].mode)
6719 > GET_MODE_SIZE (mode))
6720 || bad_for_class
6722 /* If find_reloads chose reload_out as reload
6723 register, stay with it - that leaves the
6724 inherited register for subsequent reloads. */
6725 || (rld[r].out && rld[r].reg_rtx
6726 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6728 if (! rld[r].optional)
6730 reload_override_in[r] = last_reg;
6731 reload_inheritance_insn[r]
6732 = reg_reloaded_insn[i];
6735 else
6737 int k;
6738 /* We can use this as a reload reg. */
6739 /* Mark the register as in use for this part of
6740 the insn. */
6741 mark_reload_reg_in_use (i,
6742 rld[r].opnum,
6743 rld[r].when_needed,
6744 rld[r].mode);
6745 rld[r].reg_rtx = last_reg;
6746 reload_inherited[r] = 1;
6747 reload_inheritance_insn[r]
6748 = reg_reloaded_insn[i];
6749 reload_spill_index[r] = i;
6750 for (k = 0; k < nr; k++)
6751 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6752 i + k);
6759 /* Here's another way to see if the value is already lying around. */
6760 if (inheritance
6761 && rld[r].in != 0
6762 && ! reload_inherited[r]
6763 && rld[r].out == 0
6764 && (CONSTANT_P (rld[r].in)
6765 || GET_CODE (rld[r].in) == PLUS
6766 || REG_P (rld[r].in)
6767 || MEM_P (rld[r].in))
6768 && (rld[r].nregs == max_group_size
6769 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6770 search_equiv = rld[r].in;
6772 if (search_equiv)
6774 rtx equiv
6775 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6776 -1, NULL, 0, rld[r].mode);
6777 int regno = 0;
6779 if (equiv != 0)
6781 if (REG_P (equiv))
6782 regno = REGNO (equiv);
6783 else
6785 /* This must be a SUBREG of a hard register.
6786 Make a new REG since this might be used in an
6787 address and not all machines support SUBREGs
6788 there. */
6789 gcc_assert (GET_CODE (equiv) == SUBREG);
6790 regno = subreg_regno (equiv);
6791 equiv = gen_rtx_REG (rld[r].mode, regno);
6792 /* If we choose EQUIV as the reload register, but the
6793 loop below decides to cancel the inheritance, we'll
6794 end up reloading EQUIV in rld[r].mode, not the mode
6795 it had originally. That isn't safe when EQUIV isn't
6796 available as a spill register since its value might
6797 still be live at this point. */
6798 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6799 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6800 equiv = 0;
6804 /* If we found a spill reg, reject it unless it is free
6805 and of the desired class. */
6806 if (equiv != 0)
6808 int regs_used = 0;
6809 int bad_for_class = 0;
6810 int max_regno = regno + rld[r].nregs;
6812 for (i = regno; i < max_regno; i++)
6814 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6816 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6820 if ((regs_used
6821 && ! free_for_value_p (regno, rld[r].mode,
6822 rld[r].opnum, rld[r].when_needed,
6823 rld[r].in, rld[r].out, r, 1))
6824 || bad_for_class)
6825 equiv = 0;
6828 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6829 equiv = 0;
6831 /* We found a register that contains the value we need.
6832 If this register is the same as an `earlyclobber' operand
6833 of the current insn, just mark it as a place to reload from
6834 since we can't use it as the reload register itself. */
6836 if (equiv != 0)
6837 for (i = 0; i < n_earlyclobbers; i++)
6838 if (reg_overlap_mentioned_for_reload_p (equiv,
6839 reload_earlyclobbers[i]))
6841 if (! rld[r].optional)
6842 reload_override_in[r] = equiv;
6843 equiv = 0;
6844 break;
6847 /* If the equiv register we have found is explicitly clobbered
6848 in the current insn, it depends on the reload type if we
6849 can use it, use it for reload_override_in, or not at all.
6850 In particular, we then can't use EQUIV for a
6851 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6853 if (equiv != 0)
6855 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6856 switch (rld[r].when_needed)
6858 case RELOAD_FOR_OTHER_ADDRESS:
6859 case RELOAD_FOR_INPADDR_ADDRESS:
6860 case RELOAD_FOR_INPUT_ADDRESS:
6861 case RELOAD_FOR_OPADDR_ADDR:
6862 break;
6863 case RELOAD_OTHER:
6864 case RELOAD_FOR_INPUT:
6865 case RELOAD_FOR_OPERAND_ADDRESS:
6866 if (! rld[r].optional)
6867 reload_override_in[r] = equiv;
6868 /* Fall through. */
6869 default:
6870 equiv = 0;
6871 break;
6873 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6874 switch (rld[r].when_needed)
6876 case RELOAD_FOR_OTHER_ADDRESS:
6877 case RELOAD_FOR_INPADDR_ADDRESS:
6878 case RELOAD_FOR_INPUT_ADDRESS:
6879 case RELOAD_FOR_OPADDR_ADDR:
6880 case RELOAD_FOR_OPERAND_ADDRESS:
6881 case RELOAD_FOR_INPUT:
6882 break;
6883 case RELOAD_OTHER:
6884 if (! rld[r].optional)
6885 reload_override_in[r] = equiv;
6886 /* Fall through. */
6887 default:
6888 equiv = 0;
6889 break;
6893 /* If we found an equivalent reg, say no code need be generated
6894 to load it, and use it as our reload reg. */
6895 if (equiv != 0
6896 && (regno != HARD_FRAME_POINTER_REGNUM
6897 || !frame_pointer_needed))
6899 int nr = hard_regno_nregs[regno][rld[r].mode];
6900 int k;
6901 rld[r].reg_rtx = equiv;
6902 reload_spill_index[r] = regno;
6903 reload_inherited[r] = 1;
6905 /* If reg_reloaded_valid is not set for this register,
6906 there might be a stale spill_reg_store lying around.
6907 We must clear it, since otherwise emit_reload_insns
6908 might delete the store. */
6909 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6910 spill_reg_store[regno] = NULL;
6911 /* If any of the hard registers in EQUIV are spill
6912 registers, mark them as in use for this insn. */
6913 for (k = 0; k < nr; k++)
6915 i = spill_reg_order[regno + k];
6916 if (i >= 0)
6918 mark_reload_reg_in_use (regno, rld[r].opnum,
6919 rld[r].when_needed,
6920 rld[r].mode);
6921 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6922 regno + k);
6928 /* If we found a register to use already, or if this is an optional
6929 reload, we are done. */
6930 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6931 continue;
6933 #if 0
6934 /* No longer needed for correct operation. Might or might
6935 not give better code on the average. Want to experiment? */
6937 /* See if there is a later reload that has a class different from our
6938 class that intersects our class or that requires less register
6939 than our reload. If so, we must allocate a register to this
6940 reload now, since that reload might inherit a previous reload
6941 and take the only available register in our class. Don't do this
6942 for optional reloads since they will force all previous reloads
6943 to be allocated. Also don't do this for reloads that have been
6944 turned off. */
6946 for (i = j + 1; i < n_reloads; i++)
6948 int s = reload_order[i];
6950 if ((rld[s].in == 0 && rld[s].out == 0
6951 && ! rld[s].secondary_p)
6952 || rld[s].optional)
6953 continue;
6955 if ((rld[s].rclass != rld[r].rclass
6956 && reg_classes_intersect_p (rld[r].rclass,
6957 rld[s].rclass))
6958 || rld[s].nregs < rld[r].nregs)
6959 break;
6962 if (i == n_reloads)
6963 continue;
6965 allocate_reload_reg (chain, r, j == n_reloads - 1);
6966 #endif
6969 /* Now allocate reload registers for anything non-optional that
6970 didn't get one yet. */
6971 for (j = 0; j < n_reloads; j++)
6973 int r = reload_order[j];
6975 /* Ignore reloads that got marked inoperative. */
6976 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6977 continue;
6979 /* Skip reloads that already have a register allocated or are
6980 optional. */
6981 if (rld[r].reg_rtx != 0 || rld[r].optional)
6982 continue;
6984 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6985 break;
6988 /* If that loop got all the way, we have won. */
6989 if (j == n_reloads)
6991 win = 1;
6992 break;
6995 /* Loop around and try without any inheritance. */
6998 if (! win)
7000 /* First undo everything done by the failed attempt
7001 to allocate with inheritance. */
7002 choose_reload_regs_init (chain, save_reload_reg_rtx);
7004 /* Some sanity tests to verify that the reloads found in the first
7005 pass are identical to the ones we have now. */
7006 gcc_assert (chain->n_reloads == n_reloads);
7008 for (i = 0; i < n_reloads; i++)
7010 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
7011 continue;
7012 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
7013 for (j = 0; j < n_spills; j++)
7014 if (spill_regs[j] == chain->rld[i].regno)
7015 if (! set_reload_reg (j, i))
7016 failed_reload (chain->insn, i);
7020 /* If we thought we could inherit a reload, because it seemed that
7021 nothing else wanted the same reload register earlier in the insn,
7022 verify that assumption, now that all reloads have been assigned.
7023 Likewise for reloads where reload_override_in has been set. */
7025 /* If doing expensive optimizations, do one preliminary pass that doesn't
7026 cancel any inheritance, but removes reloads that have been needed only
7027 for reloads that we know can be inherited. */
7028 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7030 for (j = 0; j < n_reloads; j++)
7032 int r = reload_order[j];
7033 rtx check_reg;
7034 #ifdef SECONDARY_MEMORY_NEEDED
7035 rtx tem;
7036 #endif
7037 if (reload_inherited[r] && rld[r].reg_rtx)
7038 check_reg = rld[r].reg_rtx;
7039 else if (reload_override_in[r]
7040 && (REG_P (reload_override_in[r])
7041 || GET_CODE (reload_override_in[r]) == SUBREG))
7042 check_reg = reload_override_in[r];
7043 else
7044 continue;
7045 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7046 rld[r].opnum, rld[r].when_needed, rld[r].in,
7047 (reload_inherited[r]
7048 ? rld[r].out : const0_rtx),
7049 r, 1))
7051 if (pass)
7052 continue;
7053 reload_inherited[r] = 0;
7054 reload_override_in[r] = 0;
7056 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7057 reload_override_in, then we do not need its related
7058 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7059 likewise for other reload types.
7060 We handle this by removing a reload when its only replacement
7061 is mentioned in reload_in of the reload we are going to inherit.
7062 A special case are auto_inc expressions; even if the input is
7063 inherited, we still need the address for the output. We can
7064 recognize them because they have RELOAD_OUT set to RELOAD_IN.
7065 If we succeeded removing some reload and we are doing a preliminary
7066 pass just to remove such reloads, make another pass, since the
7067 removal of one reload might allow us to inherit another one. */
7068 else if (rld[r].in
7069 && rld[r].out != rld[r].in
7070 && remove_address_replacements (rld[r].in))
7072 if (pass)
7073 pass = 2;
7075 #ifdef SECONDARY_MEMORY_NEEDED
7076 /* If we needed a memory location for the reload, we also have to
7077 remove its related reloads. */
7078 else if (rld[r].in
7079 && rld[r].out != rld[r].in
7080 && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7081 && REGNO (tem) < FIRST_PSEUDO_REGISTER
7082 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7083 rld[r].rclass, rld[r].inmode)
7084 && remove_address_replacements
7085 (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7086 rld[r].when_needed)))
7088 if (pass)
7089 pass = 2;
7091 #endif
7095 /* Now that reload_override_in is known valid,
7096 actually override reload_in. */
7097 for (j = 0; j < n_reloads; j++)
7098 if (reload_override_in[j])
7099 rld[j].in = reload_override_in[j];
7101 /* If this reload won't be done because it has been canceled or is
7102 optional and not inherited, clear reload_reg_rtx so other
7103 routines (such as subst_reloads) don't get confused. */
7104 for (j = 0; j < n_reloads; j++)
7105 if (rld[j].reg_rtx != 0
7106 && ((rld[j].optional && ! reload_inherited[j])
7107 || (rld[j].in == 0 && rld[j].out == 0
7108 && ! rld[j].secondary_p)))
7110 int regno = true_regnum (rld[j].reg_rtx);
7112 if (spill_reg_order[regno] >= 0)
7113 clear_reload_reg_in_use (regno, rld[j].opnum,
7114 rld[j].when_needed, rld[j].mode);
7115 rld[j].reg_rtx = 0;
7116 reload_spill_index[j] = -1;
7119 /* Record which pseudos and which spill regs have output reloads. */
7120 for (j = 0; j < n_reloads; j++)
7122 int r = reload_order[j];
7124 i = reload_spill_index[r];
7126 /* I is nonneg if this reload uses a register.
7127 If rld[r].reg_rtx is 0, this is an optional reload
7128 that we opted to ignore. */
7129 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7130 && rld[r].reg_rtx != 0)
7132 int nregno = REGNO (rld[r].out_reg);
7133 int nr = 1;
7135 if (nregno < FIRST_PSEUDO_REGISTER)
7136 nr = hard_regno_nregs[nregno][rld[r].mode];
7138 while (--nr >= 0)
7139 SET_REGNO_REG_SET (&reg_has_output_reload,
7140 nregno + nr);
7142 if (i >= 0)
7143 add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7145 gcc_assert (rld[r].when_needed == RELOAD_OTHER
7146 || rld[r].when_needed == RELOAD_FOR_OUTPUT
7147 || rld[r].when_needed == RELOAD_FOR_INSN);
7152 /* Deallocate the reload register for reload R. This is called from
7153 remove_address_replacements. */
7155 void
7156 deallocate_reload_reg (int r)
7158 int regno;
7160 if (! rld[r].reg_rtx)
7161 return;
7162 regno = true_regnum (rld[r].reg_rtx);
7163 rld[r].reg_rtx = 0;
7164 if (spill_reg_order[regno] >= 0)
7165 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7166 rld[r].mode);
7167 reload_spill_index[r] = -1;
7170 /* These arrays are filled by emit_reload_insns and its subroutines. */
7171 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7172 static rtx_insn *other_input_address_reload_insns = 0;
7173 static rtx_insn *other_input_reload_insns = 0;
7174 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7175 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7176 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7177 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7178 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7179 static rtx_insn *operand_reload_insns = 0;
7180 static rtx_insn *other_operand_reload_insns = 0;
7181 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7183 /* Values to be put in spill_reg_store are put here first. Instructions
7184 must only be placed here if the associated reload register reaches
7185 the end of the instruction's reload sequence. */
7186 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7187 static HARD_REG_SET reg_reloaded_died;
7189 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7190 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7191 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7192 adjusted register, and return true. Otherwise, return false. */
7193 static bool
7194 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7195 enum reg_class new_class,
7196 machine_mode new_mode)
7199 rtx reg;
7201 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7203 unsigned regno = REGNO (reg);
7205 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7206 continue;
7207 if (GET_MODE (reg) != new_mode)
7209 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7210 continue;
7211 if (hard_regno_nregs[regno][new_mode]
7212 > hard_regno_nregs[regno][GET_MODE (reg)])
7213 continue;
7214 reg = reload_adjust_reg_for_mode (reg, new_mode);
7216 *reload_reg = reg;
7217 return true;
7219 return false;
7222 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7223 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7224 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7225 adjusted register, and return true. Otherwise, return false. */
7226 static bool
7227 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7228 enum insn_code icode)
7231 enum reg_class new_class = scratch_reload_class (icode);
7232 machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7234 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7235 new_class, new_mode);
7238 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7239 has the number J. OLD contains the value to be used as input. */
7241 static void
7242 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7243 rtx old, int j)
7245 rtx_insn *insn = chain->insn;
7246 rtx reloadreg;
7247 rtx oldequiv_reg = 0;
7248 rtx oldequiv = 0;
7249 int special = 0;
7250 machine_mode mode;
7251 rtx_insn **where;
7253 /* delete_output_reload is only invoked properly if old contains
7254 the original pseudo register. Since this is replaced with a
7255 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7256 find the pseudo in RELOAD_IN_REG. This is also used to
7257 determine whether a secondary reload is needed. */
7258 if (reload_override_in[j]
7259 && (REG_P (rl->in_reg)
7260 || (GET_CODE (rl->in_reg) == SUBREG
7261 && REG_P (SUBREG_REG (rl->in_reg)))))
7263 oldequiv = old;
7264 old = rl->in_reg;
7266 if (oldequiv == 0)
7267 oldequiv = old;
7268 else if (REG_P (oldequiv))
7269 oldequiv_reg = oldequiv;
7270 else if (GET_CODE (oldequiv) == SUBREG)
7271 oldequiv_reg = SUBREG_REG (oldequiv);
7273 reloadreg = reload_reg_rtx_for_input[j];
7274 mode = GET_MODE (reloadreg);
7276 /* If we are reloading from a register that was recently stored in
7277 with an output-reload, see if we can prove there was
7278 actually no need to store the old value in it. */
7280 if (optimize && REG_P (oldequiv)
7281 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7282 && spill_reg_store[REGNO (oldequiv)]
7283 && REG_P (old)
7284 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7285 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7286 rl->out_reg)))
7287 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7289 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7290 OLDEQUIV. */
7292 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7293 oldequiv = SUBREG_REG (oldequiv);
7294 if (GET_MODE (oldequiv) != VOIDmode
7295 && mode != GET_MODE (oldequiv))
7296 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7298 /* Switch to the right place to emit the reload insns. */
7299 switch (rl->when_needed)
7301 case RELOAD_OTHER:
7302 where = &other_input_reload_insns;
7303 break;
7304 case RELOAD_FOR_INPUT:
7305 where = &input_reload_insns[rl->opnum];
7306 break;
7307 case RELOAD_FOR_INPUT_ADDRESS:
7308 where = &input_address_reload_insns[rl->opnum];
7309 break;
7310 case RELOAD_FOR_INPADDR_ADDRESS:
7311 where = &inpaddr_address_reload_insns[rl->opnum];
7312 break;
7313 case RELOAD_FOR_OUTPUT_ADDRESS:
7314 where = &output_address_reload_insns[rl->opnum];
7315 break;
7316 case RELOAD_FOR_OUTADDR_ADDRESS:
7317 where = &outaddr_address_reload_insns[rl->opnum];
7318 break;
7319 case RELOAD_FOR_OPERAND_ADDRESS:
7320 where = &operand_reload_insns;
7321 break;
7322 case RELOAD_FOR_OPADDR_ADDR:
7323 where = &other_operand_reload_insns;
7324 break;
7325 case RELOAD_FOR_OTHER_ADDRESS:
7326 where = &other_input_address_reload_insns;
7327 break;
7328 default:
7329 gcc_unreachable ();
7332 push_to_sequence (*where);
7334 /* Auto-increment addresses must be reloaded in a special way. */
7335 if (rl->out && ! rl->out_reg)
7337 /* We are not going to bother supporting the case where a
7338 incremented register can't be copied directly from
7339 OLDEQUIV since this seems highly unlikely. */
7340 gcc_assert (rl->secondary_in_reload < 0);
7342 if (reload_inherited[j])
7343 oldequiv = reloadreg;
7345 old = XEXP (rl->in_reg, 0);
7347 /* Prevent normal processing of this reload. */
7348 special = 1;
7349 /* Output a special code sequence for this case. */
7350 inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7353 /* If we are reloading a pseudo-register that was set by the previous
7354 insn, see if we can get rid of that pseudo-register entirely
7355 by redirecting the previous insn into our reload register. */
7357 else if (optimize && REG_P (old)
7358 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7359 && dead_or_set_p (insn, old)
7360 /* This is unsafe if some other reload
7361 uses the same reg first. */
7362 && ! conflicts_with_override (reloadreg)
7363 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7364 rl->when_needed, old, rl->out, j, 0))
7366 rtx_insn *temp = PREV_INSN (insn);
7367 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7368 temp = PREV_INSN (temp);
7369 if (temp
7370 && NONJUMP_INSN_P (temp)
7371 && GET_CODE (PATTERN (temp)) == SET
7372 && SET_DEST (PATTERN (temp)) == old
7373 /* Make sure we can access insn_operand_constraint. */
7374 && asm_noperands (PATTERN (temp)) < 0
7375 /* This is unsafe if operand occurs more than once in current
7376 insn. Perhaps some occurrences aren't reloaded. */
7377 && count_occurrences (PATTERN (insn), old, 0) == 1)
7379 rtx old = SET_DEST (PATTERN (temp));
7380 /* Store into the reload register instead of the pseudo. */
7381 SET_DEST (PATTERN (temp)) = reloadreg;
7383 /* Verify that resulting insn is valid.
7385 Note that we have replaced the destination of TEMP with
7386 RELOADREG. If TEMP references RELOADREG within an
7387 autoincrement addressing mode, then the resulting insn
7388 is ill-formed and we must reject this optimization. */
7389 extract_insn (temp);
7390 if (constrain_operands (1, get_enabled_alternatives (temp))
7391 && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7393 /* If the previous insn is an output reload, the source is
7394 a reload register, and its spill_reg_store entry will
7395 contain the previous destination. This is now
7396 invalid. */
7397 if (REG_P (SET_SRC (PATTERN (temp)))
7398 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7400 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7401 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7404 /* If these are the only uses of the pseudo reg,
7405 pretend for GDB it lives in the reload reg we used. */
7406 if (REG_N_DEATHS (REGNO (old)) == 1
7407 && REG_N_SETS (REGNO (old)) == 1)
7409 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7410 if (ira_conflicts_p)
7411 /* Inform IRA about the change. */
7412 ira_mark_allocation_change (REGNO (old));
7413 alter_reg (REGNO (old), -1, false);
7415 special = 1;
7417 /* Adjust any debug insns between temp and insn. */
7418 while ((temp = NEXT_INSN (temp)) != insn)
7419 if (DEBUG_INSN_P (temp))
7420 replace_rtx (PATTERN (temp), old, reloadreg);
7421 else
7422 gcc_assert (NOTE_P (temp));
7424 else
7426 SET_DEST (PATTERN (temp)) = old;
7431 /* We can't do that, so output an insn to load RELOADREG. */
7433 /* If we have a secondary reload, pick up the secondary register
7434 and icode, if any. If OLDEQUIV and OLD are different or
7435 if this is an in-out reload, recompute whether or not we
7436 still need a secondary register and what the icode should
7437 be. If we still need a secondary register and the class or
7438 icode is different, go back to reloading from OLD if using
7439 OLDEQUIV means that we got the wrong type of register. We
7440 cannot have different class or icode due to an in-out reload
7441 because we don't make such reloads when both the input and
7442 output need secondary reload registers. */
7444 if (! special && rl->secondary_in_reload >= 0)
7446 rtx second_reload_reg = 0;
7447 rtx third_reload_reg = 0;
7448 int secondary_reload = rl->secondary_in_reload;
7449 rtx real_oldequiv = oldequiv;
7450 rtx real_old = old;
7451 rtx tmp;
7452 enum insn_code icode;
7453 enum insn_code tertiary_icode = CODE_FOR_nothing;
7455 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7456 and similarly for OLD.
7457 See comments in get_secondary_reload in reload.c. */
7458 /* If it is a pseudo that cannot be replaced with its
7459 equivalent MEM, we must fall back to reload_in, which
7460 will have all the necessary substitutions registered.
7461 Likewise for a pseudo that can't be replaced with its
7462 equivalent constant.
7464 Take extra care for subregs of such pseudos. Note that
7465 we cannot use reg_equiv_mem in this case because it is
7466 not in the right mode. */
7468 tmp = oldequiv;
7469 if (GET_CODE (tmp) == SUBREG)
7470 tmp = SUBREG_REG (tmp);
7471 if (REG_P (tmp)
7472 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7473 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7474 || reg_equiv_constant (REGNO (tmp)) != 0))
7476 if (! reg_equiv_mem (REGNO (tmp))
7477 || num_not_at_initial_offset
7478 || GET_CODE (oldequiv) == SUBREG)
7479 real_oldequiv = rl->in;
7480 else
7481 real_oldequiv = reg_equiv_mem (REGNO (tmp));
7484 tmp = old;
7485 if (GET_CODE (tmp) == SUBREG)
7486 tmp = SUBREG_REG (tmp);
7487 if (REG_P (tmp)
7488 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7489 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7490 || reg_equiv_constant (REGNO (tmp)) != 0))
7492 if (! reg_equiv_mem (REGNO (tmp))
7493 || num_not_at_initial_offset
7494 || GET_CODE (old) == SUBREG)
7495 real_old = rl->in;
7496 else
7497 real_old = reg_equiv_mem (REGNO (tmp));
7500 second_reload_reg = rld[secondary_reload].reg_rtx;
7501 if (rld[secondary_reload].secondary_in_reload >= 0)
7503 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7505 third_reload_reg = rld[tertiary_reload].reg_rtx;
7506 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7507 /* We'd have to add more code for quartary reloads. */
7508 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7510 icode = rl->secondary_in_icode;
7512 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7513 || (rl->in != 0 && rl->out != 0))
7515 secondary_reload_info sri, sri2;
7516 enum reg_class new_class, new_t_class;
7518 sri.icode = CODE_FOR_nothing;
7519 sri.prev_sri = NULL;
7520 new_class
7521 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7522 rl->rclass, mode,
7523 &sri);
7525 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7526 second_reload_reg = 0;
7527 else if (new_class == NO_REGS)
7529 if (reload_adjust_reg_for_icode (&second_reload_reg,
7530 third_reload_reg,
7531 (enum insn_code) sri.icode))
7533 icode = (enum insn_code) sri.icode;
7534 third_reload_reg = 0;
7536 else
7538 oldequiv = old;
7539 real_oldequiv = real_old;
7542 else if (sri.icode != CODE_FOR_nothing)
7543 /* We currently lack a way to express this in reloads. */
7544 gcc_unreachable ();
7545 else
7547 sri2.icode = CODE_FOR_nothing;
7548 sri2.prev_sri = &sri;
7549 new_t_class
7550 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7551 new_class, mode,
7552 &sri);
7553 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7555 if (reload_adjust_reg_for_temp (&second_reload_reg,
7556 third_reload_reg,
7557 new_class, mode))
7559 third_reload_reg = 0;
7560 tertiary_icode = (enum insn_code) sri2.icode;
7562 else
7564 oldequiv = old;
7565 real_oldequiv = real_old;
7568 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7570 rtx intermediate = second_reload_reg;
7572 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7573 new_class, mode)
7574 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7575 ((enum insn_code)
7576 sri2.icode)))
7578 second_reload_reg = intermediate;
7579 tertiary_icode = (enum insn_code) sri2.icode;
7581 else
7583 oldequiv = old;
7584 real_oldequiv = real_old;
7587 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7589 rtx intermediate = second_reload_reg;
7591 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7592 new_class, mode)
7593 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7594 new_t_class, mode))
7596 second_reload_reg = intermediate;
7597 tertiary_icode = (enum insn_code) sri2.icode;
7599 else
7601 oldequiv = old;
7602 real_oldequiv = real_old;
7605 else
7607 /* This could be handled more intelligently too. */
7608 oldequiv = old;
7609 real_oldequiv = real_old;
7614 /* If we still need a secondary reload register, check
7615 to see if it is being used as a scratch or intermediate
7616 register and generate code appropriately. If we need
7617 a scratch register, use REAL_OLDEQUIV since the form of
7618 the insn may depend on the actual address if it is
7619 a MEM. */
7621 if (second_reload_reg)
7623 if (icode != CODE_FOR_nothing)
7625 /* We'd have to add extra code to handle this case. */
7626 gcc_assert (!third_reload_reg);
7628 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7629 second_reload_reg));
7630 special = 1;
7632 else
7634 /* See if we need a scratch register to load the
7635 intermediate register (a tertiary reload). */
7636 if (tertiary_icode != CODE_FOR_nothing)
7638 emit_insn ((GEN_FCN (tertiary_icode)
7639 (second_reload_reg, real_oldequiv,
7640 third_reload_reg)));
7642 else if (third_reload_reg)
7644 gen_reload (third_reload_reg, real_oldequiv,
7645 rl->opnum,
7646 rl->when_needed);
7647 gen_reload (second_reload_reg, third_reload_reg,
7648 rl->opnum,
7649 rl->when_needed);
7651 else
7652 gen_reload (second_reload_reg, real_oldequiv,
7653 rl->opnum,
7654 rl->when_needed);
7656 oldequiv = second_reload_reg;
7661 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7663 rtx real_oldequiv = oldequiv;
7665 if ((REG_P (oldequiv)
7666 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7667 && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7668 || reg_equiv_constant (REGNO (oldequiv)) != 0))
7669 || (GET_CODE (oldequiv) == SUBREG
7670 && REG_P (SUBREG_REG (oldequiv))
7671 && (REGNO (SUBREG_REG (oldequiv))
7672 >= FIRST_PSEUDO_REGISTER)
7673 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7674 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7675 || (CONSTANT_P (oldequiv)
7676 && (targetm.preferred_reload_class (oldequiv,
7677 REGNO_REG_CLASS (REGNO (reloadreg)))
7678 == NO_REGS)))
7679 real_oldequiv = rl->in;
7680 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7681 rl->when_needed);
7684 if (cfun->can_throw_non_call_exceptions)
7685 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7687 /* End this sequence. */
7688 *where = get_insns ();
7689 end_sequence ();
7691 /* Update reload_override_in so that delete_address_reloads_1
7692 can see the actual register usage. */
7693 if (oldequiv_reg)
7694 reload_override_in[j] = oldequiv;
7697 /* Generate insns to for the output reload RL, which is for the insn described
7698 by CHAIN and has the number J. */
7699 static void
7700 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7701 int j)
7703 rtx reloadreg;
7704 rtx_insn *insn = chain->insn;
7705 int special = 0;
7706 rtx old = rl->out;
7707 machine_mode mode;
7708 rtx_insn *p;
7709 rtx rl_reg_rtx;
7711 if (rl->when_needed == RELOAD_OTHER)
7712 start_sequence ();
7713 else
7714 push_to_sequence (output_reload_insns[rl->opnum]);
7716 rl_reg_rtx = reload_reg_rtx_for_output[j];
7717 mode = GET_MODE (rl_reg_rtx);
7719 reloadreg = rl_reg_rtx;
7721 /* If we need two reload regs, set RELOADREG to the intermediate
7722 one, since it will be stored into OLD. We might need a secondary
7723 register only for an input reload, so check again here. */
7725 if (rl->secondary_out_reload >= 0)
7727 rtx real_old = old;
7728 int secondary_reload = rl->secondary_out_reload;
7729 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7731 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7732 && reg_equiv_mem (REGNO (old)) != 0)
7733 real_old = reg_equiv_mem (REGNO (old));
7735 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7737 rtx second_reloadreg = reloadreg;
7738 reloadreg = rld[secondary_reload].reg_rtx;
7740 /* See if RELOADREG is to be used as a scratch register
7741 or as an intermediate register. */
7742 if (rl->secondary_out_icode != CODE_FOR_nothing)
7744 /* We'd have to add extra code to handle this case. */
7745 gcc_assert (tertiary_reload < 0);
7747 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7748 (real_old, second_reloadreg, reloadreg)));
7749 special = 1;
7751 else
7753 /* See if we need both a scratch and intermediate reload
7754 register. */
7756 enum insn_code tertiary_icode
7757 = rld[secondary_reload].secondary_out_icode;
7759 /* We'd have to add more code for quartary reloads. */
7760 gcc_assert (tertiary_reload < 0
7761 || rld[tertiary_reload].secondary_out_reload < 0);
7763 if (GET_MODE (reloadreg) != mode)
7764 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7766 if (tertiary_icode != CODE_FOR_nothing)
7768 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7770 /* Copy primary reload reg to secondary reload reg.
7771 (Note that these have been swapped above, then
7772 secondary reload reg to OLD using our insn.) */
7774 /* If REAL_OLD is a paradoxical SUBREG, remove it
7775 and try to put the opposite SUBREG on
7776 RELOADREG. */
7777 strip_paradoxical_subreg (&real_old, &reloadreg);
7779 gen_reload (reloadreg, second_reloadreg,
7780 rl->opnum, rl->when_needed);
7781 emit_insn ((GEN_FCN (tertiary_icode)
7782 (real_old, reloadreg, third_reloadreg)));
7783 special = 1;
7786 else
7788 /* Copy between the reload regs here and then to
7789 OUT later. */
7791 gen_reload (reloadreg, second_reloadreg,
7792 rl->opnum, rl->when_needed);
7793 if (tertiary_reload >= 0)
7795 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7797 gen_reload (third_reloadreg, reloadreg,
7798 rl->opnum, rl->when_needed);
7799 reloadreg = third_reloadreg;
7806 /* Output the last reload insn. */
7807 if (! special)
7809 rtx set;
7811 /* Don't output the last reload if OLD is not the dest of
7812 INSN and is in the src and is clobbered by INSN. */
7813 if (! flag_expensive_optimizations
7814 || !REG_P (old)
7815 || !(set = single_set (insn))
7816 || rtx_equal_p (old, SET_DEST (set))
7817 || !reg_mentioned_p (old, SET_SRC (set))
7818 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7819 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7820 gen_reload (old, reloadreg, rl->opnum,
7821 rl->when_needed);
7824 /* Look at all insns we emitted, just to be safe. */
7825 for (p = get_insns (); p; p = NEXT_INSN (p))
7826 if (INSN_P (p))
7828 rtx pat = PATTERN (p);
7830 /* If this output reload doesn't come from a spill reg,
7831 clear any memory of reloaded copies of the pseudo reg.
7832 If this output reload comes from a spill reg,
7833 reg_has_output_reload will make this do nothing. */
7834 note_stores (pat, forget_old_reloads_1, NULL);
7836 if (reg_mentioned_p (rl_reg_rtx, pat))
7838 rtx set = single_set (insn);
7839 if (reload_spill_index[j] < 0
7840 && set
7841 && SET_SRC (set) == rl_reg_rtx)
7843 int src = REGNO (SET_SRC (set));
7845 reload_spill_index[j] = src;
7846 SET_HARD_REG_BIT (reg_is_output_reload, src);
7847 if (find_regno_note (insn, REG_DEAD, src))
7848 SET_HARD_REG_BIT (reg_reloaded_died, src);
7850 if (HARD_REGISTER_P (rl_reg_rtx))
7852 int s = rl->secondary_out_reload;
7853 set = single_set (p);
7854 /* If this reload copies only to the secondary reload
7855 register, the secondary reload does the actual
7856 store. */
7857 if (s >= 0 && set == NULL_RTX)
7858 /* We can't tell what function the secondary reload
7859 has and where the actual store to the pseudo is
7860 made; leave new_spill_reg_store alone. */
7862 else if (s >= 0
7863 && SET_SRC (set) == rl_reg_rtx
7864 && SET_DEST (set) == rld[s].reg_rtx)
7866 /* Usually the next instruction will be the
7867 secondary reload insn; if we can confirm
7868 that it is, setting new_spill_reg_store to
7869 that insn will allow an extra optimization. */
7870 rtx s_reg = rld[s].reg_rtx;
7871 rtx_insn *next = NEXT_INSN (p);
7872 rld[s].out = rl->out;
7873 rld[s].out_reg = rl->out_reg;
7874 set = single_set (next);
7875 if (set && SET_SRC (set) == s_reg
7876 && reload_reg_rtx_reaches_end_p (s_reg, s))
7878 SET_HARD_REG_BIT (reg_is_output_reload,
7879 REGNO (s_reg));
7880 new_spill_reg_store[REGNO (s_reg)] = next;
7883 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7884 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7889 if (rl->when_needed == RELOAD_OTHER)
7891 emit_insn (other_output_reload_insns[rl->opnum]);
7892 other_output_reload_insns[rl->opnum] = get_insns ();
7894 else
7895 output_reload_insns[rl->opnum] = get_insns ();
7897 if (cfun->can_throw_non_call_exceptions)
7898 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7900 end_sequence ();
7903 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7904 and has the number J. */
7905 static void
7906 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7908 rtx_insn *insn = chain->insn;
7909 rtx old = (rl->in && MEM_P (rl->in)
7910 ? rl->in_reg : rl->in);
7911 rtx reg_rtx = rl->reg_rtx;
7913 if (old && reg_rtx)
7915 machine_mode mode;
7917 /* Determine the mode to reload in.
7918 This is very tricky because we have three to choose from.
7919 There is the mode the insn operand wants (rl->inmode).
7920 There is the mode of the reload register RELOADREG.
7921 There is the intrinsic mode of the operand, which we could find
7922 by stripping some SUBREGs.
7923 It turns out that RELOADREG's mode is irrelevant:
7924 we can change that arbitrarily.
7926 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7927 then the reload reg may not support QImode moves, so use SImode.
7928 If foo is in memory due to spilling a pseudo reg, this is safe,
7929 because the QImode value is in the least significant part of a
7930 slot big enough for a SImode. If foo is some other sort of
7931 memory reference, then it is impossible to reload this case,
7932 so previous passes had better make sure this never happens.
7934 Then consider a one-word union which has SImode and one of its
7935 members is a float, being fetched as (SUBREG:SF union:SI).
7936 We must fetch that as SFmode because we could be loading into
7937 a float-only register. In this case OLD's mode is correct.
7939 Consider an immediate integer: it has VOIDmode. Here we need
7940 to get a mode from something else.
7942 In some cases, there is a fourth mode, the operand's
7943 containing mode. If the insn specifies a containing mode for
7944 this operand, it overrides all others.
7946 I am not sure whether the algorithm here is always right,
7947 but it does the right things in those cases. */
7949 mode = GET_MODE (old);
7950 if (mode == VOIDmode)
7951 mode = rl->inmode;
7953 /* We cannot use gen_lowpart_common since it can do the wrong thing
7954 when REG_RTX has a multi-word mode. Note that REG_RTX must
7955 always be a REG here. */
7956 if (GET_MODE (reg_rtx) != mode)
7957 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7959 reload_reg_rtx_for_input[j] = reg_rtx;
7961 if (old != 0
7962 /* AUTO_INC reloads need to be handled even if inherited. We got an
7963 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7964 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7965 && ! rtx_equal_p (reg_rtx, old)
7966 && reg_rtx != 0)
7967 emit_input_reload_insns (chain, rld + j, old, j);
7969 /* When inheriting a wider reload, we have a MEM in rl->in,
7970 e.g. inheriting a SImode output reload for
7971 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7972 if (optimize && reload_inherited[j] && rl->in
7973 && MEM_P (rl->in)
7974 && MEM_P (rl->in_reg)
7975 && reload_spill_index[j] >= 0
7976 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7977 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7979 /* If we are reloading a register that was recently stored in with an
7980 output-reload, see if we can prove there was
7981 actually no need to store the old value in it. */
7983 if (optimize
7984 && (reload_inherited[j] || reload_override_in[j])
7985 && reg_rtx
7986 && REG_P (reg_rtx)
7987 && spill_reg_store[REGNO (reg_rtx)] != 0
7988 #if 0
7989 /* There doesn't seem to be any reason to restrict this to pseudos
7990 and doing so loses in the case where we are copying from a
7991 register of the wrong class. */
7992 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7993 #endif
7994 /* The insn might have already some references to stackslots
7995 replaced by MEMs, while reload_out_reg still names the
7996 original pseudo. */
7997 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7998 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7999 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8002 /* Do output reloading for reload RL, which is for the insn described by
8003 CHAIN and has the number J.
8004 ??? At some point we need to support handling output reloads of
8005 JUMP_INSNs or insns that set cc0. */
8006 static void
8007 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8009 rtx note, old;
8010 rtx_insn *insn = chain->insn;
8011 /* If this is an output reload that stores something that is
8012 not loaded in this same reload, see if we can eliminate a previous
8013 store. */
8014 rtx pseudo = rl->out_reg;
8015 rtx reg_rtx = rl->reg_rtx;
8017 if (rl->out && reg_rtx)
8019 machine_mode mode;
8021 /* Determine the mode to reload in.
8022 See comments above (for input reloading). */
8023 mode = GET_MODE (rl->out);
8024 if (mode == VOIDmode)
8026 /* VOIDmode should never happen for an output. */
8027 if (asm_noperands (PATTERN (insn)) < 0)
8028 /* It's the compiler's fault. */
8029 fatal_insn ("VOIDmode on an output", insn);
8030 error_for_asm (insn, "output operand is constant in %<asm%>");
8031 /* Prevent crash--use something we know is valid. */
8032 mode = word_mode;
8033 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8035 if (GET_MODE (reg_rtx) != mode)
8036 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8038 reload_reg_rtx_for_output[j] = reg_rtx;
8040 if (pseudo
8041 && optimize
8042 && REG_P (pseudo)
8043 && ! rtx_equal_p (rl->in_reg, pseudo)
8044 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8045 && reg_last_reload_reg[REGNO (pseudo)])
8047 int pseudo_no = REGNO (pseudo);
8048 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8050 /* We don't need to test full validity of last_regno for
8051 inherit here; we only want to know if the store actually
8052 matches the pseudo. */
8053 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8054 && reg_reloaded_contents[last_regno] == pseudo_no
8055 && spill_reg_store[last_regno]
8056 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8057 delete_output_reload (insn, j, last_regno, reg_rtx);
8060 old = rl->out_reg;
8061 if (old == 0
8062 || reg_rtx == 0
8063 || rtx_equal_p (old, reg_rtx))
8064 return;
8066 /* An output operand that dies right away does need a reload,
8067 but need not be copied from it. Show the new location in the
8068 REG_UNUSED note. */
8069 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8070 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8072 XEXP (note, 0) = reg_rtx;
8073 return;
8075 /* Likewise for a SUBREG of an operand that dies. */
8076 else if (GET_CODE (old) == SUBREG
8077 && REG_P (SUBREG_REG (old))
8078 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8079 SUBREG_REG (old))))
8081 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8082 return;
8084 else if (GET_CODE (old) == SCRATCH)
8085 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8086 but we don't want to make an output reload. */
8087 return;
8089 /* If is a JUMP_INSN, we can't support output reloads yet. */
8090 gcc_assert (NONJUMP_INSN_P (insn));
8092 emit_output_reload_insns (chain, rld + j, j);
8095 /* A reload copies values of MODE from register SRC to register DEST.
8096 Return true if it can be treated for inheritance purposes like a
8097 group of reloads, each one reloading a single hard register. The
8098 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8099 occupy the same number of hard registers. */
8101 static bool
8102 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8103 int src ATTRIBUTE_UNUSED,
8104 machine_mode mode ATTRIBUTE_UNUSED)
8106 #ifdef CANNOT_CHANGE_MODE_CLASS
8107 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8108 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8109 #else
8110 return true;
8111 #endif
8114 /* Output insns to reload values in and out of the chosen reload regs. */
8116 static void
8117 emit_reload_insns (struct insn_chain *chain)
8119 rtx_insn *insn = chain->insn;
8121 int j;
8123 CLEAR_HARD_REG_SET (reg_reloaded_died);
8125 for (j = 0; j < reload_n_operands; j++)
8126 input_reload_insns[j] = input_address_reload_insns[j]
8127 = inpaddr_address_reload_insns[j]
8128 = output_reload_insns[j] = output_address_reload_insns[j]
8129 = outaddr_address_reload_insns[j]
8130 = other_output_reload_insns[j] = 0;
8131 other_input_address_reload_insns = 0;
8132 other_input_reload_insns = 0;
8133 operand_reload_insns = 0;
8134 other_operand_reload_insns = 0;
8136 /* Dump reloads into the dump file. */
8137 if (dump_file)
8139 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8140 debug_reload_to_stream (dump_file);
8143 for (j = 0; j < n_reloads; j++)
8144 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8146 unsigned int i;
8148 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8149 new_spill_reg_store[i] = 0;
8152 /* Now output the instructions to copy the data into and out of the
8153 reload registers. Do these in the order that the reloads were reported,
8154 since reloads of base and index registers precede reloads of operands
8155 and the operands may need the base and index registers reloaded. */
8157 for (j = 0; j < n_reloads; j++)
8159 do_input_reload (chain, rld + j, j);
8160 do_output_reload (chain, rld + j, j);
8163 /* Now write all the insns we made for reloads in the order expected by
8164 the allocation functions. Prior to the insn being reloaded, we write
8165 the following reloads:
8167 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8169 RELOAD_OTHER reloads.
8171 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8172 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8173 RELOAD_FOR_INPUT reload for the operand.
8175 RELOAD_FOR_OPADDR_ADDRS reloads.
8177 RELOAD_FOR_OPERAND_ADDRESS reloads.
8179 After the insn being reloaded, we write the following:
8181 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8182 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8183 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8184 reloads for the operand. The RELOAD_OTHER output reloads are
8185 output in descending order by reload number. */
8187 emit_insn_before (other_input_address_reload_insns, insn);
8188 emit_insn_before (other_input_reload_insns, insn);
8190 for (j = 0; j < reload_n_operands; j++)
8192 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8193 emit_insn_before (input_address_reload_insns[j], insn);
8194 emit_insn_before (input_reload_insns[j], insn);
8197 emit_insn_before (other_operand_reload_insns, insn);
8198 emit_insn_before (operand_reload_insns, insn);
8200 for (j = 0; j < reload_n_operands; j++)
8202 rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8203 x = emit_insn_after (output_address_reload_insns[j], x);
8204 x = emit_insn_after (output_reload_insns[j], x);
8205 emit_insn_after (other_output_reload_insns[j], x);
8208 /* For all the spill regs newly reloaded in this instruction,
8209 record what they were reloaded from, so subsequent instructions
8210 can inherit the reloads.
8212 Update spill_reg_store for the reloads of this insn.
8213 Copy the elements that were updated in the loop above. */
8215 for (j = 0; j < n_reloads; j++)
8217 int r = reload_order[j];
8218 int i = reload_spill_index[r];
8220 /* If this is a non-inherited input reload from a pseudo, we must
8221 clear any memory of a previous store to the same pseudo. Only do
8222 something if there will not be an output reload for the pseudo
8223 being reloaded. */
8224 if (rld[r].in_reg != 0
8225 && ! (reload_inherited[r] || reload_override_in[r]))
8227 rtx reg = rld[r].in_reg;
8229 if (GET_CODE (reg) == SUBREG)
8230 reg = SUBREG_REG (reg);
8232 if (REG_P (reg)
8233 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8234 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8236 int nregno = REGNO (reg);
8238 if (reg_last_reload_reg[nregno])
8240 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8242 if (reg_reloaded_contents[last_regno] == nregno)
8243 spill_reg_store[last_regno] = 0;
8248 /* I is nonneg if this reload used a register.
8249 If rld[r].reg_rtx is 0, this is an optional reload
8250 that we opted to ignore. */
8252 if (i >= 0 && rld[r].reg_rtx != 0)
8254 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8255 int k;
8257 /* For a multi register reload, we need to check if all or part
8258 of the value lives to the end. */
8259 for (k = 0; k < nr; k++)
8260 if (reload_reg_reaches_end_p (i + k, r))
8261 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8263 /* Maybe the spill reg contains a copy of reload_out. */
8264 if (rld[r].out != 0
8265 && (REG_P (rld[r].out)
8266 || (rld[r].out_reg
8267 ? REG_P (rld[r].out_reg)
8268 /* The reload value is an auto-modification of
8269 some kind. For PRE_INC, POST_INC, PRE_DEC
8270 and POST_DEC, we record an equivalence
8271 between the reload register and the operand
8272 on the optimistic assumption that we can make
8273 the equivalence hold. reload_as_needed must
8274 then either make it hold or invalidate the
8275 equivalence.
8277 PRE_MODIFY and POST_MODIFY addresses are reloaded
8278 somewhat differently, and allowing them here leads
8279 to problems. */
8280 : (GET_CODE (rld[r].out) != POST_MODIFY
8281 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8283 rtx reg;
8285 reg = reload_reg_rtx_for_output[r];
8286 if (reload_reg_rtx_reaches_end_p (reg, r))
8288 machine_mode mode = GET_MODE (reg);
8289 int regno = REGNO (reg);
8290 int nregs = hard_regno_nregs[regno][mode];
8291 rtx out = (REG_P (rld[r].out)
8292 ? rld[r].out
8293 : rld[r].out_reg
8294 ? rld[r].out_reg
8295 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8296 int out_regno = REGNO (out);
8297 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8298 : hard_regno_nregs[out_regno][mode]);
8299 bool piecemeal;
8301 spill_reg_store[regno] = new_spill_reg_store[regno];
8302 spill_reg_stored_to[regno] = out;
8303 reg_last_reload_reg[out_regno] = reg;
8305 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8306 && nregs == out_nregs
8307 && inherit_piecemeal_p (out_regno, regno, mode));
8309 /* If OUT_REGNO is a hard register, it may occupy more than
8310 one register. If it does, say what is in the
8311 rest of the registers assuming that both registers
8312 agree on how many words the object takes. If not,
8313 invalidate the subsequent registers. */
8315 if (HARD_REGISTER_NUM_P (out_regno))
8316 for (k = 1; k < out_nregs; k++)
8317 reg_last_reload_reg[out_regno + k]
8318 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8320 /* Now do the inverse operation. */
8321 for (k = 0; k < nregs; k++)
8323 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8324 reg_reloaded_contents[regno + k]
8325 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8326 ? out_regno
8327 : out_regno + k);
8328 reg_reloaded_insn[regno + k] = insn;
8329 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8330 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8331 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8332 regno + k);
8333 else
8334 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8335 regno + k);
8339 /* Maybe the spill reg contains a copy of reload_in. Only do
8340 something if there will not be an output reload for
8341 the register being reloaded. */
8342 else if (rld[r].out_reg == 0
8343 && rld[r].in != 0
8344 && ((REG_P (rld[r].in)
8345 && !HARD_REGISTER_P (rld[r].in)
8346 && !REGNO_REG_SET_P (&reg_has_output_reload,
8347 REGNO (rld[r].in)))
8348 || (REG_P (rld[r].in_reg)
8349 && !REGNO_REG_SET_P (&reg_has_output_reload,
8350 REGNO (rld[r].in_reg))))
8351 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8353 rtx reg;
8355 reg = reload_reg_rtx_for_input[r];
8356 if (reload_reg_rtx_reaches_end_p (reg, r))
8358 machine_mode mode;
8359 int regno;
8360 int nregs;
8361 int in_regno;
8362 int in_nregs;
8363 rtx in;
8364 bool piecemeal;
8366 mode = GET_MODE (reg);
8367 regno = REGNO (reg);
8368 nregs = hard_regno_nregs[regno][mode];
8369 if (REG_P (rld[r].in)
8370 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8371 in = rld[r].in;
8372 else if (REG_P (rld[r].in_reg))
8373 in = rld[r].in_reg;
8374 else
8375 in = XEXP (rld[r].in_reg, 0);
8376 in_regno = REGNO (in);
8378 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8379 : hard_regno_nregs[in_regno][mode]);
8381 reg_last_reload_reg[in_regno] = reg;
8383 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8384 && nregs == in_nregs
8385 && inherit_piecemeal_p (regno, in_regno, mode));
8387 if (HARD_REGISTER_NUM_P (in_regno))
8388 for (k = 1; k < in_nregs; k++)
8389 reg_last_reload_reg[in_regno + k]
8390 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8392 /* Unless we inherited this reload, show we haven't
8393 recently done a store.
8394 Previous stores of inherited auto_inc expressions
8395 also have to be discarded. */
8396 if (! reload_inherited[r]
8397 || (rld[r].out && ! rld[r].out_reg))
8398 spill_reg_store[regno] = 0;
8400 for (k = 0; k < nregs; k++)
8402 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8403 reg_reloaded_contents[regno + k]
8404 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8405 ? in_regno
8406 : in_regno + k);
8407 reg_reloaded_insn[regno + k] = insn;
8408 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8409 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8410 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8411 regno + k);
8412 else
8413 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8414 regno + k);
8420 /* The following if-statement was #if 0'd in 1.34 (or before...).
8421 It's reenabled in 1.35 because supposedly nothing else
8422 deals with this problem. */
8424 /* If a register gets output-reloaded from a non-spill register,
8425 that invalidates any previous reloaded copy of it.
8426 But forget_old_reloads_1 won't get to see it, because
8427 it thinks only about the original insn. So invalidate it here.
8428 Also do the same thing for RELOAD_OTHER constraints where the
8429 output is discarded. */
8430 if (i < 0
8431 && ((rld[r].out != 0
8432 && (REG_P (rld[r].out)
8433 || (MEM_P (rld[r].out)
8434 && REG_P (rld[r].out_reg))))
8435 || (rld[r].out == 0 && rld[r].out_reg
8436 && REG_P (rld[r].out_reg))))
8438 rtx out = ((rld[r].out && REG_P (rld[r].out))
8439 ? rld[r].out : rld[r].out_reg);
8440 int out_regno = REGNO (out);
8441 machine_mode mode = GET_MODE (out);
8443 /* REG_RTX is now set or clobbered by the main instruction.
8444 As the comment above explains, forget_old_reloads_1 only
8445 sees the original instruction, and there is no guarantee
8446 that the original instruction also clobbered REG_RTX.
8447 For example, if find_reloads sees that the input side of
8448 a matched operand pair dies in this instruction, it may
8449 use the input register as the reload register.
8451 Calling forget_old_reloads_1 is a waste of effort if
8452 REG_RTX is also the output register.
8454 If we know that REG_RTX holds the value of a pseudo
8455 register, the code after the call will record that fact. */
8456 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8457 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8459 if (!HARD_REGISTER_NUM_P (out_regno))
8461 rtx src_reg;
8462 rtx_insn *store_insn = NULL;
8464 reg_last_reload_reg[out_regno] = 0;
8466 /* If we can find a hard register that is stored, record
8467 the storing insn so that we may delete this insn with
8468 delete_output_reload. */
8469 src_reg = reload_reg_rtx_for_output[r];
8471 if (src_reg)
8473 if (reload_reg_rtx_reaches_end_p (src_reg, r))
8474 store_insn = new_spill_reg_store[REGNO (src_reg)];
8475 else
8476 src_reg = NULL_RTX;
8478 else
8480 /* If this is an optional reload, try to find the
8481 source reg from an input reload. */
8482 rtx set = single_set (insn);
8483 if (set && SET_DEST (set) == rld[r].out)
8485 int k;
8487 src_reg = SET_SRC (set);
8488 store_insn = insn;
8489 for (k = 0; k < n_reloads; k++)
8491 if (rld[k].in == src_reg)
8493 src_reg = reload_reg_rtx_for_input[k];
8494 break;
8499 if (src_reg && REG_P (src_reg)
8500 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8502 int src_regno, src_nregs, k;
8503 rtx note;
8505 gcc_assert (GET_MODE (src_reg) == mode);
8506 src_regno = REGNO (src_reg);
8507 src_nregs = hard_regno_nregs[src_regno][mode];
8508 /* The place where to find a death note varies with
8509 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8510 necessarily checked exactly in the code that moves
8511 notes, so just check both locations. */
8512 note = find_regno_note (insn, REG_DEAD, src_regno);
8513 if (! note && store_insn)
8514 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8515 for (k = 0; k < src_nregs; k++)
8517 spill_reg_store[src_regno + k] = store_insn;
8518 spill_reg_stored_to[src_regno + k] = out;
8519 reg_reloaded_contents[src_regno + k] = out_regno;
8520 reg_reloaded_insn[src_regno + k] = store_insn;
8521 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8522 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8523 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8524 mode))
8525 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8526 src_regno + k);
8527 else
8528 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8529 src_regno + k);
8530 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8531 if (note)
8532 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8533 else
8534 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8536 reg_last_reload_reg[out_regno] = src_reg;
8537 /* We have to set reg_has_output_reload here, or else
8538 forget_old_reloads_1 will clear reg_last_reload_reg
8539 right away. */
8540 SET_REGNO_REG_SET (&reg_has_output_reload,
8541 out_regno);
8544 else
8546 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8548 for (k = 0; k < out_nregs; k++)
8549 reg_last_reload_reg[out_regno + k] = 0;
8553 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8556 /* Go through the motions to emit INSN and test if it is strictly valid.
8557 Return the emitted insn if valid, else return NULL. */
8559 static rtx_insn *
8560 emit_insn_if_valid_for_reload (rtx pat)
8562 rtx_insn *last = get_last_insn ();
8563 int code;
8565 rtx_insn *insn = emit_insn (pat);
8566 code = recog_memoized (insn);
8568 if (code >= 0)
8570 extract_insn (insn);
8571 /* We want constrain operands to treat this insn strictly in its
8572 validity determination, i.e., the way it would after reload has
8573 completed. */
8574 if (constrain_operands (1, get_enabled_alternatives (insn)))
8575 return insn;
8578 delete_insns_since (last);
8579 return NULL;
8582 /* Emit code to perform a reload from IN (which may be a reload register) to
8583 OUT (which may also be a reload register). IN or OUT is from operand
8584 OPNUM with reload type TYPE.
8586 Returns first insn emitted. */
8588 static rtx_insn *
8589 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8591 rtx_insn *last = get_last_insn ();
8592 rtx_insn *tem;
8593 #ifdef SECONDARY_MEMORY_NEEDED
8594 rtx tem1, tem2;
8595 #endif
8597 /* If IN is a paradoxical SUBREG, remove it and try to put the
8598 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8599 if (!strip_paradoxical_subreg (&in, &out))
8600 strip_paradoxical_subreg (&out, &in);
8602 /* How to do this reload can get quite tricky. Normally, we are being
8603 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8604 register that didn't get a hard register. In that case we can just
8605 call emit_move_insn.
8607 We can also be asked to reload a PLUS that adds a register or a MEM to
8608 another register, constant or MEM. This can occur during frame pointer
8609 elimination and while reloading addresses. This case is handled by
8610 trying to emit a single insn to perform the add. If it is not valid,
8611 we use a two insn sequence.
8613 Or we can be asked to reload an unary operand that was a fragment of
8614 an addressing mode, into a register. If it isn't recognized as-is,
8615 we try making the unop operand and the reload-register the same:
8616 (set reg:X (unop:X expr:Y))
8617 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8619 Finally, we could be called to handle an 'o' constraint by putting
8620 an address into a register. In that case, we first try to do this
8621 with a named pattern of "reload_load_address". If no such pattern
8622 exists, we just emit a SET insn and hope for the best (it will normally
8623 be valid on machines that use 'o').
8625 This entire process is made complex because reload will never
8626 process the insns we generate here and so we must ensure that
8627 they will fit their constraints and also by the fact that parts of
8628 IN might be being reloaded separately and replaced with spill registers.
8629 Because of this, we are, in some sense, just guessing the right approach
8630 here. The one listed above seems to work.
8632 ??? At some point, this whole thing needs to be rethought. */
8634 if (GET_CODE (in) == PLUS
8635 && (REG_P (XEXP (in, 0))
8636 || GET_CODE (XEXP (in, 0)) == SUBREG
8637 || MEM_P (XEXP (in, 0)))
8638 && (REG_P (XEXP (in, 1))
8639 || GET_CODE (XEXP (in, 1)) == SUBREG
8640 || CONSTANT_P (XEXP (in, 1))
8641 || MEM_P (XEXP (in, 1))))
8643 /* We need to compute the sum of a register or a MEM and another
8644 register, constant, or MEM, and put it into the reload
8645 register. The best possible way of doing this is if the machine
8646 has a three-operand ADD insn that accepts the required operands.
8648 The simplest approach is to try to generate such an insn and see if it
8649 is recognized and matches its constraints. If so, it can be used.
8651 It might be better not to actually emit the insn unless it is valid,
8652 but we need to pass the insn as an operand to `recog' and
8653 `extract_insn' and it is simpler to emit and then delete the insn if
8654 not valid than to dummy things up. */
8656 rtx op0, op1, tem;
8657 rtx_insn *insn;
8658 enum insn_code code;
8660 op0 = find_replacement (&XEXP (in, 0));
8661 op1 = find_replacement (&XEXP (in, 1));
8663 /* Since constraint checking is strict, commutativity won't be
8664 checked, so we need to do that here to avoid spurious failure
8665 if the add instruction is two-address and the second operand
8666 of the add is the same as the reload reg, which is frequently
8667 the case. If the insn would be A = B + A, rearrange it so
8668 it will be A = A + B as constrain_operands expects. */
8670 if (REG_P (XEXP (in, 1))
8671 && REGNO (out) == REGNO (XEXP (in, 1)))
8672 tem = op0, op0 = op1, op1 = tem;
8674 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8675 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8677 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8678 if (insn)
8679 return insn;
8681 /* If that failed, we must use a conservative two-insn sequence.
8683 Use a move to copy one operand into the reload register. Prefer
8684 to reload a constant, MEM or pseudo since the move patterns can
8685 handle an arbitrary operand. If OP1 is not a constant, MEM or
8686 pseudo and OP1 is not a valid operand for an add instruction, then
8687 reload OP1.
8689 After reloading one of the operands into the reload register, add
8690 the reload register to the output register.
8692 If there is another way to do this for a specific machine, a
8693 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8694 we emit below. */
8696 code = optab_handler (add_optab, GET_MODE (out));
8698 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8699 || (REG_P (op1)
8700 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8701 || (code != CODE_FOR_nothing
8702 && !insn_operand_matches (code, 2, op1)))
8703 tem = op0, op0 = op1, op1 = tem;
8705 gen_reload (out, op0, opnum, type);
8707 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8708 This fixes a problem on the 32K where the stack pointer cannot
8709 be used as an operand of an add insn. */
8711 if (rtx_equal_p (op0, op1))
8712 op1 = out;
8714 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8715 if (insn)
8717 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8718 set_dst_reg_note (insn, REG_EQUIV, in, out);
8719 return insn;
8722 /* If that failed, copy the address register to the reload register.
8723 Then add the constant to the reload register. */
8725 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8726 gen_reload (out, op1, opnum, type);
8727 insn = emit_insn (gen_add2_insn (out, op0));
8728 set_dst_reg_note (insn, REG_EQUIV, in, out);
8731 #ifdef SECONDARY_MEMORY_NEEDED
8732 /* If we need a memory location to do the move, do it that way. */
8733 else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8734 (REG_P (tem1) && REG_P (tem2)))
8735 && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8736 && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8737 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8738 REGNO_REG_CLASS (REGNO (tem2)),
8739 GET_MODE (out)))
8741 /* Get the memory to use and rewrite both registers to its mode. */
8742 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8744 if (GET_MODE (loc) != GET_MODE (out))
8745 out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8747 if (GET_MODE (loc) != GET_MODE (in))
8748 in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8750 gen_reload (loc, in, opnum, type);
8751 gen_reload (out, loc, opnum, type);
8753 #endif
8754 else if (REG_P (out) && UNARY_P (in))
8756 rtx insn;
8757 rtx op1;
8758 rtx out_moded;
8759 rtx_insn *set;
8761 op1 = find_replacement (&XEXP (in, 0));
8762 if (op1 != XEXP (in, 0))
8763 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8765 /* First, try a plain SET. */
8766 set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8767 if (set)
8768 return set;
8770 /* If that failed, move the inner operand to the reload
8771 register, and try the same unop with the inner expression
8772 replaced with the reload register. */
8774 if (GET_MODE (op1) != GET_MODE (out))
8775 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8776 else
8777 out_moded = out;
8779 gen_reload (out_moded, op1, opnum, type);
8781 insn = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8782 out_moded));
8783 insn = emit_insn_if_valid_for_reload (insn);
8784 if (insn)
8786 set_unique_reg_note (insn, REG_EQUIV, in);
8787 return as_a <rtx_insn *> (insn);
8790 fatal_insn ("failure trying to reload:", set);
8792 /* If IN is a simple operand, use gen_move_insn. */
8793 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8795 tem = emit_insn (gen_move_insn (out, in));
8796 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8797 mark_jump_label (in, tem, 0);
8800 else if (targetm.have_reload_load_address ())
8801 emit_insn (targetm.gen_reload_load_address (out, in));
8803 /* Otherwise, just write (set OUT IN) and hope for the best. */
8804 else
8805 emit_insn (gen_rtx_SET (out, in));
8807 /* Return the first insn emitted.
8808 We can not just return get_last_insn, because there may have
8809 been multiple instructions emitted. Also note that gen_move_insn may
8810 emit more than one insn itself, so we can not assume that there is one
8811 insn emitted per emit_insn_before call. */
8813 return last ? NEXT_INSN (last) : get_insns ();
8816 /* Delete a previously made output-reload whose result we now believe
8817 is not needed. First we double-check.
8819 INSN is the insn now being processed.
8820 LAST_RELOAD_REG is the hard register number for which we want to delete
8821 the last output reload.
8822 J is the reload-number that originally used REG. The caller has made
8823 certain that reload J doesn't use REG any longer for input.
8824 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8826 static void
8827 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8828 rtx new_reload_reg)
8830 rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8831 rtx reg = spill_reg_stored_to[last_reload_reg];
8832 int k;
8833 int n_occurrences;
8834 int n_inherited = 0;
8835 rtx substed;
8836 unsigned regno;
8837 int nregs;
8839 /* It is possible that this reload has been only used to set another reload
8840 we eliminated earlier and thus deleted this instruction too. */
8841 if (output_reload_insn->deleted ())
8842 return;
8844 /* Get the raw pseudo-register referred to. */
8846 while (GET_CODE (reg) == SUBREG)
8847 reg = SUBREG_REG (reg);
8848 substed = reg_equiv_memory_loc (REGNO (reg));
8850 /* This is unsafe if the operand occurs more often in the current
8851 insn than it is inherited. */
8852 for (k = n_reloads - 1; k >= 0; k--)
8854 rtx reg2 = rld[k].in;
8855 if (! reg2)
8856 continue;
8857 if (MEM_P (reg2) || reload_override_in[k])
8858 reg2 = rld[k].in_reg;
8860 if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8861 reg2 = XEXP (rld[k].in_reg, 0);
8863 while (GET_CODE (reg2) == SUBREG)
8864 reg2 = SUBREG_REG (reg2);
8865 if (rtx_equal_p (reg2, reg))
8867 if (reload_inherited[k] || reload_override_in[k] || k == j)
8868 n_inherited++;
8869 else
8870 return;
8873 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8874 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8875 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8876 reg, 0);
8877 if (substed)
8878 n_occurrences += count_occurrences (PATTERN (insn),
8879 eliminate_regs (substed, VOIDmode,
8880 NULL_RTX), 0);
8881 for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8883 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8884 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8886 if (n_occurrences > n_inherited)
8887 return;
8889 regno = REGNO (reg);
8890 if (regno >= FIRST_PSEUDO_REGISTER)
8891 nregs = 1;
8892 else
8893 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8895 /* If the pseudo-reg we are reloading is no longer referenced
8896 anywhere between the store into it and here,
8897 and we're within the same basic block, then the value can only
8898 pass through the reload reg and end up here.
8899 Otherwise, give up--return. */
8900 for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8901 i1 != insn; i1 = NEXT_INSN (i1))
8903 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8904 return;
8905 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8906 && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8908 /* If this is USE in front of INSN, we only have to check that
8909 there are no more references than accounted for by inheritance. */
8910 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8912 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8913 i1 = NEXT_INSN (i1);
8915 if (n_occurrences <= n_inherited && i1 == insn)
8916 break;
8917 return;
8921 /* We will be deleting the insn. Remove the spill reg information. */
8922 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8924 spill_reg_store[last_reload_reg + k] = 0;
8925 spill_reg_stored_to[last_reload_reg + k] = 0;
8928 /* The caller has already checked that REG dies or is set in INSN.
8929 It has also checked that we are optimizing, and thus some
8930 inaccuracies in the debugging information are acceptable.
8931 So we could just delete output_reload_insn. But in some cases
8932 we can improve the debugging information without sacrificing
8933 optimization - maybe even improving the code: See if the pseudo
8934 reg has been completely replaced with reload regs. If so, delete
8935 the store insn and forget we had a stack slot for the pseudo. */
8936 if (rld[j].out != rld[j].in
8937 && REG_N_DEATHS (REGNO (reg)) == 1
8938 && REG_N_SETS (REGNO (reg)) == 1
8939 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8940 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8942 rtx_insn *i2;
8944 /* We know that it was used only between here and the beginning of
8945 the current basic block. (We also know that the last use before
8946 INSN was the output reload we are thinking of deleting, but never
8947 mind that.) Search that range; see if any ref remains. */
8948 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8950 rtx set = single_set (i2);
8952 /* Uses which just store in the pseudo don't count,
8953 since if they are the only uses, they are dead. */
8954 if (set != 0 && SET_DEST (set) == reg)
8955 continue;
8956 if (LABEL_P (i2) || JUMP_P (i2))
8957 break;
8958 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8959 && reg_mentioned_p (reg, PATTERN (i2)))
8961 /* Some other ref remains; just delete the output reload we
8962 know to be dead. */
8963 delete_address_reloads (output_reload_insn, insn);
8964 delete_insn (output_reload_insn);
8965 return;
8969 /* Delete the now-dead stores into this pseudo. Note that this
8970 loop also takes care of deleting output_reload_insn. */
8971 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8973 rtx set = single_set (i2);
8975 if (set != 0 && SET_DEST (set) == reg)
8977 delete_address_reloads (i2, insn);
8978 delete_insn (i2);
8980 if (LABEL_P (i2) || JUMP_P (i2))
8981 break;
8984 /* For the debugging info, say the pseudo lives in this reload reg. */
8985 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8986 if (ira_conflicts_p)
8987 /* Inform IRA about the change. */
8988 ira_mark_allocation_change (REGNO (reg));
8989 alter_reg (REGNO (reg), -1, false);
8991 else
8993 delete_address_reloads (output_reload_insn, insn);
8994 delete_insn (output_reload_insn);
8998 /* We are going to delete DEAD_INSN. Recursively delete loads of
8999 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
9000 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
9001 static void
9002 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
9004 rtx set = single_set (dead_insn);
9005 rtx set2, dst;
9006 rtx_insn *prev, *next;
9007 if (set)
9009 rtx dst = SET_DEST (set);
9010 if (MEM_P (dst))
9011 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9013 /* If we deleted the store from a reloaded post_{in,de}c expression,
9014 we can delete the matching adds. */
9015 prev = PREV_INSN (dead_insn);
9016 next = NEXT_INSN (dead_insn);
9017 if (! prev || ! next)
9018 return;
9019 set = single_set (next);
9020 set2 = single_set (prev);
9021 if (! set || ! set2
9022 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9023 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9024 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9025 return;
9026 dst = SET_DEST (set);
9027 if (! rtx_equal_p (dst, SET_DEST (set2))
9028 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9029 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9030 || (INTVAL (XEXP (SET_SRC (set), 1))
9031 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9032 return;
9033 delete_related_insns (prev);
9034 delete_related_insns (next);
9037 /* Subfunction of delete_address_reloads: process registers found in X. */
9038 static void
9039 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9041 rtx_insn *prev, *i2;
9042 rtx set, dst;
9043 int i, j;
9044 enum rtx_code code = GET_CODE (x);
9046 if (code != REG)
9048 const char *fmt = GET_RTX_FORMAT (code);
9049 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9051 if (fmt[i] == 'e')
9052 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9053 else if (fmt[i] == 'E')
9055 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9056 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9057 current_insn);
9060 return;
9063 if (spill_reg_order[REGNO (x)] < 0)
9064 return;
9066 /* Scan backwards for the insn that sets x. This might be a way back due
9067 to inheritance. */
9068 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9070 code = GET_CODE (prev);
9071 if (code == CODE_LABEL || code == JUMP_INSN)
9072 return;
9073 if (!INSN_P (prev))
9074 continue;
9075 if (reg_set_p (x, PATTERN (prev)))
9076 break;
9077 if (reg_referenced_p (x, PATTERN (prev)))
9078 return;
9080 if (! prev || INSN_UID (prev) < reload_first_uid)
9081 return;
9082 /* Check that PREV only sets the reload register. */
9083 set = single_set (prev);
9084 if (! set)
9085 return;
9086 dst = SET_DEST (set);
9087 if (!REG_P (dst)
9088 || ! rtx_equal_p (dst, x))
9089 return;
9090 if (! reg_set_p (dst, PATTERN (dead_insn)))
9092 /* Check if DST was used in a later insn -
9093 it might have been inherited. */
9094 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9096 if (LABEL_P (i2))
9097 break;
9098 if (! INSN_P (i2))
9099 continue;
9100 if (reg_referenced_p (dst, PATTERN (i2)))
9102 /* If there is a reference to the register in the current insn,
9103 it might be loaded in a non-inherited reload. If no other
9104 reload uses it, that means the register is set before
9105 referenced. */
9106 if (i2 == current_insn)
9108 for (j = n_reloads - 1; j >= 0; j--)
9109 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9110 || reload_override_in[j] == dst)
9111 return;
9112 for (j = n_reloads - 1; j >= 0; j--)
9113 if (rld[j].in && rld[j].reg_rtx == dst)
9114 break;
9115 if (j >= 0)
9116 break;
9118 return;
9120 if (JUMP_P (i2))
9121 break;
9122 /* If DST is still live at CURRENT_INSN, check if it is used for
9123 any reload. Note that even if CURRENT_INSN sets DST, we still
9124 have to check the reloads. */
9125 if (i2 == current_insn)
9127 for (j = n_reloads - 1; j >= 0; j--)
9128 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9129 || reload_override_in[j] == dst)
9130 return;
9131 /* ??? We can't finish the loop here, because dst might be
9132 allocated to a pseudo in this block if no reload in this
9133 block needs any of the classes containing DST - see
9134 spill_hard_reg. There is no easy way to tell this, so we
9135 have to scan till the end of the basic block. */
9137 if (reg_set_p (dst, PATTERN (i2)))
9138 break;
9141 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9142 reg_reloaded_contents[REGNO (dst)] = -1;
9143 delete_insn (prev);
9146 /* Output reload-insns to reload VALUE into RELOADREG.
9147 VALUE is an autoincrement or autodecrement RTX whose operand
9148 is a register or memory location;
9149 so reloading involves incrementing that location.
9150 IN is either identical to VALUE, or some cheaper place to reload from.
9152 INC_AMOUNT is the number to increment or decrement by (always positive).
9153 This cannot be deduced from VALUE. */
9155 static void
9156 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9158 /* REG or MEM to be copied and incremented. */
9159 rtx incloc = find_replacement (&XEXP (value, 0));
9160 /* Nonzero if increment after copying. */
9161 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9162 || GET_CODE (value) == POST_MODIFY);
9163 rtx_insn *last;
9164 rtx inc;
9165 rtx_insn *add_insn;
9166 int code;
9167 rtx real_in = in == value ? incloc : in;
9169 /* No hard register is equivalent to this register after
9170 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9171 we could inc/dec that register as well (maybe even using it for
9172 the source), but I'm not sure it's worth worrying about. */
9173 if (REG_P (incloc))
9174 reg_last_reload_reg[REGNO (incloc)] = 0;
9176 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9178 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9179 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9181 else
9183 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9184 inc_amount = -inc_amount;
9186 inc = GEN_INT (inc_amount);
9189 /* If this is post-increment, first copy the location to the reload reg. */
9190 if (post && real_in != reloadreg)
9191 emit_insn (gen_move_insn (reloadreg, real_in));
9193 if (in == value)
9195 /* See if we can directly increment INCLOC. Use a method similar to
9196 that in gen_reload. */
9198 last = get_last_insn ();
9199 add_insn = emit_insn (gen_rtx_SET (incloc,
9200 gen_rtx_PLUS (GET_MODE (incloc),
9201 incloc, inc)));
9203 code = recog_memoized (add_insn);
9204 if (code >= 0)
9206 extract_insn (add_insn);
9207 if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9209 /* If this is a pre-increment and we have incremented the value
9210 where it lives, copy the incremented value to RELOADREG to
9211 be used as an address. */
9213 if (! post)
9214 emit_insn (gen_move_insn (reloadreg, incloc));
9215 return;
9218 delete_insns_since (last);
9221 /* If couldn't do the increment directly, must increment in RELOADREG.
9222 The way we do this depends on whether this is pre- or post-increment.
9223 For pre-increment, copy INCLOC to the reload register, increment it
9224 there, then save back. */
9226 if (! post)
9228 if (in != reloadreg)
9229 emit_insn (gen_move_insn (reloadreg, real_in));
9230 emit_insn (gen_add2_insn (reloadreg, inc));
9231 emit_insn (gen_move_insn (incloc, reloadreg));
9233 else
9235 /* Postincrement.
9236 Because this might be a jump insn or a compare, and because RELOADREG
9237 may not be available after the insn in an input reload, we must do
9238 the incrementation before the insn being reloaded for.
9240 We have already copied IN to RELOADREG. Increment the copy in
9241 RELOADREG, save that back, then decrement RELOADREG so it has
9242 the original value. */
9244 emit_insn (gen_add2_insn (reloadreg, inc));
9245 emit_insn (gen_move_insn (incloc, reloadreg));
9246 if (CONST_INT_P (inc))
9247 emit_insn (gen_add2_insn (reloadreg,
9248 gen_int_mode (-INTVAL (inc),
9249 GET_MODE (reloadreg))));
9250 else
9251 emit_insn (gen_sub2_insn (reloadreg, inc));
9255 static void
9256 add_auto_inc_notes (rtx_insn *insn, rtx x)
9258 enum rtx_code code = GET_CODE (x);
9259 const char *fmt;
9260 int i, j;
9262 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9264 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9265 return;
9268 /* Scan all the operand sub-expressions. */
9269 fmt = GET_RTX_FORMAT (code);
9270 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9272 if (fmt[i] == 'e')
9273 add_auto_inc_notes (insn, XEXP (x, i));
9274 else if (fmt[i] == 'E')
9275 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9276 add_auto_inc_notes (insn, XVECEXP (x, i, j));