gcc/
[official-gcc.git] / gcc / reload1.c
blob57837a1fc90871bb3c3cf042b0ac8f71decf7f23
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
25 #include "hard-reg-set.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "obstack.h"
29 #include "insn-config.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "symtab.h"
33 #include "rtl.h"
34 #include "alias.h"
35 #include "tree.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "insn-codes.h"
45 #include "optabs.h"
46 #include "regs.h"
47 #include "addresses.h"
48 #include "predict.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfgrtl.h"
52 #include "cfgbuild.h"
53 #include "basic-block.h"
54 #include "df.h"
55 #include "reload.h"
56 #include "recog.h"
57 #include "except.h"
58 #include "ira.h"
59 #include "target.h"
60 #include "dumpfile.h"
61 #include "rtl-iter.h"
63 /* This file contains the reload pass of the compiler, which is
64 run after register allocation has been done. It checks that
65 each insn is valid (operands required to be in registers really
66 are in registers of the proper class) and fixes up invalid ones
67 by copying values temporarily into registers for the insns
68 that need them.
70 The results of register allocation are described by the vector
71 reg_renumber; the insns still contain pseudo regs, but reg_renumber
72 can be used to find which hard reg, if any, a pseudo reg is in.
74 The technique we always use is to free up a few hard regs that are
75 called ``reload regs'', and for each place where a pseudo reg
76 must be in a hard reg, copy it temporarily into one of the reload regs.
78 Reload regs are allocated locally for every instruction that needs
79 reloads. When there are pseudos which are allocated to a register that
80 has been chosen as a reload reg, such pseudos must be ``spilled''.
81 This means that they go to other hard regs, or to stack slots if no other
82 available hard regs can be found. Spilling can invalidate more
83 insns, requiring additional need for reloads, so we must keep checking
84 until the process stabilizes.
86 For machines with different classes of registers, we must keep track
87 of the register class needed for each reload, and make sure that
88 we allocate enough reload registers of each class.
90 The file reload.c contains the code that checks one insn for
91 validity and reports the reloads that it needs. This file
92 is in charge of scanning the entire rtl code, accumulating the
93 reload needs, spilling, assigning reload registers to use for
94 fixing up each insn, and generating the new insns to copy values
95 into the reload registers. */
97 struct target_reload default_target_reload;
98 #if SWITCHABLE_TARGET
99 struct target_reload *this_target_reload = &default_target_reload;
100 #endif
102 #define spill_indirect_levels \
103 (this_target_reload->x_spill_indirect_levels)
105 /* During reload_as_needed, element N contains a REG rtx for the hard reg
106 into which reg N has been reloaded (perhaps for a previous insn). */
107 static rtx *reg_last_reload_reg;
109 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
110 for an output reload that stores into reg N. */
111 static regset_head reg_has_output_reload;
113 /* Indicates which hard regs are reload-registers for an output reload
114 in the current insn. */
115 static HARD_REG_SET reg_is_output_reload;
117 /* Widest width in which each pseudo reg is referred to (via subreg). */
118 static unsigned int *reg_max_ref_width;
120 /* Vector to remember old contents of reg_renumber before spilling. */
121 static short *reg_old_renumber;
123 /* During reload_as_needed, element N contains the last pseudo regno reloaded
124 into hard register N. If that pseudo reg occupied more than one register,
125 reg_reloaded_contents points to that pseudo for each spill register in
126 use; all of these must remain set for an inheritance to occur. */
127 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
129 /* During reload_as_needed, element N contains the insn for which
130 hard register N was last used. Its contents are significant only
131 when reg_reloaded_valid is set for this register. */
132 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
135 static HARD_REG_SET reg_reloaded_valid;
136 /* Indicate if the register was dead at the end of the reload.
137 This is only valid if reg_reloaded_contents is set and valid. */
138 static HARD_REG_SET reg_reloaded_dead;
140 /* Indicate whether the register's current value is one that is not
141 safe to retain across a call, even for registers that are normally
142 call-saved. This is only meaningful for members of reg_reloaded_valid. */
143 static HARD_REG_SET reg_reloaded_call_part_clobbered;
145 /* Number of spill-regs so far; number of valid elements of spill_regs. */
146 static int n_spills;
148 /* In parallel with spill_regs, contains REG rtx's for those regs.
149 Holds the last rtx used for any given reg, or 0 if it has never
150 been used for spilling yet. This rtx is reused, provided it has
151 the proper mode. */
152 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
154 /* In parallel with spill_regs, contains nonzero for a spill reg
155 that was stored after the last time it was used.
156 The precise value is the insn generated to do the store. */
157 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
159 /* This is the register that was stored with spill_reg_store. This is a
160 copy of reload_out / reload_out_reg when the value was stored; if
161 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
162 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
164 /* This table is the inverse mapping of spill_regs:
165 indexed by hard reg number,
166 it contains the position of that reg in spill_regs,
167 or -1 for something that is not in spill_regs.
169 ?!? This is no longer accurate. */
170 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
172 /* This reg set indicates registers that can't be used as spill registers for
173 the currently processed insn. These are the hard registers which are live
174 during the insn, but not allocated to pseudos, as well as fixed
175 registers. */
176 static HARD_REG_SET bad_spill_regs;
178 /* These are the hard registers that can't be used as spill register for any
179 insn. This includes registers used for user variables and registers that
180 we can't eliminate. A register that appears in this set also can't be used
181 to retry register allocation. */
182 static HARD_REG_SET bad_spill_regs_global;
184 /* Describes order of use of registers for reloading
185 of spilled pseudo-registers. `n_spills' is the number of
186 elements that are actually valid; new ones are added at the end.
188 Both spill_regs and spill_reg_order are used on two occasions:
189 once during find_reload_regs, where they keep track of the spill registers
190 for a single insn, but also during reload_as_needed where they show all
191 the registers ever used by reload. For the latter case, the information
192 is calculated during finish_spills. */
193 static short spill_regs[FIRST_PSEUDO_REGISTER];
195 /* This vector of reg sets indicates, for each pseudo, which hard registers
196 may not be used for retrying global allocation because the register was
197 formerly spilled from one of them. If we allowed reallocating a pseudo to
198 a register that it was already allocated to, reload might not
199 terminate. */
200 static HARD_REG_SET *pseudo_previous_regs;
202 /* This vector of reg sets indicates, for each pseudo, which hard
203 registers may not be used for retrying global allocation because they
204 are used as spill registers during one of the insns in which the
205 pseudo is live. */
206 static HARD_REG_SET *pseudo_forbidden_regs;
208 /* All hard regs that have been used as spill registers for any insn are
209 marked in this set. */
210 static HARD_REG_SET used_spill_regs;
212 /* Index of last register assigned as a spill register. We allocate in
213 a round-robin fashion. */
214 static int last_spill_reg;
216 /* Record the stack slot for each spilled hard register. */
217 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
219 /* Width allocated so far for that stack slot. */
220 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
222 /* Record which pseudos needed to be spilled. */
223 static regset_head spilled_pseudos;
225 /* Record which pseudos changed their allocation in finish_spills. */
226 static regset_head changed_allocation_pseudos;
228 /* Used for communication between order_regs_for_reload and count_pseudo.
229 Used to avoid counting one pseudo twice. */
230 static regset_head pseudos_counted;
232 /* First uid used by insns created by reload in this function.
233 Used in find_equiv_reg. */
234 int reload_first_uid;
236 /* Flag set by local-alloc or global-alloc if anything is live in
237 a call-clobbered reg across calls. */
238 int caller_save_needed;
240 /* Set to 1 while reload_as_needed is operating.
241 Required by some machines to handle any generated moves differently. */
242 int reload_in_progress = 0;
244 /* This obstack is used for allocation of rtl during register elimination.
245 The allocated storage can be freed once find_reloads has processed the
246 insn. */
247 static struct obstack reload_obstack;
249 /* Points to the beginning of the reload_obstack. All insn_chain structures
250 are allocated first. */
251 static char *reload_startobj;
253 /* The point after all insn_chain structures. Used to quickly deallocate
254 memory allocated in copy_reloads during calculate_needs_all_insns. */
255 static char *reload_firstobj;
257 /* This points before all local rtl generated by register elimination.
258 Used to quickly free all memory after processing one insn. */
259 static char *reload_insn_firstobj;
261 /* List of insn_chain instructions, one for every insn that reload needs to
262 examine. */
263 struct insn_chain *reload_insn_chain;
265 /* TRUE if we potentially left dead insns in the insn stream and want to
266 run DCE immediately after reload, FALSE otherwise. */
267 static bool need_dce;
269 /* List of all insns needing reloads. */
270 static struct insn_chain *insns_need_reload;
272 /* This structure is used to record information about register eliminations.
273 Each array entry describes one possible way of eliminating a register
274 in favor of another. If there is more than one way of eliminating a
275 particular register, the most preferred should be specified first. */
277 struct elim_table
279 int from; /* Register number to be eliminated. */
280 int to; /* Register number used as replacement. */
281 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
282 int can_eliminate; /* Nonzero if this elimination can be done. */
283 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
284 target hook in previous scan over insns
285 made by reload. */
286 HOST_WIDE_INT offset; /* Current offset between the two regs. */
287 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
288 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
289 rtx from_rtx; /* REG rtx for the register to be eliminated.
290 We cannot simply compare the number since
291 we might then spuriously replace a hard
292 register corresponding to a pseudo
293 assigned to the reg to be eliminated. */
294 rtx to_rtx; /* REG rtx for the replacement. */
297 static struct elim_table *reg_eliminate = 0;
299 /* This is an intermediate structure to initialize the table. It has
300 exactly the members provided by ELIMINABLE_REGS. */
301 static const struct elim_table_1
303 const int from;
304 const int to;
305 } reg_eliminate_1[] =
307 /* If a set of eliminable registers was specified, define the table from it.
308 Otherwise, default to the normal case of the frame pointer being
309 replaced by the stack pointer. */
311 #ifdef ELIMINABLE_REGS
312 ELIMINABLE_REGS;
313 #else
314 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
315 #endif
317 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
319 /* Record the number of pending eliminations that have an offset not equal
320 to their initial offset. If nonzero, we use a new copy of each
321 replacement result in any insns encountered. */
322 int num_not_at_initial_offset;
324 /* Count the number of registers that we may be able to eliminate. */
325 static int num_eliminable;
326 /* And the number of registers that are equivalent to a constant that
327 can be eliminated to frame_pointer / arg_pointer + constant. */
328 static int num_eliminable_invariants;
330 /* For each label, we record the offset of each elimination. If we reach
331 a label by more than one path and an offset differs, we cannot do the
332 elimination. This information is indexed by the difference of the
333 number of the label and the first label number. We can't offset the
334 pointer itself as this can cause problems on machines with segmented
335 memory. The first table is an array of flags that records whether we
336 have yet encountered a label and the second table is an array of arrays,
337 one entry in the latter array for each elimination. */
339 static int first_label_num;
340 static char *offsets_known_at;
341 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
343 vec<reg_equivs_t, va_gc> *reg_equivs;
345 /* Stack of addresses where an rtx has been changed. We can undo the
346 changes by popping items off the stack and restoring the original
347 value at each location.
349 We use this simplistic undo capability rather than copy_rtx as copy_rtx
350 will not make a deep copy of a normally sharable rtx, such as
351 (const (plus (symbol_ref) (const_int))). If such an expression appears
352 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
353 rtx expression would be changed. See PR 42431. */
355 typedef rtx *rtx_p;
356 static vec<rtx_p> substitute_stack;
358 /* Number of labels in the current function. */
360 static int num_labels;
362 static void replace_pseudos_in (rtx *, machine_mode, rtx);
363 static void maybe_fix_stack_asms (void);
364 static void copy_reloads (struct insn_chain *);
365 static void calculate_needs_all_insns (int);
366 static int find_reg (struct insn_chain *, int);
367 static void find_reload_regs (struct insn_chain *);
368 static void select_reload_regs (void);
369 static void delete_caller_save_insns (void);
371 static void spill_failure (rtx_insn *, enum reg_class);
372 static void count_spilled_pseudo (int, int, int);
373 static void delete_dead_insn (rtx_insn *);
374 static void alter_reg (int, int, bool);
375 static void set_label_offsets (rtx, rtx_insn *, int);
376 static void check_eliminable_occurrences (rtx);
377 static void elimination_effects (rtx, machine_mode);
378 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
379 static int eliminate_regs_in_insn (rtx_insn *, int);
380 static void update_eliminable_offsets (void);
381 static void mark_not_eliminable (rtx, const_rtx, void *);
382 static void set_initial_elim_offsets (void);
383 static bool verify_initial_elim_offsets (void);
384 static void set_initial_label_offsets (void);
385 static void set_offsets_for_label (rtx_insn *);
386 static void init_eliminable_invariants (rtx_insn *, bool);
387 static void init_elim_table (void);
388 static void free_reg_equiv (void);
389 static void update_eliminables (HARD_REG_SET *);
390 static bool update_eliminables_and_spill (void);
391 static void elimination_costs_in_insn (rtx_insn *);
392 static void spill_hard_reg (unsigned int, int);
393 static int finish_spills (int);
394 static void scan_paradoxical_subregs (rtx);
395 static void count_pseudo (int);
396 static void order_regs_for_reload (struct insn_chain *);
397 static void reload_as_needed (int);
398 static void forget_old_reloads_1 (rtx, const_rtx, void *);
399 static void forget_marked_reloads (regset);
400 static int reload_reg_class_lower (const void *, const void *);
401 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
402 machine_mode);
403 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
404 machine_mode);
405 static int reload_reg_free_p (unsigned int, int, enum reload_type);
406 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
407 rtx, rtx, int, int);
408 static int free_for_value_p (int, machine_mode, int, enum reload_type,
409 rtx, rtx, int, int);
410 static int allocate_reload_reg (struct insn_chain *, int, int);
411 static int conflicts_with_override (rtx);
412 static void failed_reload (rtx_insn *, int);
413 static int set_reload_reg (int, int);
414 static void choose_reload_regs_init (struct insn_chain *, rtx *);
415 static void choose_reload_regs (struct insn_chain *);
416 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
417 rtx, int);
418 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
419 int);
420 static void do_input_reload (struct insn_chain *, struct reload *, int);
421 static void do_output_reload (struct insn_chain *, struct reload *, int);
422 static void emit_reload_insns (struct insn_chain *);
423 static void delete_output_reload (rtx_insn *, int, int, rtx);
424 static void delete_address_reloads (rtx_insn *, rtx_insn *);
425 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
426 static void inc_for_reload (rtx, rtx, rtx, int);
427 #ifdef AUTO_INC_DEC
428 static void add_auto_inc_notes (rtx_insn *, rtx);
429 #endif
430 static void substitute (rtx *, const_rtx, rtx);
431 static bool gen_reload_chain_without_interm_reg_p (int, int);
432 static int reloads_conflict (int, int);
433 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
434 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
436 /* Initialize the reload pass. This is called at the beginning of compilation
437 and may be called again if the target is reinitialized. */
439 void
440 init_reload (void)
442 int i;
444 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
445 Set spill_indirect_levels to the number of levels such addressing is
446 permitted, zero if it is not permitted at all. */
448 rtx tem
449 = gen_rtx_MEM (Pmode,
450 gen_rtx_PLUS (Pmode,
451 gen_rtx_REG (Pmode,
452 LAST_VIRTUAL_REGISTER + 1),
453 gen_int_mode (4, Pmode)));
454 spill_indirect_levels = 0;
456 while (memory_address_p (QImode, tem))
458 spill_indirect_levels++;
459 tem = gen_rtx_MEM (Pmode, tem);
462 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
464 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
465 indirect_symref_ok = memory_address_p (QImode, tem);
467 /* See if reg+reg is a valid (and offsettable) address. */
469 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
471 tem = gen_rtx_PLUS (Pmode,
472 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
473 gen_rtx_REG (Pmode, i));
475 /* This way, we make sure that reg+reg is an offsettable address. */
476 tem = plus_constant (Pmode, tem, 4);
478 if (memory_address_p (QImode, tem))
480 double_reg_address_ok = 1;
481 break;
485 /* Initialize obstack for our rtl allocation. */
486 if (reload_startobj == NULL)
488 gcc_obstack_init (&reload_obstack);
489 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
492 INIT_REG_SET (&spilled_pseudos);
493 INIT_REG_SET (&changed_allocation_pseudos);
494 INIT_REG_SET (&pseudos_counted);
497 /* List of insn chains that are currently unused. */
498 static struct insn_chain *unused_insn_chains = 0;
500 /* Allocate an empty insn_chain structure. */
501 struct insn_chain *
502 new_insn_chain (void)
504 struct insn_chain *c;
506 if (unused_insn_chains == 0)
508 c = XOBNEW (&reload_obstack, struct insn_chain);
509 INIT_REG_SET (&c->live_throughout);
510 INIT_REG_SET (&c->dead_or_set);
512 else
514 c = unused_insn_chains;
515 unused_insn_chains = c->next;
517 c->is_caller_save_insn = 0;
518 c->need_operand_change = 0;
519 c->need_reload = 0;
520 c->need_elim = 0;
521 return c;
524 /* Small utility function to set all regs in hard reg set TO which are
525 allocated to pseudos in regset FROM. */
527 void
528 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
530 unsigned int regno;
531 reg_set_iterator rsi;
533 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
535 int r = reg_renumber[regno];
537 if (r < 0)
539 /* reload_combine uses the information from DF_LIVE_IN,
540 which might still contain registers that have not
541 actually been allocated since they have an
542 equivalence. */
543 gcc_assert (ira_conflicts_p || reload_completed);
545 else
546 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
550 /* Replace all pseudos found in LOC with their corresponding
551 equivalences. */
553 static void
554 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
556 rtx x = *loc;
557 enum rtx_code code;
558 const char *fmt;
559 int i, j;
561 if (! x)
562 return;
564 code = GET_CODE (x);
565 if (code == REG)
567 unsigned int regno = REGNO (x);
569 if (regno < FIRST_PSEUDO_REGISTER)
570 return;
572 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
573 if (x != *loc)
575 *loc = x;
576 replace_pseudos_in (loc, mem_mode, usage);
577 return;
580 if (reg_equiv_constant (regno))
581 *loc = reg_equiv_constant (regno);
582 else if (reg_equiv_invariant (regno))
583 *loc = reg_equiv_invariant (regno);
584 else if (reg_equiv_mem (regno))
585 *loc = reg_equiv_mem (regno);
586 else if (reg_equiv_address (regno))
587 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
588 else
590 gcc_assert (!REG_P (regno_reg_rtx[regno])
591 || REGNO (regno_reg_rtx[regno]) != regno);
592 *loc = regno_reg_rtx[regno];
595 return;
597 else if (code == MEM)
599 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
600 return;
603 /* Process each of our operands recursively. */
604 fmt = GET_RTX_FORMAT (code);
605 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
606 if (*fmt == 'e')
607 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
608 else if (*fmt == 'E')
609 for (j = 0; j < XVECLEN (x, i); j++)
610 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
613 /* Determine if the current function has an exception receiver block
614 that reaches the exit block via non-exceptional edges */
616 static bool
617 has_nonexceptional_receiver (void)
619 edge e;
620 edge_iterator ei;
621 basic_block *tos, *worklist, bb;
623 /* If we're not optimizing, then just err on the safe side. */
624 if (!optimize)
625 return true;
627 /* First determine which blocks can reach exit via normal paths. */
628 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
630 FOR_EACH_BB_FN (bb, cfun)
631 bb->flags &= ~BB_REACHABLE;
633 /* Place the exit block on our worklist. */
634 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
635 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
637 /* Iterate: find everything reachable from what we've already seen. */
638 while (tos != worklist)
640 bb = *--tos;
642 FOR_EACH_EDGE (e, ei, bb->preds)
643 if (!(e->flags & EDGE_ABNORMAL))
645 basic_block src = e->src;
647 if (!(src->flags & BB_REACHABLE))
649 src->flags |= BB_REACHABLE;
650 *tos++ = src;
654 free (worklist);
656 /* Now see if there's a reachable block with an exceptional incoming
657 edge. */
658 FOR_EACH_BB_FN (bb, cfun)
659 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
660 return true;
662 /* No exceptional block reached exit unexceptionally. */
663 return false;
666 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
667 zero elements) to MAX_REG_NUM elements.
669 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
670 void
671 grow_reg_equivs (void)
673 int old_size = vec_safe_length (reg_equivs);
674 int max_regno = max_reg_num ();
675 int i;
676 reg_equivs_t ze;
678 memset (&ze, 0, sizeof (reg_equivs_t));
679 vec_safe_reserve (reg_equivs, max_regno);
680 for (i = old_size; i < max_regno; i++)
681 reg_equivs->quick_insert (i, ze);
685 /* Global variables used by reload and its subroutines. */
687 /* The current basic block while in calculate_elim_costs_all_insns. */
688 static basic_block elim_bb;
690 /* Set during calculate_needs if an insn needs register elimination. */
691 static int something_needs_elimination;
692 /* Set during calculate_needs if an insn needs an operand changed. */
693 static int something_needs_operands_changed;
694 /* Set by alter_regs if we spilled a register to the stack. */
695 static bool something_was_spilled;
697 /* Nonzero means we couldn't get enough spill regs. */
698 static int failure;
700 /* Temporary array of pseudo-register number. */
701 static int *temp_pseudo_reg_arr;
703 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
704 If that insn didn't set the register (i.e., it copied the register to
705 memory), just delete that insn instead of the equivalencing insn plus
706 anything now dead. If we call delete_dead_insn on that insn, we may
707 delete the insn that actually sets the register if the register dies
708 there and that is incorrect. */
709 static void
710 remove_init_insns ()
712 for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
714 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
716 rtx list;
717 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
719 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
721 /* If we already deleted the insn or if it may trap, we can't
722 delete it. The latter case shouldn't happen, but can
723 if an insn has a variable address, gets a REG_EH_REGION
724 note added to it, and then gets converted into a load
725 from a constant address. */
726 if (NOTE_P (equiv_insn)
727 || can_throw_internal (equiv_insn))
729 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
730 delete_dead_insn (equiv_insn);
731 else
732 SET_INSN_DELETED (equiv_insn);
738 /* Return true if remove_init_insns will delete INSN. */
739 static bool
740 will_delete_init_insn_p (rtx_insn *insn)
742 rtx set = single_set (insn);
743 if (!set || !REG_P (SET_DEST (set)))
744 return false;
745 unsigned regno = REGNO (SET_DEST (set));
747 if (can_throw_internal (insn))
748 return false;
750 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
751 return false;
753 for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
755 rtx equiv_insn = XEXP (list, 0);
756 if (equiv_insn == insn)
757 return true;
759 return false;
762 /* Main entry point for the reload pass.
764 FIRST is the first insn of the function being compiled.
766 GLOBAL nonzero means we were called from global_alloc
767 and should attempt to reallocate any pseudoregs that we
768 displace from hard regs we will use for reloads.
769 If GLOBAL is zero, we do not have enough information to do that,
770 so any pseudo reg that is spilled must go to the stack.
772 Return value is TRUE if reload likely left dead insns in the
773 stream and a DCE pass should be run to elimiante them. Else the
774 return value is FALSE. */
776 bool
777 reload (rtx_insn *first, int global)
779 int i, n;
780 rtx_insn *insn;
781 struct elim_table *ep;
782 basic_block bb;
783 bool inserted;
785 /* Make sure even insns with volatile mem refs are recognizable. */
786 init_recog ();
788 failure = 0;
790 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
792 /* Make sure that the last insn in the chain
793 is not something that needs reloading. */
794 emit_note (NOTE_INSN_DELETED);
796 /* Enable find_equiv_reg to distinguish insns made by reload. */
797 reload_first_uid = get_max_uid ();
799 #ifdef SECONDARY_MEMORY_NEEDED
800 /* Initialize the secondary memory table. */
801 clear_secondary_mem ();
802 #endif
804 /* We don't have a stack slot for any spill reg yet. */
805 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
806 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
808 /* Initialize the save area information for caller-save, in case some
809 are needed. */
810 init_save_areas ();
812 /* Compute which hard registers are now in use
813 as homes for pseudo registers.
814 This is done here rather than (eg) in global_alloc
815 because this point is reached even if not optimizing. */
816 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
817 mark_home_live (i);
819 /* A function that has a nonlocal label that can reach the exit
820 block via non-exceptional paths must save all call-saved
821 registers. */
822 if (cfun->has_nonlocal_label
823 && has_nonexceptional_receiver ())
824 crtl->saves_all_registers = 1;
826 if (crtl->saves_all_registers)
827 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
828 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
829 df_set_regs_ever_live (i, true);
831 /* Find all the pseudo registers that didn't get hard regs
832 but do have known equivalent constants or memory slots.
833 These include parameters (known equivalent to parameter slots)
834 and cse'd or loop-moved constant memory addresses.
836 Record constant equivalents in reg_equiv_constant
837 so they will be substituted by find_reloads.
838 Record memory equivalents in reg_mem_equiv so they can
839 be substituted eventually by altering the REG-rtx's. */
841 grow_reg_equivs ();
842 reg_old_renumber = XCNEWVEC (short, max_regno);
843 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
844 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
845 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
847 CLEAR_HARD_REG_SET (bad_spill_regs_global);
849 init_eliminable_invariants (first, true);
850 init_elim_table ();
852 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
853 stack slots to the pseudos that lack hard regs or equivalents.
854 Do not touch virtual registers. */
856 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
857 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
858 temp_pseudo_reg_arr[n++] = i;
860 if (ira_conflicts_p)
861 /* Ask IRA to order pseudo-registers for better stack slot
862 sharing. */
863 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
865 for (i = 0; i < n; i++)
866 alter_reg (temp_pseudo_reg_arr[i], -1, false);
868 /* If we have some registers we think can be eliminated, scan all insns to
869 see if there is an insn that sets one of these registers to something
870 other than itself plus a constant. If so, the register cannot be
871 eliminated. Doing this scan here eliminates an extra pass through the
872 main reload loop in the most common case where register elimination
873 cannot be done. */
874 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
875 if (INSN_P (insn))
876 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
878 maybe_fix_stack_asms ();
880 insns_need_reload = 0;
881 something_needs_elimination = 0;
883 /* Initialize to -1, which means take the first spill register. */
884 last_spill_reg = -1;
886 /* Spill any hard regs that we know we can't eliminate. */
887 CLEAR_HARD_REG_SET (used_spill_regs);
888 /* There can be multiple ways to eliminate a register;
889 they should be listed adjacently.
890 Elimination for any register fails only if all possible ways fail. */
891 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
893 int from = ep->from;
894 int can_eliminate = 0;
897 can_eliminate |= ep->can_eliminate;
898 ep++;
900 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
901 if (! can_eliminate)
902 spill_hard_reg (from, 1);
905 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
906 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
908 finish_spills (global);
910 /* From now on, we may need to generate moves differently. We may also
911 allow modifications of insns which cause them to not be recognized.
912 Any such modifications will be cleaned up during reload itself. */
913 reload_in_progress = 1;
915 /* This loop scans the entire function each go-round
916 and repeats until one repetition spills no additional hard regs. */
917 for (;;)
919 int something_changed;
920 int did_spill;
921 HOST_WIDE_INT starting_frame_size;
923 starting_frame_size = get_frame_size ();
924 something_was_spilled = false;
926 set_initial_elim_offsets ();
927 set_initial_label_offsets ();
929 /* For each pseudo register that has an equivalent location defined,
930 try to eliminate any eliminable registers (such as the frame pointer)
931 assuming initial offsets for the replacement register, which
932 is the normal case.
934 If the resulting location is directly addressable, substitute
935 the MEM we just got directly for the old REG.
937 If it is not addressable but is a constant or the sum of a hard reg
938 and constant, it is probably not addressable because the constant is
939 out of range, in that case record the address; we will generate
940 hairy code to compute the address in a register each time it is
941 needed. Similarly if it is a hard register, but one that is not
942 valid as an address register.
944 If the location is not addressable, but does not have one of the
945 above forms, assign a stack slot. We have to do this to avoid the
946 potential of producing lots of reloads if, e.g., a location involves
947 a pseudo that didn't get a hard register and has an equivalent memory
948 location that also involves a pseudo that didn't get a hard register.
950 Perhaps at some point we will improve reload_when_needed handling
951 so this problem goes away. But that's very hairy. */
953 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
954 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
956 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
957 NULL_RTX);
959 if (strict_memory_address_addr_space_p
960 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
961 MEM_ADDR_SPACE (x)))
962 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
963 else if (CONSTANT_P (XEXP (x, 0))
964 || (REG_P (XEXP (x, 0))
965 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
966 || (GET_CODE (XEXP (x, 0)) == PLUS
967 && REG_P (XEXP (XEXP (x, 0), 0))
968 && (REGNO (XEXP (XEXP (x, 0), 0))
969 < FIRST_PSEUDO_REGISTER)
970 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
971 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
972 else
974 /* Make a new stack slot. Then indicate that something
975 changed so we go back and recompute offsets for
976 eliminable registers because the allocation of memory
977 below might change some offset. reg_equiv_{mem,address}
978 will be set up for this pseudo on the next pass around
979 the loop. */
980 reg_equiv_memory_loc (i) = 0;
981 reg_equiv_init (i) = 0;
982 alter_reg (i, -1, true);
986 if (caller_save_needed)
987 setup_save_areas ();
989 if (starting_frame_size && crtl->stack_alignment_needed)
991 /* If we have a stack frame, we must align it now. The
992 stack size may be a part of the offset computation for
993 register elimination. So if this changes the stack size,
994 then repeat the elimination bookkeeping. We don't
995 realign when there is no stack, as that will cause a
996 stack frame when none is needed should
997 STARTING_FRAME_OFFSET not be already aligned to
998 STACK_BOUNDARY. */
999 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1001 /* If we allocated another stack slot, redo elimination bookkeeping. */
1002 if (something_was_spilled || starting_frame_size != get_frame_size ())
1004 update_eliminables_and_spill ();
1005 continue;
1008 if (caller_save_needed)
1010 save_call_clobbered_regs ();
1011 /* That might have allocated new insn_chain structures. */
1012 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1015 calculate_needs_all_insns (global);
1017 if (! ira_conflicts_p)
1018 /* Don't do it for IRA. We need this info because we don't
1019 change live_throughout and dead_or_set for chains when IRA
1020 is used. */
1021 CLEAR_REG_SET (&spilled_pseudos);
1023 did_spill = 0;
1025 something_changed = 0;
1027 /* If we allocated any new memory locations, make another pass
1028 since it might have changed elimination offsets. */
1029 if (something_was_spilled || starting_frame_size != get_frame_size ())
1030 something_changed = 1;
1032 /* Even if the frame size remained the same, we might still have
1033 changed elimination offsets, e.g. if find_reloads called
1034 force_const_mem requiring the back end to allocate a constant
1035 pool base register that needs to be saved on the stack. */
1036 else if (!verify_initial_elim_offsets ())
1037 something_changed = 1;
1039 if (update_eliminables_and_spill ())
1041 did_spill = 1;
1042 something_changed = 1;
1045 select_reload_regs ();
1046 if (failure)
1047 goto failed;
1049 if (insns_need_reload != 0 || did_spill)
1050 something_changed |= finish_spills (global);
1052 if (! something_changed)
1053 break;
1055 if (caller_save_needed)
1056 delete_caller_save_insns ();
1058 obstack_free (&reload_obstack, reload_firstobj);
1061 /* If global-alloc was run, notify it of any register eliminations we have
1062 done. */
1063 if (global)
1064 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1065 if (ep->can_eliminate)
1066 mark_elimination (ep->from, ep->to);
1068 remove_init_insns ();
1070 /* Use the reload registers where necessary
1071 by generating move instructions to move the must-be-register
1072 values into or out of the reload registers. */
1074 if (insns_need_reload != 0 || something_needs_elimination
1075 || something_needs_operands_changed)
1077 HOST_WIDE_INT old_frame_size = get_frame_size ();
1079 reload_as_needed (global);
1081 gcc_assert (old_frame_size == get_frame_size ());
1083 gcc_assert (verify_initial_elim_offsets ());
1086 /* If we were able to eliminate the frame pointer, show that it is no
1087 longer live at the start of any basic block. If it ls live by
1088 virtue of being in a pseudo, that pseudo will be marked live
1089 and hence the frame pointer will be known to be live via that
1090 pseudo. */
1092 if (! frame_pointer_needed)
1093 FOR_EACH_BB_FN (bb, cfun)
1094 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1096 /* Come here (with failure set nonzero) if we can't get enough spill
1097 regs. */
1098 failed:
1100 CLEAR_REG_SET (&changed_allocation_pseudos);
1101 CLEAR_REG_SET (&spilled_pseudos);
1102 reload_in_progress = 0;
1104 /* Now eliminate all pseudo regs by modifying them into
1105 their equivalent memory references.
1106 The REG-rtx's for the pseudos are modified in place,
1107 so all insns that used to refer to them now refer to memory.
1109 For a reg that has a reg_equiv_address, all those insns
1110 were changed by reloading so that no insns refer to it any longer;
1111 but the DECL_RTL of a variable decl may refer to it,
1112 and if so this causes the debugging info to mention the variable. */
1114 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1116 rtx addr = 0;
1118 if (reg_equiv_mem (i))
1119 addr = XEXP (reg_equiv_mem (i), 0);
1121 if (reg_equiv_address (i))
1122 addr = reg_equiv_address (i);
1124 if (addr)
1126 if (reg_renumber[i] < 0)
1128 rtx reg = regno_reg_rtx[i];
1130 REG_USERVAR_P (reg) = 0;
1131 PUT_CODE (reg, MEM);
1132 XEXP (reg, 0) = addr;
1133 if (reg_equiv_memory_loc (i))
1134 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1135 else
1136 MEM_ATTRS (reg) = 0;
1137 MEM_NOTRAP_P (reg) = 1;
1139 else if (reg_equiv_mem (i))
1140 XEXP (reg_equiv_mem (i), 0) = addr;
1143 /* We don't want complex addressing modes in debug insns
1144 if simpler ones will do, so delegitimize equivalences
1145 in debug insns. */
1146 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1148 rtx reg = regno_reg_rtx[i];
1149 rtx equiv = 0;
1150 df_ref use, next;
1152 if (reg_equiv_constant (i))
1153 equiv = reg_equiv_constant (i);
1154 else if (reg_equiv_invariant (i))
1155 equiv = reg_equiv_invariant (i);
1156 else if (reg && MEM_P (reg))
1157 equiv = targetm.delegitimize_address (reg);
1158 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1159 equiv = reg;
1161 if (equiv == reg)
1162 continue;
1164 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1166 insn = DF_REF_INSN (use);
1168 /* Make sure the next ref is for a different instruction,
1169 so that we're not affected by the rescan. */
1170 next = DF_REF_NEXT_REG (use);
1171 while (next && DF_REF_INSN (next) == insn)
1172 next = DF_REF_NEXT_REG (next);
1174 if (DEBUG_INSN_P (insn))
1176 if (!equiv)
1178 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1179 df_insn_rescan_debug_internal (insn);
1181 else
1182 INSN_VAR_LOCATION_LOC (insn)
1183 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1184 reg, equiv);
1190 /* We must set reload_completed now since the cleanup_subreg_operands call
1191 below will re-recognize each insn and reload may have generated insns
1192 which are only valid during and after reload. */
1193 reload_completed = 1;
1195 /* Make a pass over all the insns and delete all USEs which we inserted
1196 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1197 notes. Delete all CLOBBER insns, except those that refer to the return
1198 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1199 from misarranging variable-array code, and simplify (subreg (reg))
1200 operands. Strip and regenerate REG_INC notes that may have been moved
1201 around. */
1203 for (insn = first; insn; insn = NEXT_INSN (insn))
1204 if (INSN_P (insn))
1206 rtx *pnote;
1208 if (CALL_P (insn))
1209 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1210 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1212 if ((GET_CODE (PATTERN (insn)) == USE
1213 /* We mark with QImode USEs introduced by reload itself. */
1214 && (GET_MODE (insn) == QImode
1215 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1216 || (GET_CODE (PATTERN (insn)) == CLOBBER
1217 && (!MEM_P (XEXP (PATTERN (insn), 0))
1218 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1219 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1220 && XEXP (XEXP (PATTERN (insn), 0), 0)
1221 != stack_pointer_rtx))
1222 && (!REG_P (XEXP (PATTERN (insn), 0))
1223 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1225 delete_insn (insn);
1226 continue;
1229 /* Some CLOBBERs may survive until here and still reference unassigned
1230 pseudos with const equivalent, which may in turn cause ICE in later
1231 passes if the reference remains in place. */
1232 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1233 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1234 VOIDmode, PATTERN (insn));
1236 /* Discard obvious no-ops, even without -O. This optimization
1237 is fast and doesn't interfere with debugging. */
1238 if (NONJUMP_INSN_P (insn)
1239 && GET_CODE (PATTERN (insn)) == SET
1240 && REG_P (SET_SRC (PATTERN (insn)))
1241 && REG_P (SET_DEST (PATTERN (insn)))
1242 && (REGNO (SET_SRC (PATTERN (insn)))
1243 == REGNO (SET_DEST (PATTERN (insn)))))
1245 delete_insn (insn);
1246 continue;
1249 pnote = &REG_NOTES (insn);
1250 while (*pnote != 0)
1252 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1253 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1254 || REG_NOTE_KIND (*pnote) == REG_INC)
1255 *pnote = XEXP (*pnote, 1);
1256 else
1257 pnote = &XEXP (*pnote, 1);
1260 #ifdef AUTO_INC_DEC
1261 add_auto_inc_notes (insn, PATTERN (insn));
1262 #endif
1264 /* Simplify (subreg (reg)) if it appears as an operand. */
1265 cleanup_subreg_operands (insn);
1267 /* Clean up invalid ASMs so that they don't confuse later passes.
1268 See PR 21299. */
1269 if (asm_noperands (PATTERN (insn)) >= 0)
1271 extract_insn (insn);
1272 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1274 error_for_asm (insn,
1275 "%<asm%> operand has impossible constraints");
1276 delete_insn (insn);
1277 continue;
1282 /* If we are doing generic stack checking, give a warning if this
1283 function's frame size is larger than we expect. */
1284 if (flag_stack_check == GENERIC_STACK_CHECK)
1286 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1287 static int verbose_warned = 0;
1289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1290 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1291 size += UNITS_PER_WORD;
1293 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1295 warning (0, "frame size too large for reliable stack checking");
1296 if (! verbose_warned)
1298 warning (0, "try reducing the number of local variables");
1299 verbose_warned = 1;
1304 free (temp_pseudo_reg_arr);
1306 /* Indicate that we no longer have known memory locations or constants. */
1307 free_reg_equiv ();
1309 free (reg_max_ref_width);
1310 free (reg_old_renumber);
1311 free (pseudo_previous_regs);
1312 free (pseudo_forbidden_regs);
1314 CLEAR_HARD_REG_SET (used_spill_regs);
1315 for (i = 0; i < n_spills; i++)
1316 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1318 /* Free all the insn_chain structures at once. */
1319 obstack_free (&reload_obstack, reload_startobj);
1320 unused_insn_chains = 0;
1322 inserted = fixup_abnormal_edges ();
1324 /* We've possibly turned single trapping insn into multiple ones. */
1325 if (cfun->can_throw_non_call_exceptions)
1327 sbitmap blocks;
1328 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1329 bitmap_ones (blocks);
1330 find_many_sub_basic_blocks (blocks);
1331 sbitmap_free (blocks);
1334 if (inserted)
1335 commit_edge_insertions ();
1337 /* Replacing pseudos with their memory equivalents might have
1338 created shared rtx. Subsequent passes would get confused
1339 by this, so unshare everything here. */
1340 unshare_all_rtl_again (first);
1342 #ifdef STACK_BOUNDARY
1343 /* init_emit has set the alignment of the hard frame pointer
1344 to STACK_BOUNDARY. It is very likely no longer valid if
1345 the hard frame pointer was used for register allocation. */
1346 if (!frame_pointer_needed)
1347 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1348 #endif
1350 substitute_stack.release ();
1352 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1354 reload_completed = !failure;
1356 return need_dce;
1359 /* Yet another special case. Unfortunately, reg-stack forces people to
1360 write incorrect clobbers in asm statements. These clobbers must not
1361 cause the register to appear in bad_spill_regs, otherwise we'll call
1362 fatal_insn later. We clear the corresponding regnos in the live
1363 register sets to avoid this.
1364 The whole thing is rather sick, I'm afraid. */
1366 static void
1367 maybe_fix_stack_asms (void)
1369 #ifdef STACK_REGS
1370 const char *constraints[MAX_RECOG_OPERANDS];
1371 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1372 struct insn_chain *chain;
1374 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1376 int i, noperands;
1377 HARD_REG_SET clobbered, allowed;
1378 rtx pat;
1380 if (! INSN_P (chain->insn)
1381 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1382 continue;
1383 pat = PATTERN (chain->insn);
1384 if (GET_CODE (pat) != PARALLEL)
1385 continue;
1387 CLEAR_HARD_REG_SET (clobbered);
1388 CLEAR_HARD_REG_SET (allowed);
1390 /* First, make a mask of all stack regs that are clobbered. */
1391 for (i = 0; i < XVECLEN (pat, 0); i++)
1393 rtx t = XVECEXP (pat, 0, i);
1394 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1395 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1398 /* Get the operand values and constraints out of the insn. */
1399 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1400 constraints, operand_mode, NULL);
1402 /* For every operand, see what registers are allowed. */
1403 for (i = 0; i < noperands; i++)
1405 const char *p = constraints[i];
1406 /* For every alternative, we compute the class of registers allowed
1407 for reloading in CLS, and merge its contents into the reg set
1408 ALLOWED. */
1409 int cls = (int) NO_REGS;
1411 for (;;)
1413 char c = *p;
1415 if (c == '\0' || c == ',' || c == '#')
1417 /* End of one alternative - mark the regs in the current
1418 class, and reset the class. */
1419 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1420 cls = NO_REGS;
1421 p++;
1422 if (c == '#')
1423 do {
1424 c = *p++;
1425 } while (c != '\0' && c != ',');
1426 if (c == '\0')
1427 break;
1428 continue;
1431 switch (c)
1433 case 'g':
1434 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1435 break;
1437 default:
1438 enum constraint_num cn = lookup_constraint (p);
1439 if (insn_extra_address_constraint (cn))
1440 cls = (int) reg_class_subunion[cls]
1441 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1442 ADDRESS, SCRATCH)];
1443 else
1444 cls = (int) reg_class_subunion[cls]
1445 [reg_class_for_constraint (cn)];
1446 break;
1448 p += CONSTRAINT_LEN (c, p);
1451 /* Those of the registers which are clobbered, but allowed by the
1452 constraints, must be usable as reload registers. So clear them
1453 out of the life information. */
1454 AND_HARD_REG_SET (allowed, clobbered);
1455 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1456 if (TEST_HARD_REG_BIT (allowed, i))
1458 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1459 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1463 #endif
1466 /* Copy the global variables n_reloads and rld into the corresponding elts
1467 of CHAIN. */
1468 static void
1469 copy_reloads (struct insn_chain *chain)
1471 chain->n_reloads = n_reloads;
1472 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1473 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1474 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1477 /* Walk the chain of insns, and determine for each whether it needs reloads
1478 and/or eliminations. Build the corresponding insns_need_reload list, and
1479 set something_needs_elimination as appropriate. */
1480 static void
1481 calculate_needs_all_insns (int global)
1483 struct insn_chain **pprev_reload = &insns_need_reload;
1484 struct insn_chain *chain, *next = 0;
1486 something_needs_elimination = 0;
1488 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1489 for (chain = reload_insn_chain; chain != 0; chain = next)
1491 rtx_insn *insn = chain->insn;
1493 next = chain->next;
1495 /* Clear out the shortcuts. */
1496 chain->n_reloads = 0;
1497 chain->need_elim = 0;
1498 chain->need_reload = 0;
1499 chain->need_operand_change = 0;
1501 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1502 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1503 what effects this has on the known offsets at labels. */
1505 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1506 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1507 set_label_offsets (insn, insn, 0);
1509 if (INSN_P (insn))
1511 rtx old_body = PATTERN (insn);
1512 int old_code = INSN_CODE (insn);
1513 rtx old_notes = REG_NOTES (insn);
1514 int did_elimination = 0;
1515 int operands_changed = 0;
1517 /* Skip insns that only set an equivalence. */
1518 if (will_delete_init_insn_p (insn))
1519 continue;
1521 /* If needed, eliminate any eliminable registers. */
1522 if (num_eliminable || num_eliminable_invariants)
1523 did_elimination = eliminate_regs_in_insn (insn, 0);
1525 /* Analyze the instruction. */
1526 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1527 global, spill_reg_order);
1529 /* If a no-op set needs more than one reload, this is likely
1530 to be something that needs input address reloads. We
1531 can't get rid of this cleanly later, and it is of no use
1532 anyway, so discard it now.
1533 We only do this when expensive_optimizations is enabled,
1534 since this complements reload inheritance / output
1535 reload deletion, and it can make debugging harder. */
1536 if (flag_expensive_optimizations && n_reloads > 1)
1538 rtx set = single_set (insn);
1539 if (set
1541 ((SET_SRC (set) == SET_DEST (set)
1542 && REG_P (SET_SRC (set))
1543 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1544 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1545 && reg_renumber[REGNO (SET_SRC (set))] < 0
1546 && reg_renumber[REGNO (SET_DEST (set))] < 0
1547 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1548 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1549 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1550 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1552 if (ira_conflicts_p)
1553 /* Inform IRA about the insn deletion. */
1554 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1555 REGNO (SET_SRC (set)));
1556 delete_insn (insn);
1557 /* Delete it from the reload chain. */
1558 if (chain->prev)
1559 chain->prev->next = next;
1560 else
1561 reload_insn_chain = next;
1562 if (next)
1563 next->prev = chain->prev;
1564 chain->next = unused_insn_chains;
1565 unused_insn_chains = chain;
1566 continue;
1569 if (num_eliminable)
1570 update_eliminable_offsets ();
1572 /* Remember for later shortcuts which insns had any reloads or
1573 register eliminations. */
1574 chain->need_elim = did_elimination;
1575 chain->need_reload = n_reloads > 0;
1576 chain->need_operand_change = operands_changed;
1578 /* Discard any register replacements done. */
1579 if (did_elimination)
1581 obstack_free (&reload_obstack, reload_insn_firstobj);
1582 PATTERN (insn) = old_body;
1583 INSN_CODE (insn) = old_code;
1584 REG_NOTES (insn) = old_notes;
1585 something_needs_elimination = 1;
1588 something_needs_operands_changed |= operands_changed;
1590 if (n_reloads != 0)
1592 copy_reloads (chain);
1593 *pprev_reload = chain;
1594 pprev_reload = &chain->next_need_reload;
1598 *pprev_reload = 0;
1601 /* This function is called from the register allocator to set up estimates
1602 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1603 an invariant. The structure is similar to calculate_needs_all_insns. */
1605 void
1606 calculate_elim_costs_all_insns (void)
1608 int *reg_equiv_init_cost;
1609 basic_block bb;
1610 int i;
1612 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1613 init_elim_table ();
1614 init_eliminable_invariants (get_insns (), false);
1616 set_initial_elim_offsets ();
1617 set_initial_label_offsets ();
1619 FOR_EACH_BB_FN (bb, cfun)
1621 rtx_insn *insn;
1622 elim_bb = bb;
1624 FOR_BB_INSNS (bb, insn)
1626 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1627 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1628 what effects this has on the known offsets at labels. */
1630 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1631 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1632 set_label_offsets (insn, insn, 0);
1634 if (INSN_P (insn))
1636 rtx set = single_set (insn);
1638 /* Skip insns that only set an equivalence. */
1639 if (set && REG_P (SET_DEST (set))
1640 && reg_renumber[REGNO (SET_DEST (set))] < 0
1641 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1642 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1644 unsigned regno = REGNO (SET_DEST (set));
1645 rtx_insn_list *init = reg_equiv_init (regno);
1646 if (init)
1648 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1649 false, true);
1650 int cost = set_src_cost (t, optimize_bb_for_speed_p (bb));
1651 int freq = REG_FREQ_FROM_BB (bb);
1653 reg_equiv_init_cost[regno] = cost * freq;
1654 continue;
1657 /* If needed, eliminate any eliminable registers. */
1658 if (num_eliminable || num_eliminable_invariants)
1659 elimination_costs_in_insn (insn);
1661 if (num_eliminable)
1662 update_eliminable_offsets ();
1666 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1668 if (reg_equiv_invariant (i))
1670 if (reg_equiv_init (i))
1672 int cost = reg_equiv_init_cost[i];
1673 if (dump_file)
1674 fprintf (dump_file,
1675 "Reg %d has equivalence, initial gains %d\n", i, cost);
1676 if (cost != 0)
1677 ira_adjust_equiv_reg_cost (i, cost);
1679 else
1681 if (dump_file)
1682 fprintf (dump_file,
1683 "Reg %d had equivalence, but can't be eliminated\n",
1685 ira_adjust_equiv_reg_cost (i, 0);
1690 free (reg_equiv_init_cost);
1691 free (offsets_known_at);
1692 free (offsets_at);
1693 offsets_at = NULL;
1694 offsets_known_at = NULL;
1697 /* Comparison function for qsort to decide which of two reloads
1698 should be handled first. *P1 and *P2 are the reload numbers. */
1700 static int
1701 reload_reg_class_lower (const void *r1p, const void *r2p)
1703 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1704 int t;
1706 /* Consider required reloads before optional ones. */
1707 t = rld[r1].optional - rld[r2].optional;
1708 if (t != 0)
1709 return t;
1711 /* Count all solitary classes before non-solitary ones. */
1712 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1713 - (reg_class_size[(int) rld[r1].rclass] == 1));
1714 if (t != 0)
1715 return t;
1717 /* Aside from solitaires, consider all multi-reg groups first. */
1718 t = rld[r2].nregs - rld[r1].nregs;
1719 if (t != 0)
1720 return t;
1722 /* Consider reloads in order of increasing reg-class number. */
1723 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1724 if (t != 0)
1725 return t;
1727 /* If reloads are equally urgent, sort by reload number,
1728 so that the results of qsort leave nothing to chance. */
1729 return r1 - r2;
1732 /* The cost of spilling each hard reg. */
1733 static int spill_cost[FIRST_PSEUDO_REGISTER];
1735 /* When spilling multiple hard registers, we use SPILL_COST for the first
1736 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1737 only the first hard reg for a multi-reg pseudo. */
1738 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1740 /* Map of hard regno to pseudo regno currently occupying the hard
1741 reg. */
1742 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1744 /* Update the spill cost arrays, considering that pseudo REG is live. */
1746 static void
1747 count_pseudo (int reg)
1749 int freq = REG_FREQ (reg);
1750 int r = reg_renumber[reg];
1751 int nregs;
1753 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1754 if (ira_conflicts_p && r < 0)
1755 return;
1757 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1758 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1759 return;
1761 SET_REGNO_REG_SET (&pseudos_counted, reg);
1763 gcc_assert (r >= 0);
1765 spill_add_cost[r] += freq;
1766 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1767 while (nregs-- > 0)
1769 hard_regno_to_pseudo_regno[r + nregs] = reg;
1770 spill_cost[r + nregs] += freq;
1774 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1775 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1777 static void
1778 order_regs_for_reload (struct insn_chain *chain)
1780 unsigned i;
1781 HARD_REG_SET used_by_pseudos;
1782 HARD_REG_SET used_by_pseudos2;
1783 reg_set_iterator rsi;
1785 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1787 memset (spill_cost, 0, sizeof spill_cost);
1788 memset (spill_add_cost, 0, sizeof spill_add_cost);
1789 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1790 hard_regno_to_pseudo_regno[i] = -1;
1792 /* Count number of uses of each hard reg by pseudo regs allocated to it
1793 and then order them by decreasing use. First exclude hard registers
1794 that are live in or across this insn. */
1796 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1797 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1798 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1799 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1801 /* Now find out which pseudos are allocated to it, and update
1802 hard_reg_n_uses. */
1803 CLEAR_REG_SET (&pseudos_counted);
1805 EXECUTE_IF_SET_IN_REG_SET
1806 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1808 count_pseudo (i);
1810 EXECUTE_IF_SET_IN_REG_SET
1811 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1813 count_pseudo (i);
1815 CLEAR_REG_SET (&pseudos_counted);
1818 /* Vector of reload-numbers showing the order in which the reloads should
1819 be processed. */
1820 static short reload_order[MAX_RELOADS];
1822 /* This is used to keep track of the spill regs used in one insn. */
1823 static HARD_REG_SET used_spill_regs_local;
1825 /* We decided to spill hard register SPILLED, which has a size of
1826 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1827 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1828 update SPILL_COST/SPILL_ADD_COST. */
1830 static void
1831 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1833 int freq = REG_FREQ (reg);
1834 int r = reg_renumber[reg];
1835 int nregs;
1837 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1838 if (ira_conflicts_p && r < 0)
1839 return;
1841 gcc_assert (r >= 0);
1843 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1845 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1846 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1847 return;
1849 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1851 spill_add_cost[r] -= freq;
1852 while (nregs-- > 0)
1854 hard_regno_to_pseudo_regno[r + nregs] = -1;
1855 spill_cost[r + nregs] -= freq;
1859 /* Find reload register to use for reload number ORDER. */
1861 static int
1862 find_reg (struct insn_chain *chain, int order)
1864 int rnum = reload_order[order];
1865 struct reload *rl = rld + rnum;
1866 int best_cost = INT_MAX;
1867 int best_reg = -1;
1868 unsigned int i, j, n;
1869 int k;
1870 HARD_REG_SET not_usable;
1871 HARD_REG_SET used_by_other_reload;
1872 reg_set_iterator rsi;
1873 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1874 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1876 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1877 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1878 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1880 CLEAR_HARD_REG_SET (used_by_other_reload);
1881 for (k = 0; k < order; k++)
1883 int other = reload_order[k];
1885 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1886 for (j = 0; j < rld[other].nregs; j++)
1887 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1890 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1892 #ifdef REG_ALLOC_ORDER
1893 unsigned int regno = reg_alloc_order[i];
1894 #else
1895 unsigned int regno = i;
1896 #endif
1898 if (! TEST_HARD_REG_BIT (not_usable, regno)
1899 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1900 && HARD_REGNO_MODE_OK (regno, rl->mode))
1902 int this_cost = spill_cost[regno];
1903 int ok = 1;
1904 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1906 for (j = 1; j < this_nregs; j++)
1908 this_cost += spill_add_cost[regno + j];
1909 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1910 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1911 ok = 0;
1913 if (! ok)
1914 continue;
1916 if (ira_conflicts_p)
1918 /* Ask IRA to find a better pseudo-register for
1919 spilling. */
1920 for (n = j = 0; j < this_nregs; j++)
1922 int r = hard_regno_to_pseudo_regno[regno + j];
1924 if (r < 0)
1925 continue;
1926 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1927 regno_pseudo_regs[n++] = r;
1929 regno_pseudo_regs[n++] = -1;
1930 if (best_reg < 0
1931 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1932 best_regno_pseudo_regs,
1933 rl->in, rl->out,
1934 chain->insn))
1936 best_reg = regno;
1937 for (j = 0;; j++)
1939 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1940 if (regno_pseudo_regs[j] < 0)
1941 break;
1944 continue;
1947 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1948 this_cost--;
1949 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1950 this_cost--;
1951 if (this_cost < best_cost
1952 /* Among registers with equal cost, prefer caller-saved ones, or
1953 use REG_ALLOC_ORDER if it is defined. */
1954 || (this_cost == best_cost
1955 #ifdef REG_ALLOC_ORDER
1956 && (inv_reg_alloc_order[regno]
1957 < inv_reg_alloc_order[best_reg])
1958 #else
1959 && call_used_regs[regno]
1960 && ! call_used_regs[best_reg]
1961 #endif
1964 best_reg = regno;
1965 best_cost = this_cost;
1969 if (best_reg == -1)
1970 return 0;
1972 if (dump_file)
1973 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1975 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1976 rl->regno = best_reg;
1978 EXECUTE_IF_SET_IN_REG_SET
1979 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1981 count_spilled_pseudo (best_reg, rl->nregs, j);
1984 EXECUTE_IF_SET_IN_REG_SET
1985 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1987 count_spilled_pseudo (best_reg, rl->nregs, j);
1990 for (i = 0; i < rl->nregs; i++)
1992 gcc_assert (spill_cost[best_reg + i] == 0);
1993 gcc_assert (spill_add_cost[best_reg + i] == 0);
1994 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1995 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1997 return 1;
2000 /* Find more reload regs to satisfy the remaining need of an insn, which
2001 is given by CHAIN.
2002 Do it by ascending class number, since otherwise a reg
2003 might be spilled for a big class and might fail to count
2004 for a smaller class even though it belongs to that class. */
2006 static void
2007 find_reload_regs (struct insn_chain *chain)
2009 int i;
2011 /* In order to be certain of getting the registers we need,
2012 we must sort the reloads into order of increasing register class.
2013 Then our grabbing of reload registers will parallel the process
2014 that provided the reload registers. */
2015 for (i = 0; i < chain->n_reloads; i++)
2017 /* Show whether this reload already has a hard reg. */
2018 if (chain->rld[i].reg_rtx)
2020 int regno = REGNO (chain->rld[i].reg_rtx);
2021 chain->rld[i].regno = regno;
2022 chain->rld[i].nregs
2023 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2025 else
2026 chain->rld[i].regno = -1;
2027 reload_order[i] = i;
2030 n_reloads = chain->n_reloads;
2031 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2033 CLEAR_HARD_REG_SET (used_spill_regs_local);
2035 if (dump_file)
2036 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2038 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2040 /* Compute the order of preference for hard registers to spill. */
2042 order_regs_for_reload (chain);
2044 for (i = 0; i < n_reloads; i++)
2046 int r = reload_order[i];
2048 /* Ignore reloads that got marked inoperative. */
2049 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2050 && ! rld[r].optional
2051 && rld[r].regno == -1)
2052 if (! find_reg (chain, i))
2054 if (dump_file)
2055 fprintf (dump_file, "reload failure for reload %d\n", r);
2056 spill_failure (chain->insn, rld[r].rclass);
2057 failure = 1;
2058 return;
2062 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2063 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2065 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2068 static void
2069 select_reload_regs (void)
2071 struct insn_chain *chain;
2073 /* Try to satisfy the needs for each insn. */
2074 for (chain = insns_need_reload; chain != 0;
2075 chain = chain->next_need_reload)
2076 find_reload_regs (chain);
2079 /* Delete all insns that were inserted by emit_caller_save_insns during
2080 this iteration. */
2081 static void
2082 delete_caller_save_insns (void)
2084 struct insn_chain *c = reload_insn_chain;
2086 while (c != 0)
2088 while (c != 0 && c->is_caller_save_insn)
2090 struct insn_chain *next = c->next;
2091 rtx_insn *insn = c->insn;
2093 if (c == reload_insn_chain)
2094 reload_insn_chain = next;
2095 delete_insn (insn);
2097 if (next)
2098 next->prev = c->prev;
2099 if (c->prev)
2100 c->prev->next = next;
2101 c->next = unused_insn_chains;
2102 unused_insn_chains = c;
2103 c = next;
2105 if (c != 0)
2106 c = c->next;
2110 /* Handle the failure to find a register to spill.
2111 INSN should be one of the insns which needed this particular spill reg. */
2113 static void
2114 spill_failure (rtx_insn *insn, enum reg_class rclass)
2116 if (asm_noperands (PATTERN (insn)) >= 0)
2117 error_for_asm (insn, "can%'t find a register in class %qs while "
2118 "reloading %<asm%>",
2119 reg_class_names[rclass]);
2120 else
2122 error ("unable to find a register to spill in class %qs",
2123 reg_class_names[rclass]);
2125 if (dump_file)
2127 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2128 debug_reload_to_stream (dump_file);
2130 fatal_insn ("this is the insn:", insn);
2134 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2135 data that is dead in INSN. */
2137 static void
2138 delete_dead_insn (rtx_insn *insn)
2140 rtx_insn *prev = prev_active_insn (insn);
2141 rtx prev_dest;
2143 /* If the previous insn sets a register that dies in our insn make
2144 a note that we want to run DCE immediately after reload.
2146 We used to delete the previous insn & recurse, but that's wrong for
2147 block local equivalences. Instead of trying to figure out the exact
2148 circumstances where we can delete the potentially dead insns, just
2149 let DCE do the job. */
2150 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2151 && GET_CODE (PATTERN (prev)) == SET
2152 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2153 && reg_mentioned_p (prev_dest, PATTERN (insn))
2154 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2155 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2156 need_dce = 1;
2158 SET_INSN_DELETED (insn);
2161 /* Modify the home of pseudo-reg I.
2162 The new home is present in reg_renumber[I].
2164 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2165 or it may be -1, meaning there is none or it is not relevant.
2166 This is used so that all pseudos spilled from a given hard reg
2167 can share one stack slot. */
2169 static void
2170 alter_reg (int i, int from_reg, bool dont_share_p)
2172 /* When outputting an inline function, this can happen
2173 for a reg that isn't actually used. */
2174 if (regno_reg_rtx[i] == 0)
2175 return;
2177 /* If the reg got changed to a MEM at rtl-generation time,
2178 ignore it. */
2179 if (!REG_P (regno_reg_rtx[i]))
2180 return;
2182 /* Modify the reg-rtx to contain the new hard reg
2183 number or else to contain its pseudo reg number. */
2184 SET_REGNO (regno_reg_rtx[i],
2185 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2187 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2188 allocate a stack slot for it. */
2190 if (reg_renumber[i] < 0
2191 && REG_N_REFS (i) > 0
2192 && reg_equiv_constant (i) == 0
2193 && (reg_equiv_invariant (i) == 0
2194 || reg_equiv_init (i) == 0)
2195 && reg_equiv_memory_loc (i) == 0)
2197 rtx x = NULL_RTX;
2198 machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2199 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2200 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2201 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2202 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2203 int adjust = 0;
2205 something_was_spilled = true;
2207 if (ira_conflicts_p)
2209 /* Mark the spill for IRA. */
2210 SET_REGNO_REG_SET (&spilled_pseudos, i);
2211 if (!dont_share_p)
2212 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2215 if (x)
2218 /* Each pseudo reg has an inherent size which comes from its own mode,
2219 and a total size which provides room for paradoxical subregs
2220 which refer to the pseudo reg in wider modes.
2222 We can use a slot already allocated if it provides both
2223 enough inherent space and enough total space.
2224 Otherwise, we allocate a new slot, making sure that it has no less
2225 inherent space, and no less total space, then the previous slot. */
2226 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2228 rtx stack_slot;
2230 /* No known place to spill from => no slot to reuse. */
2231 x = assign_stack_local (mode, total_size,
2232 min_align > inherent_align
2233 || total_size > inherent_size ? -1 : 0);
2235 stack_slot = x;
2237 /* Cancel the big-endian correction done in assign_stack_local.
2238 Get the address of the beginning of the slot. This is so we
2239 can do a big-endian correction unconditionally below. */
2240 if (BYTES_BIG_ENDIAN)
2242 adjust = inherent_size - total_size;
2243 if (adjust)
2244 stack_slot
2245 = adjust_address_nv (x, mode_for_size (total_size
2246 * BITS_PER_UNIT,
2247 MODE_INT, 1),
2248 adjust);
2251 if (! dont_share_p && ira_conflicts_p)
2252 /* Inform IRA about allocation a new stack slot. */
2253 ira_mark_new_stack_slot (stack_slot, i, total_size);
2256 /* Reuse a stack slot if possible. */
2257 else if (spill_stack_slot[from_reg] != 0
2258 && spill_stack_slot_width[from_reg] >= total_size
2259 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2260 >= inherent_size)
2261 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2262 x = spill_stack_slot[from_reg];
2264 /* Allocate a bigger slot. */
2265 else
2267 /* Compute maximum size needed, both for inherent size
2268 and for total size. */
2269 rtx stack_slot;
2271 if (spill_stack_slot[from_reg])
2273 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2274 > inherent_size)
2275 mode = GET_MODE (spill_stack_slot[from_reg]);
2276 if (spill_stack_slot_width[from_reg] > total_size)
2277 total_size = spill_stack_slot_width[from_reg];
2278 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2279 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2282 /* Make a slot with that size. */
2283 x = assign_stack_local (mode, total_size,
2284 min_align > inherent_align
2285 || total_size > inherent_size ? -1 : 0);
2286 stack_slot = x;
2288 /* Cancel the big-endian correction done in assign_stack_local.
2289 Get the address of the beginning of the slot. This is so we
2290 can do a big-endian correction unconditionally below. */
2291 if (BYTES_BIG_ENDIAN)
2293 adjust = GET_MODE_SIZE (mode) - total_size;
2294 if (adjust)
2295 stack_slot
2296 = adjust_address_nv (x, mode_for_size (total_size
2297 * BITS_PER_UNIT,
2298 MODE_INT, 1),
2299 adjust);
2302 spill_stack_slot[from_reg] = stack_slot;
2303 spill_stack_slot_width[from_reg] = total_size;
2306 /* On a big endian machine, the "address" of the slot
2307 is the address of the low part that fits its inherent mode. */
2308 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2309 adjust += (total_size - inherent_size);
2311 /* If we have any adjustment to make, or if the stack slot is the
2312 wrong mode, make a new stack slot. */
2313 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2315 /* Set all of the memory attributes as appropriate for a spill. */
2316 set_mem_attrs_for_spill (x);
2318 /* Save the stack slot for later. */
2319 reg_equiv_memory_loc (i) = x;
2323 /* Mark the slots in regs_ever_live for the hard regs used by
2324 pseudo-reg number REGNO, accessed in MODE. */
2326 static void
2327 mark_home_live_1 (int regno, machine_mode mode)
2329 int i, lim;
2331 i = reg_renumber[regno];
2332 if (i < 0)
2333 return;
2334 lim = end_hard_regno (mode, i);
2335 while (i < lim)
2336 df_set_regs_ever_live (i++, true);
2339 /* Mark the slots in regs_ever_live for the hard regs
2340 used by pseudo-reg number REGNO. */
2342 void
2343 mark_home_live (int regno)
2345 if (reg_renumber[regno] >= 0)
2346 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2349 /* This function handles the tracking of elimination offsets around branches.
2351 X is a piece of RTL being scanned.
2353 INSN is the insn that it came from, if any.
2355 INITIAL_P is nonzero if we are to set the offset to be the initial
2356 offset and zero if we are setting the offset of the label to be the
2357 current offset. */
2359 static void
2360 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2362 enum rtx_code code = GET_CODE (x);
2363 rtx tem;
2364 unsigned int i;
2365 struct elim_table *p;
2367 switch (code)
2369 case LABEL_REF:
2370 if (LABEL_REF_NONLOCAL_P (x))
2371 return;
2373 x = LABEL_REF_LABEL (x);
2375 /* ... fall through ... */
2377 case CODE_LABEL:
2378 /* If we know nothing about this label, set the desired offsets. Note
2379 that this sets the offset at a label to be the offset before a label
2380 if we don't know anything about the label. This is not correct for
2381 the label after a BARRIER, but is the best guess we can make. If
2382 we guessed wrong, we will suppress an elimination that might have
2383 been possible had we been able to guess correctly. */
2385 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2387 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2388 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2389 = (initial_p ? reg_eliminate[i].initial_offset
2390 : reg_eliminate[i].offset);
2391 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2394 /* Otherwise, if this is the definition of a label and it is
2395 preceded by a BARRIER, set our offsets to the known offset of
2396 that label. */
2398 else if (x == insn
2399 && (tem = prev_nonnote_insn (insn)) != 0
2400 && BARRIER_P (tem))
2401 set_offsets_for_label (insn);
2402 else
2403 /* If neither of the above cases is true, compare each offset
2404 with those previously recorded and suppress any eliminations
2405 where the offsets disagree. */
2407 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2408 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2409 != (initial_p ? reg_eliminate[i].initial_offset
2410 : reg_eliminate[i].offset))
2411 reg_eliminate[i].can_eliminate = 0;
2413 return;
2415 case JUMP_TABLE_DATA:
2416 set_label_offsets (PATTERN (insn), insn, initial_p);
2417 return;
2419 case JUMP_INSN:
2420 set_label_offsets (PATTERN (insn), insn, initial_p);
2422 /* ... fall through ... */
2424 case INSN:
2425 case CALL_INSN:
2426 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2427 to indirectly and hence must have all eliminations at their
2428 initial offsets. */
2429 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2430 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2431 set_label_offsets (XEXP (tem, 0), insn, 1);
2432 return;
2434 case PARALLEL:
2435 case ADDR_VEC:
2436 case ADDR_DIFF_VEC:
2437 /* Each of the labels in the parallel or address vector must be
2438 at their initial offsets. We want the first field for PARALLEL
2439 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2441 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2442 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2443 insn, initial_p);
2444 return;
2446 case SET:
2447 /* We only care about setting PC. If the source is not RETURN,
2448 IF_THEN_ELSE, or a label, disable any eliminations not at
2449 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2450 isn't one of those possibilities. For branches to a label,
2451 call ourselves recursively.
2453 Note that this can disable elimination unnecessarily when we have
2454 a non-local goto since it will look like a non-constant jump to
2455 someplace in the current function. This isn't a significant
2456 problem since such jumps will normally be when all elimination
2457 pairs are back to their initial offsets. */
2459 if (SET_DEST (x) != pc_rtx)
2460 return;
2462 switch (GET_CODE (SET_SRC (x)))
2464 case PC:
2465 case RETURN:
2466 return;
2468 case LABEL_REF:
2469 set_label_offsets (SET_SRC (x), insn, initial_p);
2470 return;
2472 case IF_THEN_ELSE:
2473 tem = XEXP (SET_SRC (x), 1);
2474 if (GET_CODE (tem) == LABEL_REF)
2475 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2476 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2477 break;
2479 tem = XEXP (SET_SRC (x), 2);
2480 if (GET_CODE (tem) == LABEL_REF)
2481 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2482 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2483 break;
2484 return;
2486 default:
2487 break;
2490 /* If we reach here, all eliminations must be at their initial
2491 offset because we are doing a jump to a variable address. */
2492 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2493 if (p->offset != p->initial_offset)
2494 p->can_eliminate = 0;
2495 break;
2497 default:
2498 break;
2502 /* This function examines every reg that occurs in X and adjusts the
2503 costs for its elimination which are gathered by IRA. INSN is the
2504 insn in which X occurs. We do not recurse into MEM expressions. */
2506 static void
2507 note_reg_elim_costly (const_rtx x, rtx insn)
2509 subrtx_iterator::array_type array;
2510 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2512 const_rtx x = *iter;
2513 if (MEM_P (x))
2514 iter.skip_subrtxes ();
2515 else if (REG_P (x)
2516 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2517 && reg_equiv_init (REGNO (x))
2518 && reg_equiv_invariant (REGNO (x)))
2520 rtx t = reg_equiv_invariant (REGNO (x));
2521 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2522 int cost = set_src_cost (new_rtx, optimize_bb_for_speed_p (elim_bb));
2523 int freq = REG_FREQ_FROM_BB (elim_bb);
2525 if (cost != 0)
2526 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2531 /* Scan X and replace any eliminable registers (such as fp) with a
2532 replacement (such as sp), plus an offset.
2534 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2535 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2536 MEM, we are allowed to replace a sum of a register and the constant zero
2537 with the register, which we cannot do outside a MEM. In addition, we need
2538 to record the fact that a register is referenced outside a MEM.
2540 If INSN is an insn, it is the insn containing X. If we replace a REG
2541 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2542 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2543 the REG is being modified.
2545 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2546 That's used when we eliminate in expressions stored in notes.
2547 This means, do not set ref_outside_mem even if the reference
2548 is outside of MEMs.
2550 If FOR_COSTS is true, we are being called before reload in order to
2551 estimate the costs of keeping registers with an equivalence unallocated.
2553 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2554 replacements done assuming all offsets are at their initial values. If
2555 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2556 encounter, return the actual location so that find_reloads will do
2557 the proper thing. */
2559 static rtx
2560 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2561 bool may_use_invariant, bool for_costs)
2563 enum rtx_code code = GET_CODE (x);
2564 struct elim_table *ep;
2565 int regno;
2566 rtx new_rtx;
2567 int i, j;
2568 const char *fmt;
2569 int copied = 0;
2571 if (! current_function_decl)
2572 return x;
2574 switch (code)
2576 CASE_CONST_ANY:
2577 case CONST:
2578 case SYMBOL_REF:
2579 case CODE_LABEL:
2580 case PC:
2581 case CC0:
2582 case ASM_INPUT:
2583 case ADDR_VEC:
2584 case ADDR_DIFF_VEC:
2585 case RETURN:
2586 return x;
2588 case REG:
2589 regno = REGNO (x);
2591 /* First handle the case where we encounter a bare register that
2592 is eliminable. Replace it with a PLUS. */
2593 if (regno < FIRST_PSEUDO_REGISTER)
2595 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2596 ep++)
2597 if (ep->from_rtx == x && ep->can_eliminate)
2598 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2601 else if (reg_renumber && reg_renumber[regno] < 0
2602 && reg_equivs
2603 && reg_equiv_invariant (regno))
2605 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2606 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2607 mem_mode, insn, true, for_costs);
2608 /* There exists at least one use of REGNO that cannot be
2609 eliminated. Prevent the defining insn from being deleted. */
2610 reg_equiv_init (regno) = NULL;
2611 if (!for_costs)
2612 alter_reg (regno, -1, true);
2614 return x;
2616 /* You might think handling MINUS in a manner similar to PLUS is a
2617 good idea. It is not. It has been tried multiple times and every
2618 time the change has had to have been reverted.
2620 Other parts of reload know a PLUS is special (gen_reload for example)
2621 and require special code to handle code a reloaded PLUS operand.
2623 Also consider backends where the flags register is clobbered by a
2624 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2625 lea instruction comes to mind). If we try to reload a MINUS, we
2626 may kill the flags register that was holding a useful value.
2628 So, please before trying to handle MINUS, consider reload as a
2629 whole instead of this little section as well as the backend issues. */
2630 case PLUS:
2631 /* If this is the sum of an eliminable register and a constant, rework
2632 the sum. */
2633 if (REG_P (XEXP (x, 0))
2634 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2635 && CONSTANT_P (XEXP (x, 1)))
2637 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2638 ep++)
2639 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2641 /* The only time we want to replace a PLUS with a REG (this
2642 occurs when the constant operand of the PLUS is the negative
2643 of the offset) is when we are inside a MEM. We won't want
2644 to do so at other times because that would change the
2645 structure of the insn in a way that reload can't handle.
2646 We special-case the commonest situation in
2647 eliminate_regs_in_insn, so just replace a PLUS with a
2648 PLUS here, unless inside a MEM. */
2649 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2650 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2651 return ep->to_rtx;
2652 else
2653 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2654 plus_constant (Pmode, XEXP (x, 1),
2655 ep->previous_offset));
2658 /* If the register is not eliminable, we are done since the other
2659 operand is a constant. */
2660 return x;
2663 /* If this is part of an address, we want to bring any constant to the
2664 outermost PLUS. We will do this by doing register replacement in
2665 our operands and seeing if a constant shows up in one of them.
2667 Note that there is no risk of modifying the structure of the insn,
2668 since we only get called for its operands, thus we are either
2669 modifying the address inside a MEM, or something like an address
2670 operand of a load-address insn. */
2673 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2674 for_costs);
2675 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2676 for_costs);
2678 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2680 /* If one side is a PLUS and the other side is a pseudo that
2681 didn't get a hard register but has a reg_equiv_constant,
2682 we must replace the constant here since it may no longer
2683 be in the position of any operand. */
2684 if (GET_CODE (new0) == PLUS && REG_P (new1)
2685 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2686 && reg_renumber[REGNO (new1)] < 0
2687 && reg_equivs
2688 && reg_equiv_constant (REGNO (new1)) != 0)
2689 new1 = reg_equiv_constant (REGNO (new1));
2690 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2691 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2692 && reg_renumber[REGNO (new0)] < 0
2693 && reg_equiv_constant (REGNO (new0)) != 0)
2694 new0 = reg_equiv_constant (REGNO (new0));
2696 new_rtx = form_sum (GET_MODE (x), new0, new1);
2698 /* As above, if we are not inside a MEM we do not want to
2699 turn a PLUS into something else. We might try to do so here
2700 for an addition of 0 if we aren't optimizing. */
2701 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2702 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2703 else
2704 return new_rtx;
2707 return x;
2709 case MULT:
2710 /* If this is the product of an eliminable register and a
2711 constant, apply the distribute law and move the constant out
2712 so that we have (plus (mult ..) ..). This is needed in order
2713 to keep load-address insns valid. This case is pathological.
2714 We ignore the possibility of overflow here. */
2715 if (REG_P (XEXP (x, 0))
2716 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2717 && CONST_INT_P (XEXP (x, 1)))
2718 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2719 ep++)
2720 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2722 if (! mem_mode
2723 /* Refs inside notes or in DEBUG_INSNs don't count for
2724 this purpose. */
2725 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2726 || GET_CODE (insn) == INSN_LIST
2727 || DEBUG_INSN_P (insn))))
2728 ep->ref_outside_mem = 1;
2730 return
2731 plus_constant (Pmode,
2732 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2733 ep->previous_offset * INTVAL (XEXP (x, 1)));
2736 /* ... fall through ... */
2738 case CALL:
2739 case COMPARE:
2740 /* See comments before PLUS about handling MINUS. */
2741 case MINUS:
2742 case DIV: case UDIV:
2743 case MOD: case UMOD:
2744 case AND: case IOR: case XOR:
2745 case ROTATERT: case ROTATE:
2746 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2747 case NE: case EQ:
2748 case GE: case GT: case GEU: case GTU:
2749 case LE: case LT: case LEU: case LTU:
2751 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2752 for_costs);
2753 rtx new1 = XEXP (x, 1)
2754 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2755 for_costs) : 0;
2757 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2758 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2760 return x;
2762 case EXPR_LIST:
2763 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2764 if (XEXP (x, 0))
2766 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2767 for_costs);
2768 if (new_rtx != XEXP (x, 0))
2770 /* If this is a REG_DEAD note, it is not valid anymore.
2771 Using the eliminated version could result in creating a
2772 REG_DEAD note for the stack or frame pointer. */
2773 if (REG_NOTE_KIND (x) == REG_DEAD)
2774 return (XEXP (x, 1)
2775 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2776 for_costs)
2777 : NULL_RTX);
2779 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2783 /* ... fall through ... */
2785 case INSN_LIST:
2786 case INT_LIST:
2787 /* Now do eliminations in the rest of the chain. If this was
2788 an EXPR_LIST, this might result in allocating more memory than is
2789 strictly needed, but it simplifies the code. */
2790 if (XEXP (x, 1))
2792 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2793 for_costs);
2794 if (new_rtx != XEXP (x, 1))
2795 return
2796 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2798 return x;
2800 case PRE_INC:
2801 case POST_INC:
2802 case PRE_DEC:
2803 case POST_DEC:
2804 /* We do not support elimination of a register that is modified.
2805 elimination_effects has already make sure that this does not
2806 happen. */
2807 return x;
2809 case PRE_MODIFY:
2810 case POST_MODIFY:
2811 /* We do not support elimination of a register that is modified.
2812 elimination_effects has already make sure that this does not
2813 happen. The only remaining case we need to consider here is
2814 that the increment value may be an eliminable register. */
2815 if (GET_CODE (XEXP (x, 1)) == PLUS
2816 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2818 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2819 insn, true, for_costs);
2821 if (new_rtx != XEXP (XEXP (x, 1), 1))
2822 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2823 gen_rtx_PLUS (GET_MODE (x),
2824 XEXP (x, 0), new_rtx));
2826 return x;
2828 case STRICT_LOW_PART:
2829 case NEG: case NOT:
2830 case SIGN_EXTEND: case ZERO_EXTEND:
2831 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2832 case FLOAT: case FIX:
2833 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2834 case ABS:
2835 case SQRT:
2836 case FFS:
2837 case CLZ:
2838 case CTZ:
2839 case POPCOUNT:
2840 case PARITY:
2841 case BSWAP:
2842 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2843 for_costs);
2844 if (new_rtx != XEXP (x, 0))
2845 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2846 return x;
2848 case SUBREG:
2849 /* Similar to above processing, but preserve SUBREG_BYTE.
2850 Convert (subreg (mem)) to (mem) if not paradoxical.
2851 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2852 pseudo didn't get a hard reg, we must replace this with the
2853 eliminated version of the memory location because push_reload
2854 may do the replacement in certain circumstances. */
2855 if (REG_P (SUBREG_REG (x))
2856 && !paradoxical_subreg_p (x)
2857 && reg_equivs
2858 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2860 new_rtx = SUBREG_REG (x);
2862 else
2863 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2865 if (new_rtx != SUBREG_REG (x))
2867 int x_size = GET_MODE_SIZE (GET_MODE (x));
2868 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2870 if (MEM_P (new_rtx)
2871 && ((x_size < new_size
2872 #ifdef WORD_REGISTER_OPERATIONS
2873 /* On these machines, combine can create rtl of the form
2874 (set (subreg:m1 (reg:m2 R) 0) ...)
2875 where m1 < m2, and expects something interesting to
2876 happen to the entire word. Moreover, it will use the
2877 (reg:m2 R) later, expecting all bits to be preserved.
2878 So if the number of words is the same, preserve the
2879 subreg so that push_reload can see it. */
2880 && ! ((x_size - 1) / UNITS_PER_WORD
2881 == (new_size -1 ) / UNITS_PER_WORD)
2882 #endif
2884 || x_size == new_size)
2886 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2887 else
2888 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2891 return x;
2893 case MEM:
2894 /* Our only special processing is to pass the mode of the MEM to our
2895 recursive call and copy the flags. While we are here, handle this
2896 case more efficiently. */
2898 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2899 for_costs);
2900 if (for_costs
2901 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2902 && !memory_address_p (GET_MODE (x), new_rtx))
2903 note_reg_elim_costly (XEXP (x, 0), insn);
2905 return replace_equiv_address_nv (x, new_rtx);
2907 case USE:
2908 /* Handle insn_list USE that a call to a pure function may generate. */
2909 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2910 for_costs);
2911 if (new_rtx != XEXP (x, 0))
2912 return gen_rtx_USE (GET_MODE (x), new_rtx);
2913 return x;
2915 case CLOBBER:
2916 case ASM_OPERANDS:
2917 gcc_assert (insn && DEBUG_INSN_P (insn));
2918 break;
2920 case SET:
2921 gcc_unreachable ();
2923 default:
2924 break;
2927 /* Process each of our operands recursively. If any have changed, make a
2928 copy of the rtx. */
2929 fmt = GET_RTX_FORMAT (code);
2930 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2932 if (*fmt == 'e')
2934 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2935 for_costs);
2936 if (new_rtx != XEXP (x, i) && ! copied)
2938 x = shallow_copy_rtx (x);
2939 copied = 1;
2941 XEXP (x, i) = new_rtx;
2943 else if (*fmt == 'E')
2945 int copied_vec = 0;
2946 for (j = 0; j < XVECLEN (x, i); j++)
2948 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2949 for_costs);
2950 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2952 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2953 XVEC (x, i)->elem);
2954 if (! copied)
2956 x = shallow_copy_rtx (x);
2957 copied = 1;
2959 XVEC (x, i) = new_v;
2960 copied_vec = 1;
2962 XVECEXP (x, i, j) = new_rtx;
2967 return x;
2971 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2973 if (reg_eliminate == NULL)
2975 gcc_assert (targetm.no_register_allocation);
2976 return x;
2978 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2981 /* Scan rtx X for modifications of elimination target registers. Update
2982 the table of eliminables to reflect the changed state. MEM_MODE is
2983 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2985 static void
2986 elimination_effects (rtx x, machine_mode mem_mode)
2988 enum rtx_code code = GET_CODE (x);
2989 struct elim_table *ep;
2990 int regno;
2991 int i, j;
2992 const char *fmt;
2994 switch (code)
2996 CASE_CONST_ANY:
2997 case CONST:
2998 case SYMBOL_REF:
2999 case CODE_LABEL:
3000 case PC:
3001 case CC0:
3002 case ASM_INPUT:
3003 case ADDR_VEC:
3004 case ADDR_DIFF_VEC:
3005 case RETURN:
3006 return;
3008 case REG:
3009 regno = REGNO (x);
3011 /* First handle the case where we encounter a bare register that
3012 is eliminable. Replace it with a PLUS. */
3013 if (regno < FIRST_PSEUDO_REGISTER)
3015 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3016 ep++)
3017 if (ep->from_rtx == x && ep->can_eliminate)
3019 if (! mem_mode)
3020 ep->ref_outside_mem = 1;
3021 return;
3025 else if (reg_renumber[regno] < 0
3026 && reg_equivs
3027 && reg_equiv_constant (regno)
3028 && ! function_invariant_p (reg_equiv_constant (regno)))
3029 elimination_effects (reg_equiv_constant (regno), mem_mode);
3030 return;
3032 case PRE_INC:
3033 case POST_INC:
3034 case PRE_DEC:
3035 case POST_DEC:
3036 case POST_MODIFY:
3037 case PRE_MODIFY:
3038 /* If we modify the source of an elimination rule, disable it. */
3039 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3040 if (ep->from_rtx == XEXP (x, 0))
3041 ep->can_eliminate = 0;
3043 /* If we modify the target of an elimination rule by adding a constant,
3044 update its offset. If we modify the target in any other way, we'll
3045 have to disable the rule as well. */
3046 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3047 if (ep->to_rtx == XEXP (x, 0))
3049 int size = GET_MODE_SIZE (mem_mode);
3051 /* If more bytes than MEM_MODE are pushed, account for them. */
3052 #ifdef PUSH_ROUNDING
3053 if (ep->to_rtx == stack_pointer_rtx)
3054 size = PUSH_ROUNDING (size);
3055 #endif
3056 if (code == PRE_DEC || code == POST_DEC)
3057 ep->offset += size;
3058 else if (code == PRE_INC || code == POST_INC)
3059 ep->offset -= size;
3060 else if (code == PRE_MODIFY || code == POST_MODIFY)
3062 if (GET_CODE (XEXP (x, 1)) == PLUS
3063 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3064 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3065 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3066 else
3067 ep->can_eliminate = 0;
3071 /* These two aren't unary operators. */
3072 if (code == POST_MODIFY || code == PRE_MODIFY)
3073 break;
3075 /* Fall through to generic unary operation case. */
3076 case STRICT_LOW_PART:
3077 case NEG: case NOT:
3078 case SIGN_EXTEND: case ZERO_EXTEND:
3079 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3080 case FLOAT: case FIX:
3081 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3082 case ABS:
3083 case SQRT:
3084 case FFS:
3085 case CLZ:
3086 case CTZ:
3087 case POPCOUNT:
3088 case PARITY:
3089 case BSWAP:
3090 elimination_effects (XEXP (x, 0), mem_mode);
3091 return;
3093 case SUBREG:
3094 if (REG_P (SUBREG_REG (x))
3095 && (GET_MODE_SIZE (GET_MODE (x))
3096 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3097 && reg_equivs
3098 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3099 return;
3101 elimination_effects (SUBREG_REG (x), mem_mode);
3102 return;
3104 case USE:
3105 /* If using a register that is the source of an eliminate we still
3106 think can be performed, note it cannot be performed since we don't
3107 know how this register is used. */
3108 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3109 if (ep->from_rtx == XEXP (x, 0))
3110 ep->can_eliminate = 0;
3112 elimination_effects (XEXP (x, 0), mem_mode);
3113 return;
3115 case CLOBBER:
3116 /* If clobbering a register that is the replacement register for an
3117 elimination we still think can be performed, note that it cannot
3118 be performed. Otherwise, we need not be concerned about it. */
3119 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3120 if (ep->to_rtx == XEXP (x, 0))
3121 ep->can_eliminate = 0;
3123 elimination_effects (XEXP (x, 0), mem_mode);
3124 return;
3126 case SET:
3127 /* Check for setting a register that we know about. */
3128 if (REG_P (SET_DEST (x)))
3130 /* See if this is setting the replacement register for an
3131 elimination.
3133 If DEST is the hard frame pointer, we do nothing because we
3134 assume that all assignments to the frame pointer are for
3135 non-local gotos and are being done at a time when they are valid
3136 and do not disturb anything else. Some machines want to
3137 eliminate a fake argument pointer (or even a fake frame pointer)
3138 with either the real frame or the stack pointer. Assignments to
3139 the hard frame pointer must not prevent this elimination. */
3141 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3142 ep++)
3143 if (ep->to_rtx == SET_DEST (x)
3144 && SET_DEST (x) != hard_frame_pointer_rtx)
3146 /* If it is being incremented, adjust the offset. Otherwise,
3147 this elimination can't be done. */
3148 rtx src = SET_SRC (x);
3150 if (GET_CODE (src) == PLUS
3151 && XEXP (src, 0) == SET_DEST (x)
3152 && CONST_INT_P (XEXP (src, 1)))
3153 ep->offset -= INTVAL (XEXP (src, 1));
3154 else
3155 ep->can_eliminate = 0;
3159 elimination_effects (SET_DEST (x), VOIDmode);
3160 elimination_effects (SET_SRC (x), VOIDmode);
3161 return;
3163 case MEM:
3164 /* Our only special processing is to pass the mode of the MEM to our
3165 recursive call. */
3166 elimination_effects (XEXP (x, 0), GET_MODE (x));
3167 return;
3169 default:
3170 break;
3173 fmt = GET_RTX_FORMAT (code);
3174 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3176 if (*fmt == 'e')
3177 elimination_effects (XEXP (x, i), mem_mode);
3178 else if (*fmt == 'E')
3179 for (j = 0; j < XVECLEN (x, i); j++)
3180 elimination_effects (XVECEXP (x, i, j), mem_mode);
3184 /* Descend through rtx X and verify that no references to eliminable registers
3185 remain. If any do remain, mark the involved register as not
3186 eliminable. */
3188 static void
3189 check_eliminable_occurrences (rtx x)
3191 const char *fmt;
3192 int i;
3193 enum rtx_code code;
3195 if (x == 0)
3196 return;
3198 code = GET_CODE (x);
3200 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3202 struct elim_table *ep;
3204 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3205 if (ep->from_rtx == x)
3206 ep->can_eliminate = 0;
3207 return;
3210 fmt = GET_RTX_FORMAT (code);
3211 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3213 if (*fmt == 'e')
3214 check_eliminable_occurrences (XEXP (x, i));
3215 else if (*fmt == 'E')
3217 int j;
3218 for (j = 0; j < XVECLEN (x, i); j++)
3219 check_eliminable_occurrences (XVECEXP (x, i, j));
3224 /* Scan INSN and eliminate all eliminable registers in it.
3226 If REPLACE is nonzero, do the replacement destructively. Also
3227 delete the insn as dead it if it is setting an eliminable register.
3229 If REPLACE is zero, do all our allocations in reload_obstack.
3231 If no eliminations were done and this insn doesn't require any elimination
3232 processing (these are not identical conditions: it might be updating sp,
3233 but not referencing fp; this needs to be seen during reload_as_needed so
3234 that the offset between fp and sp can be taken into consideration), zero
3235 is returned. Otherwise, 1 is returned. */
3237 static int
3238 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3240 int icode = recog_memoized (insn);
3241 rtx old_body = PATTERN (insn);
3242 int insn_is_asm = asm_noperands (old_body) >= 0;
3243 rtx old_set = single_set (insn);
3244 rtx new_body;
3245 int val = 0;
3246 int i;
3247 rtx substed_operand[MAX_RECOG_OPERANDS];
3248 rtx orig_operand[MAX_RECOG_OPERANDS];
3249 struct elim_table *ep;
3250 rtx plus_src, plus_cst_src;
3252 if (! insn_is_asm && icode < 0)
3254 gcc_assert (DEBUG_INSN_P (insn)
3255 || GET_CODE (PATTERN (insn)) == USE
3256 || GET_CODE (PATTERN (insn)) == CLOBBER
3257 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3258 if (DEBUG_INSN_P (insn))
3259 INSN_VAR_LOCATION_LOC (insn)
3260 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3261 return 0;
3264 if (old_set != 0 && REG_P (SET_DEST (old_set))
3265 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3267 /* Check for setting an eliminable register. */
3268 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3269 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3271 /* If this is setting the frame pointer register to the
3272 hardware frame pointer register and this is an elimination
3273 that will be done (tested above), this insn is really
3274 adjusting the frame pointer downward to compensate for
3275 the adjustment done before a nonlocal goto. */
3276 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3277 && ep->from == FRAME_POINTER_REGNUM
3278 && ep->to == HARD_FRAME_POINTER_REGNUM)
3280 rtx base = SET_SRC (old_set);
3281 rtx_insn *base_insn = insn;
3282 HOST_WIDE_INT offset = 0;
3284 while (base != ep->to_rtx)
3286 rtx_insn *prev_insn;
3287 rtx prev_set;
3289 if (GET_CODE (base) == PLUS
3290 && CONST_INT_P (XEXP (base, 1)))
3292 offset += INTVAL (XEXP (base, 1));
3293 base = XEXP (base, 0);
3295 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3296 && (prev_set = single_set (prev_insn)) != 0
3297 && rtx_equal_p (SET_DEST (prev_set), base))
3299 base = SET_SRC (prev_set);
3300 base_insn = prev_insn;
3302 else
3303 break;
3306 if (base == ep->to_rtx)
3308 rtx src = plus_constant (Pmode, ep->to_rtx,
3309 offset - ep->offset);
3311 new_body = old_body;
3312 if (! replace)
3314 new_body = copy_insn (old_body);
3315 if (REG_NOTES (insn))
3316 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3318 PATTERN (insn) = new_body;
3319 old_set = single_set (insn);
3321 /* First see if this insn remains valid when we
3322 make the change. If not, keep the INSN_CODE
3323 the same and let reload fit it up. */
3324 validate_change (insn, &SET_SRC (old_set), src, 1);
3325 validate_change (insn, &SET_DEST (old_set),
3326 ep->to_rtx, 1);
3327 if (! apply_change_group ())
3329 SET_SRC (old_set) = src;
3330 SET_DEST (old_set) = ep->to_rtx;
3333 val = 1;
3334 goto done;
3338 /* In this case this insn isn't serving a useful purpose. We
3339 will delete it in reload_as_needed once we know that this
3340 elimination is, in fact, being done.
3342 If REPLACE isn't set, we can't delete this insn, but needn't
3343 process it since it won't be used unless something changes. */
3344 if (replace)
3346 delete_dead_insn (insn);
3347 return 1;
3349 val = 1;
3350 goto done;
3354 /* We allow one special case which happens to work on all machines we
3355 currently support: a single set with the source or a REG_EQUAL
3356 note being a PLUS of an eliminable register and a constant. */
3357 plus_src = plus_cst_src = 0;
3358 if (old_set && REG_P (SET_DEST (old_set)))
3360 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3361 plus_src = SET_SRC (old_set);
3362 /* First see if the source is of the form (plus (...) CST). */
3363 if (plus_src
3364 && CONST_INT_P (XEXP (plus_src, 1)))
3365 plus_cst_src = plus_src;
3366 else if (REG_P (SET_SRC (old_set))
3367 || plus_src)
3369 /* Otherwise, see if we have a REG_EQUAL note of the form
3370 (plus (...) CST). */
3371 rtx links;
3372 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3374 if ((REG_NOTE_KIND (links) == REG_EQUAL
3375 || REG_NOTE_KIND (links) == REG_EQUIV)
3376 && GET_CODE (XEXP (links, 0)) == PLUS
3377 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3379 plus_cst_src = XEXP (links, 0);
3380 break;
3385 /* Check that the first operand of the PLUS is a hard reg or
3386 the lowpart subreg of one. */
3387 if (plus_cst_src)
3389 rtx reg = XEXP (plus_cst_src, 0);
3390 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3391 reg = SUBREG_REG (reg);
3393 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3394 plus_cst_src = 0;
3397 if (plus_cst_src)
3399 rtx reg = XEXP (plus_cst_src, 0);
3400 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3402 if (GET_CODE (reg) == SUBREG)
3403 reg = SUBREG_REG (reg);
3405 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3406 if (ep->from_rtx == reg && ep->can_eliminate)
3408 rtx to_rtx = ep->to_rtx;
3409 offset += ep->offset;
3410 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3412 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3413 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3414 to_rtx);
3415 /* If we have a nonzero offset, and the source is already
3416 a simple REG, the following transformation would
3417 increase the cost of the insn by replacing a simple REG
3418 with (plus (reg sp) CST). So try only when we already
3419 had a PLUS before. */
3420 if (offset == 0 || plus_src)
3422 rtx new_src = plus_constant (GET_MODE (to_rtx),
3423 to_rtx, offset);
3425 new_body = old_body;
3426 if (! replace)
3428 new_body = copy_insn (old_body);
3429 if (REG_NOTES (insn))
3430 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3432 PATTERN (insn) = new_body;
3433 old_set = single_set (insn);
3435 /* First see if this insn remains valid when we make the
3436 change. If not, try to replace the whole pattern with
3437 a simple set (this may help if the original insn was a
3438 PARALLEL that was only recognized as single_set due to
3439 REG_UNUSED notes). If this isn't valid either, keep
3440 the INSN_CODE the same and let reload fix it up. */
3441 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3443 rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3445 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3446 SET_SRC (old_set) = new_src;
3449 else
3450 break;
3452 val = 1;
3453 /* This can't have an effect on elimination offsets, so skip right
3454 to the end. */
3455 goto done;
3459 /* Determine the effects of this insn on elimination offsets. */
3460 elimination_effects (old_body, VOIDmode);
3462 /* Eliminate all eliminable registers occurring in operands that
3463 can be handled by reload. */
3464 extract_insn (insn);
3465 for (i = 0; i < recog_data.n_operands; i++)
3467 orig_operand[i] = recog_data.operand[i];
3468 substed_operand[i] = recog_data.operand[i];
3470 /* For an asm statement, every operand is eliminable. */
3471 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3473 bool is_set_src, in_plus;
3475 /* Check for setting a register that we know about. */
3476 if (recog_data.operand_type[i] != OP_IN
3477 && REG_P (orig_operand[i]))
3479 /* If we are assigning to a register that can be eliminated, it
3480 must be as part of a PARALLEL, since the code above handles
3481 single SETs. We must indicate that we can no longer
3482 eliminate this reg. */
3483 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3484 ep++)
3485 if (ep->from_rtx == orig_operand[i])
3486 ep->can_eliminate = 0;
3489 /* Companion to the above plus substitution, we can allow
3490 invariants as the source of a plain move. */
3491 is_set_src = false;
3492 if (old_set
3493 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3494 is_set_src = true;
3495 in_plus = false;
3496 if (plus_src
3497 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3498 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3499 in_plus = true;
3501 substed_operand[i]
3502 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3503 replace ? insn : NULL_RTX,
3504 is_set_src || in_plus, false);
3505 if (substed_operand[i] != orig_operand[i])
3506 val = 1;
3507 /* Terminate the search in check_eliminable_occurrences at
3508 this point. */
3509 *recog_data.operand_loc[i] = 0;
3511 /* If an output operand changed from a REG to a MEM and INSN is an
3512 insn, write a CLOBBER insn. */
3513 if (recog_data.operand_type[i] != OP_IN
3514 && REG_P (orig_operand[i])
3515 && MEM_P (substed_operand[i])
3516 && replace)
3517 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3521 for (i = 0; i < recog_data.n_dups; i++)
3522 *recog_data.dup_loc[i]
3523 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3525 /* If any eliminable remain, they aren't eliminable anymore. */
3526 check_eliminable_occurrences (old_body);
3528 /* Substitute the operands; the new values are in the substed_operand
3529 array. */
3530 for (i = 0; i < recog_data.n_operands; i++)
3531 *recog_data.operand_loc[i] = substed_operand[i];
3532 for (i = 0; i < recog_data.n_dups; i++)
3533 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3535 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3536 re-recognize the insn. We do this in case we had a simple addition
3537 but now can do this as a load-address. This saves an insn in this
3538 common case.
3539 If re-recognition fails, the old insn code number will still be used,
3540 and some register operands may have changed into PLUS expressions.
3541 These will be handled by find_reloads by loading them into a register
3542 again. */
3544 if (val)
3546 /* If we aren't replacing things permanently and we changed something,
3547 make another copy to ensure that all the RTL is new. Otherwise
3548 things can go wrong if find_reload swaps commutative operands
3549 and one is inside RTL that has been copied while the other is not. */
3550 new_body = old_body;
3551 if (! replace)
3553 new_body = copy_insn (old_body);
3554 if (REG_NOTES (insn))
3555 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3557 PATTERN (insn) = new_body;
3559 /* If we had a move insn but now we don't, rerecognize it. This will
3560 cause spurious re-recognition if the old move had a PARALLEL since
3561 the new one still will, but we can't call single_set without
3562 having put NEW_BODY into the insn and the re-recognition won't
3563 hurt in this rare case. */
3564 /* ??? Why this huge if statement - why don't we just rerecognize the
3565 thing always? */
3566 if (! insn_is_asm
3567 && old_set != 0
3568 && ((REG_P (SET_SRC (old_set))
3569 && (GET_CODE (new_body) != SET
3570 || !REG_P (SET_SRC (new_body))))
3571 /* If this was a load from or store to memory, compare
3572 the MEM in recog_data.operand to the one in the insn.
3573 If they are not equal, then rerecognize the insn. */
3574 || (old_set != 0
3575 && ((MEM_P (SET_SRC (old_set))
3576 && SET_SRC (old_set) != recog_data.operand[1])
3577 || (MEM_P (SET_DEST (old_set))
3578 && SET_DEST (old_set) != recog_data.operand[0])))
3579 /* If this was an add insn before, rerecognize. */
3580 || GET_CODE (SET_SRC (old_set)) == PLUS))
3582 int new_icode = recog (PATTERN (insn), insn, 0);
3583 if (new_icode >= 0)
3584 INSN_CODE (insn) = new_icode;
3588 /* Restore the old body. If there were any changes to it, we made a copy
3589 of it while the changes were still in place, so we'll correctly return
3590 a modified insn below. */
3591 if (! replace)
3593 /* Restore the old body. */
3594 for (i = 0; i < recog_data.n_operands; i++)
3595 /* Restoring a top-level match_parallel would clobber the new_body
3596 we installed in the insn. */
3597 if (recog_data.operand_loc[i] != &PATTERN (insn))
3598 *recog_data.operand_loc[i] = orig_operand[i];
3599 for (i = 0; i < recog_data.n_dups; i++)
3600 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3603 /* Update all elimination pairs to reflect the status after the current
3604 insn. The changes we make were determined by the earlier call to
3605 elimination_effects.
3607 We also detect cases where register elimination cannot be done,
3608 namely, if a register would be both changed and referenced outside a MEM
3609 in the resulting insn since such an insn is often undefined and, even if
3610 not, we cannot know what meaning will be given to it. Note that it is
3611 valid to have a register used in an address in an insn that changes it
3612 (presumably with a pre- or post-increment or decrement).
3614 If anything changes, return nonzero. */
3616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3618 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3619 ep->can_eliminate = 0;
3621 ep->ref_outside_mem = 0;
3623 if (ep->previous_offset != ep->offset)
3624 val = 1;
3627 done:
3628 /* If we changed something, perform elimination in REG_NOTES. This is
3629 needed even when REPLACE is zero because a REG_DEAD note might refer
3630 to a register that we eliminate and could cause a different number
3631 of spill registers to be needed in the final reload pass than in
3632 the pre-passes. */
3633 if (val && REG_NOTES (insn) != 0)
3634 REG_NOTES (insn)
3635 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3636 false);
3638 return val;
3641 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3642 register allocator. INSN is the instruction we need to examine, we perform
3643 eliminations in its operands and record cases where eliminating a reg with
3644 an invariant equivalence would add extra cost. */
3646 static void
3647 elimination_costs_in_insn (rtx_insn *insn)
3649 int icode = recog_memoized (insn);
3650 rtx old_body = PATTERN (insn);
3651 int insn_is_asm = asm_noperands (old_body) >= 0;
3652 rtx old_set = single_set (insn);
3653 int i;
3654 rtx orig_operand[MAX_RECOG_OPERANDS];
3655 rtx orig_dup[MAX_RECOG_OPERANDS];
3656 struct elim_table *ep;
3657 rtx plus_src, plus_cst_src;
3658 bool sets_reg_p;
3660 if (! insn_is_asm && icode < 0)
3662 gcc_assert (DEBUG_INSN_P (insn)
3663 || GET_CODE (PATTERN (insn)) == USE
3664 || GET_CODE (PATTERN (insn)) == CLOBBER
3665 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3666 return;
3669 if (old_set != 0 && REG_P (SET_DEST (old_set))
3670 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3672 /* Check for setting an eliminable register. */
3673 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3674 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3675 return;
3678 /* We allow one special case which happens to work on all machines we
3679 currently support: a single set with the source or a REG_EQUAL
3680 note being a PLUS of an eliminable register and a constant. */
3681 plus_src = plus_cst_src = 0;
3682 sets_reg_p = false;
3683 if (old_set && REG_P (SET_DEST (old_set)))
3685 sets_reg_p = true;
3686 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3687 plus_src = SET_SRC (old_set);
3688 /* First see if the source is of the form (plus (...) CST). */
3689 if (plus_src
3690 && CONST_INT_P (XEXP (plus_src, 1)))
3691 plus_cst_src = plus_src;
3692 else if (REG_P (SET_SRC (old_set))
3693 || plus_src)
3695 /* Otherwise, see if we have a REG_EQUAL note of the form
3696 (plus (...) CST). */
3697 rtx links;
3698 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3700 if ((REG_NOTE_KIND (links) == REG_EQUAL
3701 || REG_NOTE_KIND (links) == REG_EQUIV)
3702 && GET_CODE (XEXP (links, 0)) == PLUS
3703 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3705 plus_cst_src = XEXP (links, 0);
3706 break;
3712 /* Determine the effects of this insn on elimination offsets. */
3713 elimination_effects (old_body, VOIDmode);
3715 /* Eliminate all eliminable registers occurring in operands that
3716 can be handled by reload. */
3717 extract_insn (insn);
3718 for (i = 0; i < recog_data.n_dups; i++)
3719 orig_dup[i] = *recog_data.dup_loc[i];
3721 for (i = 0; i < recog_data.n_operands; i++)
3723 orig_operand[i] = recog_data.operand[i];
3725 /* For an asm statement, every operand is eliminable. */
3726 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3728 bool is_set_src, in_plus;
3730 /* Check for setting a register that we know about. */
3731 if (recog_data.operand_type[i] != OP_IN
3732 && REG_P (orig_operand[i]))
3734 /* If we are assigning to a register that can be eliminated, it
3735 must be as part of a PARALLEL, since the code above handles
3736 single SETs. We must indicate that we can no longer
3737 eliminate this reg. */
3738 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3739 ep++)
3740 if (ep->from_rtx == orig_operand[i])
3741 ep->can_eliminate = 0;
3744 /* Companion to the above plus substitution, we can allow
3745 invariants as the source of a plain move. */
3746 is_set_src = false;
3747 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3748 is_set_src = true;
3749 if (is_set_src && !sets_reg_p)
3750 note_reg_elim_costly (SET_SRC (old_set), insn);
3751 in_plus = false;
3752 if (plus_src && sets_reg_p
3753 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3754 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3755 in_plus = true;
3757 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3758 NULL_RTX,
3759 is_set_src || in_plus, true);
3760 /* Terminate the search in check_eliminable_occurrences at
3761 this point. */
3762 *recog_data.operand_loc[i] = 0;
3766 for (i = 0; i < recog_data.n_dups; i++)
3767 *recog_data.dup_loc[i]
3768 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3770 /* If any eliminable remain, they aren't eliminable anymore. */
3771 check_eliminable_occurrences (old_body);
3773 /* Restore the old body. */
3774 for (i = 0; i < recog_data.n_operands; i++)
3775 *recog_data.operand_loc[i] = orig_operand[i];
3776 for (i = 0; i < recog_data.n_dups; i++)
3777 *recog_data.dup_loc[i] = orig_dup[i];
3779 /* Update all elimination pairs to reflect the status after the current
3780 insn. The changes we make were determined by the earlier call to
3781 elimination_effects. */
3783 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3785 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3786 ep->can_eliminate = 0;
3788 ep->ref_outside_mem = 0;
3791 return;
3794 /* Loop through all elimination pairs.
3795 Recalculate the number not at initial offset.
3797 Compute the maximum offset (minimum offset if the stack does not
3798 grow downward) for each elimination pair. */
3800 static void
3801 update_eliminable_offsets (void)
3803 struct elim_table *ep;
3805 num_not_at_initial_offset = 0;
3806 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3808 ep->previous_offset = ep->offset;
3809 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3810 num_not_at_initial_offset++;
3814 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3815 replacement we currently believe is valid, mark it as not eliminable if X
3816 modifies DEST in any way other than by adding a constant integer to it.
3818 If DEST is the frame pointer, we do nothing because we assume that
3819 all assignments to the hard frame pointer are nonlocal gotos and are being
3820 done at a time when they are valid and do not disturb anything else.
3821 Some machines want to eliminate a fake argument pointer with either the
3822 frame or stack pointer. Assignments to the hard frame pointer must not
3823 prevent this elimination.
3825 Called via note_stores from reload before starting its passes to scan
3826 the insns of the function. */
3828 static void
3829 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3831 unsigned int i;
3833 /* A SUBREG of a hard register here is just changing its mode. We should
3834 not see a SUBREG of an eliminable hard register, but check just in
3835 case. */
3836 if (GET_CODE (dest) == SUBREG)
3837 dest = SUBREG_REG (dest);
3839 if (dest == hard_frame_pointer_rtx)
3840 return;
3842 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3843 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3844 && (GET_CODE (x) != SET
3845 || GET_CODE (SET_SRC (x)) != PLUS
3846 || XEXP (SET_SRC (x), 0) != dest
3847 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3849 reg_eliminate[i].can_eliminate_previous
3850 = reg_eliminate[i].can_eliminate = 0;
3851 num_eliminable--;
3855 /* Verify that the initial elimination offsets did not change since the
3856 last call to set_initial_elim_offsets. This is used to catch cases
3857 where something illegal happened during reload_as_needed that could
3858 cause incorrect code to be generated if we did not check for it. */
3860 static bool
3861 verify_initial_elim_offsets (void)
3863 HOST_WIDE_INT t;
3865 if (!num_eliminable)
3866 return true;
3868 #ifdef ELIMINABLE_REGS
3870 struct elim_table *ep;
3872 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3874 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3875 if (t != ep->initial_offset)
3876 return false;
3879 #else
3880 INITIAL_FRAME_POINTER_OFFSET (t);
3881 if (t != reg_eliminate[0].initial_offset)
3882 return false;
3883 #endif
3885 return true;
3888 /* Reset all offsets on eliminable registers to their initial values. */
3890 static void
3891 set_initial_elim_offsets (void)
3893 struct elim_table *ep = reg_eliminate;
3895 #ifdef ELIMINABLE_REGS
3896 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3898 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3899 ep->previous_offset = ep->offset = ep->initial_offset;
3901 #else
3902 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3903 ep->previous_offset = ep->offset = ep->initial_offset;
3904 #endif
3906 num_not_at_initial_offset = 0;
3909 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3911 static void
3912 set_initial_eh_label_offset (rtx label)
3914 set_label_offsets (label, NULL, 1);
3917 /* Initialize the known label offsets.
3918 Set a known offset for each forced label to be at the initial offset
3919 of each elimination. We do this because we assume that all
3920 computed jumps occur from a location where each elimination is
3921 at its initial offset.
3922 For all other labels, show that we don't know the offsets. */
3924 static void
3925 set_initial_label_offsets (void)
3927 memset (offsets_known_at, 0, num_labels);
3929 for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3930 if (x->insn ())
3931 set_label_offsets (x->insn (), NULL, 1);
3933 for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3934 if (x->insn ())
3935 set_label_offsets (x->insn (), NULL, 1);
3937 for_each_eh_label (set_initial_eh_label_offset);
3940 /* Set all elimination offsets to the known values for the code label given
3941 by INSN. */
3943 static void
3944 set_offsets_for_label (rtx_insn *insn)
3946 unsigned int i;
3947 int label_nr = CODE_LABEL_NUMBER (insn);
3948 struct elim_table *ep;
3950 num_not_at_initial_offset = 0;
3951 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3953 ep->offset = ep->previous_offset
3954 = offsets_at[label_nr - first_label_num][i];
3955 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3956 num_not_at_initial_offset++;
3960 /* See if anything that happened changes which eliminations are valid.
3961 For example, on the SPARC, whether or not the frame pointer can
3962 be eliminated can depend on what registers have been used. We need
3963 not check some conditions again (such as flag_omit_frame_pointer)
3964 since they can't have changed. */
3966 static void
3967 update_eliminables (HARD_REG_SET *pset)
3969 int previous_frame_pointer_needed = frame_pointer_needed;
3970 struct elim_table *ep;
3972 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3973 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3974 && targetm.frame_pointer_required ())
3975 #ifdef ELIMINABLE_REGS
3976 || ! targetm.can_eliminate (ep->from, ep->to)
3977 #endif
3979 ep->can_eliminate = 0;
3981 /* Look for the case where we have discovered that we can't replace
3982 register A with register B and that means that we will now be
3983 trying to replace register A with register C. This means we can
3984 no longer replace register C with register B and we need to disable
3985 such an elimination, if it exists. This occurs often with A == ap,
3986 B == sp, and C == fp. */
3988 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3990 struct elim_table *op;
3991 int new_to = -1;
3993 if (! ep->can_eliminate && ep->can_eliminate_previous)
3995 /* Find the current elimination for ep->from, if there is a
3996 new one. */
3997 for (op = reg_eliminate;
3998 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3999 if (op->from == ep->from && op->can_eliminate)
4001 new_to = op->to;
4002 break;
4005 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
4006 disable it. */
4007 for (op = reg_eliminate;
4008 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4009 if (op->from == new_to && op->to == ep->to)
4010 op->can_eliminate = 0;
4014 /* See if any registers that we thought we could eliminate the previous
4015 time are no longer eliminable. If so, something has changed and we
4016 must spill the register. Also, recompute the number of eliminable
4017 registers and see if the frame pointer is needed; it is if there is
4018 no elimination of the frame pointer that we can perform. */
4020 frame_pointer_needed = 1;
4021 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4023 if (ep->can_eliminate
4024 && ep->from == FRAME_POINTER_REGNUM
4025 && ep->to != HARD_FRAME_POINTER_REGNUM
4026 && (! SUPPORTS_STACK_ALIGNMENT
4027 || ! crtl->stack_realign_needed))
4028 frame_pointer_needed = 0;
4030 if (! ep->can_eliminate && ep->can_eliminate_previous)
4032 ep->can_eliminate_previous = 0;
4033 SET_HARD_REG_BIT (*pset, ep->from);
4034 num_eliminable--;
4038 /* If we didn't need a frame pointer last time, but we do now, spill
4039 the hard frame pointer. */
4040 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4041 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4044 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4045 Return true iff a register was spilled. */
4047 static bool
4048 update_eliminables_and_spill (void)
4050 int i;
4051 bool did_spill = false;
4052 HARD_REG_SET to_spill;
4053 CLEAR_HARD_REG_SET (to_spill);
4054 update_eliminables (&to_spill);
4055 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4057 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4058 if (TEST_HARD_REG_BIT (to_spill, i))
4060 spill_hard_reg (i, 1);
4061 did_spill = true;
4063 /* Regardless of the state of spills, if we previously had
4064 a register that we thought we could eliminate, but now can
4065 not eliminate, we must run another pass.
4067 Consider pseudos which have an entry in reg_equiv_* which
4068 reference an eliminable register. We must make another pass
4069 to update reg_equiv_* so that we do not substitute in the
4070 old value from when we thought the elimination could be
4071 performed. */
4073 return did_spill;
4076 /* Return true if X is used as the target register of an elimination. */
4078 bool
4079 elimination_target_reg_p (rtx x)
4081 struct elim_table *ep;
4083 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4084 if (ep->to_rtx == x && ep->can_eliminate)
4085 return true;
4087 return false;
4090 /* Initialize the table of registers to eliminate.
4091 Pre-condition: global flag frame_pointer_needed has been set before
4092 calling this function. */
4094 static void
4095 init_elim_table (void)
4097 struct elim_table *ep;
4098 #ifdef ELIMINABLE_REGS
4099 const struct elim_table_1 *ep1;
4100 #endif
4102 if (!reg_eliminate)
4103 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4105 num_eliminable = 0;
4107 #ifdef ELIMINABLE_REGS
4108 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4109 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4111 ep->from = ep1->from;
4112 ep->to = ep1->to;
4113 ep->can_eliminate = ep->can_eliminate_previous
4114 = (targetm.can_eliminate (ep->from, ep->to)
4115 && ! (ep->to == STACK_POINTER_REGNUM
4116 && frame_pointer_needed
4117 && (! SUPPORTS_STACK_ALIGNMENT
4118 || ! stack_realign_fp)));
4120 #else
4121 reg_eliminate[0].from = reg_eliminate_1[0].from;
4122 reg_eliminate[0].to = reg_eliminate_1[0].to;
4123 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4124 = ! frame_pointer_needed;
4125 #endif
4127 /* Count the number of eliminable registers and build the FROM and TO
4128 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4129 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4130 We depend on this. */
4131 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4133 num_eliminable += ep->can_eliminate;
4134 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4135 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4139 /* Find all the pseudo registers that didn't get hard regs
4140 but do have known equivalent constants or memory slots.
4141 These include parameters (known equivalent to parameter slots)
4142 and cse'd or loop-moved constant memory addresses.
4144 Record constant equivalents in reg_equiv_constant
4145 so they will be substituted by find_reloads.
4146 Record memory equivalents in reg_mem_equiv so they can
4147 be substituted eventually by altering the REG-rtx's. */
4149 static void
4150 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4152 int i;
4153 rtx_insn *insn;
4155 grow_reg_equivs ();
4156 if (do_subregs)
4157 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4158 else
4159 reg_max_ref_width = NULL;
4161 num_eliminable_invariants = 0;
4163 first_label_num = get_first_label_num ();
4164 num_labels = max_label_num () - first_label_num;
4166 /* Allocate the tables used to store offset information at labels. */
4167 offsets_known_at = XNEWVEC (char, num_labels);
4168 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4170 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4171 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4172 find largest such for each pseudo. FIRST is the head of the insn
4173 list. */
4175 for (insn = first; insn; insn = NEXT_INSN (insn))
4177 rtx set = single_set (insn);
4179 /* We may introduce USEs that we want to remove at the end, so
4180 we'll mark them with QImode. Make sure there are no
4181 previously-marked insns left by say regmove. */
4182 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4183 && GET_MODE (insn) != VOIDmode)
4184 PUT_MODE (insn, VOIDmode);
4186 if (do_subregs && NONDEBUG_INSN_P (insn))
4187 scan_paradoxical_subregs (PATTERN (insn));
4189 if (set != 0 && REG_P (SET_DEST (set)))
4191 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4192 rtx x;
4194 if (! note)
4195 continue;
4197 i = REGNO (SET_DEST (set));
4198 x = XEXP (note, 0);
4200 if (i <= LAST_VIRTUAL_REGISTER)
4201 continue;
4203 /* If flag_pic and we have constant, verify it's legitimate. */
4204 if (!CONSTANT_P (x)
4205 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4207 /* It can happen that a REG_EQUIV note contains a MEM
4208 that is not a legitimate memory operand. As later
4209 stages of reload assume that all addresses found
4210 in the reg_equiv_* arrays were originally legitimate,
4211 we ignore such REG_EQUIV notes. */
4212 if (memory_operand (x, VOIDmode))
4214 /* Always unshare the equivalence, so we can
4215 substitute into this insn without touching the
4216 equivalence. */
4217 reg_equiv_memory_loc (i) = copy_rtx (x);
4219 else if (function_invariant_p (x))
4221 machine_mode mode;
4223 mode = GET_MODE (SET_DEST (set));
4224 if (GET_CODE (x) == PLUS)
4226 /* This is PLUS of frame pointer and a constant,
4227 and might be shared. Unshare it. */
4228 reg_equiv_invariant (i) = copy_rtx (x);
4229 num_eliminable_invariants++;
4231 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4233 reg_equiv_invariant (i) = x;
4234 num_eliminable_invariants++;
4236 else if (targetm.legitimate_constant_p (mode, x))
4237 reg_equiv_constant (i) = x;
4238 else
4240 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4241 if (! reg_equiv_memory_loc (i))
4242 reg_equiv_init (i) = NULL;
4245 else
4247 reg_equiv_init (i) = NULL;
4248 continue;
4251 else
4252 reg_equiv_init (i) = NULL;
4256 if (dump_file)
4257 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4258 if (reg_equiv_init (i))
4260 fprintf (dump_file, "init_insns for %u: ", i);
4261 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4262 fprintf (dump_file, "\n");
4266 /* Indicate that we no longer have known memory locations or constants.
4267 Free all data involved in tracking these. */
4269 static void
4270 free_reg_equiv (void)
4272 int i;
4274 free (offsets_known_at);
4275 free (offsets_at);
4276 offsets_at = 0;
4277 offsets_known_at = 0;
4279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4280 if (reg_equiv_alt_mem_list (i))
4281 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4282 vec_free (reg_equivs);
4285 /* Kick all pseudos out of hard register REGNO.
4287 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4288 because we found we can't eliminate some register. In the case, no pseudos
4289 are allowed to be in the register, even if they are only in a block that
4290 doesn't require spill registers, unlike the case when we are spilling this
4291 hard reg to produce another spill register.
4293 Return nonzero if any pseudos needed to be kicked out. */
4295 static void
4296 spill_hard_reg (unsigned int regno, int cant_eliminate)
4298 int i;
4300 if (cant_eliminate)
4302 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4303 df_set_regs_ever_live (regno, true);
4306 /* Spill every pseudo reg that was allocated to this reg
4307 or to something that overlaps this reg. */
4309 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4310 if (reg_renumber[i] >= 0
4311 && (unsigned int) reg_renumber[i] <= regno
4312 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4313 SET_REGNO_REG_SET (&spilled_pseudos, i);
4316 /* After find_reload_regs has been run for all insn that need reloads,
4317 and/or spill_hard_regs was called, this function is used to actually
4318 spill pseudo registers and try to reallocate them. It also sets up the
4319 spill_regs array for use by choose_reload_regs. */
4321 static int
4322 finish_spills (int global)
4324 struct insn_chain *chain;
4325 int something_changed = 0;
4326 unsigned i;
4327 reg_set_iterator rsi;
4329 /* Build the spill_regs array for the function. */
4330 /* If there are some registers still to eliminate and one of the spill regs
4331 wasn't ever used before, additional stack space may have to be
4332 allocated to store this register. Thus, we may have changed the offset
4333 between the stack and frame pointers, so mark that something has changed.
4335 One might think that we need only set VAL to 1 if this is a call-used
4336 register. However, the set of registers that must be saved by the
4337 prologue is not identical to the call-used set. For example, the
4338 register used by the call insn for the return PC is a call-used register,
4339 but must be saved by the prologue. */
4341 n_spills = 0;
4342 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4343 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4345 spill_reg_order[i] = n_spills;
4346 spill_regs[n_spills++] = i;
4347 if (num_eliminable && ! df_regs_ever_live_p (i))
4348 something_changed = 1;
4349 df_set_regs_ever_live (i, true);
4351 else
4352 spill_reg_order[i] = -1;
4354 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4355 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4357 /* Record the current hard register the pseudo is allocated to
4358 in pseudo_previous_regs so we avoid reallocating it to the
4359 same hard reg in a later pass. */
4360 gcc_assert (reg_renumber[i] >= 0);
4362 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4363 /* Mark it as no longer having a hard register home. */
4364 reg_renumber[i] = -1;
4365 if (ira_conflicts_p)
4366 /* Inform IRA about the change. */
4367 ira_mark_allocation_change (i);
4368 /* We will need to scan everything again. */
4369 something_changed = 1;
4372 /* Retry global register allocation if possible. */
4373 if (global && ira_conflicts_p)
4375 unsigned int n;
4377 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4378 /* For every insn that needs reloads, set the registers used as spill
4379 regs in pseudo_forbidden_regs for every pseudo live across the
4380 insn. */
4381 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4383 EXECUTE_IF_SET_IN_REG_SET
4384 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4386 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4387 chain->used_spill_regs);
4389 EXECUTE_IF_SET_IN_REG_SET
4390 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4392 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4393 chain->used_spill_regs);
4397 /* Retry allocating the pseudos spilled in IRA and the
4398 reload. For each reg, merge the various reg sets that
4399 indicate which hard regs can't be used, and call
4400 ira_reassign_pseudos. */
4401 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4402 if (reg_old_renumber[i] != reg_renumber[i])
4404 if (reg_renumber[i] < 0)
4405 temp_pseudo_reg_arr[n++] = i;
4406 else
4407 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4409 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4410 bad_spill_regs_global,
4411 pseudo_forbidden_regs, pseudo_previous_regs,
4412 &spilled_pseudos))
4413 something_changed = 1;
4415 /* Fix up the register information in the insn chain.
4416 This involves deleting those of the spilled pseudos which did not get
4417 a new hard register home from the live_{before,after} sets. */
4418 for (chain = reload_insn_chain; chain; chain = chain->next)
4420 HARD_REG_SET used_by_pseudos;
4421 HARD_REG_SET used_by_pseudos2;
4423 if (! ira_conflicts_p)
4425 /* Don't do it for IRA because IRA and the reload still can
4426 assign hard registers to the spilled pseudos on next
4427 reload iterations. */
4428 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4429 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4431 /* Mark any unallocated hard regs as available for spills. That
4432 makes inheritance work somewhat better. */
4433 if (chain->need_reload)
4435 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4436 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4437 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4439 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4440 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4441 /* Value of chain->used_spill_regs from previous iteration
4442 may be not included in the value calculated here because
4443 of possible removing caller-saves insns (see function
4444 delete_caller_save_insns. */
4445 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4446 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4450 CLEAR_REG_SET (&changed_allocation_pseudos);
4451 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4452 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4454 int regno = reg_renumber[i];
4455 if (reg_old_renumber[i] == regno)
4456 continue;
4458 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4460 alter_reg (i, reg_old_renumber[i], false);
4461 reg_old_renumber[i] = regno;
4462 if (dump_file)
4464 if (regno == -1)
4465 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4466 else
4467 fprintf (dump_file, " Register %d now in %d.\n\n",
4468 i, reg_renumber[i]);
4472 return something_changed;
4475 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4477 static void
4478 scan_paradoxical_subregs (rtx x)
4480 int i;
4481 const char *fmt;
4482 enum rtx_code code = GET_CODE (x);
4484 switch (code)
4486 case REG:
4487 case CONST:
4488 case SYMBOL_REF:
4489 case LABEL_REF:
4490 CASE_CONST_ANY:
4491 case CC0:
4492 case PC:
4493 case USE:
4494 case CLOBBER:
4495 return;
4497 case SUBREG:
4498 if (REG_P (SUBREG_REG (x))
4499 && (GET_MODE_SIZE (GET_MODE (x))
4500 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4502 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4503 = GET_MODE_SIZE (GET_MODE (x));
4504 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4506 return;
4508 default:
4509 break;
4512 fmt = GET_RTX_FORMAT (code);
4513 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4515 if (fmt[i] == 'e')
4516 scan_paradoxical_subregs (XEXP (x, i));
4517 else if (fmt[i] == 'E')
4519 int j;
4520 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4521 scan_paradoxical_subregs (XVECEXP (x, i, j));
4526 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4527 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4528 and apply the corresponding narrowing subreg to *OTHER_PTR.
4529 Return true if the operands were changed, false otherwise. */
4531 static bool
4532 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4534 rtx op, inner, other, tem;
4536 op = *op_ptr;
4537 if (!paradoxical_subreg_p (op))
4538 return false;
4539 inner = SUBREG_REG (op);
4541 other = *other_ptr;
4542 tem = gen_lowpart_common (GET_MODE (inner), other);
4543 if (!tem)
4544 return false;
4546 /* If the lowpart operation turned a hard register into a subreg,
4547 rather than simplifying it to another hard register, then the
4548 mode change cannot be properly represented. For example, OTHER
4549 might be valid in its current mode, but not in the new one. */
4550 if (GET_CODE (tem) == SUBREG
4551 && REG_P (other)
4552 && HARD_REGISTER_P (other))
4553 return false;
4555 *op_ptr = inner;
4556 *other_ptr = tem;
4557 return true;
4560 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4561 examine all of the reload insns between PREV and NEXT exclusive, and
4562 annotate all that may trap. */
4564 static void
4565 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4567 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4568 if (note == NULL)
4569 return;
4570 if (!insn_could_throw_p (insn))
4571 remove_note (insn, note);
4572 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4575 /* Reload pseudo-registers into hard regs around each insn as needed.
4576 Additional register load insns are output before the insn that needs it
4577 and perhaps store insns after insns that modify the reloaded pseudo reg.
4579 reg_last_reload_reg and reg_reloaded_contents keep track of
4580 which registers are already available in reload registers.
4581 We update these for the reloads that we perform,
4582 as the insns are scanned. */
4584 static void
4585 reload_as_needed (int live_known)
4587 struct insn_chain *chain;
4588 #if defined (AUTO_INC_DEC)
4589 int i;
4590 #endif
4591 rtx_note *marker;
4593 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4594 memset (spill_reg_store, 0, sizeof spill_reg_store);
4595 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4596 INIT_REG_SET (&reg_has_output_reload);
4597 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4598 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4600 set_initial_elim_offsets ();
4602 /* Generate a marker insn that we will move around. */
4603 marker = emit_note (NOTE_INSN_DELETED);
4604 unlink_insn_chain (marker, marker);
4606 for (chain = reload_insn_chain; chain; chain = chain->next)
4608 rtx_insn *prev = 0;
4609 rtx_insn *insn = chain->insn;
4610 rtx_insn *old_next = NEXT_INSN (insn);
4611 #ifdef AUTO_INC_DEC
4612 rtx_insn *old_prev = PREV_INSN (insn);
4613 #endif
4615 if (will_delete_init_insn_p (insn))
4616 continue;
4618 /* If we pass a label, copy the offsets from the label information
4619 into the current offsets of each elimination. */
4620 if (LABEL_P (insn))
4621 set_offsets_for_label (insn);
4623 else if (INSN_P (insn))
4625 regset_head regs_to_forget;
4626 INIT_REG_SET (&regs_to_forget);
4627 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4629 /* If this is a USE and CLOBBER of a MEM, ensure that any
4630 references to eliminable registers have been removed. */
4632 if ((GET_CODE (PATTERN (insn)) == USE
4633 || GET_CODE (PATTERN (insn)) == CLOBBER)
4634 && MEM_P (XEXP (PATTERN (insn), 0)))
4635 XEXP (XEXP (PATTERN (insn), 0), 0)
4636 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4637 GET_MODE (XEXP (PATTERN (insn), 0)),
4638 NULL_RTX);
4640 /* If we need to do register elimination processing, do so.
4641 This might delete the insn, in which case we are done. */
4642 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4644 eliminate_regs_in_insn (insn, 1);
4645 if (NOTE_P (insn))
4647 update_eliminable_offsets ();
4648 CLEAR_REG_SET (&regs_to_forget);
4649 continue;
4653 /* If need_elim is nonzero but need_reload is zero, one might think
4654 that we could simply set n_reloads to 0. However, find_reloads
4655 could have done some manipulation of the insn (such as swapping
4656 commutative operands), and these manipulations are lost during
4657 the first pass for every insn that needs register elimination.
4658 So the actions of find_reloads must be redone here. */
4660 if (! chain->need_elim && ! chain->need_reload
4661 && ! chain->need_operand_change)
4662 n_reloads = 0;
4663 /* First find the pseudo regs that must be reloaded for this insn.
4664 This info is returned in the tables reload_... (see reload.h).
4665 Also modify the body of INSN by substituting RELOAD
4666 rtx's for those pseudo regs. */
4667 else
4669 CLEAR_REG_SET (&reg_has_output_reload);
4670 CLEAR_HARD_REG_SET (reg_is_output_reload);
4672 find_reloads (insn, 1, spill_indirect_levels, live_known,
4673 spill_reg_order);
4676 if (n_reloads > 0)
4678 rtx_insn *next = NEXT_INSN (insn);
4680 /* ??? PREV can get deleted by reload inheritance.
4681 Work around this by emitting a marker note. */
4682 prev = PREV_INSN (insn);
4683 reorder_insns_nobb (marker, marker, prev);
4685 /* Now compute which reload regs to reload them into. Perhaps
4686 reusing reload regs from previous insns, or else output
4687 load insns to reload them. Maybe output store insns too.
4688 Record the choices of reload reg in reload_reg_rtx. */
4689 choose_reload_regs (chain);
4691 /* Generate the insns to reload operands into or out of
4692 their reload regs. */
4693 emit_reload_insns (chain);
4695 /* Substitute the chosen reload regs from reload_reg_rtx
4696 into the insn's body (or perhaps into the bodies of other
4697 load and store insn that we just made for reloading
4698 and that we moved the structure into). */
4699 subst_reloads (insn);
4701 prev = PREV_INSN (marker);
4702 unlink_insn_chain (marker, marker);
4704 /* Adjust the exception region notes for loads and stores. */
4705 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4706 fixup_eh_region_note (insn, prev, next);
4708 /* Adjust the location of REG_ARGS_SIZE. */
4709 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4710 if (p)
4712 remove_note (insn, p);
4713 fixup_args_size_notes (prev, PREV_INSN (next),
4714 INTVAL (XEXP (p, 0)));
4717 /* If this was an ASM, make sure that all the reload insns
4718 we have generated are valid. If not, give an error
4719 and delete them. */
4720 if (asm_noperands (PATTERN (insn)) >= 0)
4721 for (rtx_insn *p = NEXT_INSN (prev);
4722 p != next;
4723 p = NEXT_INSN (p))
4724 if (p != insn && INSN_P (p)
4725 && GET_CODE (PATTERN (p)) != USE
4726 && (recog_memoized (p) < 0
4727 || (extract_insn (p),
4728 !(constrain_operands (1,
4729 get_enabled_alternatives (p))))))
4731 error_for_asm (insn,
4732 "%<asm%> operand requires "
4733 "impossible reload");
4734 delete_insn (p);
4738 if (num_eliminable && chain->need_elim)
4739 update_eliminable_offsets ();
4741 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4742 is no longer validly lying around to save a future reload.
4743 Note that this does not detect pseudos that were reloaded
4744 for this insn in order to be stored in
4745 (obeying register constraints). That is correct; such reload
4746 registers ARE still valid. */
4747 forget_marked_reloads (&regs_to_forget);
4748 CLEAR_REG_SET (&regs_to_forget);
4750 /* There may have been CLOBBER insns placed after INSN. So scan
4751 between INSN and NEXT and use them to forget old reloads. */
4752 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4753 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4754 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4756 #ifdef AUTO_INC_DEC
4757 /* Likewise for regs altered by auto-increment in this insn.
4758 REG_INC notes have been changed by reloading:
4759 find_reloads_address_1 records substitutions for them,
4760 which have been performed by subst_reloads above. */
4761 for (i = n_reloads - 1; i >= 0; i--)
4763 rtx in_reg = rld[i].in_reg;
4764 if (in_reg)
4766 enum rtx_code code = GET_CODE (in_reg);
4767 /* PRE_INC / PRE_DEC will have the reload register ending up
4768 with the same value as the stack slot, but that doesn't
4769 hold true for POST_INC / POST_DEC. Either we have to
4770 convert the memory access to a true POST_INC / POST_DEC,
4771 or we can't use the reload register for inheritance. */
4772 if ((code == POST_INC || code == POST_DEC)
4773 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4774 REGNO (rld[i].reg_rtx))
4775 /* Make sure it is the inc/dec pseudo, and not
4776 some other (e.g. output operand) pseudo. */
4777 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4778 == REGNO (XEXP (in_reg, 0))))
4781 rtx reload_reg = rld[i].reg_rtx;
4782 machine_mode mode = GET_MODE (reload_reg);
4783 int n = 0;
4784 rtx_insn *p;
4786 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4788 /* We really want to ignore REG_INC notes here, so
4789 use PATTERN (p) as argument to reg_set_p . */
4790 if (reg_set_p (reload_reg, PATTERN (p)))
4791 break;
4792 n = count_occurrences (PATTERN (p), reload_reg, 0);
4793 if (! n)
4794 continue;
4795 if (n == 1)
4797 rtx replace_reg
4798 = gen_rtx_fmt_e (code, mode, reload_reg);
4800 validate_replace_rtx_group (reload_reg,
4801 replace_reg, p);
4802 n = verify_changes (0);
4804 /* We must also verify that the constraints
4805 are met after the replacement. Make sure
4806 extract_insn is only called for an insn
4807 where the replacements were found to be
4808 valid so far. */
4809 if (n)
4811 extract_insn (p);
4812 n = constrain_operands (1,
4813 get_enabled_alternatives (p));
4816 /* If the constraints were not met, then
4817 undo the replacement, else confirm it. */
4818 if (!n)
4819 cancel_changes (0);
4820 else
4821 confirm_change_group ();
4823 break;
4825 if (n == 1)
4827 add_reg_note (p, REG_INC, reload_reg);
4828 /* Mark this as having an output reload so that the
4829 REG_INC processing code below won't invalidate
4830 the reload for inheritance. */
4831 SET_HARD_REG_BIT (reg_is_output_reload,
4832 REGNO (reload_reg));
4833 SET_REGNO_REG_SET (&reg_has_output_reload,
4834 REGNO (XEXP (in_reg, 0)));
4836 else
4837 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4838 NULL);
4840 else if ((code == PRE_INC || code == PRE_DEC)
4841 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4842 REGNO (rld[i].reg_rtx))
4843 /* Make sure it is the inc/dec pseudo, and not
4844 some other (e.g. output operand) pseudo. */
4845 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4846 == REGNO (XEXP (in_reg, 0))))
4848 SET_HARD_REG_BIT (reg_is_output_reload,
4849 REGNO (rld[i].reg_rtx));
4850 SET_REGNO_REG_SET (&reg_has_output_reload,
4851 REGNO (XEXP (in_reg, 0)));
4853 else if (code == PRE_INC || code == PRE_DEC
4854 || code == POST_INC || code == POST_DEC)
4856 int in_regno = REGNO (XEXP (in_reg, 0));
4858 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4860 int in_hard_regno;
4861 bool forget_p = true;
4863 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4864 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4865 in_hard_regno))
4867 for (rtx_insn *x = (old_prev ?
4868 NEXT_INSN (old_prev) : insn);
4869 x != old_next;
4870 x = NEXT_INSN (x))
4871 if (x == reg_reloaded_insn[in_hard_regno])
4873 forget_p = false;
4874 break;
4877 /* If for some reasons, we didn't set up
4878 reg_last_reload_reg in this insn,
4879 invalidate inheritance from previous
4880 insns for the incremented/decremented
4881 register. Such registers will be not in
4882 reg_has_output_reload. Invalidate it
4883 also if the corresponding element in
4884 reg_reloaded_insn is also
4885 invalidated. */
4886 if (forget_p)
4887 forget_old_reloads_1 (XEXP (in_reg, 0),
4888 NULL_RTX, NULL);
4893 /* If a pseudo that got a hard register is auto-incremented,
4894 we must purge records of copying it into pseudos without
4895 hard registers. */
4896 for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4897 if (REG_NOTE_KIND (x) == REG_INC)
4899 /* See if this pseudo reg was reloaded in this insn.
4900 If so, its last-reload info is still valid
4901 because it is based on this insn's reload. */
4902 for (i = 0; i < n_reloads; i++)
4903 if (rld[i].out == XEXP (x, 0))
4904 break;
4906 if (i == n_reloads)
4907 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4909 #endif
4911 /* A reload reg's contents are unknown after a label. */
4912 if (LABEL_P (insn))
4913 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4915 /* Don't assume a reload reg is still good after a call insn
4916 if it is a call-used reg, or if it contains a value that will
4917 be partially clobbered by the call. */
4918 else if (CALL_P (insn))
4920 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4921 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4923 /* If this is a call to a setjmp-type function, we must not
4924 reuse any reload reg contents across the call; that will
4925 just be clobbered by other uses of the register in later
4926 code, before the longjmp. */
4927 if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4928 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4932 /* Clean up. */
4933 free (reg_last_reload_reg);
4934 CLEAR_REG_SET (&reg_has_output_reload);
4937 /* Discard all record of any value reloaded from X,
4938 or reloaded in X from someplace else;
4939 unless X is an output reload reg of the current insn.
4941 X may be a hard reg (the reload reg)
4942 or it may be a pseudo reg that was reloaded from.
4944 When DATA is non-NULL just mark the registers in regset
4945 to be forgotten later. */
4947 static void
4948 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4949 void *data)
4951 unsigned int regno;
4952 unsigned int nr;
4953 regset regs = (regset) data;
4955 /* note_stores does give us subregs of hard regs,
4956 subreg_regno_offset requires a hard reg. */
4957 while (GET_CODE (x) == SUBREG)
4959 /* We ignore the subreg offset when calculating the regno,
4960 because we are using the entire underlying hard register
4961 below. */
4962 x = SUBREG_REG (x);
4965 if (!REG_P (x))
4966 return;
4968 regno = REGNO (x);
4970 if (regno >= FIRST_PSEUDO_REGISTER)
4971 nr = 1;
4972 else
4974 unsigned int i;
4976 nr = hard_regno_nregs[regno][GET_MODE (x)];
4977 /* Storing into a spilled-reg invalidates its contents.
4978 This can happen if a block-local pseudo is allocated to that reg
4979 and it wasn't spilled because this block's total need is 0.
4980 Then some insn might have an optional reload and use this reg. */
4981 if (!regs)
4982 for (i = 0; i < nr; i++)
4983 /* But don't do this if the reg actually serves as an output
4984 reload reg in the current instruction. */
4985 if (n_reloads == 0
4986 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4988 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4989 spill_reg_store[regno + i] = 0;
4993 if (regs)
4994 while (nr-- > 0)
4995 SET_REGNO_REG_SET (regs, regno + nr);
4996 else
4998 /* Since value of X has changed,
4999 forget any value previously copied from it. */
5001 while (nr-- > 0)
5002 /* But don't forget a copy if this is the output reload
5003 that establishes the copy's validity. */
5004 if (n_reloads == 0
5005 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
5006 reg_last_reload_reg[regno + nr] = 0;
5010 /* Forget the reloads marked in regset by previous function. */
5011 static void
5012 forget_marked_reloads (regset regs)
5014 unsigned int reg;
5015 reg_set_iterator rsi;
5016 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
5018 if (reg < FIRST_PSEUDO_REGISTER
5019 /* But don't do this if the reg actually serves as an output
5020 reload reg in the current instruction. */
5021 && (n_reloads == 0
5022 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
5024 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
5025 spill_reg_store[reg] = 0;
5027 if (n_reloads == 0
5028 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
5029 reg_last_reload_reg[reg] = 0;
5033 /* The following HARD_REG_SETs indicate when each hard register is
5034 used for a reload of various parts of the current insn. */
5036 /* If reg is unavailable for all reloads. */
5037 static HARD_REG_SET reload_reg_unavailable;
5038 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5039 static HARD_REG_SET reload_reg_used;
5040 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5041 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5042 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5043 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5044 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5045 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5046 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5047 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5048 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5049 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5050 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5051 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5052 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5053 static HARD_REG_SET reload_reg_used_in_op_addr;
5054 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5055 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5056 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5057 static HARD_REG_SET reload_reg_used_in_insn;
5058 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5059 static HARD_REG_SET reload_reg_used_in_other_addr;
5061 /* If reg is in use as a reload reg for any sort of reload. */
5062 static HARD_REG_SET reload_reg_used_at_all;
5064 /* If reg is use as an inherited reload. We just mark the first register
5065 in the group. */
5066 static HARD_REG_SET reload_reg_used_for_inherit;
5068 /* Records which hard regs are used in any way, either as explicit use or
5069 by being allocated to a pseudo during any point of the current insn. */
5070 static HARD_REG_SET reg_used_in_insn;
5072 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5073 TYPE. MODE is used to indicate how many consecutive regs are
5074 actually used. */
5076 static void
5077 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5078 machine_mode mode)
5080 switch (type)
5082 case RELOAD_OTHER:
5083 add_to_hard_reg_set (&reload_reg_used, mode, regno);
5084 break;
5086 case RELOAD_FOR_INPUT_ADDRESS:
5087 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5088 break;
5090 case RELOAD_FOR_INPADDR_ADDRESS:
5091 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5092 break;
5094 case RELOAD_FOR_OUTPUT_ADDRESS:
5095 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5096 break;
5098 case RELOAD_FOR_OUTADDR_ADDRESS:
5099 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5100 break;
5102 case RELOAD_FOR_OPERAND_ADDRESS:
5103 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5104 break;
5106 case RELOAD_FOR_OPADDR_ADDR:
5107 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5108 break;
5110 case RELOAD_FOR_OTHER_ADDRESS:
5111 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5112 break;
5114 case RELOAD_FOR_INPUT:
5115 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5116 break;
5118 case RELOAD_FOR_OUTPUT:
5119 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5120 break;
5122 case RELOAD_FOR_INSN:
5123 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
5124 break;
5127 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5130 /* Similarly, but show REGNO is no longer in use for a reload. */
5132 static void
5133 clear_reload_reg_in_use (unsigned int regno, int opnum,
5134 enum reload_type type, machine_mode mode)
5136 unsigned int nregs = hard_regno_nregs[regno][mode];
5137 unsigned int start_regno, end_regno, r;
5138 int i;
5139 /* A complication is that for some reload types, inheritance might
5140 allow multiple reloads of the same types to share a reload register.
5141 We set check_opnum if we have to check only reloads with the same
5142 operand number, and check_any if we have to check all reloads. */
5143 int check_opnum = 0;
5144 int check_any = 0;
5145 HARD_REG_SET *used_in_set;
5147 switch (type)
5149 case RELOAD_OTHER:
5150 used_in_set = &reload_reg_used;
5151 break;
5153 case RELOAD_FOR_INPUT_ADDRESS:
5154 used_in_set = &reload_reg_used_in_input_addr[opnum];
5155 break;
5157 case RELOAD_FOR_INPADDR_ADDRESS:
5158 check_opnum = 1;
5159 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5160 break;
5162 case RELOAD_FOR_OUTPUT_ADDRESS:
5163 used_in_set = &reload_reg_used_in_output_addr[opnum];
5164 break;
5166 case RELOAD_FOR_OUTADDR_ADDRESS:
5167 check_opnum = 1;
5168 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5169 break;
5171 case RELOAD_FOR_OPERAND_ADDRESS:
5172 used_in_set = &reload_reg_used_in_op_addr;
5173 break;
5175 case RELOAD_FOR_OPADDR_ADDR:
5176 check_any = 1;
5177 used_in_set = &reload_reg_used_in_op_addr_reload;
5178 break;
5180 case RELOAD_FOR_OTHER_ADDRESS:
5181 used_in_set = &reload_reg_used_in_other_addr;
5182 check_any = 1;
5183 break;
5185 case RELOAD_FOR_INPUT:
5186 used_in_set = &reload_reg_used_in_input[opnum];
5187 break;
5189 case RELOAD_FOR_OUTPUT:
5190 used_in_set = &reload_reg_used_in_output[opnum];
5191 break;
5193 case RELOAD_FOR_INSN:
5194 used_in_set = &reload_reg_used_in_insn;
5195 break;
5196 default:
5197 gcc_unreachable ();
5199 /* We resolve conflicts with remaining reloads of the same type by
5200 excluding the intervals of reload registers by them from the
5201 interval of freed reload registers. Since we only keep track of
5202 one set of interval bounds, we might have to exclude somewhat
5203 more than what would be necessary if we used a HARD_REG_SET here.
5204 But this should only happen very infrequently, so there should
5205 be no reason to worry about it. */
5207 start_regno = regno;
5208 end_regno = regno + nregs;
5209 if (check_opnum || check_any)
5211 for (i = n_reloads - 1; i >= 0; i--)
5213 if (rld[i].when_needed == type
5214 && (check_any || rld[i].opnum == opnum)
5215 && rld[i].reg_rtx)
5217 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5218 unsigned int conflict_end
5219 = end_hard_regno (rld[i].mode, conflict_start);
5221 /* If there is an overlap with the first to-be-freed register,
5222 adjust the interval start. */
5223 if (conflict_start <= start_regno && conflict_end > start_regno)
5224 start_regno = conflict_end;
5225 /* Otherwise, if there is a conflict with one of the other
5226 to-be-freed registers, adjust the interval end. */
5227 if (conflict_start > start_regno && conflict_start < end_regno)
5228 end_regno = conflict_start;
5233 for (r = start_regno; r < end_regno; r++)
5234 CLEAR_HARD_REG_BIT (*used_in_set, r);
5237 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5238 specified by OPNUM and TYPE. */
5240 static int
5241 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5243 int i;
5245 /* In use for a RELOAD_OTHER means it's not available for anything. */
5246 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5247 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5248 return 0;
5250 switch (type)
5252 case RELOAD_OTHER:
5253 /* In use for anything means we can't use it for RELOAD_OTHER. */
5254 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5255 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5256 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5257 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5258 return 0;
5260 for (i = 0; i < reload_n_operands; i++)
5261 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5262 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5263 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5264 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5265 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5266 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5267 return 0;
5269 return 1;
5271 case RELOAD_FOR_INPUT:
5272 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5273 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5274 return 0;
5276 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5277 return 0;
5279 /* If it is used for some other input, can't use it. */
5280 for (i = 0; i < reload_n_operands; i++)
5281 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5282 return 0;
5284 /* If it is used in a later operand's address, can't use it. */
5285 for (i = opnum + 1; i < reload_n_operands; i++)
5286 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5287 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5288 return 0;
5290 return 1;
5292 case RELOAD_FOR_INPUT_ADDRESS:
5293 /* Can't use a register if it is used for an input address for this
5294 operand or used as an input in an earlier one. */
5295 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5296 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5297 return 0;
5299 for (i = 0; i < opnum; i++)
5300 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5301 return 0;
5303 return 1;
5305 case RELOAD_FOR_INPADDR_ADDRESS:
5306 /* Can't use a register if it is used for an input address
5307 for this operand or used as an input in an earlier
5308 one. */
5309 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5310 return 0;
5312 for (i = 0; i < opnum; i++)
5313 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5314 return 0;
5316 return 1;
5318 case RELOAD_FOR_OUTPUT_ADDRESS:
5319 /* Can't use a register if it is used for an output address for this
5320 operand or used as an output in this or a later operand. Note
5321 that multiple output operands are emitted in reverse order, so
5322 the conflicting ones are those with lower indices. */
5323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5324 return 0;
5326 for (i = 0; i <= opnum; i++)
5327 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5328 return 0;
5330 return 1;
5332 case RELOAD_FOR_OUTADDR_ADDRESS:
5333 /* Can't use a register if it is used for an output address
5334 for this operand or used as an output in this or a
5335 later operand. Note that multiple output operands are
5336 emitted in reverse order, so the conflicting ones are
5337 those with lower indices. */
5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5339 return 0;
5341 for (i = 0; i <= opnum; i++)
5342 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5343 return 0;
5345 return 1;
5347 case RELOAD_FOR_OPERAND_ADDRESS:
5348 for (i = 0; i < reload_n_operands; i++)
5349 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5350 return 0;
5352 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5353 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5355 case RELOAD_FOR_OPADDR_ADDR:
5356 for (i = 0; i < reload_n_operands; i++)
5357 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5358 return 0;
5360 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5362 case RELOAD_FOR_OUTPUT:
5363 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5364 outputs, or an operand address for this or an earlier output.
5365 Note that multiple output operands are emitted in reverse order,
5366 so the conflicting ones are those with higher indices. */
5367 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5368 return 0;
5370 for (i = 0; i < reload_n_operands; i++)
5371 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5372 return 0;
5374 for (i = opnum; i < reload_n_operands; i++)
5375 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5376 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5377 return 0;
5379 return 1;
5381 case RELOAD_FOR_INSN:
5382 for (i = 0; i < reload_n_operands; i++)
5383 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5384 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5385 return 0;
5387 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5388 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5390 case RELOAD_FOR_OTHER_ADDRESS:
5391 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5393 default:
5394 gcc_unreachable ();
5398 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5399 the number RELOADNUM, is still available in REGNO at the end of the insn.
5401 We can assume that the reload reg was already tested for availability
5402 at the time it is needed, and we should not check this again,
5403 in case the reg has already been marked in use. */
5405 static int
5406 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5408 int opnum = rld[reloadnum].opnum;
5409 enum reload_type type = rld[reloadnum].when_needed;
5410 int i;
5412 /* See if there is a reload with the same type for this operand, using
5413 the same register. This case is not handled by the code below. */
5414 for (i = reloadnum + 1; i < n_reloads; i++)
5416 rtx reg;
5417 int nregs;
5419 if (rld[i].opnum != opnum || rld[i].when_needed != type)
5420 continue;
5421 reg = rld[i].reg_rtx;
5422 if (reg == NULL_RTX)
5423 continue;
5424 nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5425 if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5426 return 0;
5429 switch (type)
5431 case RELOAD_OTHER:
5432 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5433 its value must reach the end. */
5434 return 1;
5436 /* If this use is for part of the insn,
5437 its value reaches if no subsequent part uses the same register.
5438 Just like the above function, don't try to do this with lots
5439 of fallthroughs. */
5441 case RELOAD_FOR_OTHER_ADDRESS:
5442 /* Here we check for everything else, since these don't conflict
5443 with anything else and everything comes later. */
5445 for (i = 0; i < reload_n_operands; i++)
5446 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5447 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5448 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5449 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5450 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5451 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5452 return 0;
5454 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5455 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5456 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5457 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5459 case RELOAD_FOR_INPUT_ADDRESS:
5460 case RELOAD_FOR_INPADDR_ADDRESS:
5461 /* Similar, except that we check only for this and subsequent inputs
5462 and the address of only subsequent inputs and we do not need
5463 to check for RELOAD_OTHER objects since they are known not to
5464 conflict. */
5466 for (i = opnum; i < reload_n_operands; i++)
5467 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5468 return 0;
5470 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5471 could be killed if the register is also used by reload with type
5472 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5473 if (type == RELOAD_FOR_INPADDR_ADDRESS
5474 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5475 return 0;
5477 for (i = opnum + 1; i < reload_n_operands; i++)
5478 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5479 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5480 return 0;
5482 for (i = 0; i < reload_n_operands; i++)
5483 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5484 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5485 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5486 return 0;
5488 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5489 return 0;
5491 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5492 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5493 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5495 case RELOAD_FOR_INPUT:
5496 /* Similar to input address, except we start at the next operand for
5497 both input and input address and we do not check for
5498 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5499 would conflict. */
5501 for (i = opnum + 1; i < reload_n_operands; i++)
5502 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5503 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5504 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5505 return 0;
5507 /* ... fall through ... */
5509 case RELOAD_FOR_OPERAND_ADDRESS:
5510 /* Check outputs and their addresses. */
5512 for (i = 0; i < reload_n_operands; i++)
5513 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5514 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5515 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5516 return 0;
5518 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5520 case RELOAD_FOR_OPADDR_ADDR:
5521 for (i = 0; i < reload_n_operands; i++)
5522 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5523 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5524 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5525 return 0;
5527 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5528 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5529 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5531 case RELOAD_FOR_INSN:
5532 /* These conflict with other outputs with RELOAD_OTHER. So
5533 we need only check for output addresses. */
5535 opnum = reload_n_operands;
5537 /* ... fall through ... */
5539 case RELOAD_FOR_OUTPUT:
5540 case RELOAD_FOR_OUTPUT_ADDRESS:
5541 case RELOAD_FOR_OUTADDR_ADDRESS:
5542 /* We already know these can't conflict with a later output. So the
5543 only thing to check are later output addresses.
5544 Note that multiple output operands are emitted in reverse order,
5545 so the conflicting ones are those with lower indices. */
5546 for (i = 0; i < opnum; i++)
5547 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5548 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5549 return 0;
5551 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5552 could be killed if the register is also used by reload with type
5553 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5554 if (type == RELOAD_FOR_OUTADDR_ADDRESS
5555 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5556 return 0;
5558 return 1;
5560 default:
5561 gcc_unreachable ();
5565 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5566 every register in REG. */
5568 static bool
5569 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5571 unsigned int i;
5573 for (i = REGNO (reg); i < END_REGNO (reg); i++)
5574 if (!reload_reg_reaches_end_p (i, reloadnum))
5575 return false;
5576 return true;
5580 /* Returns whether R1 and R2 are uniquely chained: the value of one
5581 is used by the other, and that value is not used by any other
5582 reload for this insn. This is used to partially undo the decision
5583 made in find_reloads when in the case of multiple
5584 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5585 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5586 reloads. This code tries to avoid the conflict created by that
5587 change. It might be cleaner to explicitly keep track of which
5588 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5589 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5590 this after the fact. */
5591 static bool
5592 reloads_unique_chain_p (int r1, int r2)
5594 int i;
5596 /* We only check input reloads. */
5597 if (! rld[r1].in || ! rld[r2].in)
5598 return false;
5600 /* Avoid anything with output reloads. */
5601 if (rld[r1].out || rld[r2].out)
5602 return false;
5604 /* "chained" means one reload is a component of the other reload,
5605 not the same as the other reload. */
5606 if (rld[r1].opnum != rld[r2].opnum
5607 || rtx_equal_p (rld[r1].in, rld[r2].in)
5608 || rld[r1].optional || rld[r2].optional
5609 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5610 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5611 return false;
5613 /* The following loop assumes that r1 is the reload that feeds r2. */
5614 if (r1 > r2)
5615 std::swap (r1, r2);
5617 for (i = 0; i < n_reloads; i ++)
5618 /* Look for input reloads that aren't our two */
5619 if (i != r1 && i != r2 && rld[i].in)
5621 /* If our reload is mentioned at all, it isn't a simple chain. */
5622 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5623 return false;
5625 return true;
5628 /* The recursive function change all occurrences of WHAT in *WHERE
5629 to REPL. */
5630 static void
5631 substitute (rtx *where, const_rtx what, rtx repl)
5633 const char *fmt;
5634 int i;
5635 enum rtx_code code;
5637 if (*where == 0)
5638 return;
5640 if (*where == what || rtx_equal_p (*where, what))
5642 /* Record the location of the changed rtx. */
5643 substitute_stack.safe_push (where);
5644 *where = repl;
5645 return;
5648 code = GET_CODE (*where);
5649 fmt = GET_RTX_FORMAT (code);
5650 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5652 if (fmt[i] == 'E')
5654 int j;
5656 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5657 substitute (&XVECEXP (*where, i, j), what, repl);
5659 else if (fmt[i] == 'e')
5660 substitute (&XEXP (*where, i), what, repl);
5664 /* The function returns TRUE if chain of reload R1 and R2 (in any
5665 order) can be evaluated without usage of intermediate register for
5666 the reload containing another reload. It is important to see
5667 gen_reload to understand what the function is trying to do. As an
5668 example, let us have reload chain
5670 r2: const
5671 r1: <something> + const
5673 and reload R2 got reload reg HR. The function returns true if
5674 there is a correct insn HR = HR + <something>. Otherwise,
5675 gen_reload will use intermediate register (and this is the reload
5676 reg for R1) to reload <something>.
5678 We need this function to find a conflict for chain reloads. In our
5679 example, if HR = HR + <something> is incorrect insn, then we cannot
5680 use HR as a reload register for R2. If we do use it then we get a
5681 wrong code:
5683 HR = const
5684 HR = <something>
5685 HR = HR + HR
5688 static bool
5689 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5691 /* Assume other cases in gen_reload are not possible for
5692 chain reloads or do need an intermediate hard registers. */
5693 bool result = true;
5694 int regno, code;
5695 rtx out, in;
5696 rtx_insn *insn;
5697 rtx_insn *last = get_last_insn ();
5699 /* Make r2 a component of r1. */
5700 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5701 std::swap (r1, r2);
5703 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5704 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5705 gcc_assert (regno >= 0);
5706 out = gen_rtx_REG (rld[r1].mode, regno);
5707 in = rld[r1].in;
5708 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5710 /* If IN is a paradoxical SUBREG, remove it and try to put the
5711 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5712 strip_paradoxical_subreg (&in, &out);
5714 if (GET_CODE (in) == PLUS
5715 && (REG_P (XEXP (in, 0))
5716 || GET_CODE (XEXP (in, 0)) == SUBREG
5717 || MEM_P (XEXP (in, 0)))
5718 && (REG_P (XEXP (in, 1))
5719 || GET_CODE (XEXP (in, 1)) == SUBREG
5720 || CONSTANT_P (XEXP (in, 1))
5721 || MEM_P (XEXP (in, 1))))
5723 insn = emit_insn (gen_rtx_SET (out, in));
5724 code = recog_memoized (insn);
5725 result = false;
5727 if (code >= 0)
5729 extract_insn (insn);
5730 /* We want constrain operands to treat this insn strictly in
5731 its validity determination, i.e., the way it would after
5732 reload has completed. */
5733 result = constrain_operands (1, get_enabled_alternatives (insn));
5736 delete_insns_since (last);
5739 /* Restore the original value at each changed address within R1. */
5740 while (!substitute_stack.is_empty ())
5742 rtx *where = substitute_stack.pop ();
5743 *where = rld[r2].in;
5746 return result;
5749 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5750 Return 0 otherwise.
5752 This function uses the same algorithm as reload_reg_free_p above. */
5754 static int
5755 reloads_conflict (int r1, int r2)
5757 enum reload_type r1_type = rld[r1].when_needed;
5758 enum reload_type r2_type = rld[r2].when_needed;
5759 int r1_opnum = rld[r1].opnum;
5760 int r2_opnum = rld[r2].opnum;
5762 /* RELOAD_OTHER conflicts with everything. */
5763 if (r2_type == RELOAD_OTHER)
5764 return 1;
5766 /* Otherwise, check conflicts differently for each type. */
5768 switch (r1_type)
5770 case RELOAD_FOR_INPUT:
5771 return (r2_type == RELOAD_FOR_INSN
5772 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5773 || r2_type == RELOAD_FOR_OPADDR_ADDR
5774 || r2_type == RELOAD_FOR_INPUT
5775 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5776 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5777 && r2_opnum > r1_opnum));
5779 case RELOAD_FOR_INPUT_ADDRESS:
5780 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5781 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5783 case RELOAD_FOR_INPADDR_ADDRESS:
5784 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5785 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5787 case RELOAD_FOR_OUTPUT_ADDRESS:
5788 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5789 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5791 case RELOAD_FOR_OUTADDR_ADDRESS:
5792 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5793 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5795 case RELOAD_FOR_OPERAND_ADDRESS:
5796 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5797 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5798 && (!reloads_unique_chain_p (r1, r2)
5799 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5801 case RELOAD_FOR_OPADDR_ADDR:
5802 return (r2_type == RELOAD_FOR_INPUT
5803 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5805 case RELOAD_FOR_OUTPUT:
5806 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5807 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5808 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5809 && r2_opnum >= r1_opnum));
5811 case RELOAD_FOR_INSN:
5812 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5813 || r2_type == RELOAD_FOR_INSN
5814 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5816 case RELOAD_FOR_OTHER_ADDRESS:
5817 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5819 case RELOAD_OTHER:
5820 return 1;
5822 default:
5823 gcc_unreachable ();
5827 /* Indexed by reload number, 1 if incoming value
5828 inherited from previous insns. */
5829 static char reload_inherited[MAX_RELOADS];
5831 /* For an inherited reload, this is the insn the reload was inherited from,
5832 if we know it. Otherwise, this is 0. */
5833 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5835 /* If nonzero, this is a place to get the value of the reload,
5836 rather than using reload_in. */
5837 static rtx reload_override_in[MAX_RELOADS];
5839 /* For each reload, the hard register number of the register used,
5840 or -1 if we did not need a register for this reload. */
5841 static int reload_spill_index[MAX_RELOADS];
5843 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5844 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5846 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5847 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5849 /* Subroutine of free_for_value_p, used to check a single register.
5850 START_REGNO is the starting regno of the full reload register
5851 (possibly comprising multiple hard registers) that we are considering. */
5853 static int
5854 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5855 enum reload_type type, rtx value, rtx out,
5856 int reloadnum, int ignore_address_reloads)
5858 int time1;
5859 /* Set if we see an input reload that must not share its reload register
5860 with any new earlyclobber, but might otherwise share the reload
5861 register with an output or input-output reload. */
5862 int check_earlyclobber = 0;
5863 int i;
5864 int copy = 0;
5866 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5867 return 0;
5869 if (out == const0_rtx)
5871 copy = 1;
5872 out = NULL_RTX;
5875 /* We use some pseudo 'time' value to check if the lifetimes of the
5876 new register use would overlap with the one of a previous reload
5877 that is not read-only or uses a different value.
5878 The 'time' used doesn't have to be linear in any shape or form, just
5879 monotonic.
5880 Some reload types use different 'buckets' for each operand.
5881 So there are MAX_RECOG_OPERANDS different time values for each
5882 such reload type.
5883 We compute TIME1 as the time when the register for the prospective
5884 new reload ceases to be live, and TIME2 for each existing
5885 reload as the time when that the reload register of that reload
5886 becomes live.
5887 Where there is little to be gained by exact lifetime calculations,
5888 we just make conservative assumptions, i.e. a longer lifetime;
5889 this is done in the 'default:' cases. */
5890 switch (type)
5892 case RELOAD_FOR_OTHER_ADDRESS:
5893 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5894 time1 = copy ? 0 : 1;
5895 break;
5896 case RELOAD_OTHER:
5897 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5898 break;
5899 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5900 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5901 respectively, to the time values for these, we get distinct time
5902 values. To get distinct time values for each operand, we have to
5903 multiply opnum by at least three. We round that up to four because
5904 multiply by four is often cheaper. */
5905 case RELOAD_FOR_INPADDR_ADDRESS:
5906 time1 = opnum * 4 + 2;
5907 break;
5908 case RELOAD_FOR_INPUT_ADDRESS:
5909 time1 = opnum * 4 + 3;
5910 break;
5911 case RELOAD_FOR_INPUT:
5912 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5913 executes (inclusive). */
5914 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5915 break;
5916 case RELOAD_FOR_OPADDR_ADDR:
5917 /* opnum * 4 + 4
5918 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5919 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5920 break;
5921 case RELOAD_FOR_OPERAND_ADDRESS:
5922 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5923 is executed. */
5924 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5925 break;
5926 case RELOAD_FOR_OUTADDR_ADDRESS:
5927 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5928 break;
5929 case RELOAD_FOR_OUTPUT_ADDRESS:
5930 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5931 break;
5932 default:
5933 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5936 for (i = 0; i < n_reloads; i++)
5938 rtx reg = rld[i].reg_rtx;
5939 if (reg && REG_P (reg)
5940 && ((unsigned) regno - true_regnum (reg)
5941 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5942 && i != reloadnum)
5944 rtx other_input = rld[i].in;
5946 /* If the other reload loads the same input value, that
5947 will not cause a conflict only if it's loading it into
5948 the same register. */
5949 if (true_regnum (reg) != start_regno)
5950 other_input = NULL_RTX;
5951 if (! other_input || ! rtx_equal_p (other_input, value)
5952 || rld[i].out || out)
5954 int time2;
5955 switch (rld[i].when_needed)
5957 case RELOAD_FOR_OTHER_ADDRESS:
5958 time2 = 0;
5959 break;
5960 case RELOAD_FOR_INPADDR_ADDRESS:
5961 /* find_reloads makes sure that a
5962 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5963 by at most one - the first -
5964 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5965 address reload is inherited, the address address reload
5966 goes away, so we can ignore this conflict. */
5967 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5968 && ignore_address_reloads
5969 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5970 Then the address address is still needed to store
5971 back the new address. */
5972 && ! rld[reloadnum].out)
5973 continue;
5974 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5975 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5976 reloads go away. */
5977 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5978 && ignore_address_reloads
5979 /* Unless we are reloading an auto_inc expression. */
5980 && ! rld[reloadnum].out)
5981 continue;
5982 time2 = rld[i].opnum * 4 + 2;
5983 break;
5984 case RELOAD_FOR_INPUT_ADDRESS:
5985 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5986 && ignore_address_reloads
5987 && ! rld[reloadnum].out)
5988 continue;
5989 time2 = rld[i].opnum * 4 + 3;
5990 break;
5991 case RELOAD_FOR_INPUT:
5992 time2 = rld[i].opnum * 4 + 4;
5993 check_earlyclobber = 1;
5994 break;
5995 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5996 == MAX_RECOG_OPERAND * 4 */
5997 case RELOAD_FOR_OPADDR_ADDR:
5998 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5999 && ignore_address_reloads
6000 && ! rld[reloadnum].out)
6001 continue;
6002 time2 = MAX_RECOG_OPERANDS * 4 + 1;
6003 break;
6004 case RELOAD_FOR_OPERAND_ADDRESS:
6005 time2 = MAX_RECOG_OPERANDS * 4 + 2;
6006 check_earlyclobber = 1;
6007 break;
6008 case RELOAD_FOR_INSN:
6009 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6010 break;
6011 case RELOAD_FOR_OUTPUT:
6012 /* All RELOAD_FOR_OUTPUT reloads become live just after the
6013 instruction is executed. */
6014 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6015 break;
6016 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
6017 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
6018 value. */
6019 case RELOAD_FOR_OUTADDR_ADDRESS:
6020 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
6021 && ignore_address_reloads
6022 && ! rld[reloadnum].out)
6023 continue;
6024 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
6025 break;
6026 case RELOAD_FOR_OUTPUT_ADDRESS:
6027 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
6028 break;
6029 case RELOAD_OTHER:
6030 /* If there is no conflict in the input part, handle this
6031 like an output reload. */
6032 if (! rld[i].in || rtx_equal_p (other_input, value))
6034 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6035 /* Earlyclobbered outputs must conflict with inputs. */
6036 if (earlyclobber_operand_p (rld[i].out))
6037 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6039 break;
6041 time2 = 1;
6042 /* RELOAD_OTHER might be live beyond instruction execution,
6043 but this is not obvious when we set time2 = 1. So check
6044 here if there might be a problem with the new reload
6045 clobbering the register used by the RELOAD_OTHER. */
6046 if (out)
6047 return 0;
6048 break;
6049 default:
6050 return 0;
6052 if ((time1 >= time2
6053 && (! rld[i].in || rld[i].out
6054 || ! rtx_equal_p (other_input, value)))
6055 || (out && rld[reloadnum].out_reg
6056 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6057 return 0;
6062 /* Earlyclobbered outputs must conflict with inputs. */
6063 if (check_earlyclobber && out && earlyclobber_operand_p (out))
6064 return 0;
6066 return 1;
6069 /* Return 1 if the value in reload reg REGNO, as used by a reload
6070 needed for the part of the insn specified by OPNUM and TYPE,
6071 may be used to load VALUE into it.
6073 MODE is the mode in which the register is used, this is needed to
6074 determine how many hard regs to test.
6076 Other read-only reloads with the same value do not conflict
6077 unless OUT is nonzero and these other reloads have to live while
6078 output reloads live.
6079 If OUT is CONST0_RTX, this is a special case: it means that the
6080 test should not be for using register REGNO as reload register, but
6081 for copying from register REGNO into the reload register.
6083 RELOADNUM is the number of the reload we want to load this value for;
6084 a reload does not conflict with itself.
6086 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6087 reloads that load an address for the very reload we are considering.
6089 The caller has to make sure that there is no conflict with the return
6090 register. */
6092 static int
6093 free_for_value_p (int regno, machine_mode mode, int opnum,
6094 enum reload_type type, rtx value, rtx out, int reloadnum,
6095 int ignore_address_reloads)
6097 int nregs = hard_regno_nregs[regno][mode];
6098 while (nregs-- > 0)
6099 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6100 value, out, reloadnum,
6101 ignore_address_reloads))
6102 return 0;
6103 return 1;
6106 /* Return nonzero if the rtx X is invariant over the current function. */
6107 /* ??? Actually, the places where we use this expect exactly what is
6108 tested here, and not everything that is function invariant. In
6109 particular, the frame pointer and arg pointer are special cased;
6110 pic_offset_table_rtx is not, and we must not spill these things to
6111 memory. */
6114 function_invariant_p (const_rtx x)
6116 if (CONSTANT_P (x))
6117 return 1;
6118 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6119 return 1;
6120 if (GET_CODE (x) == PLUS
6121 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6122 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6123 return 1;
6124 return 0;
6127 /* Determine whether the reload reg X overlaps any rtx'es used for
6128 overriding inheritance. Return nonzero if so. */
6130 static int
6131 conflicts_with_override (rtx x)
6133 int i;
6134 for (i = 0; i < n_reloads; i++)
6135 if (reload_override_in[i]
6136 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6137 return 1;
6138 return 0;
6141 /* Give an error message saying we failed to find a reload for INSN,
6142 and clear out reload R. */
6143 static void
6144 failed_reload (rtx_insn *insn, int r)
6146 if (asm_noperands (PATTERN (insn)) < 0)
6147 /* It's the compiler's fault. */
6148 fatal_insn ("could not find a spill register", insn);
6150 /* It's the user's fault; the operand's mode and constraint
6151 don't match. Disable this reload so we don't crash in final. */
6152 error_for_asm (insn,
6153 "%<asm%> operand constraint incompatible with operand size");
6154 rld[r].in = 0;
6155 rld[r].out = 0;
6156 rld[r].reg_rtx = 0;
6157 rld[r].optional = 1;
6158 rld[r].secondary_p = 1;
6161 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6162 for reload R. If it's valid, get an rtx for it. Return nonzero if
6163 successful. */
6164 static int
6165 set_reload_reg (int i, int r)
6167 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6168 parameter. */
6169 int regno ATTRIBUTE_UNUSED;
6170 rtx reg = spill_reg_rtx[i];
6172 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6173 spill_reg_rtx[i] = reg
6174 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6176 regno = true_regnum (reg);
6178 /* Detect when the reload reg can't hold the reload mode.
6179 This used to be one `if', but Sequent compiler can't handle that. */
6180 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6182 machine_mode test_mode = VOIDmode;
6183 if (rld[r].in)
6184 test_mode = GET_MODE (rld[r].in);
6185 /* If rld[r].in has VOIDmode, it means we will load it
6186 in whatever mode the reload reg has: to wit, rld[r].mode.
6187 We have already tested that for validity. */
6188 /* Aside from that, we need to test that the expressions
6189 to reload from or into have modes which are valid for this
6190 reload register. Otherwise the reload insns would be invalid. */
6191 if (! (rld[r].in != 0 && test_mode != VOIDmode
6192 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6193 if (! (rld[r].out != 0
6194 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6196 /* The reg is OK. */
6197 last_spill_reg = i;
6199 /* Mark as in use for this insn the reload regs we use
6200 for this. */
6201 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6202 rld[r].when_needed, rld[r].mode);
6204 rld[r].reg_rtx = reg;
6205 reload_spill_index[r] = spill_regs[i];
6206 return 1;
6209 return 0;
6212 /* Find a spill register to use as a reload register for reload R.
6213 LAST_RELOAD is nonzero if this is the last reload for the insn being
6214 processed.
6216 Set rld[R].reg_rtx to the register allocated.
6218 We return 1 if successful, or 0 if we couldn't find a spill reg and
6219 we didn't change anything. */
6221 static int
6222 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6223 int last_reload)
6225 int i, pass, count;
6227 /* If we put this reload ahead, thinking it is a group,
6228 then insist on finding a group. Otherwise we can grab a
6229 reg that some other reload needs.
6230 (That can happen when we have a 68000 DATA_OR_FP_REG
6231 which is a group of data regs or one fp reg.)
6232 We need not be so restrictive if there are no more reloads
6233 for this insn.
6235 ??? Really it would be nicer to have smarter handling
6236 for that kind of reg class, where a problem like this is normal.
6237 Perhaps those classes should be avoided for reloading
6238 by use of more alternatives. */
6240 int force_group = rld[r].nregs > 1 && ! last_reload;
6242 /* If we want a single register and haven't yet found one,
6243 take any reg in the right class and not in use.
6244 If we want a consecutive group, here is where we look for it.
6246 We use three passes so we can first look for reload regs to
6247 reuse, which are already in use for other reloads in this insn,
6248 and only then use additional registers which are not "bad", then
6249 finally any register.
6251 I think that maximizing reuse is needed to make sure we don't
6252 run out of reload regs. Suppose we have three reloads, and
6253 reloads A and B can share regs. These need two regs.
6254 Suppose A and B are given different regs.
6255 That leaves none for C. */
6256 for (pass = 0; pass < 3; pass++)
6258 /* I is the index in spill_regs.
6259 We advance it round-robin between insns to use all spill regs
6260 equally, so that inherited reloads have a chance
6261 of leapfrogging each other. */
6263 i = last_spill_reg;
6265 for (count = 0; count < n_spills; count++)
6267 int rclass = (int) rld[r].rclass;
6268 int regnum;
6270 i++;
6271 if (i >= n_spills)
6272 i -= n_spills;
6273 regnum = spill_regs[i];
6275 if ((reload_reg_free_p (regnum, rld[r].opnum,
6276 rld[r].when_needed)
6277 || (rld[r].in
6278 /* We check reload_reg_used to make sure we
6279 don't clobber the return register. */
6280 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6281 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6282 rld[r].when_needed, rld[r].in,
6283 rld[r].out, r, 1)))
6284 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6285 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6286 /* Look first for regs to share, then for unshared. But
6287 don't share regs used for inherited reloads; they are
6288 the ones we want to preserve. */
6289 && (pass
6290 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6291 regnum)
6292 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6293 regnum))))
6295 int nr = hard_regno_nregs[regnum][rld[r].mode];
6297 /* During the second pass we want to avoid reload registers
6298 which are "bad" for this reload. */
6299 if (pass == 1
6300 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6301 continue;
6303 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6304 (on 68000) got us two FP regs. If NR is 1,
6305 we would reject both of them. */
6306 if (force_group)
6307 nr = rld[r].nregs;
6308 /* If we need only one reg, we have already won. */
6309 if (nr == 1)
6311 /* But reject a single reg if we demand a group. */
6312 if (force_group)
6313 continue;
6314 break;
6316 /* Otherwise check that as many consecutive regs as we need
6317 are available here. */
6318 while (nr > 1)
6320 int regno = regnum + nr - 1;
6321 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6322 && spill_reg_order[regno] >= 0
6323 && reload_reg_free_p (regno, rld[r].opnum,
6324 rld[r].when_needed)))
6325 break;
6326 nr--;
6328 if (nr == 1)
6329 break;
6333 /* If we found something on the current pass, omit later passes. */
6334 if (count < n_spills)
6335 break;
6338 /* We should have found a spill register by now. */
6339 if (count >= n_spills)
6340 return 0;
6342 /* I is the index in SPILL_REG_RTX of the reload register we are to
6343 allocate. Get an rtx for it and find its register number. */
6345 return set_reload_reg (i, r);
6348 /* Initialize all the tables needed to allocate reload registers.
6349 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6350 is the array we use to restore the reg_rtx field for every reload. */
6352 static void
6353 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6355 int i;
6357 for (i = 0; i < n_reloads; i++)
6358 rld[i].reg_rtx = save_reload_reg_rtx[i];
6360 memset (reload_inherited, 0, MAX_RELOADS);
6361 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6362 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6364 CLEAR_HARD_REG_SET (reload_reg_used);
6365 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6366 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6367 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6368 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6369 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6371 CLEAR_HARD_REG_SET (reg_used_in_insn);
6373 HARD_REG_SET tmp;
6374 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6375 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6376 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6377 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6378 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6379 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6382 for (i = 0; i < reload_n_operands; i++)
6384 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6385 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6386 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6387 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6388 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6389 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6392 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6394 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6396 for (i = 0; i < n_reloads; i++)
6397 /* If we have already decided to use a certain register,
6398 don't use it in another way. */
6399 if (rld[i].reg_rtx)
6400 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6401 rld[i].when_needed, rld[i].mode);
6404 #ifdef SECONDARY_MEMORY_NEEDED
6405 /* If X is not a subreg, return it unmodified. If it is a subreg,
6406 look up whether we made a replacement for the SUBREG_REG. Return
6407 either the replacement or the SUBREG_REG. */
6409 static rtx
6410 replaced_subreg (rtx x)
6412 if (GET_CODE (x) == SUBREG)
6413 return find_replacement (&SUBREG_REG (x));
6414 return x;
6416 #endif
6418 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6419 mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6420 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6421 otherwise it is NULL. */
6423 static int
6424 compute_reload_subreg_offset (machine_mode outermode,
6425 rtx subreg,
6426 machine_mode innermode)
6428 int outer_offset;
6429 machine_mode middlemode;
6431 if (!subreg)
6432 return subreg_lowpart_offset (outermode, innermode);
6434 outer_offset = SUBREG_BYTE (subreg);
6435 middlemode = GET_MODE (SUBREG_REG (subreg));
6437 /* If SUBREG is paradoxical then return the normal lowpart offset
6438 for OUTERMODE and INNERMODE. Our caller has already checked
6439 that OUTERMODE fits in INNERMODE. */
6440 if (outer_offset == 0
6441 && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6442 return subreg_lowpart_offset (outermode, innermode);
6444 /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6445 plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6446 return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6449 /* Assign hard reg targets for the pseudo-registers we must reload
6450 into hard regs for this insn.
6451 Also output the instructions to copy them in and out of the hard regs.
6453 For machines with register classes, we are responsible for
6454 finding a reload reg in the proper class. */
6456 static void
6457 choose_reload_regs (struct insn_chain *chain)
6459 rtx_insn *insn = chain->insn;
6460 int i, j;
6461 unsigned int max_group_size = 1;
6462 enum reg_class group_class = NO_REGS;
6463 int pass, win, inheritance;
6465 rtx save_reload_reg_rtx[MAX_RELOADS];
6467 /* In order to be certain of getting the registers we need,
6468 we must sort the reloads into order of increasing register class.
6469 Then our grabbing of reload registers will parallel the process
6470 that provided the reload registers.
6472 Also note whether any of the reloads wants a consecutive group of regs.
6473 If so, record the maximum size of the group desired and what
6474 register class contains all the groups needed by this insn. */
6476 for (j = 0; j < n_reloads; j++)
6478 reload_order[j] = j;
6479 if (rld[j].reg_rtx != NULL_RTX)
6481 gcc_assert (REG_P (rld[j].reg_rtx)
6482 && HARD_REGISTER_P (rld[j].reg_rtx));
6483 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6485 else
6486 reload_spill_index[j] = -1;
6488 if (rld[j].nregs > 1)
6490 max_group_size = MAX (rld[j].nregs, max_group_size);
6491 group_class
6492 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6495 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6498 if (n_reloads > 1)
6499 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6501 /* If -O, try first with inheritance, then turning it off.
6502 If not -O, don't do inheritance.
6503 Using inheritance when not optimizing leads to paradoxes
6504 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6505 because one side of the comparison might be inherited. */
6506 win = 0;
6507 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6509 choose_reload_regs_init (chain, save_reload_reg_rtx);
6511 /* Process the reloads in order of preference just found.
6512 Beyond this point, subregs can be found in reload_reg_rtx.
6514 This used to look for an existing reloaded home for all of the
6515 reloads, and only then perform any new reloads. But that could lose
6516 if the reloads were done out of reg-class order because a later
6517 reload with a looser constraint might have an old home in a register
6518 needed by an earlier reload with a tighter constraint.
6520 To solve this, we make two passes over the reloads, in the order
6521 described above. In the first pass we try to inherit a reload
6522 from a previous insn. If there is a later reload that needs a
6523 class that is a proper subset of the class being processed, we must
6524 also allocate a spill register during the first pass.
6526 Then make a second pass over the reloads to allocate any reloads
6527 that haven't been given registers yet. */
6529 for (j = 0; j < n_reloads; j++)
6531 int r = reload_order[j];
6532 rtx search_equiv = NULL_RTX;
6534 /* Ignore reloads that got marked inoperative. */
6535 if (rld[r].out == 0 && rld[r].in == 0
6536 && ! rld[r].secondary_p)
6537 continue;
6539 /* If find_reloads chose to use reload_in or reload_out as a reload
6540 register, we don't need to chose one. Otherwise, try even if it
6541 found one since we might save an insn if we find the value lying
6542 around.
6543 Try also when reload_in is a pseudo without a hard reg. */
6544 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6545 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6546 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6547 && !MEM_P (rld[r].in)
6548 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6549 continue;
6551 #if 0 /* No longer needed for correct operation.
6552 It might give better code, or might not; worth an experiment? */
6553 /* If this is an optional reload, we can't inherit from earlier insns
6554 until we are sure that any non-optional reloads have been allocated.
6555 The following code takes advantage of the fact that optional reloads
6556 are at the end of reload_order. */
6557 if (rld[r].optional != 0)
6558 for (i = 0; i < j; i++)
6559 if ((rld[reload_order[i]].out != 0
6560 || rld[reload_order[i]].in != 0
6561 || rld[reload_order[i]].secondary_p)
6562 && ! rld[reload_order[i]].optional
6563 && rld[reload_order[i]].reg_rtx == 0)
6564 allocate_reload_reg (chain, reload_order[i], 0);
6565 #endif
6567 /* First see if this pseudo is already available as reloaded
6568 for a previous insn. We cannot try to inherit for reloads
6569 that are smaller than the maximum number of registers needed
6570 for groups unless the register we would allocate cannot be used
6571 for the groups.
6573 We could check here to see if this is a secondary reload for
6574 an object that is already in a register of the desired class.
6575 This would avoid the need for the secondary reload register.
6576 But this is complex because we can't easily determine what
6577 objects might want to be loaded via this reload. So let a
6578 register be allocated here. In `emit_reload_insns' we suppress
6579 one of the loads in the case described above. */
6581 if (inheritance)
6583 int byte = 0;
6584 int regno = -1;
6585 machine_mode mode = VOIDmode;
6586 rtx subreg = NULL_RTX;
6588 if (rld[r].in == 0)
6590 else if (REG_P (rld[r].in))
6592 regno = REGNO (rld[r].in);
6593 mode = GET_MODE (rld[r].in);
6595 else if (REG_P (rld[r].in_reg))
6597 regno = REGNO (rld[r].in_reg);
6598 mode = GET_MODE (rld[r].in_reg);
6600 else if (GET_CODE (rld[r].in_reg) == SUBREG
6601 && REG_P (SUBREG_REG (rld[r].in_reg)))
6603 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6604 if (regno < FIRST_PSEUDO_REGISTER)
6605 regno = subreg_regno (rld[r].in_reg);
6606 else
6608 subreg = rld[r].in_reg;
6609 byte = SUBREG_BYTE (subreg);
6611 mode = GET_MODE (rld[r].in_reg);
6613 #ifdef AUTO_INC_DEC
6614 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6615 && REG_P (XEXP (rld[r].in_reg, 0)))
6617 regno = REGNO (XEXP (rld[r].in_reg, 0));
6618 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6619 rld[r].out = rld[r].in;
6621 #endif
6622 #if 0
6623 /* This won't work, since REGNO can be a pseudo reg number.
6624 Also, it takes much more hair to keep track of all the things
6625 that can invalidate an inherited reload of part of a pseudoreg. */
6626 else if (GET_CODE (rld[r].in) == SUBREG
6627 && REG_P (SUBREG_REG (rld[r].in)))
6628 regno = subreg_regno (rld[r].in);
6629 #endif
6631 if (regno >= 0
6632 && reg_last_reload_reg[regno] != 0
6633 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6634 >= GET_MODE_SIZE (mode) + byte)
6635 #ifdef CANNOT_CHANGE_MODE_CLASS
6636 /* Verify that the register it's in can be used in
6637 mode MODE. */
6638 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6639 GET_MODE (reg_last_reload_reg[regno]),
6640 mode)
6641 #endif
6644 enum reg_class rclass = rld[r].rclass, last_class;
6645 rtx last_reg = reg_last_reload_reg[regno];
6647 i = REGNO (last_reg);
6648 byte = compute_reload_subreg_offset (mode,
6649 subreg,
6650 GET_MODE (last_reg));
6651 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6652 last_class = REGNO_REG_CLASS (i);
6654 if (reg_reloaded_contents[i] == regno
6655 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6656 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6657 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6658 /* Even if we can't use this register as a reload
6659 register, we might use it for reload_override_in,
6660 if copying it to the desired class is cheap
6661 enough. */
6662 || ((register_move_cost (mode, last_class, rclass)
6663 < memory_move_cost (mode, rclass, true))
6664 && (secondary_reload_class (1, rclass, mode,
6665 last_reg)
6666 == NO_REGS)
6667 #ifdef SECONDARY_MEMORY_NEEDED
6668 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6669 mode)
6670 #endif
6673 && (rld[r].nregs == max_group_size
6674 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6676 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6677 rld[r].when_needed, rld[r].in,
6678 const0_rtx, r, 1))
6680 /* If a group is needed, verify that all the subsequent
6681 registers still have their values intact. */
6682 int nr = hard_regno_nregs[i][rld[r].mode];
6683 int k;
6685 for (k = 1; k < nr; k++)
6686 if (reg_reloaded_contents[i + k] != regno
6687 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6688 break;
6690 if (k == nr)
6692 int i1;
6693 int bad_for_class;
6695 last_reg = (GET_MODE (last_reg) == mode
6696 ? last_reg : gen_rtx_REG (mode, i));
6698 bad_for_class = 0;
6699 for (k = 0; k < nr; k++)
6700 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6701 i+k);
6703 /* We found a register that contains the
6704 value we need. If this register is the
6705 same as an `earlyclobber' operand of the
6706 current insn, just mark it as a place to
6707 reload from since we can't use it as the
6708 reload register itself. */
6710 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6711 if (reg_overlap_mentioned_for_reload_p
6712 (reg_last_reload_reg[regno],
6713 reload_earlyclobbers[i1]))
6714 break;
6716 if (i1 != n_earlyclobbers
6717 || ! (free_for_value_p (i, rld[r].mode,
6718 rld[r].opnum,
6719 rld[r].when_needed, rld[r].in,
6720 rld[r].out, r, 1))
6721 /* Don't use it if we'd clobber a pseudo reg. */
6722 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6723 && rld[r].out
6724 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6725 /* Don't clobber the frame pointer. */
6726 || (i == HARD_FRAME_POINTER_REGNUM
6727 && frame_pointer_needed
6728 && rld[r].out)
6729 /* Don't really use the inherited spill reg
6730 if we need it wider than we've got it. */
6731 || (GET_MODE_SIZE (rld[r].mode)
6732 > GET_MODE_SIZE (mode))
6733 || bad_for_class
6735 /* If find_reloads chose reload_out as reload
6736 register, stay with it - that leaves the
6737 inherited register for subsequent reloads. */
6738 || (rld[r].out && rld[r].reg_rtx
6739 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6741 if (! rld[r].optional)
6743 reload_override_in[r] = last_reg;
6744 reload_inheritance_insn[r]
6745 = reg_reloaded_insn[i];
6748 else
6750 int k;
6751 /* We can use this as a reload reg. */
6752 /* Mark the register as in use for this part of
6753 the insn. */
6754 mark_reload_reg_in_use (i,
6755 rld[r].opnum,
6756 rld[r].when_needed,
6757 rld[r].mode);
6758 rld[r].reg_rtx = last_reg;
6759 reload_inherited[r] = 1;
6760 reload_inheritance_insn[r]
6761 = reg_reloaded_insn[i];
6762 reload_spill_index[r] = i;
6763 for (k = 0; k < nr; k++)
6764 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6765 i + k);
6772 /* Here's another way to see if the value is already lying around. */
6773 if (inheritance
6774 && rld[r].in != 0
6775 && ! reload_inherited[r]
6776 && rld[r].out == 0
6777 && (CONSTANT_P (rld[r].in)
6778 || GET_CODE (rld[r].in) == PLUS
6779 || REG_P (rld[r].in)
6780 || MEM_P (rld[r].in))
6781 && (rld[r].nregs == max_group_size
6782 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6783 search_equiv = rld[r].in;
6785 if (search_equiv)
6787 rtx equiv
6788 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6789 -1, NULL, 0, rld[r].mode);
6790 int regno = 0;
6792 if (equiv != 0)
6794 if (REG_P (equiv))
6795 regno = REGNO (equiv);
6796 else
6798 /* This must be a SUBREG of a hard register.
6799 Make a new REG since this might be used in an
6800 address and not all machines support SUBREGs
6801 there. */
6802 gcc_assert (GET_CODE (equiv) == SUBREG);
6803 regno = subreg_regno (equiv);
6804 equiv = gen_rtx_REG (rld[r].mode, regno);
6805 /* If we choose EQUIV as the reload register, but the
6806 loop below decides to cancel the inheritance, we'll
6807 end up reloading EQUIV in rld[r].mode, not the mode
6808 it had originally. That isn't safe when EQUIV isn't
6809 available as a spill register since its value might
6810 still be live at this point. */
6811 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6812 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6813 equiv = 0;
6817 /* If we found a spill reg, reject it unless it is free
6818 and of the desired class. */
6819 if (equiv != 0)
6821 int regs_used = 0;
6822 int bad_for_class = 0;
6823 int max_regno = regno + rld[r].nregs;
6825 for (i = regno; i < max_regno; i++)
6827 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6829 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6833 if ((regs_used
6834 && ! free_for_value_p (regno, rld[r].mode,
6835 rld[r].opnum, rld[r].when_needed,
6836 rld[r].in, rld[r].out, r, 1))
6837 || bad_for_class)
6838 equiv = 0;
6841 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6842 equiv = 0;
6844 /* We found a register that contains the value we need.
6845 If this register is the same as an `earlyclobber' operand
6846 of the current insn, just mark it as a place to reload from
6847 since we can't use it as the reload register itself. */
6849 if (equiv != 0)
6850 for (i = 0; i < n_earlyclobbers; i++)
6851 if (reg_overlap_mentioned_for_reload_p (equiv,
6852 reload_earlyclobbers[i]))
6854 if (! rld[r].optional)
6855 reload_override_in[r] = equiv;
6856 equiv = 0;
6857 break;
6860 /* If the equiv register we have found is explicitly clobbered
6861 in the current insn, it depends on the reload type if we
6862 can use it, use it for reload_override_in, or not at all.
6863 In particular, we then can't use EQUIV for a
6864 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6866 if (equiv != 0)
6868 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6869 switch (rld[r].when_needed)
6871 case RELOAD_FOR_OTHER_ADDRESS:
6872 case RELOAD_FOR_INPADDR_ADDRESS:
6873 case RELOAD_FOR_INPUT_ADDRESS:
6874 case RELOAD_FOR_OPADDR_ADDR:
6875 break;
6876 case RELOAD_OTHER:
6877 case RELOAD_FOR_INPUT:
6878 case RELOAD_FOR_OPERAND_ADDRESS:
6879 if (! rld[r].optional)
6880 reload_override_in[r] = equiv;
6881 /* Fall through. */
6882 default:
6883 equiv = 0;
6884 break;
6886 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6887 switch (rld[r].when_needed)
6889 case RELOAD_FOR_OTHER_ADDRESS:
6890 case RELOAD_FOR_INPADDR_ADDRESS:
6891 case RELOAD_FOR_INPUT_ADDRESS:
6892 case RELOAD_FOR_OPADDR_ADDR:
6893 case RELOAD_FOR_OPERAND_ADDRESS:
6894 case RELOAD_FOR_INPUT:
6895 break;
6896 case RELOAD_OTHER:
6897 if (! rld[r].optional)
6898 reload_override_in[r] = equiv;
6899 /* Fall through. */
6900 default:
6901 equiv = 0;
6902 break;
6906 /* If we found an equivalent reg, say no code need be generated
6907 to load it, and use it as our reload reg. */
6908 if (equiv != 0
6909 && (regno != HARD_FRAME_POINTER_REGNUM
6910 || !frame_pointer_needed))
6912 int nr = hard_regno_nregs[regno][rld[r].mode];
6913 int k;
6914 rld[r].reg_rtx = equiv;
6915 reload_spill_index[r] = regno;
6916 reload_inherited[r] = 1;
6918 /* If reg_reloaded_valid is not set for this register,
6919 there might be a stale spill_reg_store lying around.
6920 We must clear it, since otherwise emit_reload_insns
6921 might delete the store. */
6922 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6923 spill_reg_store[regno] = NULL;
6924 /* If any of the hard registers in EQUIV are spill
6925 registers, mark them as in use for this insn. */
6926 for (k = 0; k < nr; k++)
6928 i = spill_reg_order[regno + k];
6929 if (i >= 0)
6931 mark_reload_reg_in_use (regno, rld[r].opnum,
6932 rld[r].when_needed,
6933 rld[r].mode);
6934 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6935 regno + k);
6941 /* If we found a register to use already, or if this is an optional
6942 reload, we are done. */
6943 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6944 continue;
6946 #if 0
6947 /* No longer needed for correct operation. Might or might
6948 not give better code on the average. Want to experiment? */
6950 /* See if there is a later reload that has a class different from our
6951 class that intersects our class or that requires less register
6952 than our reload. If so, we must allocate a register to this
6953 reload now, since that reload might inherit a previous reload
6954 and take the only available register in our class. Don't do this
6955 for optional reloads since they will force all previous reloads
6956 to be allocated. Also don't do this for reloads that have been
6957 turned off. */
6959 for (i = j + 1; i < n_reloads; i++)
6961 int s = reload_order[i];
6963 if ((rld[s].in == 0 && rld[s].out == 0
6964 && ! rld[s].secondary_p)
6965 || rld[s].optional)
6966 continue;
6968 if ((rld[s].rclass != rld[r].rclass
6969 && reg_classes_intersect_p (rld[r].rclass,
6970 rld[s].rclass))
6971 || rld[s].nregs < rld[r].nregs)
6972 break;
6975 if (i == n_reloads)
6976 continue;
6978 allocate_reload_reg (chain, r, j == n_reloads - 1);
6979 #endif
6982 /* Now allocate reload registers for anything non-optional that
6983 didn't get one yet. */
6984 for (j = 0; j < n_reloads; j++)
6986 int r = reload_order[j];
6988 /* Ignore reloads that got marked inoperative. */
6989 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6990 continue;
6992 /* Skip reloads that already have a register allocated or are
6993 optional. */
6994 if (rld[r].reg_rtx != 0 || rld[r].optional)
6995 continue;
6997 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6998 break;
7001 /* If that loop got all the way, we have won. */
7002 if (j == n_reloads)
7004 win = 1;
7005 break;
7008 /* Loop around and try without any inheritance. */
7011 if (! win)
7013 /* First undo everything done by the failed attempt
7014 to allocate with inheritance. */
7015 choose_reload_regs_init (chain, save_reload_reg_rtx);
7017 /* Some sanity tests to verify that the reloads found in the first
7018 pass are identical to the ones we have now. */
7019 gcc_assert (chain->n_reloads == n_reloads);
7021 for (i = 0; i < n_reloads; i++)
7023 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
7024 continue;
7025 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
7026 for (j = 0; j < n_spills; j++)
7027 if (spill_regs[j] == chain->rld[i].regno)
7028 if (! set_reload_reg (j, i))
7029 failed_reload (chain->insn, i);
7033 /* If we thought we could inherit a reload, because it seemed that
7034 nothing else wanted the same reload register earlier in the insn,
7035 verify that assumption, now that all reloads have been assigned.
7036 Likewise for reloads where reload_override_in has been set. */
7038 /* If doing expensive optimizations, do one preliminary pass that doesn't
7039 cancel any inheritance, but removes reloads that have been needed only
7040 for reloads that we know can be inherited. */
7041 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7043 for (j = 0; j < n_reloads; j++)
7045 int r = reload_order[j];
7046 rtx check_reg;
7047 #ifdef SECONDARY_MEMORY_NEEDED
7048 rtx tem;
7049 #endif
7050 if (reload_inherited[r] && rld[r].reg_rtx)
7051 check_reg = rld[r].reg_rtx;
7052 else if (reload_override_in[r]
7053 && (REG_P (reload_override_in[r])
7054 || GET_CODE (reload_override_in[r]) == SUBREG))
7055 check_reg = reload_override_in[r];
7056 else
7057 continue;
7058 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7059 rld[r].opnum, rld[r].when_needed, rld[r].in,
7060 (reload_inherited[r]
7061 ? rld[r].out : const0_rtx),
7062 r, 1))
7064 if (pass)
7065 continue;
7066 reload_inherited[r] = 0;
7067 reload_override_in[r] = 0;
7069 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7070 reload_override_in, then we do not need its related
7071 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7072 likewise for other reload types.
7073 We handle this by removing a reload when its only replacement
7074 is mentioned in reload_in of the reload we are going to inherit.
7075 A special case are auto_inc expressions; even if the input is
7076 inherited, we still need the address for the output. We can
7077 recognize them because they have RELOAD_OUT set to RELOAD_IN.
7078 If we succeeded removing some reload and we are doing a preliminary
7079 pass just to remove such reloads, make another pass, since the
7080 removal of one reload might allow us to inherit another one. */
7081 else if (rld[r].in
7082 && rld[r].out != rld[r].in
7083 && remove_address_replacements (rld[r].in))
7085 if (pass)
7086 pass = 2;
7088 #ifdef SECONDARY_MEMORY_NEEDED
7089 /* If we needed a memory location for the reload, we also have to
7090 remove its related reloads. */
7091 else if (rld[r].in
7092 && rld[r].out != rld[r].in
7093 && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7094 && REGNO (tem) < FIRST_PSEUDO_REGISTER
7095 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7096 rld[r].rclass, rld[r].inmode)
7097 && remove_address_replacements
7098 (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7099 rld[r].when_needed)))
7101 if (pass)
7102 pass = 2;
7104 #endif
7108 /* Now that reload_override_in is known valid,
7109 actually override reload_in. */
7110 for (j = 0; j < n_reloads; j++)
7111 if (reload_override_in[j])
7112 rld[j].in = reload_override_in[j];
7114 /* If this reload won't be done because it has been canceled or is
7115 optional and not inherited, clear reload_reg_rtx so other
7116 routines (such as subst_reloads) don't get confused. */
7117 for (j = 0; j < n_reloads; j++)
7118 if (rld[j].reg_rtx != 0
7119 && ((rld[j].optional && ! reload_inherited[j])
7120 || (rld[j].in == 0 && rld[j].out == 0
7121 && ! rld[j].secondary_p)))
7123 int regno = true_regnum (rld[j].reg_rtx);
7125 if (spill_reg_order[regno] >= 0)
7126 clear_reload_reg_in_use (regno, rld[j].opnum,
7127 rld[j].when_needed, rld[j].mode);
7128 rld[j].reg_rtx = 0;
7129 reload_spill_index[j] = -1;
7132 /* Record which pseudos and which spill regs have output reloads. */
7133 for (j = 0; j < n_reloads; j++)
7135 int r = reload_order[j];
7137 i = reload_spill_index[r];
7139 /* I is nonneg if this reload uses a register.
7140 If rld[r].reg_rtx is 0, this is an optional reload
7141 that we opted to ignore. */
7142 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7143 && rld[r].reg_rtx != 0)
7145 int nregno = REGNO (rld[r].out_reg);
7146 int nr = 1;
7148 if (nregno < FIRST_PSEUDO_REGISTER)
7149 nr = hard_regno_nregs[nregno][rld[r].mode];
7151 while (--nr >= 0)
7152 SET_REGNO_REG_SET (&reg_has_output_reload,
7153 nregno + nr);
7155 if (i >= 0)
7156 add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7158 gcc_assert (rld[r].when_needed == RELOAD_OTHER
7159 || rld[r].when_needed == RELOAD_FOR_OUTPUT
7160 || rld[r].when_needed == RELOAD_FOR_INSN);
7165 /* Deallocate the reload register for reload R. This is called from
7166 remove_address_replacements. */
7168 void
7169 deallocate_reload_reg (int r)
7171 int regno;
7173 if (! rld[r].reg_rtx)
7174 return;
7175 regno = true_regnum (rld[r].reg_rtx);
7176 rld[r].reg_rtx = 0;
7177 if (spill_reg_order[regno] >= 0)
7178 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7179 rld[r].mode);
7180 reload_spill_index[r] = -1;
7183 /* These arrays are filled by emit_reload_insns and its subroutines. */
7184 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7185 static rtx_insn *other_input_address_reload_insns = 0;
7186 static rtx_insn *other_input_reload_insns = 0;
7187 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7188 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7189 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7190 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7191 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7192 static rtx_insn *operand_reload_insns = 0;
7193 static rtx_insn *other_operand_reload_insns = 0;
7194 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7196 /* Values to be put in spill_reg_store are put here first. Instructions
7197 must only be placed here if the associated reload register reaches
7198 the end of the instruction's reload sequence. */
7199 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7200 static HARD_REG_SET reg_reloaded_died;
7202 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7203 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7204 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7205 adjusted register, and return true. Otherwise, return false. */
7206 static bool
7207 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7208 enum reg_class new_class,
7209 machine_mode new_mode)
7212 rtx reg;
7214 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7216 unsigned regno = REGNO (reg);
7218 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7219 continue;
7220 if (GET_MODE (reg) != new_mode)
7222 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7223 continue;
7224 if (hard_regno_nregs[regno][new_mode]
7225 > hard_regno_nregs[regno][GET_MODE (reg)])
7226 continue;
7227 reg = reload_adjust_reg_for_mode (reg, new_mode);
7229 *reload_reg = reg;
7230 return true;
7232 return false;
7235 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7236 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7237 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7238 adjusted register, and return true. Otherwise, return false. */
7239 static bool
7240 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7241 enum insn_code icode)
7244 enum reg_class new_class = scratch_reload_class (icode);
7245 machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7247 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7248 new_class, new_mode);
7251 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7252 has the number J. OLD contains the value to be used as input. */
7254 static void
7255 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7256 rtx old, int j)
7258 rtx_insn *insn = chain->insn;
7259 rtx reloadreg;
7260 rtx oldequiv_reg = 0;
7261 rtx oldequiv = 0;
7262 int special = 0;
7263 machine_mode mode;
7264 rtx_insn **where;
7266 /* delete_output_reload is only invoked properly if old contains
7267 the original pseudo register. Since this is replaced with a
7268 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7269 find the pseudo in RELOAD_IN_REG. This is also used to
7270 determine whether a secondary reload is needed. */
7271 if (reload_override_in[j]
7272 && (REG_P (rl->in_reg)
7273 || (GET_CODE (rl->in_reg) == SUBREG
7274 && REG_P (SUBREG_REG (rl->in_reg)))))
7276 oldequiv = old;
7277 old = rl->in_reg;
7279 if (oldequiv == 0)
7280 oldequiv = old;
7281 else if (REG_P (oldequiv))
7282 oldequiv_reg = oldequiv;
7283 else if (GET_CODE (oldequiv) == SUBREG)
7284 oldequiv_reg = SUBREG_REG (oldequiv);
7286 reloadreg = reload_reg_rtx_for_input[j];
7287 mode = GET_MODE (reloadreg);
7289 /* If we are reloading from a register that was recently stored in
7290 with an output-reload, see if we can prove there was
7291 actually no need to store the old value in it. */
7293 if (optimize && REG_P (oldequiv)
7294 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7295 && spill_reg_store[REGNO (oldequiv)]
7296 && REG_P (old)
7297 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7298 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7299 rl->out_reg)))
7300 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7302 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7303 OLDEQUIV. */
7305 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7306 oldequiv = SUBREG_REG (oldequiv);
7307 if (GET_MODE (oldequiv) != VOIDmode
7308 && mode != GET_MODE (oldequiv))
7309 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7311 /* Switch to the right place to emit the reload insns. */
7312 switch (rl->when_needed)
7314 case RELOAD_OTHER:
7315 where = &other_input_reload_insns;
7316 break;
7317 case RELOAD_FOR_INPUT:
7318 where = &input_reload_insns[rl->opnum];
7319 break;
7320 case RELOAD_FOR_INPUT_ADDRESS:
7321 where = &input_address_reload_insns[rl->opnum];
7322 break;
7323 case RELOAD_FOR_INPADDR_ADDRESS:
7324 where = &inpaddr_address_reload_insns[rl->opnum];
7325 break;
7326 case RELOAD_FOR_OUTPUT_ADDRESS:
7327 where = &output_address_reload_insns[rl->opnum];
7328 break;
7329 case RELOAD_FOR_OUTADDR_ADDRESS:
7330 where = &outaddr_address_reload_insns[rl->opnum];
7331 break;
7332 case RELOAD_FOR_OPERAND_ADDRESS:
7333 where = &operand_reload_insns;
7334 break;
7335 case RELOAD_FOR_OPADDR_ADDR:
7336 where = &other_operand_reload_insns;
7337 break;
7338 case RELOAD_FOR_OTHER_ADDRESS:
7339 where = &other_input_address_reload_insns;
7340 break;
7341 default:
7342 gcc_unreachable ();
7345 push_to_sequence (*where);
7347 /* Auto-increment addresses must be reloaded in a special way. */
7348 if (rl->out && ! rl->out_reg)
7350 /* We are not going to bother supporting the case where a
7351 incremented register can't be copied directly from
7352 OLDEQUIV since this seems highly unlikely. */
7353 gcc_assert (rl->secondary_in_reload < 0);
7355 if (reload_inherited[j])
7356 oldequiv = reloadreg;
7358 old = XEXP (rl->in_reg, 0);
7360 /* Prevent normal processing of this reload. */
7361 special = 1;
7362 /* Output a special code sequence for this case. */
7363 inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7366 /* If we are reloading a pseudo-register that was set by the previous
7367 insn, see if we can get rid of that pseudo-register entirely
7368 by redirecting the previous insn into our reload register. */
7370 else if (optimize && REG_P (old)
7371 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7372 && dead_or_set_p (insn, old)
7373 /* This is unsafe if some other reload
7374 uses the same reg first. */
7375 && ! conflicts_with_override (reloadreg)
7376 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7377 rl->when_needed, old, rl->out, j, 0))
7379 rtx_insn *temp = PREV_INSN (insn);
7380 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7381 temp = PREV_INSN (temp);
7382 if (temp
7383 && NONJUMP_INSN_P (temp)
7384 && GET_CODE (PATTERN (temp)) == SET
7385 && SET_DEST (PATTERN (temp)) == old
7386 /* Make sure we can access insn_operand_constraint. */
7387 && asm_noperands (PATTERN (temp)) < 0
7388 /* This is unsafe if operand occurs more than once in current
7389 insn. Perhaps some occurrences aren't reloaded. */
7390 && count_occurrences (PATTERN (insn), old, 0) == 1)
7392 rtx old = SET_DEST (PATTERN (temp));
7393 /* Store into the reload register instead of the pseudo. */
7394 SET_DEST (PATTERN (temp)) = reloadreg;
7396 /* Verify that resulting insn is valid.
7398 Note that we have replaced the destination of TEMP with
7399 RELOADREG. If TEMP references RELOADREG within an
7400 autoincrement addressing mode, then the resulting insn
7401 is ill-formed and we must reject this optimization. */
7402 extract_insn (temp);
7403 if (constrain_operands (1, get_enabled_alternatives (temp))
7404 #ifdef AUTO_INC_DEC
7405 && ! find_reg_note (temp, REG_INC, reloadreg)
7406 #endif
7409 /* If the previous insn is an output reload, the source is
7410 a reload register, and its spill_reg_store entry will
7411 contain the previous destination. This is now
7412 invalid. */
7413 if (REG_P (SET_SRC (PATTERN (temp)))
7414 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7416 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7417 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7420 /* If these are the only uses of the pseudo reg,
7421 pretend for GDB it lives in the reload reg we used. */
7422 if (REG_N_DEATHS (REGNO (old)) == 1
7423 && REG_N_SETS (REGNO (old)) == 1)
7425 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7426 if (ira_conflicts_p)
7427 /* Inform IRA about the change. */
7428 ira_mark_allocation_change (REGNO (old));
7429 alter_reg (REGNO (old), -1, false);
7431 special = 1;
7433 /* Adjust any debug insns between temp and insn. */
7434 while ((temp = NEXT_INSN (temp)) != insn)
7435 if (DEBUG_INSN_P (temp))
7436 replace_rtx (PATTERN (temp), old, reloadreg);
7437 else
7438 gcc_assert (NOTE_P (temp));
7440 else
7442 SET_DEST (PATTERN (temp)) = old;
7447 /* We can't do that, so output an insn to load RELOADREG. */
7449 /* If we have a secondary reload, pick up the secondary register
7450 and icode, if any. If OLDEQUIV and OLD are different or
7451 if this is an in-out reload, recompute whether or not we
7452 still need a secondary register and what the icode should
7453 be. If we still need a secondary register and the class or
7454 icode is different, go back to reloading from OLD if using
7455 OLDEQUIV means that we got the wrong type of register. We
7456 cannot have different class or icode due to an in-out reload
7457 because we don't make such reloads when both the input and
7458 output need secondary reload registers. */
7460 if (! special && rl->secondary_in_reload >= 0)
7462 rtx second_reload_reg = 0;
7463 rtx third_reload_reg = 0;
7464 int secondary_reload = rl->secondary_in_reload;
7465 rtx real_oldequiv = oldequiv;
7466 rtx real_old = old;
7467 rtx tmp;
7468 enum insn_code icode;
7469 enum insn_code tertiary_icode = CODE_FOR_nothing;
7471 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7472 and similarly for OLD.
7473 See comments in get_secondary_reload in reload.c. */
7474 /* If it is a pseudo that cannot be replaced with its
7475 equivalent MEM, we must fall back to reload_in, which
7476 will have all the necessary substitutions registered.
7477 Likewise for a pseudo that can't be replaced with its
7478 equivalent constant.
7480 Take extra care for subregs of such pseudos. Note that
7481 we cannot use reg_equiv_mem in this case because it is
7482 not in the right mode. */
7484 tmp = oldequiv;
7485 if (GET_CODE (tmp) == SUBREG)
7486 tmp = SUBREG_REG (tmp);
7487 if (REG_P (tmp)
7488 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7489 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7490 || reg_equiv_constant (REGNO (tmp)) != 0))
7492 if (! reg_equiv_mem (REGNO (tmp))
7493 || num_not_at_initial_offset
7494 || GET_CODE (oldequiv) == SUBREG)
7495 real_oldequiv = rl->in;
7496 else
7497 real_oldequiv = reg_equiv_mem (REGNO (tmp));
7500 tmp = old;
7501 if (GET_CODE (tmp) == SUBREG)
7502 tmp = SUBREG_REG (tmp);
7503 if (REG_P (tmp)
7504 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7505 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7506 || reg_equiv_constant (REGNO (tmp)) != 0))
7508 if (! reg_equiv_mem (REGNO (tmp))
7509 || num_not_at_initial_offset
7510 || GET_CODE (old) == SUBREG)
7511 real_old = rl->in;
7512 else
7513 real_old = reg_equiv_mem (REGNO (tmp));
7516 second_reload_reg = rld[secondary_reload].reg_rtx;
7517 if (rld[secondary_reload].secondary_in_reload >= 0)
7519 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7521 third_reload_reg = rld[tertiary_reload].reg_rtx;
7522 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7523 /* We'd have to add more code for quartary reloads. */
7524 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7526 icode = rl->secondary_in_icode;
7528 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7529 || (rl->in != 0 && rl->out != 0))
7531 secondary_reload_info sri, sri2;
7532 enum reg_class new_class, new_t_class;
7534 sri.icode = CODE_FOR_nothing;
7535 sri.prev_sri = NULL;
7536 new_class
7537 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7538 rl->rclass, mode,
7539 &sri);
7541 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7542 second_reload_reg = 0;
7543 else if (new_class == NO_REGS)
7545 if (reload_adjust_reg_for_icode (&second_reload_reg,
7546 third_reload_reg,
7547 (enum insn_code) sri.icode))
7549 icode = (enum insn_code) sri.icode;
7550 third_reload_reg = 0;
7552 else
7554 oldequiv = old;
7555 real_oldequiv = real_old;
7558 else if (sri.icode != CODE_FOR_nothing)
7559 /* We currently lack a way to express this in reloads. */
7560 gcc_unreachable ();
7561 else
7563 sri2.icode = CODE_FOR_nothing;
7564 sri2.prev_sri = &sri;
7565 new_t_class
7566 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7567 new_class, mode,
7568 &sri);
7569 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7571 if (reload_adjust_reg_for_temp (&second_reload_reg,
7572 third_reload_reg,
7573 new_class, mode))
7575 third_reload_reg = 0;
7576 tertiary_icode = (enum insn_code) sri2.icode;
7578 else
7580 oldequiv = old;
7581 real_oldequiv = real_old;
7584 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7586 rtx intermediate = second_reload_reg;
7588 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7589 new_class, mode)
7590 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7591 ((enum insn_code)
7592 sri2.icode)))
7594 second_reload_reg = intermediate;
7595 tertiary_icode = (enum insn_code) sri2.icode;
7597 else
7599 oldequiv = old;
7600 real_oldequiv = real_old;
7603 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7605 rtx intermediate = second_reload_reg;
7607 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7608 new_class, mode)
7609 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7610 new_t_class, mode))
7612 second_reload_reg = intermediate;
7613 tertiary_icode = (enum insn_code) sri2.icode;
7615 else
7617 oldequiv = old;
7618 real_oldequiv = real_old;
7621 else
7623 /* This could be handled more intelligently too. */
7624 oldequiv = old;
7625 real_oldequiv = real_old;
7630 /* If we still need a secondary reload register, check
7631 to see if it is being used as a scratch or intermediate
7632 register and generate code appropriately. If we need
7633 a scratch register, use REAL_OLDEQUIV since the form of
7634 the insn may depend on the actual address if it is
7635 a MEM. */
7637 if (second_reload_reg)
7639 if (icode != CODE_FOR_nothing)
7641 /* We'd have to add extra code to handle this case. */
7642 gcc_assert (!third_reload_reg);
7644 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7645 second_reload_reg));
7646 special = 1;
7648 else
7650 /* See if we need a scratch register to load the
7651 intermediate register (a tertiary reload). */
7652 if (tertiary_icode != CODE_FOR_nothing)
7654 emit_insn ((GEN_FCN (tertiary_icode)
7655 (second_reload_reg, real_oldequiv,
7656 third_reload_reg)));
7658 else if (third_reload_reg)
7660 gen_reload (third_reload_reg, real_oldequiv,
7661 rl->opnum,
7662 rl->when_needed);
7663 gen_reload (second_reload_reg, third_reload_reg,
7664 rl->opnum,
7665 rl->when_needed);
7667 else
7668 gen_reload (second_reload_reg, real_oldequiv,
7669 rl->opnum,
7670 rl->when_needed);
7672 oldequiv = second_reload_reg;
7677 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7679 rtx real_oldequiv = oldequiv;
7681 if ((REG_P (oldequiv)
7682 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7683 && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7684 || reg_equiv_constant (REGNO (oldequiv)) != 0))
7685 || (GET_CODE (oldequiv) == SUBREG
7686 && REG_P (SUBREG_REG (oldequiv))
7687 && (REGNO (SUBREG_REG (oldequiv))
7688 >= FIRST_PSEUDO_REGISTER)
7689 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7690 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7691 || (CONSTANT_P (oldequiv)
7692 && (targetm.preferred_reload_class (oldequiv,
7693 REGNO_REG_CLASS (REGNO (reloadreg)))
7694 == NO_REGS)))
7695 real_oldequiv = rl->in;
7696 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7697 rl->when_needed);
7700 if (cfun->can_throw_non_call_exceptions)
7701 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7703 /* End this sequence. */
7704 *where = get_insns ();
7705 end_sequence ();
7707 /* Update reload_override_in so that delete_address_reloads_1
7708 can see the actual register usage. */
7709 if (oldequiv_reg)
7710 reload_override_in[j] = oldequiv;
7713 /* Generate insns to for the output reload RL, which is for the insn described
7714 by CHAIN and has the number J. */
7715 static void
7716 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7717 int j)
7719 rtx reloadreg;
7720 rtx_insn *insn = chain->insn;
7721 int special = 0;
7722 rtx old = rl->out;
7723 machine_mode mode;
7724 rtx_insn *p;
7725 rtx rl_reg_rtx;
7727 if (rl->when_needed == RELOAD_OTHER)
7728 start_sequence ();
7729 else
7730 push_to_sequence (output_reload_insns[rl->opnum]);
7732 rl_reg_rtx = reload_reg_rtx_for_output[j];
7733 mode = GET_MODE (rl_reg_rtx);
7735 reloadreg = rl_reg_rtx;
7737 /* If we need two reload regs, set RELOADREG to the intermediate
7738 one, since it will be stored into OLD. We might need a secondary
7739 register only for an input reload, so check again here. */
7741 if (rl->secondary_out_reload >= 0)
7743 rtx real_old = old;
7744 int secondary_reload = rl->secondary_out_reload;
7745 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7747 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7748 && reg_equiv_mem (REGNO (old)) != 0)
7749 real_old = reg_equiv_mem (REGNO (old));
7751 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7753 rtx second_reloadreg = reloadreg;
7754 reloadreg = rld[secondary_reload].reg_rtx;
7756 /* See if RELOADREG is to be used as a scratch register
7757 or as an intermediate register. */
7758 if (rl->secondary_out_icode != CODE_FOR_nothing)
7760 /* We'd have to add extra code to handle this case. */
7761 gcc_assert (tertiary_reload < 0);
7763 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7764 (real_old, second_reloadreg, reloadreg)));
7765 special = 1;
7767 else
7769 /* See if we need both a scratch and intermediate reload
7770 register. */
7772 enum insn_code tertiary_icode
7773 = rld[secondary_reload].secondary_out_icode;
7775 /* We'd have to add more code for quartary reloads. */
7776 gcc_assert (tertiary_reload < 0
7777 || rld[tertiary_reload].secondary_out_reload < 0);
7779 if (GET_MODE (reloadreg) != mode)
7780 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7782 if (tertiary_icode != CODE_FOR_nothing)
7784 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7786 /* Copy primary reload reg to secondary reload reg.
7787 (Note that these have been swapped above, then
7788 secondary reload reg to OLD using our insn.) */
7790 /* If REAL_OLD is a paradoxical SUBREG, remove it
7791 and try to put the opposite SUBREG on
7792 RELOADREG. */
7793 strip_paradoxical_subreg (&real_old, &reloadreg);
7795 gen_reload (reloadreg, second_reloadreg,
7796 rl->opnum, rl->when_needed);
7797 emit_insn ((GEN_FCN (tertiary_icode)
7798 (real_old, reloadreg, third_reloadreg)));
7799 special = 1;
7802 else
7804 /* Copy between the reload regs here and then to
7805 OUT later. */
7807 gen_reload (reloadreg, second_reloadreg,
7808 rl->opnum, rl->when_needed);
7809 if (tertiary_reload >= 0)
7811 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7813 gen_reload (third_reloadreg, reloadreg,
7814 rl->opnum, rl->when_needed);
7815 reloadreg = third_reloadreg;
7822 /* Output the last reload insn. */
7823 if (! special)
7825 rtx set;
7827 /* Don't output the last reload if OLD is not the dest of
7828 INSN and is in the src and is clobbered by INSN. */
7829 if (! flag_expensive_optimizations
7830 || !REG_P (old)
7831 || !(set = single_set (insn))
7832 || rtx_equal_p (old, SET_DEST (set))
7833 || !reg_mentioned_p (old, SET_SRC (set))
7834 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7835 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7836 gen_reload (old, reloadreg, rl->opnum,
7837 rl->when_needed);
7840 /* Look at all insns we emitted, just to be safe. */
7841 for (p = get_insns (); p; p = NEXT_INSN (p))
7842 if (INSN_P (p))
7844 rtx pat = PATTERN (p);
7846 /* If this output reload doesn't come from a spill reg,
7847 clear any memory of reloaded copies of the pseudo reg.
7848 If this output reload comes from a spill reg,
7849 reg_has_output_reload will make this do nothing. */
7850 note_stores (pat, forget_old_reloads_1, NULL);
7852 if (reg_mentioned_p (rl_reg_rtx, pat))
7854 rtx set = single_set (insn);
7855 if (reload_spill_index[j] < 0
7856 && set
7857 && SET_SRC (set) == rl_reg_rtx)
7859 int src = REGNO (SET_SRC (set));
7861 reload_spill_index[j] = src;
7862 SET_HARD_REG_BIT (reg_is_output_reload, src);
7863 if (find_regno_note (insn, REG_DEAD, src))
7864 SET_HARD_REG_BIT (reg_reloaded_died, src);
7866 if (HARD_REGISTER_P (rl_reg_rtx))
7868 int s = rl->secondary_out_reload;
7869 set = single_set (p);
7870 /* If this reload copies only to the secondary reload
7871 register, the secondary reload does the actual
7872 store. */
7873 if (s >= 0 && set == NULL_RTX)
7874 /* We can't tell what function the secondary reload
7875 has and where the actual store to the pseudo is
7876 made; leave new_spill_reg_store alone. */
7878 else if (s >= 0
7879 && SET_SRC (set) == rl_reg_rtx
7880 && SET_DEST (set) == rld[s].reg_rtx)
7882 /* Usually the next instruction will be the
7883 secondary reload insn; if we can confirm
7884 that it is, setting new_spill_reg_store to
7885 that insn will allow an extra optimization. */
7886 rtx s_reg = rld[s].reg_rtx;
7887 rtx_insn *next = NEXT_INSN (p);
7888 rld[s].out = rl->out;
7889 rld[s].out_reg = rl->out_reg;
7890 set = single_set (next);
7891 if (set && SET_SRC (set) == s_reg
7892 && reload_reg_rtx_reaches_end_p (s_reg, s))
7894 SET_HARD_REG_BIT (reg_is_output_reload,
7895 REGNO (s_reg));
7896 new_spill_reg_store[REGNO (s_reg)] = next;
7899 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7900 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7905 if (rl->when_needed == RELOAD_OTHER)
7907 emit_insn (other_output_reload_insns[rl->opnum]);
7908 other_output_reload_insns[rl->opnum] = get_insns ();
7910 else
7911 output_reload_insns[rl->opnum] = get_insns ();
7913 if (cfun->can_throw_non_call_exceptions)
7914 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7916 end_sequence ();
7919 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7920 and has the number J. */
7921 static void
7922 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7924 rtx_insn *insn = chain->insn;
7925 rtx old = (rl->in && MEM_P (rl->in)
7926 ? rl->in_reg : rl->in);
7927 rtx reg_rtx = rl->reg_rtx;
7929 if (old && reg_rtx)
7931 machine_mode mode;
7933 /* Determine the mode to reload in.
7934 This is very tricky because we have three to choose from.
7935 There is the mode the insn operand wants (rl->inmode).
7936 There is the mode of the reload register RELOADREG.
7937 There is the intrinsic mode of the operand, which we could find
7938 by stripping some SUBREGs.
7939 It turns out that RELOADREG's mode is irrelevant:
7940 we can change that arbitrarily.
7942 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7943 then the reload reg may not support QImode moves, so use SImode.
7944 If foo is in memory due to spilling a pseudo reg, this is safe,
7945 because the QImode value is in the least significant part of a
7946 slot big enough for a SImode. If foo is some other sort of
7947 memory reference, then it is impossible to reload this case,
7948 so previous passes had better make sure this never happens.
7950 Then consider a one-word union which has SImode and one of its
7951 members is a float, being fetched as (SUBREG:SF union:SI).
7952 We must fetch that as SFmode because we could be loading into
7953 a float-only register. In this case OLD's mode is correct.
7955 Consider an immediate integer: it has VOIDmode. Here we need
7956 to get a mode from something else.
7958 In some cases, there is a fourth mode, the operand's
7959 containing mode. If the insn specifies a containing mode for
7960 this operand, it overrides all others.
7962 I am not sure whether the algorithm here is always right,
7963 but it does the right things in those cases. */
7965 mode = GET_MODE (old);
7966 if (mode == VOIDmode)
7967 mode = rl->inmode;
7969 /* We cannot use gen_lowpart_common since it can do the wrong thing
7970 when REG_RTX has a multi-word mode. Note that REG_RTX must
7971 always be a REG here. */
7972 if (GET_MODE (reg_rtx) != mode)
7973 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7975 reload_reg_rtx_for_input[j] = reg_rtx;
7977 if (old != 0
7978 /* AUTO_INC reloads need to be handled even if inherited. We got an
7979 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7980 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7981 && ! rtx_equal_p (reg_rtx, old)
7982 && reg_rtx != 0)
7983 emit_input_reload_insns (chain, rld + j, old, j);
7985 /* When inheriting a wider reload, we have a MEM in rl->in,
7986 e.g. inheriting a SImode output reload for
7987 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7988 if (optimize && reload_inherited[j] && rl->in
7989 && MEM_P (rl->in)
7990 && MEM_P (rl->in_reg)
7991 && reload_spill_index[j] >= 0
7992 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7993 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7995 /* If we are reloading a register that was recently stored in with an
7996 output-reload, see if we can prove there was
7997 actually no need to store the old value in it. */
7999 if (optimize
8000 && (reload_inherited[j] || reload_override_in[j])
8001 && reg_rtx
8002 && REG_P (reg_rtx)
8003 && spill_reg_store[REGNO (reg_rtx)] != 0
8004 #if 0
8005 /* There doesn't seem to be any reason to restrict this to pseudos
8006 and doing so loses in the case where we are copying from a
8007 register of the wrong class. */
8008 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
8009 #endif
8010 /* The insn might have already some references to stackslots
8011 replaced by MEMs, while reload_out_reg still names the
8012 original pseudo. */
8013 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
8014 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
8015 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8018 /* Do output reloading for reload RL, which is for the insn described by
8019 CHAIN and has the number J.
8020 ??? At some point we need to support handling output reloads of
8021 JUMP_INSNs or insns that set cc0. */
8022 static void
8023 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8025 rtx note, old;
8026 rtx_insn *insn = chain->insn;
8027 /* If this is an output reload that stores something that is
8028 not loaded in this same reload, see if we can eliminate a previous
8029 store. */
8030 rtx pseudo = rl->out_reg;
8031 rtx reg_rtx = rl->reg_rtx;
8033 if (rl->out && reg_rtx)
8035 machine_mode mode;
8037 /* Determine the mode to reload in.
8038 See comments above (for input reloading). */
8039 mode = GET_MODE (rl->out);
8040 if (mode == VOIDmode)
8042 /* VOIDmode should never happen for an output. */
8043 if (asm_noperands (PATTERN (insn)) < 0)
8044 /* It's the compiler's fault. */
8045 fatal_insn ("VOIDmode on an output", insn);
8046 error_for_asm (insn, "output operand is constant in %<asm%>");
8047 /* Prevent crash--use something we know is valid. */
8048 mode = word_mode;
8049 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8051 if (GET_MODE (reg_rtx) != mode)
8052 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8054 reload_reg_rtx_for_output[j] = reg_rtx;
8056 if (pseudo
8057 && optimize
8058 && REG_P (pseudo)
8059 && ! rtx_equal_p (rl->in_reg, pseudo)
8060 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8061 && reg_last_reload_reg[REGNO (pseudo)])
8063 int pseudo_no = REGNO (pseudo);
8064 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8066 /* We don't need to test full validity of last_regno for
8067 inherit here; we only want to know if the store actually
8068 matches the pseudo. */
8069 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8070 && reg_reloaded_contents[last_regno] == pseudo_no
8071 && spill_reg_store[last_regno]
8072 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8073 delete_output_reload (insn, j, last_regno, reg_rtx);
8076 old = rl->out_reg;
8077 if (old == 0
8078 || reg_rtx == 0
8079 || rtx_equal_p (old, reg_rtx))
8080 return;
8082 /* An output operand that dies right away does need a reload,
8083 but need not be copied from it. Show the new location in the
8084 REG_UNUSED note. */
8085 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8086 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8088 XEXP (note, 0) = reg_rtx;
8089 return;
8091 /* Likewise for a SUBREG of an operand that dies. */
8092 else if (GET_CODE (old) == SUBREG
8093 && REG_P (SUBREG_REG (old))
8094 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8095 SUBREG_REG (old))))
8097 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8098 return;
8100 else if (GET_CODE (old) == SCRATCH)
8101 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8102 but we don't want to make an output reload. */
8103 return;
8105 /* If is a JUMP_INSN, we can't support output reloads yet. */
8106 gcc_assert (NONJUMP_INSN_P (insn));
8108 emit_output_reload_insns (chain, rld + j, j);
8111 /* A reload copies values of MODE from register SRC to register DEST.
8112 Return true if it can be treated for inheritance purposes like a
8113 group of reloads, each one reloading a single hard register. The
8114 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8115 occupy the same number of hard registers. */
8117 static bool
8118 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8119 int src ATTRIBUTE_UNUSED,
8120 machine_mode mode ATTRIBUTE_UNUSED)
8122 #ifdef CANNOT_CHANGE_MODE_CLASS
8123 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8124 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8125 #else
8126 return true;
8127 #endif
8130 /* Output insns to reload values in and out of the chosen reload regs. */
8132 static void
8133 emit_reload_insns (struct insn_chain *chain)
8135 rtx_insn *insn = chain->insn;
8137 int j;
8139 CLEAR_HARD_REG_SET (reg_reloaded_died);
8141 for (j = 0; j < reload_n_operands; j++)
8142 input_reload_insns[j] = input_address_reload_insns[j]
8143 = inpaddr_address_reload_insns[j]
8144 = output_reload_insns[j] = output_address_reload_insns[j]
8145 = outaddr_address_reload_insns[j]
8146 = other_output_reload_insns[j] = 0;
8147 other_input_address_reload_insns = 0;
8148 other_input_reload_insns = 0;
8149 operand_reload_insns = 0;
8150 other_operand_reload_insns = 0;
8152 /* Dump reloads into the dump file. */
8153 if (dump_file)
8155 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8156 debug_reload_to_stream (dump_file);
8159 for (j = 0; j < n_reloads; j++)
8160 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8162 unsigned int i;
8164 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8165 new_spill_reg_store[i] = 0;
8168 /* Now output the instructions to copy the data into and out of the
8169 reload registers. Do these in the order that the reloads were reported,
8170 since reloads of base and index registers precede reloads of operands
8171 and the operands may need the base and index registers reloaded. */
8173 for (j = 0; j < n_reloads; j++)
8175 do_input_reload (chain, rld + j, j);
8176 do_output_reload (chain, rld + j, j);
8179 /* Now write all the insns we made for reloads in the order expected by
8180 the allocation functions. Prior to the insn being reloaded, we write
8181 the following reloads:
8183 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8185 RELOAD_OTHER reloads.
8187 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8188 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8189 RELOAD_FOR_INPUT reload for the operand.
8191 RELOAD_FOR_OPADDR_ADDRS reloads.
8193 RELOAD_FOR_OPERAND_ADDRESS reloads.
8195 After the insn being reloaded, we write the following:
8197 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8198 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8199 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8200 reloads for the operand. The RELOAD_OTHER output reloads are
8201 output in descending order by reload number. */
8203 emit_insn_before (other_input_address_reload_insns, insn);
8204 emit_insn_before (other_input_reload_insns, insn);
8206 for (j = 0; j < reload_n_operands; j++)
8208 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8209 emit_insn_before (input_address_reload_insns[j], insn);
8210 emit_insn_before (input_reload_insns[j], insn);
8213 emit_insn_before (other_operand_reload_insns, insn);
8214 emit_insn_before (operand_reload_insns, insn);
8216 for (j = 0; j < reload_n_operands; j++)
8218 rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8219 x = emit_insn_after (output_address_reload_insns[j], x);
8220 x = emit_insn_after (output_reload_insns[j], x);
8221 emit_insn_after (other_output_reload_insns[j], x);
8224 /* For all the spill regs newly reloaded in this instruction,
8225 record what they were reloaded from, so subsequent instructions
8226 can inherit the reloads.
8228 Update spill_reg_store for the reloads of this insn.
8229 Copy the elements that were updated in the loop above. */
8231 for (j = 0; j < n_reloads; j++)
8233 int r = reload_order[j];
8234 int i = reload_spill_index[r];
8236 /* If this is a non-inherited input reload from a pseudo, we must
8237 clear any memory of a previous store to the same pseudo. Only do
8238 something if there will not be an output reload for the pseudo
8239 being reloaded. */
8240 if (rld[r].in_reg != 0
8241 && ! (reload_inherited[r] || reload_override_in[r]))
8243 rtx reg = rld[r].in_reg;
8245 if (GET_CODE (reg) == SUBREG)
8246 reg = SUBREG_REG (reg);
8248 if (REG_P (reg)
8249 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8250 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8252 int nregno = REGNO (reg);
8254 if (reg_last_reload_reg[nregno])
8256 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8258 if (reg_reloaded_contents[last_regno] == nregno)
8259 spill_reg_store[last_regno] = 0;
8264 /* I is nonneg if this reload used a register.
8265 If rld[r].reg_rtx is 0, this is an optional reload
8266 that we opted to ignore. */
8268 if (i >= 0 && rld[r].reg_rtx != 0)
8270 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8271 int k;
8273 /* For a multi register reload, we need to check if all or part
8274 of the value lives to the end. */
8275 for (k = 0; k < nr; k++)
8276 if (reload_reg_reaches_end_p (i + k, r))
8277 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8279 /* Maybe the spill reg contains a copy of reload_out. */
8280 if (rld[r].out != 0
8281 && (REG_P (rld[r].out)
8282 || (rld[r].out_reg
8283 ? REG_P (rld[r].out_reg)
8284 /* The reload value is an auto-modification of
8285 some kind. For PRE_INC, POST_INC, PRE_DEC
8286 and POST_DEC, we record an equivalence
8287 between the reload register and the operand
8288 on the optimistic assumption that we can make
8289 the equivalence hold. reload_as_needed must
8290 then either make it hold or invalidate the
8291 equivalence.
8293 PRE_MODIFY and POST_MODIFY addresses are reloaded
8294 somewhat differently, and allowing them here leads
8295 to problems. */
8296 : (GET_CODE (rld[r].out) != POST_MODIFY
8297 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8299 rtx reg;
8301 reg = reload_reg_rtx_for_output[r];
8302 if (reload_reg_rtx_reaches_end_p (reg, r))
8304 machine_mode mode = GET_MODE (reg);
8305 int regno = REGNO (reg);
8306 int nregs = hard_regno_nregs[regno][mode];
8307 rtx out = (REG_P (rld[r].out)
8308 ? rld[r].out
8309 : rld[r].out_reg
8310 ? rld[r].out_reg
8311 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8312 int out_regno = REGNO (out);
8313 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8314 : hard_regno_nregs[out_regno][mode]);
8315 bool piecemeal;
8317 spill_reg_store[regno] = new_spill_reg_store[regno];
8318 spill_reg_stored_to[regno] = out;
8319 reg_last_reload_reg[out_regno] = reg;
8321 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8322 && nregs == out_nregs
8323 && inherit_piecemeal_p (out_regno, regno, mode));
8325 /* If OUT_REGNO is a hard register, it may occupy more than
8326 one register. If it does, say what is in the
8327 rest of the registers assuming that both registers
8328 agree on how many words the object takes. If not,
8329 invalidate the subsequent registers. */
8331 if (HARD_REGISTER_NUM_P (out_regno))
8332 for (k = 1; k < out_nregs; k++)
8333 reg_last_reload_reg[out_regno + k]
8334 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8336 /* Now do the inverse operation. */
8337 for (k = 0; k < nregs; k++)
8339 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8340 reg_reloaded_contents[regno + k]
8341 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8342 ? out_regno
8343 : out_regno + k);
8344 reg_reloaded_insn[regno + k] = insn;
8345 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8346 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8347 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8348 regno + k);
8349 else
8350 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8351 regno + k);
8355 /* Maybe the spill reg contains a copy of reload_in. Only do
8356 something if there will not be an output reload for
8357 the register being reloaded. */
8358 else if (rld[r].out_reg == 0
8359 && rld[r].in != 0
8360 && ((REG_P (rld[r].in)
8361 && !HARD_REGISTER_P (rld[r].in)
8362 && !REGNO_REG_SET_P (&reg_has_output_reload,
8363 REGNO (rld[r].in)))
8364 || (REG_P (rld[r].in_reg)
8365 && !REGNO_REG_SET_P (&reg_has_output_reload,
8366 REGNO (rld[r].in_reg))))
8367 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8369 rtx reg;
8371 reg = reload_reg_rtx_for_input[r];
8372 if (reload_reg_rtx_reaches_end_p (reg, r))
8374 machine_mode mode;
8375 int regno;
8376 int nregs;
8377 int in_regno;
8378 int in_nregs;
8379 rtx in;
8380 bool piecemeal;
8382 mode = GET_MODE (reg);
8383 regno = REGNO (reg);
8384 nregs = hard_regno_nregs[regno][mode];
8385 if (REG_P (rld[r].in)
8386 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8387 in = rld[r].in;
8388 else if (REG_P (rld[r].in_reg))
8389 in = rld[r].in_reg;
8390 else
8391 in = XEXP (rld[r].in_reg, 0);
8392 in_regno = REGNO (in);
8394 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8395 : hard_regno_nregs[in_regno][mode]);
8397 reg_last_reload_reg[in_regno] = reg;
8399 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8400 && nregs == in_nregs
8401 && inherit_piecemeal_p (regno, in_regno, mode));
8403 if (HARD_REGISTER_NUM_P (in_regno))
8404 for (k = 1; k < in_nregs; k++)
8405 reg_last_reload_reg[in_regno + k]
8406 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8408 /* Unless we inherited this reload, show we haven't
8409 recently done a store.
8410 Previous stores of inherited auto_inc expressions
8411 also have to be discarded. */
8412 if (! reload_inherited[r]
8413 || (rld[r].out && ! rld[r].out_reg))
8414 spill_reg_store[regno] = 0;
8416 for (k = 0; k < nregs; k++)
8418 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8419 reg_reloaded_contents[regno + k]
8420 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8421 ? in_regno
8422 : in_regno + k);
8423 reg_reloaded_insn[regno + k] = insn;
8424 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8425 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8426 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8427 regno + k);
8428 else
8429 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8430 regno + k);
8436 /* The following if-statement was #if 0'd in 1.34 (or before...).
8437 It's reenabled in 1.35 because supposedly nothing else
8438 deals with this problem. */
8440 /* If a register gets output-reloaded from a non-spill register,
8441 that invalidates any previous reloaded copy of it.
8442 But forget_old_reloads_1 won't get to see it, because
8443 it thinks only about the original insn. So invalidate it here.
8444 Also do the same thing for RELOAD_OTHER constraints where the
8445 output is discarded. */
8446 if (i < 0
8447 && ((rld[r].out != 0
8448 && (REG_P (rld[r].out)
8449 || (MEM_P (rld[r].out)
8450 && REG_P (rld[r].out_reg))))
8451 || (rld[r].out == 0 && rld[r].out_reg
8452 && REG_P (rld[r].out_reg))))
8454 rtx out = ((rld[r].out && REG_P (rld[r].out))
8455 ? rld[r].out : rld[r].out_reg);
8456 int out_regno = REGNO (out);
8457 machine_mode mode = GET_MODE (out);
8459 /* REG_RTX is now set or clobbered by the main instruction.
8460 As the comment above explains, forget_old_reloads_1 only
8461 sees the original instruction, and there is no guarantee
8462 that the original instruction also clobbered REG_RTX.
8463 For example, if find_reloads sees that the input side of
8464 a matched operand pair dies in this instruction, it may
8465 use the input register as the reload register.
8467 Calling forget_old_reloads_1 is a waste of effort if
8468 REG_RTX is also the output register.
8470 If we know that REG_RTX holds the value of a pseudo
8471 register, the code after the call will record that fact. */
8472 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8473 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8475 if (!HARD_REGISTER_NUM_P (out_regno))
8477 rtx src_reg;
8478 rtx_insn *store_insn = NULL;
8480 reg_last_reload_reg[out_regno] = 0;
8482 /* If we can find a hard register that is stored, record
8483 the storing insn so that we may delete this insn with
8484 delete_output_reload. */
8485 src_reg = reload_reg_rtx_for_output[r];
8487 if (src_reg)
8489 if (reload_reg_rtx_reaches_end_p (src_reg, r))
8490 store_insn = new_spill_reg_store[REGNO (src_reg)];
8491 else
8492 src_reg = NULL_RTX;
8494 else
8496 /* If this is an optional reload, try to find the
8497 source reg from an input reload. */
8498 rtx set = single_set (insn);
8499 if (set && SET_DEST (set) == rld[r].out)
8501 int k;
8503 src_reg = SET_SRC (set);
8504 store_insn = insn;
8505 for (k = 0; k < n_reloads; k++)
8507 if (rld[k].in == src_reg)
8509 src_reg = reload_reg_rtx_for_input[k];
8510 break;
8515 if (src_reg && REG_P (src_reg)
8516 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8518 int src_regno, src_nregs, k;
8519 rtx note;
8521 gcc_assert (GET_MODE (src_reg) == mode);
8522 src_regno = REGNO (src_reg);
8523 src_nregs = hard_regno_nregs[src_regno][mode];
8524 /* The place where to find a death note varies with
8525 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8526 necessarily checked exactly in the code that moves
8527 notes, so just check both locations. */
8528 note = find_regno_note (insn, REG_DEAD, src_regno);
8529 if (! note && store_insn)
8530 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8531 for (k = 0; k < src_nregs; k++)
8533 spill_reg_store[src_regno + k] = store_insn;
8534 spill_reg_stored_to[src_regno + k] = out;
8535 reg_reloaded_contents[src_regno + k] = out_regno;
8536 reg_reloaded_insn[src_regno + k] = store_insn;
8537 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8538 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8539 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8540 mode))
8541 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8542 src_regno + k);
8543 else
8544 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8545 src_regno + k);
8546 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8547 if (note)
8548 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8549 else
8550 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8552 reg_last_reload_reg[out_regno] = src_reg;
8553 /* We have to set reg_has_output_reload here, or else
8554 forget_old_reloads_1 will clear reg_last_reload_reg
8555 right away. */
8556 SET_REGNO_REG_SET (&reg_has_output_reload,
8557 out_regno);
8560 else
8562 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8564 for (k = 0; k < out_nregs; k++)
8565 reg_last_reload_reg[out_regno + k] = 0;
8569 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8572 /* Go through the motions to emit INSN and test if it is strictly valid.
8573 Return the emitted insn if valid, else return NULL. */
8575 static rtx_insn *
8576 emit_insn_if_valid_for_reload (rtx pat)
8578 rtx_insn *last = get_last_insn ();
8579 int code;
8581 rtx_insn *insn = emit_insn (pat);
8582 code = recog_memoized (insn);
8584 if (code >= 0)
8586 extract_insn (insn);
8587 /* We want constrain operands to treat this insn strictly in its
8588 validity determination, i.e., the way it would after reload has
8589 completed. */
8590 if (constrain_operands (1, get_enabled_alternatives (insn)))
8591 return insn;
8594 delete_insns_since (last);
8595 return NULL;
8598 /* Emit code to perform a reload from IN (which may be a reload register) to
8599 OUT (which may also be a reload register). IN or OUT is from operand
8600 OPNUM with reload type TYPE.
8602 Returns first insn emitted. */
8604 static rtx_insn *
8605 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8607 rtx_insn *last = get_last_insn ();
8608 rtx_insn *tem;
8609 #ifdef SECONDARY_MEMORY_NEEDED
8610 rtx tem1, tem2;
8611 #endif
8613 /* If IN is a paradoxical SUBREG, remove it and try to put the
8614 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8615 if (!strip_paradoxical_subreg (&in, &out))
8616 strip_paradoxical_subreg (&out, &in);
8618 /* How to do this reload can get quite tricky. Normally, we are being
8619 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8620 register that didn't get a hard register. In that case we can just
8621 call emit_move_insn.
8623 We can also be asked to reload a PLUS that adds a register or a MEM to
8624 another register, constant or MEM. This can occur during frame pointer
8625 elimination and while reloading addresses. This case is handled by
8626 trying to emit a single insn to perform the add. If it is not valid,
8627 we use a two insn sequence.
8629 Or we can be asked to reload an unary operand that was a fragment of
8630 an addressing mode, into a register. If it isn't recognized as-is,
8631 we try making the unop operand and the reload-register the same:
8632 (set reg:X (unop:X expr:Y))
8633 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8635 Finally, we could be called to handle an 'o' constraint by putting
8636 an address into a register. In that case, we first try to do this
8637 with a named pattern of "reload_load_address". If no such pattern
8638 exists, we just emit a SET insn and hope for the best (it will normally
8639 be valid on machines that use 'o').
8641 This entire process is made complex because reload will never
8642 process the insns we generate here and so we must ensure that
8643 they will fit their constraints and also by the fact that parts of
8644 IN might be being reloaded separately and replaced with spill registers.
8645 Because of this, we are, in some sense, just guessing the right approach
8646 here. The one listed above seems to work.
8648 ??? At some point, this whole thing needs to be rethought. */
8650 if (GET_CODE (in) == PLUS
8651 && (REG_P (XEXP (in, 0))
8652 || GET_CODE (XEXP (in, 0)) == SUBREG
8653 || MEM_P (XEXP (in, 0)))
8654 && (REG_P (XEXP (in, 1))
8655 || GET_CODE (XEXP (in, 1)) == SUBREG
8656 || CONSTANT_P (XEXP (in, 1))
8657 || MEM_P (XEXP (in, 1))))
8659 /* We need to compute the sum of a register or a MEM and another
8660 register, constant, or MEM, and put it into the reload
8661 register. The best possible way of doing this is if the machine
8662 has a three-operand ADD insn that accepts the required operands.
8664 The simplest approach is to try to generate such an insn and see if it
8665 is recognized and matches its constraints. If so, it can be used.
8667 It might be better not to actually emit the insn unless it is valid,
8668 but we need to pass the insn as an operand to `recog' and
8669 `extract_insn' and it is simpler to emit and then delete the insn if
8670 not valid than to dummy things up. */
8672 rtx op0, op1, tem;
8673 rtx_insn *insn;
8674 enum insn_code code;
8676 op0 = find_replacement (&XEXP (in, 0));
8677 op1 = find_replacement (&XEXP (in, 1));
8679 /* Since constraint checking is strict, commutativity won't be
8680 checked, so we need to do that here to avoid spurious failure
8681 if the add instruction is two-address and the second operand
8682 of the add is the same as the reload reg, which is frequently
8683 the case. If the insn would be A = B + A, rearrange it so
8684 it will be A = A + B as constrain_operands expects. */
8686 if (REG_P (XEXP (in, 1))
8687 && REGNO (out) == REGNO (XEXP (in, 1)))
8688 tem = op0, op0 = op1, op1 = tem;
8690 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8691 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8693 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8694 if (insn)
8695 return insn;
8697 /* If that failed, we must use a conservative two-insn sequence.
8699 Use a move to copy one operand into the reload register. Prefer
8700 to reload a constant, MEM or pseudo since the move patterns can
8701 handle an arbitrary operand. If OP1 is not a constant, MEM or
8702 pseudo and OP1 is not a valid operand for an add instruction, then
8703 reload OP1.
8705 After reloading one of the operands into the reload register, add
8706 the reload register to the output register.
8708 If there is another way to do this for a specific machine, a
8709 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8710 we emit below. */
8712 code = optab_handler (add_optab, GET_MODE (out));
8714 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8715 || (REG_P (op1)
8716 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8717 || (code != CODE_FOR_nothing
8718 && !insn_operand_matches (code, 2, op1)))
8719 tem = op0, op0 = op1, op1 = tem;
8721 gen_reload (out, op0, opnum, type);
8723 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8724 This fixes a problem on the 32K where the stack pointer cannot
8725 be used as an operand of an add insn. */
8727 if (rtx_equal_p (op0, op1))
8728 op1 = out;
8730 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8731 if (insn)
8733 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8734 set_dst_reg_note (insn, REG_EQUIV, in, out);
8735 return insn;
8738 /* If that failed, copy the address register to the reload register.
8739 Then add the constant to the reload register. */
8741 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8742 gen_reload (out, op1, opnum, type);
8743 insn = emit_insn (gen_add2_insn (out, op0));
8744 set_dst_reg_note (insn, REG_EQUIV, in, out);
8747 #ifdef SECONDARY_MEMORY_NEEDED
8748 /* If we need a memory location to do the move, do it that way. */
8749 else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8750 (REG_P (tem1) && REG_P (tem2)))
8751 && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8752 && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8753 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8754 REGNO_REG_CLASS (REGNO (tem2)),
8755 GET_MODE (out)))
8757 /* Get the memory to use and rewrite both registers to its mode. */
8758 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8760 if (GET_MODE (loc) != GET_MODE (out))
8761 out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8763 if (GET_MODE (loc) != GET_MODE (in))
8764 in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8766 gen_reload (loc, in, opnum, type);
8767 gen_reload (out, loc, opnum, type);
8769 #endif
8770 else if (REG_P (out) && UNARY_P (in))
8772 rtx insn;
8773 rtx op1;
8774 rtx out_moded;
8775 rtx_insn *set;
8777 op1 = find_replacement (&XEXP (in, 0));
8778 if (op1 != XEXP (in, 0))
8779 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8781 /* First, try a plain SET. */
8782 set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8783 if (set)
8784 return set;
8786 /* If that failed, move the inner operand to the reload
8787 register, and try the same unop with the inner expression
8788 replaced with the reload register. */
8790 if (GET_MODE (op1) != GET_MODE (out))
8791 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8792 else
8793 out_moded = out;
8795 gen_reload (out_moded, op1, opnum, type);
8797 insn = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8798 out_moded));
8799 insn = emit_insn_if_valid_for_reload (insn);
8800 if (insn)
8802 set_unique_reg_note (insn, REG_EQUIV, in);
8803 return as_a <rtx_insn *> (insn);
8806 fatal_insn ("failure trying to reload:", set);
8808 /* If IN is a simple operand, use gen_move_insn. */
8809 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8811 tem = emit_insn (gen_move_insn (out, in));
8812 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8813 mark_jump_label (in, tem, 0);
8816 #ifdef HAVE_reload_load_address
8817 else if (HAVE_reload_load_address)
8818 emit_insn (gen_reload_load_address (out, in));
8819 #endif
8821 /* Otherwise, just write (set OUT IN) and hope for the best. */
8822 else
8823 emit_insn (gen_rtx_SET (out, in));
8825 /* Return the first insn emitted.
8826 We can not just return get_last_insn, because there may have
8827 been multiple instructions emitted. Also note that gen_move_insn may
8828 emit more than one insn itself, so we can not assume that there is one
8829 insn emitted per emit_insn_before call. */
8831 return last ? NEXT_INSN (last) : get_insns ();
8834 /* Delete a previously made output-reload whose result we now believe
8835 is not needed. First we double-check.
8837 INSN is the insn now being processed.
8838 LAST_RELOAD_REG is the hard register number for which we want to delete
8839 the last output reload.
8840 J is the reload-number that originally used REG. The caller has made
8841 certain that reload J doesn't use REG any longer for input.
8842 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8844 static void
8845 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8846 rtx new_reload_reg)
8848 rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8849 rtx reg = spill_reg_stored_to[last_reload_reg];
8850 int k;
8851 int n_occurrences;
8852 int n_inherited = 0;
8853 rtx substed;
8854 unsigned regno;
8855 int nregs;
8857 /* It is possible that this reload has been only used to set another reload
8858 we eliminated earlier and thus deleted this instruction too. */
8859 if (output_reload_insn->deleted ())
8860 return;
8862 /* Get the raw pseudo-register referred to. */
8864 while (GET_CODE (reg) == SUBREG)
8865 reg = SUBREG_REG (reg);
8866 substed = reg_equiv_memory_loc (REGNO (reg));
8868 /* This is unsafe if the operand occurs more often in the current
8869 insn than it is inherited. */
8870 for (k = n_reloads - 1; k >= 0; k--)
8872 rtx reg2 = rld[k].in;
8873 if (! reg2)
8874 continue;
8875 if (MEM_P (reg2) || reload_override_in[k])
8876 reg2 = rld[k].in_reg;
8877 #ifdef AUTO_INC_DEC
8878 if (rld[k].out && ! rld[k].out_reg)
8879 reg2 = XEXP (rld[k].in_reg, 0);
8880 #endif
8881 while (GET_CODE (reg2) == SUBREG)
8882 reg2 = SUBREG_REG (reg2);
8883 if (rtx_equal_p (reg2, reg))
8885 if (reload_inherited[k] || reload_override_in[k] || k == j)
8886 n_inherited++;
8887 else
8888 return;
8891 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8892 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8893 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8894 reg, 0);
8895 if (substed)
8896 n_occurrences += count_occurrences (PATTERN (insn),
8897 eliminate_regs (substed, VOIDmode,
8898 NULL_RTX), 0);
8899 for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8901 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8902 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8904 if (n_occurrences > n_inherited)
8905 return;
8907 regno = REGNO (reg);
8908 if (regno >= FIRST_PSEUDO_REGISTER)
8909 nregs = 1;
8910 else
8911 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8913 /* If the pseudo-reg we are reloading is no longer referenced
8914 anywhere between the store into it and here,
8915 and we're within the same basic block, then the value can only
8916 pass through the reload reg and end up here.
8917 Otherwise, give up--return. */
8918 for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8919 i1 != insn; i1 = NEXT_INSN (i1))
8921 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8922 return;
8923 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8924 && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8926 /* If this is USE in front of INSN, we only have to check that
8927 there are no more references than accounted for by inheritance. */
8928 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8930 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8931 i1 = NEXT_INSN (i1);
8933 if (n_occurrences <= n_inherited && i1 == insn)
8934 break;
8935 return;
8939 /* We will be deleting the insn. Remove the spill reg information. */
8940 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8942 spill_reg_store[last_reload_reg + k] = 0;
8943 spill_reg_stored_to[last_reload_reg + k] = 0;
8946 /* The caller has already checked that REG dies or is set in INSN.
8947 It has also checked that we are optimizing, and thus some
8948 inaccuracies in the debugging information are acceptable.
8949 So we could just delete output_reload_insn. But in some cases
8950 we can improve the debugging information without sacrificing
8951 optimization - maybe even improving the code: See if the pseudo
8952 reg has been completely replaced with reload regs. If so, delete
8953 the store insn and forget we had a stack slot for the pseudo. */
8954 if (rld[j].out != rld[j].in
8955 && REG_N_DEATHS (REGNO (reg)) == 1
8956 && REG_N_SETS (REGNO (reg)) == 1
8957 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8958 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8960 rtx_insn *i2;
8962 /* We know that it was used only between here and the beginning of
8963 the current basic block. (We also know that the last use before
8964 INSN was the output reload we are thinking of deleting, but never
8965 mind that.) Search that range; see if any ref remains. */
8966 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8968 rtx set = single_set (i2);
8970 /* Uses which just store in the pseudo don't count,
8971 since if they are the only uses, they are dead. */
8972 if (set != 0 && SET_DEST (set) == reg)
8973 continue;
8974 if (LABEL_P (i2) || JUMP_P (i2))
8975 break;
8976 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8977 && reg_mentioned_p (reg, PATTERN (i2)))
8979 /* Some other ref remains; just delete the output reload we
8980 know to be dead. */
8981 delete_address_reloads (output_reload_insn, insn);
8982 delete_insn (output_reload_insn);
8983 return;
8987 /* Delete the now-dead stores into this pseudo. Note that this
8988 loop also takes care of deleting output_reload_insn. */
8989 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8991 rtx set = single_set (i2);
8993 if (set != 0 && SET_DEST (set) == reg)
8995 delete_address_reloads (i2, insn);
8996 delete_insn (i2);
8998 if (LABEL_P (i2) || JUMP_P (i2))
8999 break;
9002 /* For the debugging info, say the pseudo lives in this reload reg. */
9003 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
9004 if (ira_conflicts_p)
9005 /* Inform IRA about the change. */
9006 ira_mark_allocation_change (REGNO (reg));
9007 alter_reg (REGNO (reg), -1, false);
9009 else
9011 delete_address_reloads (output_reload_insn, insn);
9012 delete_insn (output_reload_insn);
9016 /* We are going to delete DEAD_INSN. Recursively delete loads of
9017 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
9018 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
9019 static void
9020 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
9022 rtx set = single_set (dead_insn);
9023 rtx set2, dst;
9024 rtx_insn *prev, *next;
9025 if (set)
9027 rtx dst = SET_DEST (set);
9028 if (MEM_P (dst))
9029 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9031 /* If we deleted the store from a reloaded post_{in,de}c expression,
9032 we can delete the matching adds. */
9033 prev = PREV_INSN (dead_insn);
9034 next = NEXT_INSN (dead_insn);
9035 if (! prev || ! next)
9036 return;
9037 set = single_set (next);
9038 set2 = single_set (prev);
9039 if (! set || ! set2
9040 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9041 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9042 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9043 return;
9044 dst = SET_DEST (set);
9045 if (! rtx_equal_p (dst, SET_DEST (set2))
9046 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9047 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9048 || (INTVAL (XEXP (SET_SRC (set), 1))
9049 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9050 return;
9051 delete_related_insns (prev);
9052 delete_related_insns (next);
9055 /* Subfunction of delete_address_reloads: process registers found in X. */
9056 static void
9057 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9059 rtx_insn *prev, *i2;
9060 rtx set, dst;
9061 int i, j;
9062 enum rtx_code code = GET_CODE (x);
9064 if (code != REG)
9066 const char *fmt = GET_RTX_FORMAT (code);
9067 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9069 if (fmt[i] == 'e')
9070 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9071 else if (fmt[i] == 'E')
9073 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9074 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9075 current_insn);
9078 return;
9081 if (spill_reg_order[REGNO (x)] < 0)
9082 return;
9084 /* Scan backwards for the insn that sets x. This might be a way back due
9085 to inheritance. */
9086 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9088 code = GET_CODE (prev);
9089 if (code == CODE_LABEL || code == JUMP_INSN)
9090 return;
9091 if (!INSN_P (prev))
9092 continue;
9093 if (reg_set_p (x, PATTERN (prev)))
9094 break;
9095 if (reg_referenced_p (x, PATTERN (prev)))
9096 return;
9098 if (! prev || INSN_UID (prev) < reload_first_uid)
9099 return;
9100 /* Check that PREV only sets the reload register. */
9101 set = single_set (prev);
9102 if (! set)
9103 return;
9104 dst = SET_DEST (set);
9105 if (!REG_P (dst)
9106 || ! rtx_equal_p (dst, x))
9107 return;
9108 if (! reg_set_p (dst, PATTERN (dead_insn)))
9110 /* Check if DST was used in a later insn -
9111 it might have been inherited. */
9112 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9114 if (LABEL_P (i2))
9115 break;
9116 if (! INSN_P (i2))
9117 continue;
9118 if (reg_referenced_p (dst, PATTERN (i2)))
9120 /* If there is a reference to the register in the current insn,
9121 it might be loaded in a non-inherited reload. If no other
9122 reload uses it, that means the register is set before
9123 referenced. */
9124 if (i2 == current_insn)
9126 for (j = n_reloads - 1; j >= 0; j--)
9127 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9128 || reload_override_in[j] == dst)
9129 return;
9130 for (j = n_reloads - 1; j >= 0; j--)
9131 if (rld[j].in && rld[j].reg_rtx == dst)
9132 break;
9133 if (j >= 0)
9134 break;
9136 return;
9138 if (JUMP_P (i2))
9139 break;
9140 /* If DST is still live at CURRENT_INSN, check if it is used for
9141 any reload. Note that even if CURRENT_INSN sets DST, we still
9142 have to check the reloads. */
9143 if (i2 == current_insn)
9145 for (j = n_reloads - 1; j >= 0; j--)
9146 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9147 || reload_override_in[j] == dst)
9148 return;
9149 /* ??? We can't finish the loop here, because dst might be
9150 allocated to a pseudo in this block if no reload in this
9151 block needs any of the classes containing DST - see
9152 spill_hard_reg. There is no easy way to tell this, so we
9153 have to scan till the end of the basic block. */
9155 if (reg_set_p (dst, PATTERN (i2)))
9156 break;
9159 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9160 reg_reloaded_contents[REGNO (dst)] = -1;
9161 delete_insn (prev);
9164 /* Output reload-insns to reload VALUE into RELOADREG.
9165 VALUE is an autoincrement or autodecrement RTX whose operand
9166 is a register or memory location;
9167 so reloading involves incrementing that location.
9168 IN is either identical to VALUE, or some cheaper place to reload from.
9170 INC_AMOUNT is the number to increment or decrement by (always positive).
9171 This cannot be deduced from VALUE. */
9173 static void
9174 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9176 /* REG or MEM to be copied and incremented. */
9177 rtx incloc = find_replacement (&XEXP (value, 0));
9178 /* Nonzero if increment after copying. */
9179 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9180 || GET_CODE (value) == POST_MODIFY);
9181 rtx_insn *last;
9182 rtx inc;
9183 rtx_insn *add_insn;
9184 int code;
9185 rtx real_in = in == value ? incloc : in;
9187 /* No hard register is equivalent to this register after
9188 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9189 we could inc/dec that register as well (maybe even using it for
9190 the source), but I'm not sure it's worth worrying about. */
9191 if (REG_P (incloc))
9192 reg_last_reload_reg[REGNO (incloc)] = 0;
9194 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9196 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9197 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9199 else
9201 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9202 inc_amount = -inc_amount;
9204 inc = GEN_INT (inc_amount);
9207 /* If this is post-increment, first copy the location to the reload reg. */
9208 if (post && real_in != reloadreg)
9209 emit_insn (gen_move_insn (reloadreg, real_in));
9211 if (in == value)
9213 /* See if we can directly increment INCLOC. Use a method similar to
9214 that in gen_reload. */
9216 last = get_last_insn ();
9217 add_insn = emit_insn (gen_rtx_SET (incloc,
9218 gen_rtx_PLUS (GET_MODE (incloc),
9219 incloc, inc)));
9221 code = recog_memoized (add_insn);
9222 if (code >= 0)
9224 extract_insn (add_insn);
9225 if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9227 /* If this is a pre-increment and we have incremented the value
9228 where it lives, copy the incremented value to RELOADREG to
9229 be used as an address. */
9231 if (! post)
9232 emit_insn (gen_move_insn (reloadreg, incloc));
9233 return;
9236 delete_insns_since (last);
9239 /* If couldn't do the increment directly, must increment in RELOADREG.
9240 The way we do this depends on whether this is pre- or post-increment.
9241 For pre-increment, copy INCLOC to the reload register, increment it
9242 there, then save back. */
9244 if (! post)
9246 if (in != reloadreg)
9247 emit_insn (gen_move_insn (reloadreg, real_in));
9248 emit_insn (gen_add2_insn (reloadreg, inc));
9249 emit_insn (gen_move_insn (incloc, reloadreg));
9251 else
9253 /* Postincrement.
9254 Because this might be a jump insn or a compare, and because RELOADREG
9255 may not be available after the insn in an input reload, we must do
9256 the incrementation before the insn being reloaded for.
9258 We have already copied IN to RELOADREG. Increment the copy in
9259 RELOADREG, save that back, then decrement RELOADREG so it has
9260 the original value. */
9262 emit_insn (gen_add2_insn (reloadreg, inc));
9263 emit_insn (gen_move_insn (incloc, reloadreg));
9264 if (CONST_INT_P (inc))
9265 emit_insn (gen_add2_insn (reloadreg,
9266 gen_int_mode (-INTVAL (inc),
9267 GET_MODE (reloadreg))));
9268 else
9269 emit_insn (gen_sub2_insn (reloadreg, inc));
9273 #ifdef AUTO_INC_DEC
9274 static void
9275 add_auto_inc_notes (rtx_insn *insn, rtx x)
9277 enum rtx_code code = GET_CODE (x);
9278 const char *fmt;
9279 int i, j;
9281 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9283 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9284 return;
9287 /* Scan all the operand sub-expressions. */
9288 fmt = GET_RTX_FORMAT (code);
9289 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9291 if (fmt[i] == 'e')
9292 add_auto_inc_notes (insn, XEXP (x, i));
9293 else if (fmt[i] == 'E')
9294 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9295 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9298 #endif