* function.h (struct function): Add can_throw_non_call_exceptions bit.
[official-gcc.git] / gcc / reload1.c
blob85880f8210c0700073d3839adb649df73ff2e099
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "df.h"
41 #include "reload.h"
42 #include "recog.h"
43 #include "output.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "target.h"
49 #include "emit-rtl.h"
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
56 that need them.
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
85 /* During reload_as_needed, element N contains a REG rtx for the hard reg
86 into which reg N has been reloaded (perhaps for a previous insn). */
87 static rtx *reg_last_reload_reg;
89 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
90 for an output reload that stores into reg N. */
91 static regset_head reg_has_output_reload;
93 /* Indicates which hard regs are reload-registers for an output reload
94 in the current insn. */
95 static HARD_REG_SET reg_is_output_reload;
97 /* Element N is the constant value to which pseudo reg N is equivalent,
98 or zero if pseudo reg N is not equivalent to a constant.
99 find_reloads looks at this in order to replace pseudo reg N
100 with the constant it stands for. */
101 rtx *reg_equiv_constant;
103 /* Element N is an invariant value to which pseudo reg N is equivalent.
104 eliminate_regs_in_insn uses this to replace pseudos in particular
105 contexts. */
106 rtx *reg_equiv_invariant;
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx *reg_equiv_memory_loc;
114 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
115 collector can keep track of what is inside. */
116 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
118 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
119 This is used when the address is not valid as a memory address
120 (because its displacement is too big for the machine.) */
121 rtx *reg_equiv_address;
123 /* Element N is the memory slot to which pseudo reg N is equivalent,
124 or zero if pseudo reg N is not equivalent to a memory slot. */
125 rtx *reg_equiv_mem;
127 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
128 alternate representations of the location of pseudo reg N. */
129 rtx *reg_equiv_alt_mem_list;
131 /* Widest width in which each pseudo reg is referred to (via subreg). */
132 static unsigned int *reg_max_ref_width;
134 /* Element N is the list of insns that initialized reg N from its equivalent
135 constant or memory slot. */
136 rtx *reg_equiv_init;
137 int reg_equiv_init_size;
139 /* Vector to remember old contents of reg_renumber before spilling. */
140 static short *reg_old_renumber;
142 /* During reload_as_needed, element N contains the last pseudo regno reloaded
143 into hard register N. If that pseudo reg occupied more than one register,
144 reg_reloaded_contents points to that pseudo for each spill register in
145 use; all of these must remain set for an inheritance to occur. */
146 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
148 /* During reload_as_needed, element N contains the insn for which
149 hard register N was last used. Its contents are significant only
150 when reg_reloaded_valid is set for this register. */
151 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
153 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
154 static HARD_REG_SET reg_reloaded_valid;
155 /* Indicate if the register was dead at the end of the reload.
156 This is only valid if reg_reloaded_contents is set and valid. */
157 static HARD_REG_SET reg_reloaded_dead;
159 /* Indicate whether the register's current value is one that is not
160 safe to retain across a call, even for registers that are normally
161 call-saved. This is only meaningful for members of reg_reloaded_valid. */
162 static HARD_REG_SET reg_reloaded_call_part_clobbered;
164 /* Number of spill-regs so far; number of valid elements of spill_regs. */
165 static int n_spills;
167 /* In parallel with spill_regs, contains REG rtx's for those regs.
168 Holds the last rtx used for any given reg, or 0 if it has never
169 been used for spilling yet. This rtx is reused, provided it has
170 the proper mode. */
171 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
173 /* In parallel with spill_regs, contains nonzero for a spill reg
174 that was stored after the last time it was used.
175 The precise value is the insn generated to do the store. */
176 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
178 /* This is the register that was stored with spill_reg_store. This is a
179 copy of reload_out / reload_out_reg when the value was stored; if
180 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
181 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
183 /* This table is the inverse mapping of spill_regs:
184 indexed by hard reg number,
185 it contains the position of that reg in spill_regs,
186 or -1 for something that is not in spill_regs.
188 ?!? This is no longer accurate. */
189 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
191 /* This reg set indicates registers that can't be used as spill registers for
192 the currently processed insn. These are the hard registers which are live
193 during the insn, but not allocated to pseudos, as well as fixed
194 registers. */
195 static HARD_REG_SET bad_spill_regs;
197 /* These are the hard registers that can't be used as spill register for any
198 insn. This includes registers used for user variables and registers that
199 we can't eliminate. A register that appears in this set also can't be used
200 to retry register allocation. */
201 static HARD_REG_SET bad_spill_regs_global;
203 /* Describes order of use of registers for reloading
204 of spilled pseudo-registers. `n_spills' is the number of
205 elements that are actually valid; new ones are added at the end.
207 Both spill_regs and spill_reg_order are used on two occasions:
208 once during find_reload_regs, where they keep track of the spill registers
209 for a single insn, but also during reload_as_needed where they show all
210 the registers ever used by reload. For the latter case, the information
211 is calculated during finish_spills. */
212 static short spill_regs[FIRST_PSEUDO_REGISTER];
214 /* This vector of reg sets indicates, for each pseudo, which hard registers
215 may not be used for retrying global allocation because the register was
216 formerly spilled from one of them. If we allowed reallocating a pseudo to
217 a register that it was already allocated to, reload might not
218 terminate. */
219 static HARD_REG_SET *pseudo_previous_regs;
221 /* This vector of reg sets indicates, for each pseudo, which hard
222 registers may not be used for retrying global allocation because they
223 are used as spill registers during one of the insns in which the
224 pseudo is live. */
225 static HARD_REG_SET *pseudo_forbidden_regs;
227 /* All hard regs that have been used as spill registers for any insn are
228 marked in this set. */
229 static HARD_REG_SET used_spill_regs;
231 /* Index of last register assigned as a spill register. We allocate in
232 a round-robin fashion. */
233 static int last_spill_reg;
235 /* Nonzero if indirect addressing is supported on the machine; this means
236 that spilling (REG n) does not require reloading it into a register in
237 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
238 value indicates the level of indirect addressing supported, e.g., two
239 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
240 a hard register. */
241 static char spill_indirect_levels;
243 /* Nonzero if indirect addressing is supported when the innermost MEM is
244 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
245 which these are valid is the same as spill_indirect_levels, above. */
246 char indirect_symref_ok;
248 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
249 char double_reg_address_ok;
251 /* Record the stack slot for each spilled hard register. */
252 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
254 /* Width allocated so far for that stack slot. */
255 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
257 /* Record which pseudos needed to be spilled. */
258 static regset_head spilled_pseudos;
260 /* Record which pseudos changed their allocation in finish_spills. */
261 static regset_head changed_allocation_pseudos;
263 /* Used for communication between order_regs_for_reload and count_pseudo.
264 Used to avoid counting one pseudo twice. */
265 static regset_head pseudos_counted;
267 /* First uid used by insns created by reload in this function.
268 Used in find_equiv_reg. */
269 int reload_first_uid;
271 /* Flag set by local-alloc or global-alloc if anything is live in
272 a call-clobbered reg across calls. */
273 int caller_save_needed;
275 /* Set to 1 while reload_as_needed is operating.
276 Required by some machines to handle any generated moves differently. */
277 int reload_in_progress = 0;
279 /* These arrays record the insn_code of insns that may be needed to
280 perform input and output reloads of special objects. They provide a
281 place to pass a scratch register. */
282 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
283 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
285 /* This obstack is used for allocation of rtl during register elimination.
286 The allocated storage can be freed once find_reloads has processed the
287 insn. */
288 static struct obstack reload_obstack;
290 /* Points to the beginning of the reload_obstack. All insn_chain structures
291 are allocated first. */
292 static char *reload_startobj;
294 /* The point after all insn_chain structures. Used to quickly deallocate
295 memory allocated in copy_reloads during calculate_needs_all_insns. */
296 static char *reload_firstobj;
298 /* This points before all local rtl generated by register elimination.
299 Used to quickly free all memory after processing one insn. */
300 static char *reload_insn_firstobj;
302 /* List of insn_chain instructions, one for every insn that reload needs to
303 examine. */
304 struct insn_chain *reload_insn_chain;
306 /* List of all insns needing reloads. */
307 static struct insn_chain *insns_need_reload;
309 /* This structure is used to record information about register eliminations.
310 Each array entry describes one possible way of eliminating a register
311 in favor of another. If there is more than one way of eliminating a
312 particular register, the most preferred should be specified first. */
314 struct elim_table
316 int from; /* Register number to be eliminated. */
317 int to; /* Register number used as replacement. */
318 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
319 int can_eliminate; /* Nonzero if this elimination can be done. */
320 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
321 target hook in previous scan over insns
322 made by reload. */
323 HOST_WIDE_INT offset; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
325 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since
328 we might then spuriously replace a hard
329 register corresponding to a pseudo
330 assigned to the reg to be eliminated. */
331 rtx to_rtx; /* REG rtx for the replacement. */
334 static struct elim_table *reg_eliminate = 0;
336 /* This is an intermediate structure to initialize the table. It has
337 exactly the members provided by ELIMINABLE_REGS. */
338 static const struct elim_table_1
340 const int from;
341 const int to;
342 } reg_eliminate_1[] =
344 /* If a set of eliminable registers was specified, define the table from it.
345 Otherwise, default to the normal case of the frame pointer being
346 replaced by the stack pointer. */
348 #ifdef ELIMINABLE_REGS
349 ELIMINABLE_REGS;
350 #else
351 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
352 #endif
354 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356 /* Record the number of pending eliminations that have an offset not equal
357 to their initial offset. If nonzero, we use a new copy of each
358 replacement result in any insns encountered. */
359 int num_not_at_initial_offset;
361 /* Count the number of registers that we may be able to eliminate. */
362 static int num_eliminable;
363 /* And the number of registers that are equivalent to a constant that
364 can be eliminated to frame_pointer / arg_pointer + constant. */
365 static int num_eliminable_invariants;
367 /* For each label, we record the offset of each elimination. If we reach
368 a label by more than one path and an offset differs, we cannot do the
369 elimination. This information is indexed by the difference of the
370 number of the label and the first label number. We can't offset the
371 pointer itself as this can cause problems on machines with segmented
372 memory. The first table is an array of flags that records whether we
373 have yet encountered a label and the second table is an array of arrays,
374 one entry in the latter array for each elimination. */
376 static int first_label_num;
377 static char *offsets_known_at;
378 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
380 /* Stack of addresses where an rtx has been changed. We can undo the
381 changes by popping items off the stack and restoring the original
382 value at each location.
384 We use this simplistic undo capability rather than copy_rtx as copy_rtx
385 will not make a deep copy of a normally sharable rtx, such as
386 (const (plus (symbol_ref) (const_int))). If such an expression appears
387 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
388 rtx expression would be changed. See PR 42431. */
390 typedef rtx *rtx_p;
391 DEF_VEC_P(rtx_p);
392 DEF_VEC_ALLOC_P(rtx_p,heap);
393 static VEC(rtx_p,heap) *substitute_stack;
395 /* Number of labels in the current function. */
397 static int num_labels;
399 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
400 static void maybe_fix_stack_asms (void);
401 static void copy_reloads (struct insn_chain *);
402 static void calculate_needs_all_insns (int);
403 static int find_reg (struct insn_chain *, int);
404 static void find_reload_regs (struct insn_chain *);
405 static void select_reload_regs (void);
406 static void delete_caller_save_insns (void);
408 static void spill_failure (rtx, enum reg_class);
409 static void count_spilled_pseudo (int, int, int);
410 static void delete_dead_insn (rtx);
411 static void alter_reg (int, int, bool);
412 static void set_label_offsets (rtx, rtx, int);
413 static void check_eliminable_occurrences (rtx);
414 static void elimination_effects (rtx, enum machine_mode);
415 static int eliminate_regs_in_insn (rtx, int);
416 static void update_eliminable_offsets (void);
417 static void mark_not_eliminable (rtx, const_rtx, void *);
418 static void set_initial_elim_offsets (void);
419 static bool verify_initial_elim_offsets (void);
420 static void set_initial_label_offsets (void);
421 static void set_offsets_for_label (rtx);
422 static void init_elim_table (void);
423 static void update_eliminables (HARD_REG_SET *);
424 static void spill_hard_reg (unsigned int, int);
425 static int finish_spills (int);
426 static void scan_paradoxical_subregs (rtx);
427 static void count_pseudo (int);
428 static void order_regs_for_reload (struct insn_chain *);
429 static void reload_as_needed (int);
430 static void forget_old_reloads_1 (rtx, const_rtx, void *);
431 static void forget_marked_reloads (regset);
432 static int reload_reg_class_lower (const void *, const void *);
433 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
434 enum machine_mode);
435 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
436 enum machine_mode);
437 static int reload_reg_free_p (unsigned int, int, enum reload_type);
438 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
439 rtx, rtx, int, int);
440 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
441 rtx, rtx, int, int);
442 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
443 static int allocate_reload_reg (struct insn_chain *, int, int);
444 static int conflicts_with_override (rtx);
445 static void failed_reload (rtx, int);
446 static int set_reload_reg (int, int);
447 static void choose_reload_regs_init (struct insn_chain *, rtx *);
448 static void choose_reload_regs (struct insn_chain *);
449 static void merge_assigned_reloads (rtx);
450 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
451 rtx, int);
452 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
453 int);
454 static void do_input_reload (struct insn_chain *, struct reload *, int);
455 static void do_output_reload (struct insn_chain *, struct reload *, int);
456 static void emit_reload_insns (struct insn_chain *);
457 static void delete_output_reload (rtx, int, int, rtx);
458 static void delete_address_reloads (rtx, rtx);
459 static void delete_address_reloads_1 (rtx, rtx, rtx);
460 static rtx inc_for_reload (rtx, rtx, rtx, int);
461 #ifdef AUTO_INC_DEC
462 static void add_auto_inc_notes (rtx, rtx);
463 #endif
464 static void substitute (rtx *, const_rtx, rtx);
465 static bool gen_reload_chain_without_interm_reg_p (int, int);
466 static int reloads_conflict (int, int);
467 static rtx gen_reload (rtx, rtx, int, enum reload_type);
468 static rtx emit_insn_if_valid_for_reload (rtx);
470 /* Initialize the reload pass. This is called at the beginning of compilation
471 and may be called again if the target is reinitialized. */
473 void
474 init_reload (void)
476 int i;
478 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
479 Set spill_indirect_levels to the number of levels such addressing is
480 permitted, zero if it is not permitted at all. */
482 rtx tem
483 = gen_rtx_MEM (Pmode,
484 gen_rtx_PLUS (Pmode,
485 gen_rtx_REG (Pmode,
486 LAST_VIRTUAL_REGISTER + 1),
487 GEN_INT (4)));
488 spill_indirect_levels = 0;
490 while (memory_address_p (QImode, tem))
492 spill_indirect_levels++;
493 tem = gen_rtx_MEM (Pmode, tem);
496 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
498 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
499 indirect_symref_ok = memory_address_p (QImode, tem);
501 /* See if reg+reg is a valid (and offsettable) address. */
503 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
505 tem = gen_rtx_PLUS (Pmode,
506 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
507 gen_rtx_REG (Pmode, i));
509 /* This way, we make sure that reg+reg is an offsettable address. */
510 tem = plus_constant (tem, 4);
512 if (memory_address_p (QImode, tem))
514 double_reg_address_ok = 1;
515 break;
519 /* Initialize obstack for our rtl allocation. */
520 gcc_obstack_init (&reload_obstack);
521 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
523 INIT_REG_SET (&spilled_pseudos);
524 INIT_REG_SET (&changed_allocation_pseudos);
525 INIT_REG_SET (&pseudos_counted);
528 /* List of insn chains that are currently unused. */
529 static struct insn_chain *unused_insn_chains = 0;
531 /* Allocate an empty insn_chain structure. */
532 struct insn_chain *
533 new_insn_chain (void)
535 struct insn_chain *c;
537 if (unused_insn_chains == 0)
539 c = XOBNEW (&reload_obstack, struct insn_chain);
540 INIT_REG_SET (&c->live_throughout);
541 INIT_REG_SET (&c->dead_or_set);
543 else
545 c = unused_insn_chains;
546 unused_insn_chains = c->next;
548 c->is_caller_save_insn = 0;
549 c->need_operand_change = 0;
550 c->need_reload = 0;
551 c->need_elim = 0;
552 return c;
555 /* Small utility function to set all regs in hard reg set TO which are
556 allocated to pseudos in regset FROM. */
558 void
559 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
561 unsigned int regno;
562 reg_set_iterator rsi;
564 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
566 int r = reg_renumber[regno];
568 if (r < 0)
570 /* reload_combine uses the information from DF_LIVE_IN,
571 which might still contain registers that have not
572 actually been allocated since they have an
573 equivalence. */
574 gcc_assert (ira_conflicts_p || reload_completed);
576 else
577 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
581 /* Replace all pseudos found in LOC with their corresponding
582 equivalences. */
584 static void
585 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
587 rtx x = *loc;
588 enum rtx_code code;
589 const char *fmt;
590 int i, j;
592 if (! x)
593 return;
595 code = GET_CODE (x);
596 if (code == REG)
598 unsigned int regno = REGNO (x);
600 if (regno < FIRST_PSEUDO_REGISTER)
601 return;
603 x = eliminate_regs (x, mem_mode, usage);
604 if (x != *loc)
606 *loc = x;
607 replace_pseudos_in (loc, mem_mode, usage);
608 return;
611 if (reg_equiv_constant[regno])
612 *loc = reg_equiv_constant[regno];
613 else if (reg_equiv_mem[regno])
614 *loc = reg_equiv_mem[regno];
615 else if (reg_equiv_address[regno])
616 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
617 else
619 gcc_assert (!REG_P (regno_reg_rtx[regno])
620 || REGNO (regno_reg_rtx[regno]) != regno);
621 *loc = regno_reg_rtx[regno];
624 return;
626 else if (code == MEM)
628 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
629 return;
632 /* Process each of our operands recursively. */
633 fmt = GET_RTX_FORMAT (code);
634 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
635 if (*fmt == 'e')
636 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
637 else if (*fmt == 'E')
638 for (j = 0; j < XVECLEN (x, i); j++)
639 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
642 /* Determine if the current function has an exception receiver block
643 that reaches the exit block via non-exceptional edges */
645 static bool
646 has_nonexceptional_receiver (void)
648 edge e;
649 edge_iterator ei;
650 basic_block *tos, *worklist, bb;
652 /* If we're not optimizing, then just err on the safe side. */
653 if (!optimize)
654 return true;
656 /* First determine which blocks can reach exit via normal paths. */
657 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
659 FOR_EACH_BB (bb)
660 bb->flags &= ~BB_REACHABLE;
662 /* Place the exit block on our worklist. */
663 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
664 *tos++ = EXIT_BLOCK_PTR;
666 /* Iterate: find everything reachable from what we've already seen. */
667 while (tos != worklist)
669 bb = *--tos;
671 FOR_EACH_EDGE (e, ei, bb->preds)
672 if (!(e->flags & EDGE_ABNORMAL))
674 basic_block src = e->src;
676 if (!(src->flags & BB_REACHABLE))
678 src->flags |= BB_REACHABLE;
679 *tos++ = src;
683 free (worklist);
685 /* Now see if there's a reachable block with an exceptional incoming
686 edge. */
687 FOR_EACH_BB (bb)
688 if (bb->flags & BB_REACHABLE)
689 FOR_EACH_EDGE (e, ei, bb->preds)
690 if (e->flags & EDGE_ABNORMAL)
691 return true;
693 /* No exceptional block reached exit unexceptionally. */
694 return false;
698 /* Global variables used by reload and its subroutines. */
700 /* Set during calculate_needs if an insn needs register elimination. */
701 static int something_needs_elimination;
702 /* Set during calculate_needs if an insn needs an operand changed. */
703 static int something_needs_operands_changed;
704 /* Set by alter_regs if we spilled a register to the stack. */
705 static bool something_was_spilled;
707 /* Nonzero means we couldn't get enough spill regs. */
708 static int failure;
710 /* Temporary array of pseudo-register number. */
711 static int *temp_pseudo_reg_arr;
713 /* Main entry point for the reload pass.
715 FIRST is the first insn of the function being compiled.
717 GLOBAL nonzero means we were called from global_alloc
718 and should attempt to reallocate any pseudoregs that we
719 displace from hard regs we will use for reloads.
720 If GLOBAL is zero, we do not have enough information to do that,
721 so any pseudo reg that is spilled must go to the stack.
723 Return value is nonzero if reload failed
724 and we must not do any more for this function. */
727 reload (rtx first, int global)
729 int i, n;
730 rtx insn;
731 struct elim_table *ep;
732 basic_block bb;
734 /* Make sure even insns with volatile mem refs are recognizable. */
735 init_recog ();
737 failure = 0;
739 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
741 /* Make sure that the last insn in the chain
742 is not something that needs reloading. */
743 emit_note (NOTE_INSN_DELETED);
745 /* Enable find_equiv_reg to distinguish insns made by reload. */
746 reload_first_uid = get_max_uid ();
748 #ifdef SECONDARY_MEMORY_NEEDED
749 /* Initialize the secondary memory table. */
750 clear_secondary_mem ();
751 #endif
753 /* We don't have a stack slot for any spill reg yet. */
754 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
755 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
757 /* Initialize the save area information for caller-save, in case some
758 are needed. */
759 init_save_areas ();
761 /* Compute which hard registers are now in use
762 as homes for pseudo registers.
763 This is done here rather than (eg) in global_alloc
764 because this point is reached even if not optimizing. */
765 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
766 mark_home_live (i);
768 /* A function that has a nonlocal label that can reach the exit
769 block via non-exceptional paths must save all call-saved
770 registers. */
771 if (cfun->has_nonlocal_label
772 && has_nonexceptional_receiver ())
773 crtl->saves_all_registers = 1;
775 if (crtl->saves_all_registers)
776 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
777 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
778 df_set_regs_ever_live (i, true);
780 /* Find all the pseudo registers that didn't get hard regs
781 but do have known equivalent constants or memory slots.
782 These include parameters (known equivalent to parameter slots)
783 and cse'd or loop-moved constant memory addresses.
785 Record constant equivalents in reg_equiv_constant
786 so they will be substituted by find_reloads.
787 Record memory equivalents in reg_mem_equiv so they can
788 be substituted eventually by altering the REG-rtx's. */
790 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
791 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
792 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
793 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
794 reg_equiv_address = XCNEWVEC (rtx, max_regno);
795 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
796 reg_old_renumber = XCNEWVEC (short, max_regno);
797 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
798 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
799 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
801 CLEAR_HARD_REG_SET (bad_spill_regs_global);
803 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
804 to. Also find all paradoxical subregs and find largest such for
805 each pseudo. */
807 num_eliminable_invariants = 0;
808 for (insn = first; insn; insn = NEXT_INSN (insn))
810 rtx set = single_set (insn);
812 /* We may introduce USEs that we want to remove at the end, so
813 we'll mark them with QImode. Make sure there are no
814 previously-marked insns left by say regmove. */
815 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
816 && GET_MODE (insn) != VOIDmode)
817 PUT_MODE (insn, VOIDmode);
819 if (NONDEBUG_INSN_P (insn))
820 scan_paradoxical_subregs (PATTERN (insn));
822 if (set != 0 && REG_P (SET_DEST (set)))
824 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
825 rtx x;
827 if (! note)
828 continue;
830 i = REGNO (SET_DEST (set));
831 x = XEXP (note, 0);
833 if (i <= LAST_VIRTUAL_REGISTER)
834 continue;
836 if (! function_invariant_p (x)
837 || ! flag_pic
838 /* A function invariant is often CONSTANT_P but may
839 include a register. We promise to only pass
840 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
841 || (CONSTANT_P (x)
842 && LEGITIMATE_PIC_OPERAND_P (x)))
844 /* It can happen that a REG_EQUIV note contains a MEM
845 that is not a legitimate memory operand. As later
846 stages of reload assume that all addresses found
847 in the reg_equiv_* arrays were originally legitimate,
848 we ignore such REG_EQUIV notes. */
849 if (memory_operand (x, VOIDmode))
851 /* Always unshare the equivalence, so we can
852 substitute into this insn without touching the
853 equivalence. */
854 reg_equiv_memory_loc[i] = copy_rtx (x);
856 else if (function_invariant_p (x))
858 if (GET_CODE (x) == PLUS)
860 /* This is PLUS of frame pointer and a constant,
861 and might be shared. Unshare it. */
862 reg_equiv_invariant[i] = copy_rtx (x);
863 num_eliminable_invariants++;
865 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
867 reg_equiv_invariant[i] = x;
868 num_eliminable_invariants++;
870 else if (LEGITIMATE_CONSTANT_P (x))
871 reg_equiv_constant[i] = x;
872 else
874 reg_equiv_memory_loc[i]
875 = force_const_mem (GET_MODE (SET_DEST (set)), x);
876 if (! reg_equiv_memory_loc[i])
877 reg_equiv_init[i] = NULL_RTX;
880 else
882 reg_equiv_init[i] = NULL_RTX;
883 continue;
886 else
887 reg_equiv_init[i] = NULL_RTX;
891 if (dump_file)
892 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
893 if (reg_equiv_init[i])
895 fprintf (dump_file, "init_insns for %u: ", i);
896 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
897 fprintf (dump_file, "\n");
900 init_elim_table ();
902 first_label_num = get_first_label_num ();
903 num_labels = max_label_num () - first_label_num;
905 /* Allocate the tables used to store offset information at labels. */
906 /* We used to use alloca here, but the size of what it would try to
907 allocate would occasionally cause it to exceed the stack limit and
908 cause a core dump. */
909 offsets_known_at = XNEWVEC (char, num_labels);
910 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
912 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
913 stack slots to the pseudos that lack hard regs or equivalents.
914 Do not touch virtual registers. */
916 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
917 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
918 temp_pseudo_reg_arr[n++] = i;
920 if (ira_conflicts_p)
921 /* Ask IRA to order pseudo-registers for better stack slot
922 sharing. */
923 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
925 for (i = 0; i < n; i++)
926 alter_reg (temp_pseudo_reg_arr[i], -1, false);
928 /* If we have some registers we think can be eliminated, scan all insns to
929 see if there is an insn that sets one of these registers to something
930 other than itself plus a constant. If so, the register cannot be
931 eliminated. Doing this scan here eliminates an extra pass through the
932 main reload loop in the most common case where register elimination
933 cannot be done. */
934 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
935 if (INSN_P (insn))
936 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
938 maybe_fix_stack_asms ();
940 insns_need_reload = 0;
941 something_needs_elimination = 0;
943 /* Initialize to -1, which means take the first spill register. */
944 last_spill_reg = -1;
946 /* Spill any hard regs that we know we can't eliminate. */
947 CLEAR_HARD_REG_SET (used_spill_regs);
948 /* There can be multiple ways to eliminate a register;
949 they should be listed adjacently.
950 Elimination for any register fails only if all possible ways fail. */
951 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
953 int from = ep->from;
954 int can_eliminate = 0;
957 can_eliminate |= ep->can_eliminate;
958 ep++;
960 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
961 if (! can_eliminate)
962 spill_hard_reg (from, 1);
965 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
966 if (frame_pointer_needed)
967 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
968 #endif
969 finish_spills (global);
971 /* From now on, we may need to generate moves differently. We may also
972 allow modifications of insns which cause them to not be recognized.
973 Any such modifications will be cleaned up during reload itself. */
974 reload_in_progress = 1;
976 /* This loop scans the entire function each go-round
977 and repeats until one repetition spills no additional hard regs. */
978 for (;;)
980 int something_changed;
981 int did_spill;
982 HOST_WIDE_INT starting_frame_size;
984 starting_frame_size = get_frame_size ();
985 something_was_spilled = false;
987 set_initial_elim_offsets ();
988 set_initial_label_offsets ();
990 /* For each pseudo register that has an equivalent location defined,
991 try to eliminate any eliminable registers (such as the frame pointer)
992 assuming initial offsets for the replacement register, which
993 is the normal case.
995 If the resulting location is directly addressable, substitute
996 the MEM we just got directly for the old REG.
998 If it is not addressable but is a constant or the sum of a hard reg
999 and constant, it is probably not addressable because the constant is
1000 out of range, in that case record the address; we will generate
1001 hairy code to compute the address in a register each time it is
1002 needed. Similarly if it is a hard register, but one that is not
1003 valid as an address register.
1005 If the location is not addressable, but does not have one of the
1006 above forms, assign a stack slot. We have to do this to avoid the
1007 potential of producing lots of reloads if, e.g., a location involves
1008 a pseudo that didn't get a hard register and has an equivalent memory
1009 location that also involves a pseudo that didn't get a hard register.
1011 Perhaps at some point we will improve reload_when_needed handling
1012 so this problem goes away. But that's very hairy. */
1014 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1015 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1017 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1018 NULL_RTX);
1020 if (strict_memory_address_addr_space_p
1021 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
1022 MEM_ADDR_SPACE (x)))
1023 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1024 else if (CONSTANT_P (XEXP (x, 0))
1025 || (REG_P (XEXP (x, 0))
1026 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1027 || (GET_CODE (XEXP (x, 0)) == PLUS
1028 && REG_P (XEXP (XEXP (x, 0), 0))
1029 && (REGNO (XEXP (XEXP (x, 0), 0))
1030 < FIRST_PSEUDO_REGISTER)
1031 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1032 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1033 else
1035 /* Make a new stack slot. Then indicate that something
1036 changed so we go back and recompute offsets for
1037 eliminable registers because the allocation of memory
1038 below might change some offset. reg_equiv_{mem,address}
1039 will be set up for this pseudo on the next pass around
1040 the loop. */
1041 reg_equiv_memory_loc[i] = 0;
1042 reg_equiv_init[i] = 0;
1043 alter_reg (i, -1, true);
1047 if (caller_save_needed)
1048 setup_save_areas ();
1050 /* If we allocated another stack slot, redo elimination bookkeeping. */
1051 if (something_was_spilled || starting_frame_size != get_frame_size ())
1052 continue;
1053 if (starting_frame_size && crtl->stack_alignment_needed)
1055 /* If we have a stack frame, we must align it now. The
1056 stack size may be a part of the offset computation for
1057 register elimination. So if this changes the stack size,
1058 then repeat the elimination bookkeeping. We don't
1059 realign when there is no stack, as that will cause a
1060 stack frame when none is needed should
1061 STARTING_FRAME_OFFSET not be already aligned to
1062 STACK_BOUNDARY. */
1063 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1064 if (starting_frame_size != get_frame_size ())
1065 continue;
1068 if (caller_save_needed)
1070 save_call_clobbered_regs ();
1071 /* That might have allocated new insn_chain structures. */
1072 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1075 calculate_needs_all_insns (global);
1077 if (! ira_conflicts_p)
1078 /* Don't do it for IRA. We need this info because we don't
1079 change live_throughout and dead_or_set for chains when IRA
1080 is used. */
1081 CLEAR_REG_SET (&spilled_pseudos);
1083 did_spill = 0;
1085 something_changed = 0;
1087 /* If we allocated any new memory locations, make another pass
1088 since it might have changed elimination offsets. */
1089 if (something_was_spilled || starting_frame_size != get_frame_size ())
1090 something_changed = 1;
1092 /* Even if the frame size remained the same, we might still have
1093 changed elimination offsets, e.g. if find_reloads called
1094 force_const_mem requiring the back end to allocate a constant
1095 pool base register that needs to be saved on the stack. */
1096 else if (!verify_initial_elim_offsets ())
1097 something_changed = 1;
1100 HARD_REG_SET to_spill;
1101 CLEAR_HARD_REG_SET (to_spill);
1102 update_eliminables (&to_spill);
1103 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1105 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1106 if (TEST_HARD_REG_BIT (to_spill, i))
1108 spill_hard_reg (i, 1);
1109 did_spill = 1;
1111 /* Regardless of the state of spills, if we previously had
1112 a register that we thought we could eliminate, but now can
1113 not eliminate, we must run another pass.
1115 Consider pseudos which have an entry in reg_equiv_* which
1116 reference an eliminable register. We must make another pass
1117 to update reg_equiv_* so that we do not substitute in the
1118 old value from when we thought the elimination could be
1119 performed. */
1120 something_changed = 1;
1124 select_reload_regs ();
1125 if (failure)
1126 goto failed;
1128 if (insns_need_reload != 0 || did_spill)
1129 something_changed |= finish_spills (global);
1131 if (! something_changed)
1132 break;
1134 if (caller_save_needed)
1135 delete_caller_save_insns ();
1137 obstack_free (&reload_obstack, reload_firstobj);
1140 /* If global-alloc was run, notify it of any register eliminations we have
1141 done. */
1142 if (global)
1143 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1144 if (ep->can_eliminate)
1145 mark_elimination (ep->from, ep->to);
1147 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1148 If that insn didn't set the register (i.e., it copied the register to
1149 memory), just delete that insn instead of the equivalencing insn plus
1150 anything now dead. If we call delete_dead_insn on that insn, we may
1151 delete the insn that actually sets the register if the register dies
1152 there and that is incorrect. */
1154 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1156 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1158 rtx list;
1159 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1161 rtx equiv_insn = XEXP (list, 0);
1163 /* If we already deleted the insn or if it may trap, we can't
1164 delete it. The latter case shouldn't happen, but can
1165 if an insn has a variable address, gets a REG_EH_REGION
1166 note added to it, and then gets converted into a load
1167 from a constant address. */
1168 if (NOTE_P (equiv_insn)
1169 || can_throw_internal (equiv_insn))
1171 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1172 delete_dead_insn (equiv_insn);
1173 else
1174 SET_INSN_DELETED (equiv_insn);
1179 /* Use the reload registers where necessary
1180 by generating move instructions to move the must-be-register
1181 values into or out of the reload registers. */
1183 if (insns_need_reload != 0 || something_needs_elimination
1184 || something_needs_operands_changed)
1186 HOST_WIDE_INT old_frame_size = get_frame_size ();
1188 reload_as_needed (global);
1190 gcc_assert (old_frame_size == get_frame_size ());
1192 gcc_assert (verify_initial_elim_offsets ());
1195 /* If we were able to eliminate the frame pointer, show that it is no
1196 longer live at the start of any basic block. If it ls live by
1197 virtue of being in a pseudo, that pseudo will be marked live
1198 and hence the frame pointer will be known to be live via that
1199 pseudo. */
1201 if (! frame_pointer_needed)
1202 FOR_EACH_BB (bb)
1203 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1205 /* Come here (with failure set nonzero) if we can't get enough spill
1206 regs. */
1207 failed:
1209 CLEAR_REG_SET (&changed_allocation_pseudos);
1210 CLEAR_REG_SET (&spilled_pseudos);
1211 reload_in_progress = 0;
1213 /* Now eliminate all pseudo regs by modifying them into
1214 their equivalent memory references.
1215 The REG-rtx's for the pseudos are modified in place,
1216 so all insns that used to refer to them now refer to memory.
1218 For a reg that has a reg_equiv_address, all those insns
1219 were changed by reloading so that no insns refer to it any longer;
1220 but the DECL_RTL of a variable decl may refer to it,
1221 and if so this causes the debugging info to mention the variable. */
1223 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1225 rtx addr = 0;
1227 if (reg_equiv_mem[i])
1228 addr = XEXP (reg_equiv_mem[i], 0);
1230 if (reg_equiv_address[i])
1231 addr = reg_equiv_address[i];
1233 if (addr)
1235 if (reg_renumber[i] < 0)
1237 rtx reg = regno_reg_rtx[i];
1239 REG_USERVAR_P (reg) = 0;
1240 PUT_CODE (reg, MEM);
1241 XEXP (reg, 0) = addr;
1242 if (reg_equiv_memory_loc[i])
1243 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1244 else
1246 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1247 MEM_ATTRS (reg) = 0;
1249 MEM_NOTRAP_P (reg) = 1;
1251 else if (reg_equiv_mem[i])
1252 XEXP (reg_equiv_mem[i], 0) = addr;
1255 /* We don't want complex addressing modes in debug insns
1256 if simpler ones will do, so delegitimize equivalences
1257 in debug insns. */
1258 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1260 rtx reg = regno_reg_rtx[i];
1261 rtx equiv = 0;
1262 df_ref use, next;
1264 if (reg_equiv_constant[i])
1265 equiv = reg_equiv_constant[i];
1266 else if (reg_equiv_invariant[i])
1267 equiv = reg_equiv_invariant[i];
1268 else if (reg && MEM_P (reg))
1269 equiv = targetm.delegitimize_address (reg);
1270 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1271 equiv = reg;
1273 if (equiv == reg)
1274 continue;
1276 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1278 insn = DF_REF_INSN (use);
1280 /* Make sure the next ref is for a different instruction,
1281 so that we're not affected by the rescan. */
1282 next = DF_REF_NEXT_REG (use);
1283 while (next && DF_REF_INSN (next) == insn)
1284 next = DF_REF_NEXT_REG (next);
1286 if (DEBUG_INSN_P (insn))
1288 if (!equiv)
1290 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1291 df_insn_rescan_debug_internal (insn);
1293 else
1294 INSN_VAR_LOCATION_LOC (insn)
1295 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1296 reg, equiv);
1302 /* We must set reload_completed now since the cleanup_subreg_operands call
1303 below will re-recognize each insn and reload may have generated insns
1304 which are only valid during and after reload. */
1305 reload_completed = 1;
1307 /* Make a pass over all the insns and delete all USEs which we inserted
1308 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1309 notes. Delete all CLOBBER insns, except those that refer to the return
1310 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1311 from misarranging variable-array code, and simplify (subreg (reg))
1312 operands. Strip and regenerate REG_INC notes that may have been moved
1313 around. */
1315 for (insn = first; insn; insn = NEXT_INSN (insn))
1316 if (INSN_P (insn))
1318 rtx *pnote;
1320 if (CALL_P (insn))
1321 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1322 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1324 if ((GET_CODE (PATTERN (insn)) == USE
1325 /* We mark with QImode USEs introduced by reload itself. */
1326 && (GET_MODE (insn) == QImode
1327 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1328 || (GET_CODE (PATTERN (insn)) == CLOBBER
1329 && (!MEM_P (XEXP (PATTERN (insn), 0))
1330 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1331 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1332 && XEXP (XEXP (PATTERN (insn), 0), 0)
1333 != stack_pointer_rtx))
1334 && (!REG_P (XEXP (PATTERN (insn), 0))
1335 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1337 delete_insn (insn);
1338 continue;
1341 /* Some CLOBBERs may survive until here and still reference unassigned
1342 pseudos with const equivalent, which may in turn cause ICE in later
1343 passes if the reference remains in place. */
1344 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1345 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1346 VOIDmode, PATTERN (insn));
1348 /* Discard obvious no-ops, even without -O. This optimization
1349 is fast and doesn't interfere with debugging. */
1350 if (NONJUMP_INSN_P (insn)
1351 && GET_CODE (PATTERN (insn)) == SET
1352 && REG_P (SET_SRC (PATTERN (insn)))
1353 && REG_P (SET_DEST (PATTERN (insn)))
1354 && (REGNO (SET_SRC (PATTERN (insn)))
1355 == REGNO (SET_DEST (PATTERN (insn)))))
1357 delete_insn (insn);
1358 continue;
1361 pnote = &REG_NOTES (insn);
1362 while (*pnote != 0)
1364 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1365 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1366 || REG_NOTE_KIND (*pnote) == REG_INC)
1367 *pnote = XEXP (*pnote, 1);
1368 else
1369 pnote = &XEXP (*pnote, 1);
1372 #ifdef AUTO_INC_DEC
1373 add_auto_inc_notes (insn, PATTERN (insn));
1374 #endif
1376 /* Simplify (subreg (reg)) if it appears as an operand. */
1377 cleanup_subreg_operands (insn);
1379 /* Clean up invalid ASMs so that they don't confuse later passes.
1380 See PR 21299. */
1381 if (asm_noperands (PATTERN (insn)) >= 0)
1383 extract_insn (insn);
1384 if (!constrain_operands (1))
1386 error_for_asm (insn,
1387 "%<asm%> operand has impossible constraints");
1388 delete_insn (insn);
1389 continue;
1394 /* If we are doing generic stack checking, give a warning if this
1395 function's frame size is larger than we expect. */
1396 if (flag_stack_check == GENERIC_STACK_CHECK)
1398 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1399 static int verbose_warned = 0;
1401 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1403 size += UNITS_PER_WORD;
1405 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1407 warning (0, "frame size too large for reliable stack checking");
1408 if (! verbose_warned)
1410 warning (0, "try reducing the number of local variables");
1411 verbose_warned = 1;
1416 /* Indicate that we no longer have known memory locations or constants. */
1417 if (reg_equiv_constant)
1418 free (reg_equiv_constant);
1419 if (reg_equiv_invariant)
1420 free (reg_equiv_invariant);
1421 reg_equiv_constant = 0;
1422 reg_equiv_invariant = 0;
1423 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1424 reg_equiv_memory_loc = 0;
1426 free (temp_pseudo_reg_arr);
1428 if (offsets_known_at)
1429 free (offsets_known_at);
1430 if (offsets_at)
1431 free (offsets_at);
1433 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1434 if (reg_equiv_alt_mem_list[i])
1435 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1436 free (reg_equiv_alt_mem_list);
1438 free (reg_equiv_mem);
1439 reg_equiv_init = 0;
1440 free (reg_equiv_address);
1441 free (reg_max_ref_width);
1442 free (reg_old_renumber);
1443 free (pseudo_previous_regs);
1444 free (pseudo_forbidden_regs);
1446 CLEAR_HARD_REG_SET (used_spill_regs);
1447 for (i = 0; i < n_spills; i++)
1448 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1450 /* Free all the insn_chain structures at once. */
1451 obstack_free (&reload_obstack, reload_startobj);
1452 unused_insn_chains = 0;
1453 fixup_abnormal_edges ();
1455 /* Replacing pseudos with their memory equivalents might have
1456 created shared rtx. Subsequent passes would get confused
1457 by this, so unshare everything here. */
1458 unshare_all_rtl_again (first);
1460 #ifdef STACK_BOUNDARY
1461 /* init_emit has set the alignment of the hard frame pointer
1462 to STACK_BOUNDARY. It is very likely no longer valid if
1463 the hard frame pointer was used for register allocation. */
1464 if (!frame_pointer_needed)
1465 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1466 #endif
1468 VEC_free (rtx_p, heap, substitute_stack);
1470 return failure;
1473 /* Yet another special case. Unfortunately, reg-stack forces people to
1474 write incorrect clobbers in asm statements. These clobbers must not
1475 cause the register to appear in bad_spill_regs, otherwise we'll call
1476 fatal_insn later. We clear the corresponding regnos in the live
1477 register sets to avoid this.
1478 The whole thing is rather sick, I'm afraid. */
1480 static void
1481 maybe_fix_stack_asms (void)
1483 #ifdef STACK_REGS
1484 const char *constraints[MAX_RECOG_OPERANDS];
1485 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1486 struct insn_chain *chain;
1488 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1490 int i, noperands;
1491 HARD_REG_SET clobbered, allowed;
1492 rtx pat;
1494 if (! INSN_P (chain->insn)
1495 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1496 continue;
1497 pat = PATTERN (chain->insn);
1498 if (GET_CODE (pat) != PARALLEL)
1499 continue;
1501 CLEAR_HARD_REG_SET (clobbered);
1502 CLEAR_HARD_REG_SET (allowed);
1504 /* First, make a mask of all stack regs that are clobbered. */
1505 for (i = 0; i < XVECLEN (pat, 0); i++)
1507 rtx t = XVECEXP (pat, 0, i);
1508 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1509 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1512 /* Get the operand values and constraints out of the insn. */
1513 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1514 constraints, operand_mode, NULL);
1516 /* For every operand, see what registers are allowed. */
1517 for (i = 0; i < noperands; i++)
1519 const char *p = constraints[i];
1520 /* For every alternative, we compute the class of registers allowed
1521 for reloading in CLS, and merge its contents into the reg set
1522 ALLOWED. */
1523 int cls = (int) NO_REGS;
1525 for (;;)
1527 char c = *p;
1529 if (c == '\0' || c == ',' || c == '#')
1531 /* End of one alternative - mark the regs in the current
1532 class, and reset the class. */
1533 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1534 cls = NO_REGS;
1535 p++;
1536 if (c == '#')
1537 do {
1538 c = *p++;
1539 } while (c != '\0' && c != ',');
1540 if (c == '\0')
1541 break;
1542 continue;
1545 switch (c)
1547 case '=': case '+': case '*': case '%': case '?': case '!':
1548 case '0': case '1': case '2': case '3': case '4': case '<':
1549 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1550 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1551 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1552 case TARGET_MEM_CONSTRAINT:
1553 break;
1555 case 'p':
1556 cls = (int) reg_class_subunion[cls]
1557 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1558 break;
1560 case 'g':
1561 case 'r':
1562 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1563 break;
1565 default:
1566 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1567 cls = (int) reg_class_subunion[cls]
1568 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1569 else
1570 cls = (int) reg_class_subunion[cls]
1571 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1573 p += CONSTRAINT_LEN (c, p);
1576 /* Those of the registers which are clobbered, but allowed by the
1577 constraints, must be usable as reload registers. So clear them
1578 out of the life information. */
1579 AND_HARD_REG_SET (allowed, clobbered);
1580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1581 if (TEST_HARD_REG_BIT (allowed, i))
1583 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1584 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1588 #endif
1591 /* Copy the global variables n_reloads and rld into the corresponding elts
1592 of CHAIN. */
1593 static void
1594 copy_reloads (struct insn_chain *chain)
1596 chain->n_reloads = n_reloads;
1597 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1598 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1599 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1602 /* Walk the chain of insns, and determine for each whether it needs reloads
1603 and/or eliminations. Build the corresponding insns_need_reload list, and
1604 set something_needs_elimination as appropriate. */
1605 static void
1606 calculate_needs_all_insns (int global)
1608 struct insn_chain **pprev_reload = &insns_need_reload;
1609 struct insn_chain *chain, *next = 0;
1611 something_needs_elimination = 0;
1613 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1614 for (chain = reload_insn_chain; chain != 0; chain = next)
1616 rtx insn = chain->insn;
1618 next = chain->next;
1620 /* Clear out the shortcuts. */
1621 chain->n_reloads = 0;
1622 chain->need_elim = 0;
1623 chain->need_reload = 0;
1624 chain->need_operand_change = 0;
1626 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1627 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1628 what effects this has on the known offsets at labels. */
1630 if (LABEL_P (insn) || JUMP_P (insn)
1631 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1632 set_label_offsets (insn, insn, 0);
1634 if (INSN_P (insn))
1636 rtx old_body = PATTERN (insn);
1637 int old_code = INSN_CODE (insn);
1638 rtx old_notes = REG_NOTES (insn);
1639 int did_elimination = 0;
1640 int operands_changed = 0;
1641 rtx set = single_set (insn);
1643 /* Skip insns that only set an equivalence. */
1644 if (set && REG_P (SET_DEST (set))
1645 && reg_renumber[REGNO (SET_DEST (set))] < 0
1646 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1647 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1648 && reg_equiv_init[REGNO (SET_DEST (set))])
1649 continue;
1651 /* If needed, eliminate any eliminable registers. */
1652 if (num_eliminable || num_eliminable_invariants)
1653 did_elimination = eliminate_regs_in_insn (insn, 0);
1655 /* Analyze the instruction. */
1656 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1657 global, spill_reg_order);
1659 /* If a no-op set needs more than one reload, this is likely
1660 to be something that needs input address reloads. We
1661 can't get rid of this cleanly later, and it is of no use
1662 anyway, so discard it now.
1663 We only do this when expensive_optimizations is enabled,
1664 since this complements reload inheritance / output
1665 reload deletion, and it can make debugging harder. */
1666 if (flag_expensive_optimizations && n_reloads > 1)
1668 rtx set = single_set (insn);
1669 if (set
1671 ((SET_SRC (set) == SET_DEST (set)
1672 && REG_P (SET_SRC (set))
1673 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1674 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1675 && reg_renumber[REGNO (SET_SRC (set))] < 0
1676 && reg_renumber[REGNO (SET_DEST (set))] < 0
1677 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1678 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1679 && rtx_equal_p (reg_equiv_memory_loc
1680 [REGNO (SET_SRC (set))],
1681 reg_equiv_memory_loc
1682 [REGNO (SET_DEST (set))]))))
1684 if (ira_conflicts_p)
1685 /* Inform IRA about the insn deletion. */
1686 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1687 REGNO (SET_SRC (set)));
1688 delete_insn (insn);
1689 /* Delete it from the reload chain. */
1690 if (chain->prev)
1691 chain->prev->next = next;
1692 else
1693 reload_insn_chain = next;
1694 if (next)
1695 next->prev = chain->prev;
1696 chain->next = unused_insn_chains;
1697 unused_insn_chains = chain;
1698 continue;
1701 if (num_eliminable)
1702 update_eliminable_offsets ();
1704 /* Remember for later shortcuts which insns had any reloads or
1705 register eliminations. */
1706 chain->need_elim = did_elimination;
1707 chain->need_reload = n_reloads > 0;
1708 chain->need_operand_change = operands_changed;
1710 /* Discard any register replacements done. */
1711 if (did_elimination)
1713 obstack_free (&reload_obstack, reload_insn_firstobj);
1714 PATTERN (insn) = old_body;
1715 INSN_CODE (insn) = old_code;
1716 REG_NOTES (insn) = old_notes;
1717 something_needs_elimination = 1;
1720 something_needs_operands_changed |= operands_changed;
1722 if (n_reloads != 0)
1724 copy_reloads (chain);
1725 *pprev_reload = chain;
1726 pprev_reload = &chain->next_need_reload;
1730 *pprev_reload = 0;
1733 /* Comparison function for qsort to decide which of two reloads
1734 should be handled first. *P1 and *P2 are the reload numbers. */
1736 static int
1737 reload_reg_class_lower (const void *r1p, const void *r2p)
1739 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1740 int t;
1742 /* Consider required reloads before optional ones. */
1743 t = rld[r1].optional - rld[r2].optional;
1744 if (t != 0)
1745 return t;
1747 /* Count all solitary classes before non-solitary ones. */
1748 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1749 - (reg_class_size[(int) rld[r1].rclass] == 1));
1750 if (t != 0)
1751 return t;
1753 /* Aside from solitaires, consider all multi-reg groups first. */
1754 t = rld[r2].nregs - rld[r1].nregs;
1755 if (t != 0)
1756 return t;
1758 /* Consider reloads in order of increasing reg-class number. */
1759 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1760 if (t != 0)
1761 return t;
1763 /* If reloads are equally urgent, sort by reload number,
1764 so that the results of qsort leave nothing to chance. */
1765 return r1 - r2;
1768 /* The cost of spilling each hard reg. */
1769 static int spill_cost[FIRST_PSEUDO_REGISTER];
1771 /* When spilling multiple hard registers, we use SPILL_COST for the first
1772 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1773 only the first hard reg for a multi-reg pseudo. */
1774 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1776 /* Map of hard regno to pseudo regno currently occupying the hard
1777 reg. */
1778 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1780 /* Update the spill cost arrays, considering that pseudo REG is live. */
1782 static void
1783 count_pseudo (int reg)
1785 int freq = REG_FREQ (reg);
1786 int r = reg_renumber[reg];
1787 int nregs;
1789 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1790 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1791 /* Ignore spilled pseudo-registers which can be here only if IRA
1792 is used. */
1793 || (ira_conflicts_p && r < 0))
1794 return;
1796 SET_REGNO_REG_SET (&pseudos_counted, reg);
1798 gcc_assert (r >= 0);
1800 spill_add_cost[r] += freq;
1801 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1802 while (nregs-- > 0)
1804 hard_regno_to_pseudo_regno[r + nregs] = reg;
1805 spill_cost[r + nregs] += freq;
1809 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1810 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1812 static void
1813 order_regs_for_reload (struct insn_chain *chain)
1815 unsigned i;
1816 HARD_REG_SET used_by_pseudos;
1817 HARD_REG_SET used_by_pseudos2;
1818 reg_set_iterator rsi;
1820 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1822 memset (spill_cost, 0, sizeof spill_cost);
1823 memset (spill_add_cost, 0, sizeof spill_add_cost);
1824 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1825 hard_regno_to_pseudo_regno[i] = -1;
1827 /* Count number of uses of each hard reg by pseudo regs allocated to it
1828 and then order them by decreasing use. First exclude hard registers
1829 that are live in or across this insn. */
1831 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1832 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1833 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1834 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1836 /* Now find out which pseudos are allocated to it, and update
1837 hard_reg_n_uses. */
1838 CLEAR_REG_SET (&pseudos_counted);
1840 EXECUTE_IF_SET_IN_REG_SET
1841 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1843 count_pseudo (i);
1845 EXECUTE_IF_SET_IN_REG_SET
1846 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1848 count_pseudo (i);
1850 CLEAR_REG_SET (&pseudos_counted);
1853 /* Vector of reload-numbers showing the order in which the reloads should
1854 be processed. */
1855 static short reload_order[MAX_RELOADS];
1857 /* This is used to keep track of the spill regs used in one insn. */
1858 static HARD_REG_SET used_spill_regs_local;
1860 /* We decided to spill hard register SPILLED, which has a size of
1861 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1862 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1863 update SPILL_COST/SPILL_ADD_COST. */
1865 static void
1866 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1868 int freq = REG_FREQ (reg);
1869 int r = reg_renumber[reg];
1870 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1872 /* Ignore spilled pseudo-registers which can be here only if IRA is
1873 used. */
1874 if ((ira_conflicts_p && r < 0)
1875 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1876 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1877 return;
1879 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1881 spill_add_cost[r] -= freq;
1882 while (nregs-- > 0)
1884 hard_regno_to_pseudo_regno[r + nregs] = -1;
1885 spill_cost[r + nregs] -= freq;
1889 /* Find reload register to use for reload number ORDER. */
1891 static int
1892 find_reg (struct insn_chain *chain, int order)
1894 int rnum = reload_order[order];
1895 struct reload *rl = rld + rnum;
1896 int best_cost = INT_MAX;
1897 int best_reg = -1;
1898 unsigned int i, j, n;
1899 int k;
1900 HARD_REG_SET not_usable;
1901 HARD_REG_SET used_by_other_reload;
1902 reg_set_iterator rsi;
1903 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1904 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1906 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1907 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1908 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1910 CLEAR_HARD_REG_SET (used_by_other_reload);
1911 for (k = 0; k < order; k++)
1913 int other = reload_order[k];
1915 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1916 for (j = 0; j < rld[other].nregs; j++)
1917 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1920 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1922 #ifdef REG_ALLOC_ORDER
1923 unsigned int regno = reg_alloc_order[i];
1924 #else
1925 unsigned int regno = i;
1926 #endif
1928 if (! TEST_HARD_REG_BIT (not_usable, regno)
1929 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1930 && HARD_REGNO_MODE_OK (regno, rl->mode))
1932 int this_cost = spill_cost[regno];
1933 int ok = 1;
1934 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1936 for (j = 1; j < this_nregs; j++)
1938 this_cost += spill_add_cost[regno + j];
1939 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1940 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1941 ok = 0;
1943 if (! ok)
1944 continue;
1946 if (ira_conflicts_p)
1948 /* Ask IRA to find a better pseudo-register for
1949 spilling. */
1950 for (n = j = 0; j < this_nregs; j++)
1952 int r = hard_regno_to_pseudo_regno[regno + j];
1954 if (r < 0)
1955 continue;
1956 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1957 regno_pseudo_regs[n++] = r;
1959 regno_pseudo_regs[n++] = -1;
1960 if (best_reg < 0
1961 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1962 best_regno_pseudo_regs,
1963 rl->in, rl->out,
1964 chain->insn))
1966 best_reg = regno;
1967 for (j = 0;; j++)
1969 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1970 if (regno_pseudo_regs[j] < 0)
1971 break;
1974 continue;
1977 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1978 this_cost--;
1979 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1980 this_cost--;
1981 if (this_cost < best_cost
1982 /* Among registers with equal cost, prefer caller-saved ones, or
1983 use REG_ALLOC_ORDER if it is defined. */
1984 || (this_cost == best_cost
1985 #ifdef REG_ALLOC_ORDER
1986 && (inv_reg_alloc_order[regno]
1987 < inv_reg_alloc_order[best_reg])
1988 #else
1989 && call_used_regs[regno]
1990 && ! call_used_regs[best_reg]
1991 #endif
1994 best_reg = regno;
1995 best_cost = this_cost;
1999 if (best_reg == -1)
2000 return 0;
2002 if (dump_file)
2003 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
2005 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
2006 rl->regno = best_reg;
2008 EXECUTE_IF_SET_IN_REG_SET
2009 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
2011 count_spilled_pseudo (best_reg, rl->nregs, j);
2014 EXECUTE_IF_SET_IN_REG_SET
2015 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
2017 count_spilled_pseudo (best_reg, rl->nregs, j);
2020 for (i = 0; i < rl->nregs; i++)
2022 gcc_assert (spill_cost[best_reg + i] == 0);
2023 gcc_assert (spill_add_cost[best_reg + i] == 0);
2024 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2025 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2027 return 1;
2030 /* Find more reload regs to satisfy the remaining need of an insn, which
2031 is given by CHAIN.
2032 Do it by ascending class number, since otherwise a reg
2033 might be spilled for a big class and might fail to count
2034 for a smaller class even though it belongs to that class. */
2036 static void
2037 find_reload_regs (struct insn_chain *chain)
2039 int i;
2041 /* In order to be certain of getting the registers we need,
2042 we must sort the reloads into order of increasing register class.
2043 Then our grabbing of reload registers will parallel the process
2044 that provided the reload registers. */
2045 for (i = 0; i < chain->n_reloads; i++)
2047 /* Show whether this reload already has a hard reg. */
2048 if (chain->rld[i].reg_rtx)
2050 int regno = REGNO (chain->rld[i].reg_rtx);
2051 chain->rld[i].regno = regno;
2052 chain->rld[i].nregs
2053 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2055 else
2056 chain->rld[i].regno = -1;
2057 reload_order[i] = i;
2060 n_reloads = chain->n_reloads;
2061 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2063 CLEAR_HARD_REG_SET (used_spill_regs_local);
2065 if (dump_file)
2066 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2068 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2070 /* Compute the order of preference for hard registers to spill. */
2072 order_regs_for_reload (chain);
2074 for (i = 0; i < n_reloads; i++)
2076 int r = reload_order[i];
2078 /* Ignore reloads that got marked inoperative. */
2079 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2080 && ! rld[r].optional
2081 && rld[r].regno == -1)
2082 if (! find_reg (chain, i))
2084 if (dump_file)
2085 fprintf (dump_file, "reload failure for reload %d\n", r);
2086 spill_failure (chain->insn, rld[r].rclass);
2087 failure = 1;
2088 return;
2092 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2093 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2095 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2098 static void
2099 select_reload_regs (void)
2101 struct insn_chain *chain;
2103 /* Try to satisfy the needs for each insn. */
2104 for (chain = insns_need_reload; chain != 0;
2105 chain = chain->next_need_reload)
2106 find_reload_regs (chain);
2109 /* Delete all insns that were inserted by emit_caller_save_insns during
2110 this iteration. */
2111 static void
2112 delete_caller_save_insns (void)
2114 struct insn_chain *c = reload_insn_chain;
2116 while (c != 0)
2118 while (c != 0 && c->is_caller_save_insn)
2120 struct insn_chain *next = c->next;
2121 rtx insn = c->insn;
2123 if (c == reload_insn_chain)
2124 reload_insn_chain = next;
2125 delete_insn (insn);
2127 if (next)
2128 next->prev = c->prev;
2129 if (c->prev)
2130 c->prev->next = next;
2131 c->next = unused_insn_chains;
2132 unused_insn_chains = c;
2133 c = next;
2135 if (c != 0)
2136 c = c->next;
2140 /* Handle the failure to find a register to spill.
2141 INSN should be one of the insns which needed this particular spill reg. */
2143 static void
2144 spill_failure (rtx insn, enum reg_class rclass)
2146 if (asm_noperands (PATTERN (insn)) >= 0)
2147 error_for_asm (insn, "can't find a register in class %qs while "
2148 "reloading %<asm%>",
2149 reg_class_names[rclass]);
2150 else
2152 error ("unable to find a register to spill in class %qs",
2153 reg_class_names[rclass]);
2155 if (dump_file)
2157 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2158 debug_reload_to_stream (dump_file);
2160 fatal_insn ("this is the insn:", insn);
2164 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2165 data that is dead in INSN. */
2167 static void
2168 delete_dead_insn (rtx insn)
2170 rtx prev = prev_real_insn (insn);
2171 rtx prev_dest;
2173 /* If the previous insn sets a register that dies in our insn, delete it
2174 too. */
2175 if (prev && GET_CODE (PATTERN (prev)) == SET
2176 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2177 && reg_mentioned_p (prev_dest, PATTERN (insn))
2178 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2179 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2180 delete_dead_insn (prev);
2182 SET_INSN_DELETED (insn);
2185 /* Modify the home of pseudo-reg I.
2186 The new home is present in reg_renumber[I].
2188 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2189 or it may be -1, meaning there is none or it is not relevant.
2190 This is used so that all pseudos spilled from a given hard reg
2191 can share one stack slot. */
2193 static void
2194 alter_reg (int i, int from_reg, bool dont_share_p)
2196 /* When outputting an inline function, this can happen
2197 for a reg that isn't actually used. */
2198 if (regno_reg_rtx[i] == 0)
2199 return;
2201 /* If the reg got changed to a MEM at rtl-generation time,
2202 ignore it. */
2203 if (!REG_P (regno_reg_rtx[i]))
2204 return;
2206 /* Modify the reg-rtx to contain the new hard reg
2207 number or else to contain its pseudo reg number. */
2208 SET_REGNO (regno_reg_rtx[i],
2209 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2211 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2212 allocate a stack slot for it. */
2214 if (reg_renumber[i] < 0
2215 && REG_N_REFS (i) > 0
2216 && reg_equiv_constant[i] == 0
2217 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2218 && reg_equiv_memory_loc[i] == 0)
2220 rtx x = NULL_RTX;
2221 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2222 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2223 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2224 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2225 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2226 int adjust = 0;
2228 something_was_spilled = true;
2230 if (ira_conflicts_p)
2232 /* Mark the spill for IRA. */
2233 SET_REGNO_REG_SET (&spilled_pseudos, i);
2234 if (!dont_share_p)
2235 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2238 if (x)
2241 /* Each pseudo reg has an inherent size which comes from its own mode,
2242 and a total size which provides room for paradoxical subregs
2243 which refer to the pseudo reg in wider modes.
2245 We can use a slot already allocated if it provides both
2246 enough inherent space and enough total space.
2247 Otherwise, we allocate a new slot, making sure that it has no less
2248 inherent space, and no less total space, then the previous slot. */
2249 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2251 rtx stack_slot;
2253 /* No known place to spill from => no slot to reuse. */
2254 x = assign_stack_local (mode, total_size,
2255 min_align > inherent_align
2256 || total_size > inherent_size ? -1 : 0);
2258 stack_slot = x;
2260 /* Cancel the big-endian correction done in assign_stack_local.
2261 Get the address of the beginning of the slot. This is so we
2262 can do a big-endian correction unconditionally below. */
2263 if (BYTES_BIG_ENDIAN)
2265 adjust = inherent_size - total_size;
2266 if (adjust)
2267 stack_slot
2268 = adjust_address_nv (x, mode_for_size (total_size
2269 * BITS_PER_UNIT,
2270 MODE_INT, 1),
2271 adjust);
2274 if (! dont_share_p && ira_conflicts_p)
2275 /* Inform IRA about allocation a new stack slot. */
2276 ira_mark_new_stack_slot (stack_slot, i, total_size);
2279 /* Reuse a stack slot if possible. */
2280 else if (spill_stack_slot[from_reg] != 0
2281 && spill_stack_slot_width[from_reg] >= total_size
2282 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2283 >= inherent_size)
2284 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2285 x = spill_stack_slot[from_reg];
2287 /* Allocate a bigger slot. */
2288 else
2290 /* Compute maximum size needed, both for inherent size
2291 and for total size. */
2292 rtx stack_slot;
2294 if (spill_stack_slot[from_reg])
2296 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2297 > inherent_size)
2298 mode = GET_MODE (spill_stack_slot[from_reg]);
2299 if (spill_stack_slot_width[from_reg] > total_size)
2300 total_size = spill_stack_slot_width[from_reg];
2301 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2302 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2305 /* Make a slot with that size. */
2306 x = assign_stack_local (mode, total_size,
2307 min_align > inherent_align
2308 || total_size > inherent_size ? -1 : 0);
2309 stack_slot = x;
2311 /* Cancel the big-endian correction done in assign_stack_local.
2312 Get the address of the beginning of the slot. This is so we
2313 can do a big-endian correction unconditionally below. */
2314 if (BYTES_BIG_ENDIAN)
2316 adjust = GET_MODE_SIZE (mode) - total_size;
2317 if (adjust)
2318 stack_slot
2319 = adjust_address_nv (x, mode_for_size (total_size
2320 * BITS_PER_UNIT,
2321 MODE_INT, 1),
2322 adjust);
2325 spill_stack_slot[from_reg] = stack_slot;
2326 spill_stack_slot_width[from_reg] = total_size;
2329 /* On a big endian machine, the "address" of the slot
2330 is the address of the low part that fits its inherent mode. */
2331 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2332 adjust += (total_size - inherent_size);
2334 /* If we have any adjustment to make, or if the stack slot is the
2335 wrong mode, make a new stack slot. */
2336 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2338 /* Set all of the memory attributes as appropriate for a spill. */
2339 set_mem_attrs_for_spill (x);
2341 /* Save the stack slot for later. */
2342 reg_equiv_memory_loc[i] = x;
2346 /* Mark the slots in regs_ever_live for the hard regs used by
2347 pseudo-reg number REGNO, accessed in MODE. */
2349 static void
2350 mark_home_live_1 (int regno, enum machine_mode mode)
2352 int i, lim;
2354 i = reg_renumber[regno];
2355 if (i < 0)
2356 return;
2357 lim = end_hard_regno (mode, i);
2358 while (i < lim)
2359 df_set_regs_ever_live(i++, true);
2362 /* Mark the slots in regs_ever_live for the hard regs
2363 used by pseudo-reg number REGNO. */
2365 void
2366 mark_home_live (int regno)
2368 if (reg_renumber[regno] >= 0)
2369 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2372 /* This function handles the tracking of elimination offsets around branches.
2374 X is a piece of RTL being scanned.
2376 INSN is the insn that it came from, if any.
2378 INITIAL_P is nonzero if we are to set the offset to be the initial
2379 offset and zero if we are setting the offset of the label to be the
2380 current offset. */
2382 static void
2383 set_label_offsets (rtx x, rtx insn, int initial_p)
2385 enum rtx_code code = GET_CODE (x);
2386 rtx tem;
2387 unsigned int i;
2388 struct elim_table *p;
2390 switch (code)
2392 case LABEL_REF:
2393 if (LABEL_REF_NONLOCAL_P (x))
2394 return;
2396 x = XEXP (x, 0);
2398 /* ... fall through ... */
2400 case CODE_LABEL:
2401 /* If we know nothing about this label, set the desired offsets. Note
2402 that this sets the offset at a label to be the offset before a label
2403 if we don't know anything about the label. This is not correct for
2404 the label after a BARRIER, but is the best guess we can make. If
2405 we guessed wrong, we will suppress an elimination that might have
2406 been possible had we been able to guess correctly. */
2408 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2410 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2411 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2412 = (initial_p ? reg_eliminate[i].initial_offset
2413 : reg_eliminate[i].offset);
2414 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2417 /* Otherwise, if this is the definition of a label and it is
2418 preceded by a BARRIER, set our offsets to the known offset of
2419 that label. */
2421 else if (x == insn
2422 && (tem = prev_nonnote_insn (insn)) != 0
2423 && BARRIER_P (tem))
2424 set_offsets_for_label (insn);
2425 else
2426 /* If neither of the above cases is true, compare each offset
2427 with those previously recorded and suppress any eliminations
2428 where the offsets disagree. */
2430 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2431 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2432 != (initial_p ? reg_eliminate[i].initial_offset
2433 : reg_eliminate[i].offset))
2434 reg_eliminate[i].can_eliminate = 0;
2436 return;
2438 case JUMP_INSN:
2439 set_label_offsets (PATTERN (insn), insn, initial_p);
2441 /* ... fall through ... */
2443 case INSN:
2444 case CALL_INSN:
2445 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2446 to indirectly and hence must have all eliminations at their
2447 initial offsets. */
2448 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2449 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2450 set_label_offsets (XEXP (tem, 0), insn, 1);
2451 return;
2453 case PARALLEL:
2454 case ADDR_VEC:
2455 case ADDR_DIFF_VEC:
2456 /* Each of the labels in the parallel or address vector must be
2457 at their initial offsets. We want the first field for PARALLEL
2458 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2460 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2461 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2462 insn, initial_p);
2463 return;
2465 case SET:
2466 /* We only care about setting PC. If the source is not RETURN,
2467 IF_THEN_ELSE, or a label, disable any eliminations not at
2468 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2469 isn't one of those possibilities. For branches to a label,
2470 call ourselves recursively.
2472 Note that this can disable elimination unnecessarily when we have
2473 a non-local goto since it will look like a non-constant jump to
2474 someplace in the current function. This isn't a significant
2475 problem since such jumps will normally be when all elimination
2476 pairs are back to their initial offsets. */
2478 if (SET_DEST (x) != pc_rtx)
2479 return;
2481 switch (GET_CODE (SET_SRC (x)))
2483 case PC:
2484 case RETURN:
2485 return;
2487 case LABEL_REF:
2488 set_label_offsets (SET_SRC (x), insn, initial_p);
2489 return;
2491 case IF_THEN_ELSE:
2492 tem = XEXP (SET_SRC (x), 1);
2493 if (GET_CODE (tem) == LABEL_REF)
2494 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2495 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2496 break;
2498 tem = XEXP (SET_SRC (x), 2);
2499 if (GET_CODE (tem) == LABEL_REF)
2500 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2501 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2502 break;
2503 return;
2505 default:
2506 break;
2509 /* If we reach here, all eliminations must be at their initial
2510 offset because we are doing a jump to a variable address. */
2511 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2512 if (p->offset != p->initial_offset)
2513 p->can_eliminate = 0;
2514 break;
2516 default:
2517 break;
2521 /* Scan X and replace any eliminable registers (such as fp) with a
2522 replacement (such as sp), plus an offset.
2524 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2525 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2526 MEM, we are allowed to replace a sum of a register and the constant zero
2527 with the register, which we cannot do outside a MEM. In addition, we need
2528 to record the fact that a register is referenced outside a MEM.
2530 If INSN is an insn, it is the insn containing X. If we replace a REG
2531 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2532 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2533 the REG is being modified.
2535 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2536 That's used when we eliminate in expressions stored in notes.
2537 This means, do not set ref_outside_mem even if the reference
2538 is outside of MEMs.
2540 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2541 replacements done assuming all offsets are at their initial values. If
2542 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2543 encounter, return the actual location so that find_reloads will do
2544 the proper thing. */
2546 static rtx
2547 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2548 bool may_use_invariant)
2550 enum rtx_code code = GET_CODE (x);
2551 struct elim_table *ep;
2552 int regno;
2553 rtx new_rtx;
2554 int i, j;
2555 const char *fmt;
2556 int copied = 0;
2558 if (! current_function_decl)
2559 return x;
2561 switch (code)
2563 case CONST_INT:
2564 case CONST_DOUBLE:
2565 case CONST_FIXED:
2566 case CONST_VECTOR:
2567 case CONST:
2568 case SYMBOL_REF:
2569 case CODE_LABEL:
2570 case PC:
2571 case CC0:
2572 case ASM_INPUT:
2573 case ADDR_VEC:
2574 case ADDR_DIFF_VEC:
2575 case RETURN:
2576 return x;
2578 case REG:
2579 regno = REGNO (x);
2581 /* First handle the case where we encounter a bare register that
2582 is eliminable. Replace it with a PLUS. */
2583 if (regno < FIRST_PSEUDO_REGISTER)
2585 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2586 ep++)
2587 if (ep->from_rtx == x && ep->can_eliminate)
2588 return plus_constant (ep->to_rtx, ep->previous_offset);
2591 else if (reg_renumber && reg_renumber[regno] < 0
2592 && reg_equiv_invariant && reg_equiv_invariant[regno])
2594 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2595 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2596 mem_mode, insn, true);
2597 /* There exists at least one use of REGNO that cannot be
2598 eliminated. Prevent the defining insn from being deleted. */
2599 reg_equiv_init[regno] = NULL_RTX;
2600 alter_reg (regno, -1, true);
2602 return x;
2604 /* You might think handling MINUS in a manner similar to PLUS is a
2605 good idea. It is not. It has been tried multiple times and every
2606 time the change has had to have been reverted.
2608 Other parts of reload know a PLUS is special (gen_reload for example)
2609 and require special code to handle code a reloaded PLUS operand.
2611 Also consider backends where the flags register is clobbered by a
2612 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2613 lea instruction comes to mind). If we try to reload a MINUS, we
2614 may kill the flags register that was holding a useful value.
2616 So, please before trying to handle MINUS, consider reload as a
2617 whole instead of this little section as well as the backend issues. */
2618 case PLUS:
2619 /* If this is the sum of an eliminable register and a constant, rework
2620 the sum. */
2621 if (REG_P (XEXP (x, 0))
2622 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2623 && CONSTANT_P (XEXP (x, 1)))
2625 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2626 ep++)
2627 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2629 /* The only time we want to replace a PLUS with a REG (this
2630 occurs when the constant operand of the PLUS is the negative
2631 of the offset) is when we are inside a MEM. We won't want
2632 to do so at other times because that would change the
2633 structure of the insn in a way that reload can't handle.
2634 We special-case the commonest situation in
2635 eliminate_regs_in_insn, so just replace a PLUS with a
2636 PLUS here, unless inside a MEM. */
2637 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2638 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2639 return ep->to_rtx;
2640 else
2641 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2642 plus_constant (XEXP (x, 1),
2643 ep->previous_offset));
2646 /* If the register is not eliminable, we are done since the other
2647 operand is a constant. */
2648 return x;
2651 /* If this is part of an address, we want to bring any constant to the
2652 outermost PLUS. We will do this by doing register replacement in
2653 our operands and seeing if a constant shows up in one of them.
2655 Note that there is no risk of modifying the structure of the insn,
2656 since we only get called for its operands, thus we are either
2657 modifying the address inside a MEM, or something like an address
2658 operand of a load-address insn. */
2661 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2662 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2664 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2666 /* If one side is a PLUS and the other side is a pseudo that
2667 didn't get a hard register but has a reg_equiv_constant,
2668 we must replace the constant here since it may no longer
2669 be in the position of any operand. */
2670 if (GET_CODE (new0) == PLUS && REG_P (new1)
2671 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2672 && reg_renumber[REGNO (new1)] < 0
2673 && reg_equiv_constant != 0
2674 && reg_equiv_constant[REGNO (new1)] != 0)
2675 new1 = reg_equiv_constant[REGNO (new1)];
2676 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2677 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2678 && reg_renumber[REGNO (new0)] < 0
2679 && reg_equiv_constant[REGNO (new0)] != 0)
2680 new0 = reg_equiv_constant[REGNO (new0)];
2682 new_rtx = form_sum (GET_MODE (x), new0, new1);
2684 /* As above, if we are not inside a MEM we do not want to
2685 turn a PLUS into something else. We might try to do so here
2686 for an addition of 0 if we aren't optimizing. */
2687 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2688 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2689 else
2690 return new_rtx;
2693 return x;
2695 case MULT:
2696 /* If this is the product of an eliminable register and a
2697 constant, apply the distribute law and move the constant out
2698 so that we have (plus (mult ..) ..). This is needed in order
2699 to keep load-address insns valid. This case is pathological.
2700 We ignore the possibility of overflow here. */
2701 if (REG_P (XEXP (x, 0))
2702 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2703 && CONST_INT_P (XEXP (x, 1)))
2704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2705 ep++)
2706 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2708 if (! mem_mode
2709 /* Refs inside notes or in DEBUG_INSNs don't count for
2710 this purpose. */
2711 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2712 || GET_CODE (insn) == INSN_LIST
2713 || DEBUG_INSN_P (insn))))
2714 ep->ref_outside_mem = 1;
2716 return
2717 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2718 ep->previous_offset * INTVAL (XEXP (x, 1)));
2721 /* ... fall through ... */
2723 case CALL:
2724 case COMPARE:
2725 /* See comments before PLUS about handling MINUS. */
2726 case MINUS:
2727 case DIV: case UDIV:
2728 case MOD: case UMOD:
2729 case AND: case IOR: case XOR:
2730 case ROTATERT: case ROTATE:
2731 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2732 case NE: case EQ:
2733 case GE: case GT: case GEU: case GTU:
2734 case LE: case LT: case LEU: case LTU:
2736 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2737 rtx new1 = XEXP (x, 1)
2738 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2740 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2741 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2743 return x;
2745 case EXPR_LIST:
2746 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2747 if (XEXP (x, 0))
2749 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2750 if (new_rtx != XEXP (x, 0))
2752 /* If this is a REG_DEAD note, it is not valid anymore.
2753 Using the eliminated version could result in creating a
2754 REG_DEAD note for the stack or frame pointer. */
2755 if (REG_NOTE_KIND (x) == REG_DEAD)
2756 return (XEXP (x, 1)
2757 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2758 : NULL_RTX);
2760 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2764 /* ... fall through ... */
2766 case INSN_LIST:
2767 /* Now do eliminations in the rest of the chain. If this was
2768 an EXPR_LIST, this might result in allocating more memory than is
2769 strictly needed, but it simplifies the code. */
2770 if (XEXP (x, 1))
2772 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2773 if (new_rtx != XEXP (x, 1))
2774 return
2775 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2777 return x;
2779 case PRE_INC:
2780 case POST_INC:
2781 case PRE_DEC:
2782 case POST_DEC:
2783 /* We do not support elimination of a register that is modified.
2784 elimination_effects has already make sure that this does not
2785 happen. */
2786 return x;
2788 case PRE_MODIFY:
2789 case POST_MODIFY:
2790 /* We do not support elimination of a register that is modified.
2791 elimination_effects has already make sure that this does not
2792 happen. The only remaining case we need to consider here is
2793 that the increment value may be an eliminable register. */
2794 if (GET_CODE (XEXP (x, 1)) == PLUS
2795 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2797 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2798 insn, true);
2800 if (new_rtx != XEXP (XEXP (x, 1), 1))
2801 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2802 gen_rtx_PLUS (GET_MODE (x),
2803 XEXP (x, 0), new_rtx));
2805 return x;
2807 case STRICT_LOW_PART:
2808 case NEG: case NOT:
2809 case SIGN_EXTEND: case ZERO_EXTEND:
2810 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2811 case FLOAT: case FIX:
2812 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2813 case ABS:
2814 case SQRT:
2815 case FFS:
2816 case CLZ:
2817 case CTZ:
2818 case POPCOUNT:
2819 case PARITY:
2820 case BSWAP:
2821 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2822 if (new_rtx != XEXP (x, 0))
2823 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2824 return x;
2826 case SUBREG:
2827 /* Similar to above processing, but preserve SUBREG_BYTE.
2828 Convert (subreg (mem)) to (mem) if not paradoxical.
2829 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2830 pseudo didn't get a hard reg, we must replace this with the
2831 eliminated version of the memory location because push_reload
2832 may do the replacement in certain circumstances. */
2833 if (REG_P (SUBREG_REG (x))
2834 && (GET_MODE_SIZE (GET_MODE (x))
2835 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2836 && reg_equiv_memory_loc != 0
2837 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2839 new_rtx = SUBREG_REG (x);
2841 else
2842 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2844 if (new_rtx != SUBREG_REG (x))
2846 int x_size = GET_MODE_SIZE (GET_MODE (x));
2847 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2849 if (MEM_P (new_rtx)
2850 && ((x_size < new_size
2851 #ifdef WORD_REGISTER_OPERATIONS
2852 /* On these machines, combine can create rtl of the form
2853 (set (subreg:m1 (reg:m2 R) 0) ...)
2854 where m1 < m2, and expects something interesting to
2855 happen to the entire word. Moreover, it will use the
2856 (reg:m2 R) later, expecting all bits to be preserved.
2857 So if the number of words is the same, preserve the
2858 subreg so that push_reload can see it. */
2859 && ! ((x_size - 1) / UNITS_PER_WORD
2860 == (new_size -1 ) / UNITS_PER_WORD)
2861 #endif
2863 || x_size == new_size)
2865 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2866 else
2867 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2870 return x;
2872 case MEM:
2873 /* Our only special processing is to pass the mode of the MEM to our
2874 recursive call and copy the flags. While we are here, handle this
2875 case more efficiently. */
2876 return
2877 replace_equiv_address_nv (x,
2878 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2879 insn, true));
2881 case USE:
2882 /* Handle insn_list USE that a call to a pure function may generate. */
2883 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2884 if (new_rtx != XEXP (x, 0))
2885 return gen_rtx_USE (GET_MODE (x), new_rtx);
2886 return x;
2888 case CLOBBER:
2889 gcc_assert (insn && DEBUG_INSN_P (insn));
2890 break;
2892 case ASM_OPERANDS:
2893 case SET:
2894 gcc_unreachable ();
2896 default:
2897 break;
2900 /* Process each of our operands recursively. If any have changed, make a
2901 copy of the rtx. */
2902 fmt = GET_RTX_FORMAT (code);
2903 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2905 if (*fmt == 'e')
2907 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2908 if (new_rtx != XEXP (x, i) && ! copied)
2910 x = shallow_copy_rtx (x);
2911 copied = 1;
2913 XEXP (x, i) = new_rtx;
2915 else if (*fmt == 'E')
2917 int copied_vec = 0;
2918 for (j = 0; j < XVECLEN (x, i); j++)
2920 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2921 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2923 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2924 XVEC (x, i)->elem);
2925 if (! copied)
2927 x = shallow_copy_rtx (x);
2928 copied = 1;
2930 XVEC (x, i) = new_v;
2931 copied_vec = 1;
2933 XVECEXP (x, i, j) = new_rtx;
2938 return x;
2942 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2944 return eliminate_regs_1 (x, mem_mode, insn, false);
2947 /* Scan rtx X for modifications of elimination target registers. Update
2948 the table of eliminables to reflect the changed state. MEM_MODE is
2949 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2951 static void
2952 elimination_effects (rtx x, enum machine_mode mem_mode)
2954 enum rtx_code code = GET_CODE (x);
2955 struct elim_table *ep;
2956 int regno;
2957 int i, j;
2958 const char *fmt;
2960 switch (code)
2962 case CONST_INT:
2963 case CONST_DOUBLE:
2964 case CONST_FIXED:
2965 case CONST_VECTOR:
2966 case CONST:
2967 case SYMBOL_REF:
2968 case CODE_LABEL:
2969 case PC:
2970 case CC0:
2971 case ASM_INPUT:
2972 case ADDR_VEC:
2973 case ADDR_DIFF_VEC:
2974 case RETURN:
2975 return;
2977 case REG:
2978 regno = REGNO (x);
2980 /* First handle the case where we encounter a bare register that
2981 is eliminable. Replace it with a PLUS. */
2982 if (regno < FIRST_PSEUDO_REGISTER)
2984 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2985 ep++)
2986 if (ep->from_rtx == x && ep->can_eliminate)
2988 if (! mem_mode)
2989 ep->ref_outside_mem = 1;
2990 return;
2994 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2995 && reg_equiv_constant[regno]
2996 && ! function_invariant_p (reg_equiv_constant[regno]))
2997 elimination_effects (reg_equiv_constant[regno], mem_mode);
2998 return;
3000 case PRE_INC:
3001 case POST_INC:
3002 case PRE_DEC:
3003 case POST_DEC:
3004 case POST_MODIFY:
3005 case PRE_MODIFY:
3006 /* If we modify the source of an elimination rule, disable it. */
3007 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3008 if (ep->from_rtx == XEXP (x, 0))
3009 ep->can_eliminate = 0;
3011 /* If we modify the target of an elimination rule by adding a constant,
3012 update its offset. If we modify the target in any other way, we'll
3013 have to disable the rule as well. */
3014 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3015 if (ep->to_rtx == XEXP (x, 0))
3017 int size = GET_MODE_SIZE (mem_mode);
3019 /* If more bytes than MEM_MODE are pushed, account for them. */
3020 #ifdef PUSH_ROUNDING
3021 if (ep->to_rtx == stack_pointer_rtx)
3022 size = PUSH_ROUNDING (size);
3023 #endif
3024 if (code == PRE_DEC || code == POST_DEC)
3025 ep->offset += size;
3026 else if (code == PRE_INC || code == POST_INC)
3027 ep->offset -= size;
3028 else if (code == PRE_MODIFY || code == POST_MODIFY)
3030 if (GET_CODE (XEXP (x, 1)) == PLUS
3031 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3032 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3033 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3034 else
3035 ep->can_eliminate = 0;
3039 /* These two aren't unary operators. */
3040 if (code == POST_MODIFY || code == PRE_MODIFY)
3041 break;
3043 /* Fall through to generic unary operation case. */
3044 case STRICT_LOW_PART:
3045 case NEG: case NOT:
3046 case SIGN_EXTEND: case ZERO_EXTEND:
3047 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3048 case FLOAT: case FIX:
3049 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3050 case ABS:
3051 case SQRT:
3052 case FFS:
3053 case CLZ:
3054 case CTZ:
3055 case POPCOUNT:
3056 case PARITY:
3057 case BSWAP:
3058 elimination_effects (XEXP (x, 0), mem_mode);
3059 return;
3061 case SUBREG:
3062 if (REG_P (SUBREG_REG (x))
3063 && (GET_MODE_SIZE (GET_MODE (x))
3064 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3065 && reg_equiv_memory_loc != 0
3066 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3067 return;
3069 elimination_effects (SUBREG_REG (x), mem_mode);
3070 return;
3072 case USE:
3073 /* If using a register that is the source of an eliminate we still
3074 think can be performed, note it cannot be performed since we don't
3075 know how this register is used. */
3076 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3077 if (ep->from_rtx == XEXP (x, 0))
3078 ep->can_eliminate = 0;
3080 elimination_effects (XEXP (x, 0), mem_mode);
3081 return;
3083 case CLOBBER:
3084 /* If clobbering a register that is the replacement register for an
3085 elimination we still think can be performed, note that it cannot
3086 be performed. Otherwise, we need not be concerned about it. */
3087 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3088 if (ep->to_rtx == XEXP (x, 0))
3089 ep->can_eliminate = 0;
3091 elimination_effects (XEXP (x, 0), mem_mode);
3092 return;
3094 case SET:
3095 /* Check for setting a register that we know about. */
3096 if (REG_P (SET_DEST (x)))
3098 /* See if this is setting the replacement register for an
3099 elimination.
3101 If DEST is the hard frame pointer, we do nothing because we
3102 assume that all assignments to the frame pointer are for
3103 non-local gotos and are being done at a time when they are valid
3104 and do not disturb anything else. Some machines want to
3105 eliminate a fake argument pointer (or even a fake frame pointer)
3106 with either the real frame or the stack pointer. Assignments to
3107 the hard frame pointer must not prevent this elimination. */
3109 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3110 ep++)
3111 if (ep->to_rtx == SET_DEST (x)
3112 && SET_DEST (x) != hard_frame_pointer_rtx)
3114 /* If it is being incremented, adjust the offset. Otherwise,
3115 this elimination can't be done. */
3116 rtx src = SET_SRC (x);
3118 if (GET_CODE (src) == PLUS
3119 && XEXP (src, 0) == SET_DEST (x)
3120 && CONST_INT_P (XEXP (src, 1)))
3121 ep->offset -= INTVAL (XEXP (src, 1));
3122 else
3123 ep->can_eliminate = 0;
3127 elimination_effects (SET_DEST (x), VOIDmode);
3128 elimination_effects (SET_SRC (x), VOIDmode);
3129 return;
3131 case MEM:
3132 /* Our only special processing is to pass the mode of the MEM to our
3133 recursive call. */
3134 elimination_effects (XEXP (x, 0), GET_MODE (x));
3135 return;
3137 default:
3138 break;
3141 fmt = GET_RTX_FORMAT (code);
3142 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3144 if (*fmt == 'e')
3145 elimination_effects (XEXP (x, i), mem_mode);
3146 else if (*fmt == 'E')
3147 for (j = 0; j < XVECLEN (x, i); j++)
3148 elimination_effects (XVECEXP (x, i, j), mem_mode);
3152 /* Descend through rtx X and verify that no references to eliminable registers
3153 remain. If any do remain, mark the involved register as not
3154 eliminable. */
3156 static void
3157 check_eliminable_occurrences (rtx x)
3159 const char *fmt;
3160 int i;
3161 enum rtx_code code;
3163 if (x == 0)
3164 return;
3166 code = GET_CODE (x);
3168 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3170 struct elim_table *ep;
3172 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3173 if (ep->from_rtx == x)
3174 ep->can_eliminate = 0;
3175 return;
3178 fmt = GET_RTX_FORMAT (code);
3179 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3181 if (*fmt == 'e')
3182 check_eliminable_occurrences (XEXP (x, i));
3183 else if (*fmt == 'E')
3185 int j;
3186 for (j = 0; j < XVECLEN (x, i); j++)
3187 check_eliminable_occurrences (XVECEXP (x, i, j));
3192 /* Scan INSN and eliminate all eliminable registers in it.
3194 If REPLACE is nonzero, do the replacement destructively. Also
3195 delete the insn as dead it if it is setting an eliminable register.
3197 If REPLACE is zero, do all our allocations in reload_obstack.
3199 If no eliminations were done and this insn doesn't require any elimination
3200 processing (these are not identical conditions: it might be updating sp,
3201 but not referencing fp; this needs to be seen during reload_as_needed so
3202 that the offset between fp and sp can be taken into consideration), zero
3203 is returned. Otherwise, 1 is returned. */
3205 static int
3206 eliminate_regs_in_insn (rtx insn, int replace)
3208 int icode = recog_memoized (insn);
3209 rtx old_body = PATTERN (insn);
3210 int insn_is_asm = asm_noperands (old_body) >= 0;
3211 rtx old_set = single_set (insn);
3212 rtx new_body;
3213 int val = 0;
3214 int i;
3215 rtx substed_operand[MAX_RECOG_OPERANDS];
3216 rtx orig_operand[MAX_RECOG_OPERANDS];
3217 struct elim_table *ep;
3218 rtx plus_src, plus_cst_src;
3220 if (! insn_is_asm && icode < 0)
3222 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3223 || GET_CODE (PATTERN (insn)) == CLOBBER
3224 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3225 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3226 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3227 || DEBUG_INSN_P (insn));
3228 if (DEBUG_INSN_P (insn))
3229 INSN_VAR_LOCATION_LOC (insn)
3230 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3231 return 0;
3234 if (old_set != 0 && REG_P (SET_DEST (old_set))
3235 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3237 /* Check for setting an eliminable register. */
3238 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3239 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3241 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3242 /* If this is setting the frame pointer register to the
3243 hardware frame pointer register and this is an elimination
3244 that will be done (tested above), this insn is really
3245 adjusting the frame pointer downward to compensate for
3246 the adjustment done before a nonlocal goto. */
3247 if (ep->from == FRAME_POINTER_REGNUM
3248 && ep->to == HARD_FRAME_POINTER_REGNUM)
3250 rtx base = SET_SRC (old_set);
3251 rtx base_insn = insn;
3252 HOST_WIDE_INT offset = 0;
3254 while (base != ep->to_rtx)
3256 rtx prev_insn, prev_set;
3258 if (GET_CODE (base) == PLUS
3259 && CONST_INT_P (XEXP (base, 1)))
3261 offset += INTVAL (XEXP (base, 1));
3262 base = XEXP (base, 0);
3264 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3265 && (prev_set = single_set (prev_insn)) != 0
3266 && rtx_equal_p (SET_DEST (prev_set), base))
3268 base = SET_SRC (prev_set);
3269 base_insn = prev_insn;
3271 else
3272 break;
3275 if (base == ep->to_rtx)
3277 rtx src
3278 = plus_constant (ep->to_rtx, offset - ep->offset);
3280 new_body = old_body;
3281 if (! replace)
3283 new_body = copy_insn (old_body);
3284 if (REG_NOTES (insn))
3285 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3287 PATTERN (insn) = new_body;
3288 old_set = single_set (insn);
3290 /* First see if this insn remains valid when we
3291 make the change. If not, keep the INSN_CODE
3292 the same and let reload fit it up. */
3293 validate_change (insn, &SET_SRC (old_set), src, 1);
3294 validate_change (insn, &SET_DEST (old_set),
3295 ep->to_rtx, 1);
3296 if (! apply_change_group ())
3298 SET_SRC (old_set) = src;
3299 SET_DEST (old_set) = ep->to_rtx;
3302 val = 1;
3303 goto done;
3306 #endif
3308 /* In this case this insn isn't serving a useful purpose. We
3309 will delete it in reload_as_needed once we know that this
3310 elimination is, in fact, being done.
3312 If REPLACE isn't set, we can't delete this insn, but needn't
3313 process it since it won't be used unless something changes. */
3314 if (replace)
3316 delete_dead_insn (insn);
3317 return 1;
3319 val = 1;
3320 goto done;
3324 /* We allow one special case which happens to work on all machines we
3325 currently support: a single set with the source or a REG_EQUAL
3326 note being a PLUS of an eliminable register and a constant. */
3327 plus_src = plus_cst_src = 0;
3328 if (old_set && REG_P (SET_DEST (old_set)))
3330 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3331 plus_src = SET_SRC (old_set);
3332 /* First see if the source is of the form (plus (...) CST). */
3333 if (plus_src
3334 && CONST_INT_P (XEXP (plus_src, 1)))
3335 plus_cst_src = plus_src;
3336 else if (REG_P (SET_SRC (old_set))
3337 || plus_src)
3339 /* Otherwise, see if we have a REG_EQUAL note of the form
3340 (plus (...) CST). */
3341 rtx links;
3342 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3344 if ((REG_NOTE_KIND (links) == REG_EQUAL
3345 || REG_NOTE_KIND (links) == REG_EQUIV)
3346 && GET_CODE (XEXP (links, 0)) == PLUS
3347 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3349 plus_cst_src = XEXP (links, 0);
3350 break;
3355 /* Check that the first operand of the PLUS is a hard reg or
3356 the lowpart subreg of one. */
3357 if (plus_cst_src)
3359 rtx reg = XEXP (plus_cst_src, 0);
3360 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3361 reg = SUBREG_REG (reg);
3363 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3364 plus_cst_src = 0;
3367 if (plus_cst_src)
3369 rtx reg = XEXP (plus_cst_src, 0);
3370 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3372 if (GET_CODE (reg) == SUBREG)
3373 reg = SUBREG_REG (reg);
3375 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3376 if (ep->from_rtx == reg && ep->can_eliminate)
3378 rtx to_rtx = ep->to_rtx;
3379 offset += ep->offset;
3380 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3382 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3383 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3384 to_rtx);
3385 /* If we have a nonzero offset, and the source is already
3386 a simple REG, the following transformation would
3387 increase the cost of the insn by replacing a simple REG
3388 with (plus (reg sp) CST). So try only when we already
3389 had a PLUS before. */
3390 if (offset == 0 || plus_src)
3392 rtx new_src = plus_constant (to_rtx, offset);
3394 new_body = old_body;
3395 if (! replace)
3397 new_body = copy_insn (old_body);
3398 if (REG_NOTES (insn))
3399 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3401 PATTERN (insn) = new_body;
3402 old_set = single_set (insn);
3404 /* First see if this insn remains valid when we make the
3405 change. If not, try to replace the whole pattern with
3406 a simple set (this may help if the original insn was a
3407 PARALLEL that was only recognized as single_set due to
3408 REG_UNUSED notes). If this isn't valid either, keep
3409 the INSN_CODE the same and let reload fix it up. */
3410 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3412 rtx new_pat = gen_rtx_SET (VOIDmode,
3413 SET_DEST (old_set), new_src);
3415 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3416 SET_SRC (old_set) = new_src;
3419 else
3420 break;
3422 val = 1;
3423 /* This can't have an effect on elimination offsets, so skip right
3424 to the end. */
3425 goto done;
3429 /* Determine the effects of this insn on elimination offsets. */
3430 elimination_effects (old_body, VOIDmode);
3432 /* Eliminate all eliminable registers occurring in operands that
3433 can be handled by reload. */
3434 extract_insn (insn);
3435 for (i = 0; i < recog_data.n_operands; i++)
3437 orig_operand[i] = recog_data.operand[i];
3438 substed_operand[i] = recog_data.operand[i];
3440 /* For an asm statement, every operand is eliminable. */
3441 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3443 bool is_set_src, in_plus;
3445 /* Check for setting a register that we know about. */
3446 if (recog_data.operand_type[i] != OP_IN
3447 && REG_P (orig_operand[i]))
3449 /* If we are assigning to a register that can be eliminated, it
3450 must be as part of a PARALLEL, since the code above handles
3451 single SETs. We must indicate that we can no longer
3452 eliminate this reg. */
3453 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3454 ep++)
3455 if (ep->from_rtx == orig_operand[i])
3456 ep->can_eliminate = 0;
3459 /* Companion to the above plus substitution, we can allow
3460 invariants as the source of a plain move. */
3461 is_set_src = false;
3462 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3463 is_set_src = true;
3464 in_plus = false;
3465 if (plus_src
3466 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3467 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3468 in_plus = true;
3470 substed_operand[i]
3471 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3472 replace ? insn : NULL_RTX,
3473 is_set_src || in_plus);
3474 if (substed_operand[i] != orig_operand[i])
3475 val = 1;
3476 /* Terminate the search in check_eliminable_occurrences at
3477 this point. */
3478 *recog_data.operand_loc[i] = 0;
3480 /* If an output operand changed from a REG to a MEM and INSN is an
3481 insn, write a CLOBBER insn. */
3482 if (recog_data.operand_type[i] != OP_IN
3483 && REG_P (orig_operand[i])
3484 && MEM_P (substed_operand[i])
3485 && replace)
3486 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3490 for (i = 0; i < recog_data.n_dups; i++)
3491 *recog_data.dup_loc[i]
3492 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3494 /* If any eliminable remain, they aren't eliminable anymore. */
3495 check_eliminable_occurrences (old_body);
3497 /* Substitute the operands; the new values are in the substed_operand
3498 array. */
3499 for (i = 0; i < recog_data.n_operands; i++)
3500 *recog_data.operand_loc[i] = substed_operand[i];
3501 for (i = 0; i < recog_data.n_dups; i++)
3502 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3504 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3505 re-recognize the insn. We do this in case we had a simple addition
3506 but now can do this as a load-address. This saves an insn in this
3507 common case.
3508 If re-recognition fails, the old insn code number will still be used,
3509 and some register operands may have changed into PLUS expressions.
3510 These will be handled by find_reloads by loading them into a register
3511 again. */
3513 if (val)
3515 /* If we aren't replacing things permanently and we changed something,
3516 make another copy to ensure that all the RTL is new. Otherwise
3517 things can go wrong if find_reload swaps commutative operands
3518 and one is inside RTL that has been copied while the other is not. */
3519 new_body = old_body;
3520 if (! replace)
3522 new_body = copy_insn (old_body);
3523 if (REG_NOTES (insn))
3524 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3526 PATTERN (insn) = new_body;
3528 /* If we had a move insn but now we don't, rerecognize it. This will
3529 cause spurious re-recognition if the old move had a PARALLEL since
3530 the new one still will, but we can't call single_set without
3531 having put NEW_BODY into the insn and the re-recognition won't
3532 hurt in this rare case. */
3533 /* ??? Why this huge if statement - why don't we just rerecognize the
3534 thing always? */
3535 if (! insn_is_asm
3536 && old_set != 0
3537 && ((REG_P (SET_SRC (old_set))
3538 && (GET_CODE (new_body) != SET
3539 || !REG_P (SET_SRC (new_body))))
3540 /* If this was a load from or store to memory, compare
3541 the MEM in recog_data.operand to the one in the insn.
3542 If they are not equal, then rerecognize the insn. */
3543 || (old_set != 0
3544 && ((MEM_P (SET_SRC (old_set))
3545 && SET_SRC (old_set) != recog_data.operand[1])
3546 || (MEM_P (SET_DEST (old_set))
3547 && SET_DEST (old_set) != recog_data.operand[0])))
3548 /* If this was an add insn before, rerecognize. */
3549 || GET_CODE (SET_SRC (old_set)) == PLUS))
3551 int new_icode = recog (PATTERN (insn), insn, 0);
3552 if (new_icode >= 0)
3553 INSN_CODE (insn) = new_icode;
3557 /* Restore the old body. If there were any changes to it, we made a copy
3558 of it while the changes were still in place, so we'll correctly return
3559 a modified insn below. */
3560 if (! replace)
3562 /* Restore the old body. */
3563 for (i = 0; i < recog_data.n_operands; i++)
3564 /* Restoring a top-level match_parallel would clobber the new_body
3565 we installed in the insn. */
3566 if (recog_data.operand_loc[i] != &PATTERN (insn))
3567 *recog_data.operand_loc[i] = orig_operand[i];
3568 for (i = 0; i < recog_data.n_dups; i++)
3569 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3572 /* Update all elimination pairs to reflect the status after the current
3573 insn. The changes we make were determined by the earlier call to
3574 elimination_effects.
3576 We also detect cases where register elimination cannot be done,
3577 namely, if a register would be both changed and referenced outside a MEM
3578 in the resulting insn since such an insn is often undefined and, even if
3579 not, we cannot know what meaning will be given to it. Note that it is
3580 valid to have a register used in an address in an insn that changes it
3581 (presumably with a pre- or post-increment or decrement).
3583 If anything changes, return nonzero. */
3585 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3587 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3588 ep->can_eliminate = 0;
3590 ep->ref_outside_mem = 0;
3592 if (ep->previous_offset != ep->offset)
3593 val = 1;
3596 done:
3597 /* If we changed something, perform elimination in REG_NOTES. This is
3598 needed even when REPLACE is zero because a REG_DEAD note might refer
3599 to a register that we eliminate and could cause a different number
3600 of spill registers to be needed in the final reload pass than in
3601 the pre-passes. */
3602 if (val && REG_NOTES (insn) != 0)
3603 REG_NOTES (insn)
3604 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3606 return val;
3609 /* Loop through all elimination pairs.
3610 Recalculate the number not at initial offset.
3612 Compute the maximum offset (minimum offset if the stack does not
3613 grow downward) for each elimination pair. */
3615 static void
3616 update_eliminable_offsets (void)
3618 struct elim_table *ep;
3620 num_not_at_initial_offset = 0;
3621 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3623 ep->previous_offset = ep->offset;
3624 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3625 num_not_at_initial_offset++;
3629 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3630 replacement we currently believe is valid, mark it as not eliminable if X
3631 modifies DEST in any way other than by adding a constant integer to it.
3633 If DEST is the frame pointer, we do nothing because we assume that
3634 all assignments to the hard frame pointer are nonlocal gotos and are being
3635 done at a time when they are valid and do not disturb anything else.
3636 Some machines want to eliminate a fake argument pointer with either the
3637 frame or stack pointer. Assignments to the hard frame pointer must not
3638 prevent this elimination.
3640 Called via note_stores from reload before starting its passes to scan
3641 the insns of the function. */
3643 static void
3644 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3646 unsigned int i;
3648 /* A SUBREG of a hard register here is just changing its mode. We should
3649 not see a SUBREG of an eliminable hard register, but check just in
3650 case. */
3651 if (GET_CODE (dest) == SUBREG)
3652 dest = SUBREG_REG (dest);
3654 if (dest == hard_frame_pointer_rtx)
3655 return;
3657 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3658 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3659 && (GET_CODE (x) != SET
3660 || GET_CODE (SET_SRC (x)) != PLUS
3661 || XEXP (SET_SRC (x), 0) != dest
3662 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3664 reg_eliminate[i].can_eliminate_previous
3665 = reg_eliminate[i].can_eliminate = 0;
3666 num_eliminable--;
3670 /* Verify that the initial elimination offsets did not change since the
3671 last call to set_initial_elim_offsets. This is used to catch cases
3672 where something illegal happened during reload_as_needed that could
3673 cause incorrect code to be generated if we did not check for it. */
3675 static bool
3676 verify_initial_elim_offsets (void)
3678 HOST_WIDE_INT t;
3680 if (!num_eliminable)
3681 return true;
3683 #ifdef ELIMINABLE_REGS
3685 struct elim_table *ep;
3687 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3689 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3690 if (t != ep->initial_offset)
3691 return false;
3694 #else
3695 INITIAL_FRAME_POINTER_OFFSET (t);
3696 if (t != reg_eliminate[0].initial_offset)
3697 return false;
3698 #endif
3700 return true;
3703 /* Reset all offsets on eliminable registers to their initial values. */
3705 static void
3706 set_initial_elim_offsets (void)
3708 struct elim_table *ep = reg_eliminate;
3710 #ifdef ELIMINABLE_REGS
3711 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3713 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3714 ep->previous_offset = ep->offset = ep->initial_offset;
3716 #else
3717 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3718 ep->previous_offset = ep->offset = ep->initial_offset;
3719 #endif
3721 num_not_at_initial_offset = 0;
3724 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3726 static void
3727 set_initial_eh_label_offset (rtx label)
3729 set_label_offsets (label, NULL_RTX, 1);
3732 /* Initialize the known label offsets.
3733 Set a known offset for each forced label to be at the initial offset
3734 of each elimination. We do this because we assume that all
3735 computed jumps occur from a location where each elimination is
3736 at its initial offset.
3737 For all other labels, show that we don't know the offsets. */
3739 static void
3740 set_initial_label_offsets (void)
3742 rtx x;
3743 memset (offsets_known_at, 0, num_labels);
3745 for (x = forced_labels; x; x = XEXP (x, 1))
3746 if (XEXP (x, 0))
3747 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3749 for_each_eh_label (set_initial_eh_label_offset);
3752 /* Set all elimination offsets to the known values for the code label given
3753 by INSN. */
3755 static void
3756 set_offsets_for_label (rtx insn)
3758 unsigned int i;
3759 int label_nr = CODE_LABEL_NUMBER (insn);
3760 struct elim_table *ep;
3762 num_not_at_initial_offset = 0;
3763 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3765 ep->offset = ep->previous_offset
3766 = offsets_at[label_nr - first_label_num][i];
3767 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3768 num_not_at_initial_offset++;
3772 /* See if anything that happened changes which eliminations are valid.
3773 For example, on the SPARC, whether or not the frame pointer can
3774 be eliminated can depend on what registers have been used. We need
3775 not check some conditions again (such as flag_omit_frame_pointer)
3776 since they can't have changed. */
3778 static void
3779 update_eliminables (HARD_REG_SET *pset)
3781 int previous_frame_pointer_needed = frame_pointer_needed;
3782 struct elim_table *ep;
3784 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3785 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3786 && targetm.frame_pointer_required ())
3787 #ifdef ELIMINABLE_REGS
3788 || ! targetm.can_eliminate (ep->from, ep->to)
3789 #endif
3791 ep->can_eliminate = 0;
3793 /* Look for the case where we have discovered that we can't replace
3794 register A with register B and that means that we will now be
3795 trying to replace register A with register C. This means we can
3796 no longer replace register C with register B and we need to disable
3797 such an elimination, if it exists. This occurs often with A == ap,
3798 B == sp, and C == fp. */
3800 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3802 struct elim_table *op;
3803 int new_to = -1;
3805 if (! ep->can_eliminate && ep->can_eliminate_previous)
3807 /* Find the current elimination for ep->from, if there is a
3808 new one. */
3809 for (op = reg_eliminate;
3810 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3811 if (op->from == ep->from && op->can_eliminate)
3813 new_to = op->to;
3814 break;
3817 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3818 disable it. */
3819 for (op = reg_eliminate;
3820 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3821 if (op->from == new_to && op->to == ep->to)
3822 op->can_eliminate = 0;
3826 /* See if any registers that we thought we could eliminate the previous
3827 time are no longer eliminable. If so, something has changed and we
3828 must spill the register. Also, recompute the number of eliminable
3829 registers and see if the frame pointer is needed; it is if there is
3830 no elimination of the frame pointer that we can perform. */
3832 frame_pointer_needed = 1;
3833 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3835 if (ep->can_eliminate
3836 && ep->from == FRAME_POINTER_REGNUM
3837 && ep->to != HARD_FRAME_POINTER_REGNUM
3838 && (! SUPPORTS_STACK_ALIGNMENT
3839 || ! crtl->stack_realign_needed))
3840 frame_pointer_needed = 0;
3842 if (! ep->can_eliminate && ep->can_eliminate_previous)
3844 ep->can_eliminate_previous = 0;
3845 SET_HARD_REG_BIT (*pset, ep->from);
3846 num_eliminable--;
3850 /* If we didn't need a frame pointer last time, but we do now, spill
3851 the hard frame pointer. */
3852 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3853 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3856 /* Return true if X is used as the target register of an elimination. */
3858 bool
3859 elimination_target_reg_p (rtx x)
3861 struct elim_table *ep;
3863 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3864 if (ep->to_rtx == x && ep->can_eliminate)
3865 return true;
3867 return false;
3870 /* Initialize the table of registers to eliminate.
3871 Pre-condition: global flag frame_pointer_needed has been set before
3872 calling this function. */
3874 static void
3875 init_elim_table (void)
3877 struct elim_table *ep;
3878 #ifdef ELIMINABLE_REGS
3879 const struct elim_table_1 *ep1;
3880 #endif
3882 if (!reg_eliminate)
3883 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3885 num_eliminable = 0;
3887 #ifdef ELIMINABLE_REGS
3888 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3889 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3891 ep->from = ep1->from;
3892 ep->to = ep1->to;
3893 ep->can_eliminate = ep->can_eliminate_previous
3894 = (targetm.can_eliminate (ep->from, ep->to)
3895 && ! (ep->to == STACK_POINTER_REGNUM
3896 && frame_pointer_needed
3897 && (! SUPPORTS_STACK_ALIGNMENT
3898 || ! stack_realign_fp)));
3900 #else
3901 reg_eliminate[0].from = reg_eliminate_1[0].from;
3902 reg_eliminate[0].to = reg_eliminate_1[0].to;
3903 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3904 = ! frame_pointer_needed;
3905 #endif
3907 /* Count the number of eliminable registers and build the FROM and TO
3908 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3909 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3910 We depend on this. */
3911 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3913 num_eliminable += ep->can_eliminate;
3914 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3915 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3919 /* Kick all pseudos out of hard register REGNO.
3921 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3922 because we found we can't eliminate some register. In the case, no pseudos
3923 are allowed to be in the register, even if they are only in a block that
3924 doesn't require spill registers, unlike the case when we are spilling this
3925 hard reg to produce another spill register.
3927 Return nonzero if any pseudos needed to be kicked out. */
3929 static void
3930 spill_hard_reg (unsigned int regno, int cant_eliminate)
3932 int i;
3934 if (cant_eliminate)
3936 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3937 df_set_regs_ever_live (regno, true);
3940 /* Spill every pseudo reg that was allocated to this reg
3941 or to something that overlaps this reg. */
3943 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3944 if (reg_renumber[i] >= 0
3945 && (unsigned int) reg_renumber[i] <= regno
3946 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3947 SET_REGNO_REG_SET (&spilled_pseudos, i);
3950 /* After find_reload_regs has been run for all insn that need reloads,
3951 and/or spill_hard_regs was called, this function is used to actually
3952 spill pseudo registers and try to reallocate them. It also sets up the
3953 spill_regs array for use by choose_reload_regs. */
3955 static int
3956 finish_spills (int global)
3958 struct insn_chain *chain;
3959 int something_changed = 0;
3960 unsigned i;
3961 reg_set_iterator rsi;
3963 /* Build the spill_regs array for the function. */
3964 /* If there are some registers still to eliminate and one of the spill regs
3965 wasn't ever used before, additional stack space may have to be
3966 allocated to store this register. Thus, we may have changed the offset
3967 between the stack and frame pointers, so mark that something has changed.
3969 One might think that we need only set VAL to 1 if this is a call-used
3970 register. However, the set of registers that must be saved by the
3971 prologue is not identical to the call-used set. For example, the
3972 register used by the call insn for the return PC is a call-used register,
3973 but must be saved by the prologue. */
3975 n_spills = 0;
3976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3977 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3979 spill_reg_order[i] = n_spills;
3980 spill_regs[n_spills++] = i;
3981 if (num_eliminable && ! df_regs_ever_live_p (i))
3982 something_changed = 1;
3983 df_set_regs_ever_live (i, true);
3985 else
3986 spill_reg_order[i] = -1;
3988 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3989 if (! ira_conflicts_p || reg_renumber[i] >= 0)
3991 /* Record the current hard register the pseudo is allocated to
3992 in pseudo_previous_regs so we avoid reallocating it to the
3993 same hard reg in a later pass. */
3994 gcc_assert (reg_renumber[i] >= 0);
3996 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3997 /* Mark it as no longer having a hard register home. */
3998 reg_renumber[i] = -1;
3999 if (ira_conflicts_p)
4000 /* Inform IRA about the change. */
4001 ira_mark_allocation_change (i);
4002 /* We will need to scan everything again. */
4003 something_changed = 1;
4006 /* Retry global register allocation if possible. */
4007 if (global && ira_conflicts_p)
4009 unsigned int n;
4011 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4012 /* For every insn that needs reloads, set the registers used as spill
4013 regs in pseudo_forbidden_regs for every pseudo live across the
4014 insn. */
4015 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4017 EXECUTE_IF_SET_IN_REG_SET
4018 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4020 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4021 chain->used_spill_regs);
4023 EXECUTE_IF_SET_IN_REG_SET
4024 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4026 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4027 chain->used_spill_regs);
4031 /* Retry allocating the pseudos spilled in IRA and the
4032 reload. For each reg, merge the various reg sets that
4033 indicate which hard regs can't be used, and call
4034 ira_reassign_pseudos. */
4035 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4036 if (reg_old_renumber[i] != reg_renumber[i])
4038 if (reg_renumber[i] < 0)
4039 temp_pseudo_reg_arr[n++] = i;
4040 else
4041 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4043 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4044 bad_spill_regs_global,
4045 pseudo_forbidden_regs, pseudo_previous_regs,
4046 &spilled_pseudos))
4047 something_changed = 1;
4049 /* Fix up the register information in the insn chain.
4050 This involves deleting those of the spilled pseudos which did not get
4051 a new hard register home from the live_{before,after} sets. */
4052 for (chain = reload_insn_chain; chain; chain = chain->next)
4054 HARD_REG_SET used_by_pseudos;
4055 HARD_REG_SET used_by_pseudos2;
4057 if (! ira_conflicts_p)
4059 /* Don't do it for IRA because IRA and the reload still can
4060 assign hard registers to the spilled pseudos on next
4061 reload iterations. */
4062 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4063 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4065 /* Mark any unallocated hard regs as available for spills. That
4066 makes inheritance work somewhat better. */
4067 if (chain->need_reload)
4069 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4070 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4071 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4073 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4074 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4075 /* Value of chain->used_spill_regs from previous iteration
4076 may be not included in the value calculated here because
4077 of possible removing caller-saves insns (see function
4078 delete_caller_save_insns. */
4079 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4080 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4084 CLEAR_REG_SET (&changed_allocation_pseudos);
4085 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4086 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4088 int regno = reg_renumber[i];
4089 if (reg_old_renumber[i] == regno)
4090 continue;
4092 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4094 alter_reg (i, reg_old_renumber[i], false);
4095 reg_old_renumber[i] = regno;
4096 if (dump_file)
4098 if (regno == -1)
4099 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4100 else
4101 fprintf (dump_file, " Register %d now in %d.\n\n",
4102 i, reg_renumber[i]);
4106 return something_changed;
4109 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4111 static void
4112 scan_paradoxical_subregs (rtx x)
4114 int i;
4115 const char *fmt;
4116 enum rtx_code code = GET_CODE (x);
4118 switch (code)
4120 case REG:
4121 case CONST_INT:
4122 case CONST:
4123 case SYMBOL_REF:
4124 case LABEL_REF:
4125 case CONST_DOUBLE:
4126 case CONST_FIXED:
4127 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4128 case CC0:
4129 case PC:
4130 case USE:
4131 case CLOBBER:
4132 return;
4134 case SUBREG:
4135 if (REG_P (SUBREG_REG (x))
4136 && (GET_MODE_SIZE (GET_MODE (x))
4137 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4139 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4140 = GET_MODE_SIZE (GET_MODE (x));
4141 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4143 return;
4145 default:
4146 break;
4149 fmt = GET_RTX_FORMAT (code);
4150 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4152 if (fmt[i] == 'e')
4153 scan_paradoxical_subregs (XEXP (x, i));
4154 else if (fmt[i] == 'E')
4156 int j;
4157 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4158 scan_paradoxical_subregs (XVECEXP (x, i, j));
4163 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4164 examine all of the reload insns between PREV and NEXT exclusive, and
4165 annotate all that may trap. */
4167 static void
4168 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4170 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4171 if (note == NULL)
4172 return;
4173 if (!insn_could_throw_p (insn))
4174 remove_note (insn, note);
4175 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4178 /* Reload pseudo-registers into hard regs around each insn as needed.
4179 Additional register load insns are output before the insn that needs it
4180 and perhaps store insns after insns that modify the reloaded pseudo reg.
4182 reg_last_reload_reg and reg_reloaded_contents keep track of
4183 which registers are already available in reload registers.
4184 We update these for the reloads that we perform,
4185 as the insns are scanned. */
4187 static void
4188 reload_as_needed (int live_known)
4190 struct insn_chain *chain;
4191 #if defined (AUTO_INC_DEC)
4192 int i;
4193 #endif
4194 rtx x;
4196 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4197 memset (spill_reg_store, 0, sizeof spill_reg_store);
4198 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4199 INIT_REG_SET (&reg_has_output_reload);
4200 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4201 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4203 set_initial_elim_offsets ();
4205 for (chain = reload_insn_chain; chain; chain = chain->next)
4207 rtx prev = 0;
4208 rtx insn = chain->insn;
4209 rtx old_next = NEXT_INSN (insn);
4210 #ifdef AUTO_INC_DEC
4211 rtx old_prev = PREV_INSN (insn);
4212 #endif
4214 /* If we pass a label, copy the offsets from the label information
4215 into the current offsets of each elimination. */
4216 if (LABEL_P (insn))
4217 set_offsets_for_label (insn);
4219 else if (INSN_P (insn))
4221 regset_head regs_to_forget;
4222 INIT_REG_SET (&regs_to_forget);
4223 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4225 /* If this is a USE and CLOBBER of a MEM, ensure that any
4226 references to eliminable registers have been removed. */
4228 if ((GET_CODE (PATTERN (insn)) == USE
4229 || GET_CODE (PATTERN (insn)) == CLOBBER)
4230 && MEM_P (XEXP (PATTERN (insn), 0)))
4231 XEXP (XEXP (PATTERN (insn), 0), 0)
4232 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4233 GET_MODE (XEXP (PATTERN (insn), 0)),
4234 NULL_RTX);
4236 /* If we need to do register elimination processing, do so.
4237 This might delete the insn, in which case we are done. */
4238 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4240 eliminate_regs_in_insn (insn, 1);
4241 if (NOTE_P (insn))
4243 update_eliminable_offsets ();
4244 CLEAR_REG_SET (&regs_to_forget);
4245 continue;
4249 /* If need_elim is nonzero but need_reload is zero, one might think
4250 that we could simply set n_reloads to 0. However, find_reloads
4251 could have done some manipulation of the insn (such as swapping
4252 commutative operands), and these manipulations are lost during
4253 the first pass for every insn that needs register elimination.
4254 So the actions of find_reloads must be redone here. */
4256 if (! chain->need_elim && ! chain->need_reload
4257 && ! chain->need_operand_change)
4258 n_reloads = 0;
4259 /* First find the pseudo regs that must be reloaded for this insn.
4260 This info is returned in the tables reload_... (see reload.h).
4261 Also modify the body of INSN by substituting RELOAD
4262 rtx's for those pseudo regs. */
4263 else
4265 CLEAR_REG_SET (&reg_has_output_reload);
4266 CLEAR_HARD_REG_SET (reg_is_output_reload);
4268 find_reloads (insn, 1, spill_indirect_levels, live_known,
4269 spill_reg_order);
4272 if (n_reloads > 0)
4274 rtx next = NEXT_INSN (insn);
4275 rtx p;
4277 prev = PREV_INSN (insn);
4279 /* Now compute which reload regs to reload them into. Perhaps
4280 reusing reload regs from previous insns, or else output
4281 load insns to reload them. Maybe output store insns too.
4282 Record the choices of reload reg in reload_reg_rtx. */
4283 choose_reload_regs (chain);
4285 /* Merge any reloads that we didn't combine for fear of
4286 increasing the number of spill registers needed but now
4287 discover can be safely merged. */
4288 if (targetm.small_register_classes_for_mode_p (VOIDmode))
4289 merge_assigned_reloads (insn);
4291 /* Generate the insns to reload operands into or out of
4292 their reload regs. */
4293 emit_reload_insns (chain);
4295 /* Substitute the chosen reload regs from reload_reg_rtx
4296 into the insn's body (or perhaps into the bodies of other
4297 load and store insn that we just made for reloading
4298 and that we moved the structure into). */
4299 subst_reloads (insn);
4301 /* Adjust the exception region notes for loads and stores. */
4302 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4303 fixup_eh_region_note (insn, prev, next);
4305 /* If this was an ASM, make sure that all the reload insns
4306 we have generated are valid. If not, give an error
4307 and delete them. */
4308 if (asm_noperands (PATTERN (insn)) >= 0)
4309 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4310 if (p != insn && INSN_P (p)
4311 && GET_CODE (PATTERN (p)) != USE
4312 && (recog_memoized (p) < 0
4313 || (extract_insn (p), ! constrain_operands (1))))
4315 error_for_asm (insn,
4316 "%<asm%> operand requires "
4317 "impossible reload");
4318 delete_insn (p);
4322 if (num_eliminable && chain->need_elim)
4323 update_eliminable_offsets ();
4325 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4326 is no longer validly lying around to save a future reload.
4327 Note that this does not detect pseudos that were reloaded
4328 for this insn in order to be stored in
4329 (obeying register constraints). That is correct; such reload
4330 registers ARE still valid. */
4331 forget_marked_reloads (&regs_to_forget);
4332 CLEAR_REG_SET (&regs_to_forget);
4334 /* There may have been CLOBBER insns placed after INSN. So scan
4335 between INSN and NEXT and use them to forget old reloads. */
4336 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4337 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4338 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4340 #ifdef AUTO_INC_DEC
4341 /* Likewise for regs altered by auto-increment in this insn.
4342 REG_INC notes have been changed by reloading:
4343 find_reloads_address_1 records substitutions for them,
4344 which have been performed by subst_reloads above. */
4345 for (i = n_reloads - 1; i >= 0; i--)
4347 rtx in_reg = rld[i].in_reg;
4348 if (in_reg)
4350 enum rtx_code code = GET_CODE (in_reg);
4351 /* PRE_INC / PRE_DEC will have the reload register ending up
4352 with the same value as the stack slot, but that doesn't
4353 hold true for POST_INC / POST_DEC. Either we have to
4354 convert the memory access to a true POST_INC / POST_DEC,
4355 or we can't use the reload register for inheritance. */
4356 if ((code == POST_INC || code == POST_DEC)
4357 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4358 REGNO (rld[i].reg_rtx))
4359 /* Make sure it is the inc/dec pseudo, and not
4360 some other (e.g. output operand) pseudo. */
4361 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4362 == REGNO (XEXP (in_reg, 0))))
4365 rtx reload_reg = rld[i].reg_rtx;
4366 enum machine_mode mode = GET_MODE (reload_reg);
4367 int n = 0;
4368 rtx p;
4370 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4372 /* We really want to ignore REG_INC notes here, so
4373 use PATTERN (p) as argument to reg_set_p . */
4374 if (reg_set_p (reload_reg, PATTERN (p)))
4375 break;
4376 n = count_occurrences (PATTERN (p), reload_reg, 0);
4377 if (! n)
4378 continue;
4379 if (n == 1)
4381 rtx replace_reg
4382 = gen_rtx_fmt_e (code, mode, reload_reg);
4384 validate_replace_rtx_group (reload_reg,
4385 replace_reg, p);
4386 n = verify_changes (0);
4388 /* We must also verify that the constraints
4389 are met after the replacement. Make sure
4390 extract_insn is only called for an insn
4391 where the replacements were found to be
4392 valid so far. */
4393 if (n)
4395 extract_insn (p);
4396 n = constrain_operands (1);
4399 /* If the constraints were not met, then
4400 undo the replacement, else confirm it. */
4401 if (!n)
4402 cancel_changes (0);
4403 else
4404 confirm_change_group ();
4406 break;
4408 if (n == 1)
4410 add_reg_note (p, REG_INC, reload_reg);
4411 /* Mark this as having an output reload so that the
4412 REG_INC processing code below won't invalidate
4413 the reload for inheritance. */
4414 SET_HARD_REG_BIT (reg_is_output_reload,
4415 REGNO (reload_reg));
4416 SET_REGNO_REG_SET (&reg_has_output_reload,
4417 REGNO (XEXP (in_reg, 0)));
4419 else
4420 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4421 NULL);
4423 else if ((code == PRE_INC || code == PRE_DEC)
4424 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4425 REGNO (rld[i].reg_rtx))
4426 /* Make sure it is the inc/dec pseudo, and not
4427 some other (e.g. output operand) pseudo. */
4428 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4429 == REGNO (XEXP (in_reg, 0))))
4431 SET_HARD_REG_BIT (reg_is_output_reload,
4432 REGNO (rld[i].reg_rtx));
4433 SET_REGNO_REG_SET (&reg_has_output_reload,
4434 REGNO (XEXP (in_reg, 0)));
4436 else if (code == PRE_INC || code == PRE_DEC
4437 || code == POST_INC || code == POST_DEC)
4439 int in_regno = REGNO (XEXP (in_reg, 0));
4441 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4443 int in_hard_regno;
4444 bool forget_p = true;
4446 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4447 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4448 in_hard_regno))
4450 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4451 x != old_next;
4452 x = NEXT_INSN (x))
4453 if (x == reg_reloaded_insn[in_hard_regno])
4455 forget_p = false;
4456 break;
4459 /* If for some reasons, we didn't set up
4460 reg_last_reload_reg in this insn,
4461 invalidate inheritance from previous
4462 insns for the incremented/decremented
4463 register. Such registers will be not in
4464 reg_has_output_reload. Invalidate it
4465 also if the corresponding element in
4466 reg_reloaded_insn is also
4467 invalidated. */
4468 if (forget_p)
4469 forget_old_reloads_1 (XEXP (in_reg, 0),
4470 NULL_RTX, NULL);
4475 /* If a pseudo that got a hard register is auto-incremented,
4476 we must purge records of copying it into pseudos without
4477 hard registers. */
4478 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4479 if (REG_NOTE_KIND (x) == REG_INC)
4481 /* See if this pseudo reg was reloaded in this insn.
4482 If so, its last-reload info is still valid
4483 because it is based on this insn's reload. */
4484 for (i = 0; i < n_reloads; i++)
4485 if (rld[i].out == XEXP (x, 0))
4486 break;
4488 if (i == n_reloads)
4489 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4491 #endif
4493 /* A reload reg's contents are unknown after a label. */
4494 if (LABEL_P (insn))
4495 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4497 /* Don't assume a reload reg is still good after a call insn
4498 if it is a call-used reg, or if it contains a value that will
4499 be partially clobbered by the call. */
4500 else if (CALL_P (insn))
4502 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4503 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4507 /* Clean up. */
4508 free (reg_last_reload_reg);
4509 CLEAR_REG_SET (&reg_has_output_reload);
4512 /* Discard all record of any value reloaded from X,
4513 or reloaded in X from someplace else;
4514 unless X is an output reload reg of the current insn.
4516 X may be a hard reg (the reload reg)
4517 or it may be a pseudo reg that was reloaded from.
4519 When DATA is non-NULL just mark the registers in regset
4520 to be forgotten later. */
4522 static void
4523 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4524 void *data)
4526 unsigned int regno;
4527 unsigned int nr;
4528 regset regs = (regset) data;
4530 /* note_stores does give us subregs of hard regs,
4531 subreg_regno_offset requires a hard reg. */
4532 while (GET_CODE (x) == SUBREG)
4534 /* We ignore the subreg offset when calculating the regno,
4535 because we are using the entire underlying hard register
4536 below. */
4537 x = SUBREG_REG (x);
4540 if (!REG_P (x))
4541 return;
4543 regno = REGNO (x);
4545 if (regno >= FIRST_PSEUDO_REGISTER)
4546 nr = 1;
4547 else
4549 unsigned int i;
4551 nr = hard_regno_nregs[regno][GET_MODE (x)];
4552 /* Storing into a spilled-reg invalidates its contents.
4553 This can happen if a block-local pseudo is allocated to that reg
4554 and it wasn't spilled because this block's total need is 0.
4555 Then some insn might have an optional reload and use this reg. */
4556 if (!regs)
4557 for (i = 0; i < nr; i++)
4558 /* But don't do this if the reg actually serves as an output
4559 reload reg in the current instruction. */
4560 if (n_reloads == 0
4561 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4563 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4564 spill_reg_store[regno + i] = 0;
4568 if (regs)
4569 while (nr-- > 0)
4570 SET_REGNO_REG_SET (regs, regno + nr);
4571 else
4573 /* Since value of X has changed,
4574 forget any value previously copied from it. */
4576 while (nr-- > 0)
4577 /* But don't forget a copy if this is the output reload
4578 that establishes the copy's validity. */
4579 if (n_reloads == 0
4580 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4581 reg_last_reload_reg[regno + nr] = 0;
4585 /* Forget the reloads marked in regset by previous function. */
4586 static void
4587 forget_marked_reloads (regset regs)
4589 unsigned int reg;
4590 reg_set_iterator rsi;
4591 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4593 if (reg < FIRST_PSEUDO_REGISTER
4594 /* But don't do this if the reg actually serves as an output
4595 reload reg in the current instruction. */
4596 && (n_reloads == 0
4597 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4599 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4600 spill_reg_store[reg] = 0;
4602 if (n_reloads == 0
4603 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4604 reg_last_reload_reg[reg] = 0;
4608 /* The following HARD_REG_SETs indicate when each hard register is
4609 used for a reload of various parts of the current insn. */
4611 /* If reg is unavailable for all reloads. */
4612 static HARD_REG_SET reload_reg_unavailable;
4613 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4614 static HARD_REG_SET reload_reg_used;
4615 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4616 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4617 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4618 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4619 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4620 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4621 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4622 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4623 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4624 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4625 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4626 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4627 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4628 static HARD_REG_SET reload_reg_used_in_op_addr;
4629 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4630 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4631 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4632 static HARD_REG_SET reload_reg_used_in_insn;
4633 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4634 static HARD_REG_SET reload_reg_used_in_other_addr;
4636 /* If reg is in use as a reload reg for any sort of reload. */
4637 static HARD_REG_SET reload_reg_used_at_all;
4639 /* If reg is use as an inherited reload. We just mark the first register
4640 in the group. */
4641 static HARD_REG_SET reload_reg_used_for_inherit;
4643 /* Records which hard regs are used in any way, either as explicit use or
4644 by being allocated to a pseudo during any point of the current insn. */
4645 static HARD_REG_SET reg_used_in_insn;
4647 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4648 TYPE. MODE is used to indicate how many consecutive regs are
4649 actually used. */
4651 static void
4652 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4653 enum machine_mode mode)
4655 unsigned int nregs = hard_regno_nregs[regno][mode];
4656 unsigned int i;
4658 for (i = regno; i < nregs + regno; i++)
4660 switch (type)
4662 case RELOAD_OTHER:
4663 SET_HARD_REG_BIT (reload_reg_used, i);
4664 break;
4666 case RELOAD_FOR_INPUT_ADDRESS:
4667 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4668 break;
4670 case RELOAD_FOR_INPADDR_ADDRESS:
4671 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4672 break;
4674 case RELOAD_FOR_OUTPUT_ADDRESS:
4675 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4676 break;
4678 case RELOAD_FOR_OUTADDR_ADDRESS:
4679 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4680 break;
4682 case RELOAD_FOR_OPERAND_ADDRESS:
4683 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4684 break;
4686 case RELOAD_FOR_OPADDR_ADDR:
4687 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4688 break;
4690 case RELOAD_FOR_OTHER_ADDRESS:
4691 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4692 break;
4694 case RELOAD_FOR_INPUT:
4695 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4696 break;
4698 case RELOAD_FOR_OUTPUT:
4699 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4700 break;
4702 case RELOAD_FOR_INSN:
4703 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4704 break;
4707 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4711 /* Similarly, but show REGNO is no longer in use for a reload. */
4713 static void
4714 clear_reload_reg_in_use (unsigned int regno, int opnum,
4715 enum reload_type type, enum machine_mode mode)
4717 unsigned int nregs = hard_regno_nregs[regno][mode];
4718 unsigned int start_regno, end_regno, r;
4719 int i;
4720 /* A complication is that for some reload types, inheritance might
4721 allow multiple reloads of the same types to share a reload register.
4722 We set check_opnum if we have to check only reloads with the same
4723 operand number, and check_any if we have to check all reloads. */
4724 int check_opnum = 0;
4725 int check_any = 0;
4726 HARD_REG_SET *used_in_set;
4728 switch (type)
4730 case RELOAD_OTHER:
4731 used_in_set = &reload_reg_used;
4732 break;
4734 case RELOAD_FOR_INPUT_ADDRESS:
4735 used_in_set = &reload_reg_used_in_input_addr[opnum];
4736 break;
4738 case RELOAD_FOR_INPADDR_ADDRESS:
4739 check_opnum = 1;
4740 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4741 break;
4743 case RELOAD_FOR_OUTPUT_ADDRESS:
4744 used_in_set = &reload_reg_used_in_output_addr[opnum];
4745 break;
4747 case RELOAD_FOR_OUTADDR_ADDRESS:
4748 check_opnum = 1;
4749 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4750 break;
4752 case RELOAD_FOR_OPERAND_ADDRESS:
4753 used_in_set = &reload_reg_used_in_op_addr;
4754 break;
4756 case RELOAD_FOR_OPADDR_ADDR:
4757 check_any = 1;
4758 used_in_set = &reload_reg_used_in_op_addr_reload;
4759 break;
4761 case RELOAD_FOR_OTHER_ADDRESS:
4762 used_in_set = &reload_reg_used_in_other_addr;
4763 check_any = 1;
4764 break;
4766 case RELOAD_FOR_INPUT:
4767 used_in_set = &reload_reg_used_in_input[opnum];
4768 break;
4770 case RELOAD_FOR_OUTPUT:
4771 used_in_set = &reload_reg_used_in_output[opnum];
4772 break;
4774 case RELOAD_FOR_INSN:
4775 used_in_set = &reload_reg_used_in_insn;
4776 break;
4777 default:
4778 gcc_unreachable ();
4780 /* We resolve conflicts with remaining reloads of the same type by
4781 excluding the intervals of reload registers by them from the
4782 interval of freed reload registers. Since we only keep track of
4783 one set of interval bounds, we might have to exclude somewhat
4784 more than what would be necessary if we used a HARD_REG_SET here.
4785 But this should only happen very infrequently, so there should
4786 be no reason to worry about it. */
4788 start_regno = regno;
4789 end_regno = regno + nregs;
4790 if (check_opnum || check_any)
4792 for (i = n_reloads - 1; i >= 0; i--)
4794 if (rld[i].when_needed == type
4795 && (check_any || rld[i].opnum == opnum)
4796 && rld[i].reg_rtx)
4798 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4799 unsigned int conflict_end
4800 = end_hard_regno (rld[i].mode, conflict_start);
4802 /* If there is an overlap with the first to-be-freed register,
4803 adjust the interval start. */
4804 if (conflict_start <= start_regno && conflict_end > start_regno)
4805 start_regno = conflict_end;
4806 /* Otherwise, if there is a conflict with one of the other
4807 to-be-freed registers, adjust the interval end. */
4808 if (conflict_start > start_regno && conflict_start < end_regno)
4809 end_regno = conflict_start;
4814 for (r = start_regno; r < end_regno; r++)
4815 CLEAR_HARD_REG_BIT (*used_in_set, r);
4818 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4819 specified by OPNUM and TYPE. */
4821 static int
4822 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4824 int i;
4826 /* In use for a RELOAD_OTHER means it's not available for anything. */
4827 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4828 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4829 return 0;
4831 switch (type)
4833 case RELOAD_OTHER:
4834 /* In use for anything means we can't use it for RELOAD_OTHER. */
4835 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4836 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4837 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4838 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4839 return 0;
4841 for (i = 0; i < reload_n_operands; i++)
4842 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4843 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4844 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4845 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4846 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4847 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4848 return 0;
4850 return 1;
4852 case RELOAD_FOR_INPUT:
4853 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4854 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4855 return 0;
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4858 return 0;
4860 /* If it is used for some other input, can't use it. */
4861 for (i = 0; i < reload_n_operands; i++)
4862 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4863 return 0;
4865 /* If it is used in a later operand's address, can't use it. */
4866 for (i = opnum + 1; i < reload_n_operands; i++)
4867 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4868 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4869 return 0;
4871 return 1;
4873 case RELOAD_FOR_INPUT_ADDRESS:
4874 /* Can't use a register if it is used for an input address for this
4875 operand or used as an input in an earlier one. */
4876 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4877 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4878 return 0;
4880 for (i = 0; i < opnum; i++)
4881 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4882 return 0;
4884 return 1;
4886 case RELOAD_FOR_INPADDR_ADDRESS:
4887 /* Can't use a register if it is used for an input address
4888 for this operand or used as an input in an earlier
4889 one. */
4890 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4891 return 0;
4893 for (i = 0; i < opnum; i++)
4894 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4895 return 0;
4897 return 1;
4899 case RELOAD_FOR_OUTPUT_ADDRESS:
4900 /* Can't use a register if it is used for an output address for this
4901 operand or used as an output in this or a later operand. Note
4902 that multiple output operands are emitted in reverse order, so
4903 the conflicting ones are those with lower indices. */
4904 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4905 return 0;
4907 for (i = 0; i <= opnum; i++)
4908 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4909 return 0;
4911 return 1;
4913 case RELOAD_FOR_OUTADDR_ADDRESS:
4914 /* Can't use a register if it is used for an output address
4915 for this operand or used as an output in this or a
4916 later operand. Note that multiple output operands are
4917 emitted in reverse order, so the conflicting ones are
4918 those with lower indices. */
4919 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4920 return 0;
4922 for (i = 0; i <= opnum; i++)
4923 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4924 return 0;
4926 return 1;
4928 case RELOAD_FOR_OPERAND_ADDRESS:
4929 for (i = 0; i < reload_n_operands; i++)
4930 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4931 return 0;
4933 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4934 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4936 case RELOAD_FOR_OPADDR_ADDR:
4937 for (i = 0; i < reload_n_operands; i++)
4938 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4939 return 0;
4941 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4943 case RELOAD_FOR_OUTPUT:
4944 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4945 outputs, or an operand address for this or an earlier output.
4946 Note that multiple output operands are emitted in reverse order,
4947 so the conflicting ones are those with higher indices. */
4948 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4949 return 0;
4951 for (i = 0; i < reload_n_operands; i++)
4952 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4953 return 0;
4955 for (i = opnum; i < reload_n_operands; i++)
4956 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4957 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4958 return 0;
4960 return 1;
4962 case RELOAD_FOR_INSN:
4963 for (i = 0; i < reload_n_operands; i++)
4964 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4965 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4966 return 0;
4968 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4969 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4971 case RELOAD_FOR_OTHER_ADDRESS:
4972 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4974 default:
4975 gcc_unreachable ();
4979 /* Return 1 if the value in reload reg REGNO, as used by a reload
4980 needed for the part of the insn specified by OPNUM and TYPE,
4981 is still available in REGNO at the end of the insn.
4983 We can assume that the reload reg was already tested for availability
4984 at the time it is needed, and we should not check this again,
4985 in case the reg has already been marked in use. */
4987 static int
4988 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4990 int i;
4992 switch (type)
4994 case RELOAD_OTHER:
4995 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4996 its value must reach the end. */
4997 return 1;
4999 /* If this use is for part of the insn,
5000 its value reaches if no subsequent part uses the same register.
5001 Just like the above function, don't try to do this with lots
5002 of fallthroughs. */
5004 case RELOAD_FOR_OTHER_ADDRESS:
5005 /* Here we check for everything else, since these don't conflict
5006 with anything else and everything comes later. */
5008 for (i = 0; i < reload_n_operands; i++)
5009 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5010 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5011 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5012 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5013 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5014 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5015 return 0;
5017 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5018 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5019 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5020 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5022 case RELOAD_FOR_INPUT_ADDRESS:
5023 case RELOAD_FOR_INPADDR_ADDRESS:
5024 /* Similar, except that we check only for this and subsequent inputs
5025 and the address of only subsequent inputs and we do not need
5026 to check for RELOAD_OTHER objects since they are known not to
5027 conflict. */
5029 for (i = opnum; i < reload_n_operands; i++)
5030 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5031 return 0;
5033 for (i = opnum + 1; i < reload_n_operands; i++)
5034 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5035 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5036 return 0;
5038 for (i = 0; i < reload_n_operands; i++)
5039 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5040 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5041 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5042 return 0;
5044 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5045 return 0;
5047 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5048 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5049 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5051 case RELOAD_FOR_INPUT:
5052 /* Similar to input address, except we start at the next operand for
5053 both input and input address and we do not check for
5054 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5055 would conflict. */
5057 for (i = opnum + 1; i < reload_n_operands; i++)
5058 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5059 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5060 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5061 return 0;
5063 /* ... fall through ... */
5065 case RELOAD_FOR_OPERAND_ADDRESS:
5066 /* Check outputs and their addresses. */
5068 for (i = 0; i < reload_n_operands; i++)
5069 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5070 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5071 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5072 return 0;
5074 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5076 case RELOAD_FOR_OPADDR_ADDR:
5077 for (i = 0; i < reload_n_operands; i++)
5078 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5079 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5080 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5081 return 0;
5083 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5084 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5085 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5087 case RELOAD_FOR_INSN:
5088 /* These conflict with other outputs with RELOAD_OTHER. So
5089 we need only check for output addresses. */
5091 opnum = reload_n_operands;
5093 /* ... fall through ... */
5095 case RELOAD_FOR_OUTPUT:
5096 case RELOAD_FOR_OUTPUT_ADDRESS:
5097 case RELOAD_FOR_OUTADDR_ADDRESS:
5098 /* We already know these can't conflict with a later output. So the
5099 only thing to check are later output addresses.
5100 Note that multiple output operands are emitted in reverse order,
5101 so the conflicting ones are those with lower indices. */
5102 for (i = 0; i < opnum; i++)
5103 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5104 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5105 return 0;
5107 return 1;
5109 default:
5110 gcc_unreachable ();
5114 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5115 every register in the range [REGNO, REGNO + NREGS). */
5117 static bool
5118 reload_regs_reach_end_p (unsigned int regno, int nregs,
5119 int opnum, enum reload_type type)
5121 int i;
5123 for (i = 0; i < nregs; i++)
5124 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5125 return false;
5126 return true;
5130 /* Returns whether R1 and R2 are uniquely chained: the value of one
5131 is used by the other, and that value is not used by any other
5132 reload for this insn. This is used to partially undo the decision
5133 made in find_reloads when in the case of multiple
5134 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5135 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5136 reloads. This code tries to avoid the conflict created by that
5137 change. It might be cleaner to explicitly keep track of which
5138 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5139 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5140 this after the fact. */
5141 static bool
5142 reloads_unique_chain_p (int r1, int r2)
5144 int i;
5146 /* We only check input reloads. */
5147 if (! rld[r1].in || ! rld[r2].in)
5148 return false;
5150 /* Avoid anything with output reloads. */
5151 if (rld[r1].out || rld[r2].out)
5152 return false;
5154 /* "chained" means one reload is a component of the other reload,
5155 not the same as the other reload. */
5156 if (rld[r1].opnum != rld[r2].opnum
5157 || rtx_equal_p (rld[r1].in, rld[r2].in)
5158 || rld[r1].optional || rld[r2].optional
5159 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5160 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5161 return false;
5163 for (i = 0; i < n_reloads; i ++)
5164 /* Look for input reloads that aren't our two */
5165 if (i != r1 && i != r2 && rld[i].in)
5167 /* If our reload is mentioned at all, it isn't a simple chain. */
5168 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5169 return false;
5171 return true;
5174 /* The recursive function change all occurrences of WHAT in *WHERE
5175 to REPL. */
5176 static void
5177 substitute (rtx *where, const_rtx what, rtx repl)
5179 const char *fmt;
5180 int i;
5181 enum rtx_code code;
5183 if (*where == 0)
5184 return;
5186 if (*where == what || rtx_equal_p (*where, what))
5188 /* Record the location of the changed rtx. */
5189 VEC_safe_push (rtx_p, heap, substitute_stack, where);
5190 *where = repl;
5191 return;
5194 code = GET_CODE (*where);
5195 fmt = GET_RTX_FORMAT (code);
5196 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5198 if (fmt[i] == 'E')
5200 int j;
5202 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5203 substitute (&XVECEXP (*where, i, j), what, repl);
5205 else if (fmt[i] == 'e')
5206 substitute (&XEXP (*where, i), what, repl);
5210 /* The function returns TRUE if chain of reload R1 and R2 (in any
5211 order) can be evaluated without usage of intermediate register for
5212 the reload containing another reload. It is important to see
5213 gen_reload to understand what the function is trying to do. As an
5214 example, let us have reload chain
5216 r2: const
5217 r1: <something> + const
5219 and reload R2 got reload reg HR. The function returns true if
5220 there is a correct insn HR = HR + <something>. Otherwise,
5221 gen_reload will use intermediate register (and this is the reload
5222 reg for R1) to reload <something>.
5224 We need this function to find a conflict for chain reloads. In our
5225 example, if HR = HR + <something> is incorrect insn, then we cannot
5226 use HR as a reload register for R2. If we do use it then we get a
5227 wrong code:
5229 HR = const
5230 HR = <something>
5231 HR = HR + HR
5234 static bool
5235 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5237 /* Assume other cases in gen_reload are not possible for
5238 chain reloads or do need an intermediate hard registers. */
5239 bool result = true;
5240 int regno, n, code;
5241 rtx out, in, tem, insn;
5242 rtx last = get_last_insn ();
5244 /* Make r2 a component of r1. */
5245 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5247 n = r1;
5248 r1 = r2;
5249 r2 = n;
5251 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5252 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5253 gcc_assert (regno >= 0);
5254 out = gen_rtx_REG (rld[r1].mode, regno);
5255 in = rld[r1].in;
5256 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5258 /* If IN is a paradoxical SUBREG, remove it and try to put the
5259 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5260 if (GET_CODE (in) == SUBREG
5261 && (GET_MODE_SIZE (GET_MODE (in))
5262 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5263 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5264 in = SUBREG_REG (in), out = tem;
5266 if (GET_CODE (in) == PLUS
5267 && (REG_P (XEXP (in, 0))
5268 || GET_CODE (XEXP (in, 0)) == SUBREG
5269 || MEM_P (XEXP (in, 0)))
5270 && (REG_P (XEXP (in, 1))
5271 || GET_CODE (XEXP (in, 1)) == SUBREG
5272 || CONSTANT_P (XEXP (in, 1))
5273 || MEM_P (XEXP (in, 1))))
5275 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5276 code = recog_memoized (insn);
5277 result = false;
5279 if (code >= 0)
5281 extract_insn (insn);
5282 /* We want constrain operands to treat this insn strictly in
5283 its validity determination, i.e., the way it would after
5284 reload has completed. */
5285 result = constrain_operands (1);
5288 delete_insns_since (last);
5291 /* Restore the original value at each changed address within R1. */
5292 while (!VEC_empty (rtx_p, substitute_stack))
5294 rtx *where = VEC_pop (rtx_p, substitute_stack);
5295 *where = rld[r2].in;
5298 return result;
5301 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5302 Return 0 otherwise.
5304 This function uses the same algorithm as reload_reg_free_p above. */
5306 static int
5307 reloads_conflict (int r1, int r2)
5309 enum reload_type r1_type = rld[r1].when_needed;
5310 enum reload_type r2_type = rld[r2].when_needed;
5311 int r1_opnum = rld[r1].opnum;
5312 int r2_opnum = rld[r2].opnum;
5314 /* RELOAD_OTHER conflicts with everything. */
5315 if (r2_type == RELOAD_OTHER)
5316 return 1;
5318 /* Otherwise, check conflicts differently for each type. */
5320 switch (r1_type)
5322 case RELOAD_FOR_INPUT:
5323 return (r2_type == RELOAD_FOR_INSN
5324 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5325 || r2_type == RELOAD_FOR_OPADDR_ADDR
5326 || r2_type == RELOAD_FOR_INPUT
5327 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5328 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5329 && r2_opnum > r1_opnum));
5331 case RELOAD_FOR_INPUT_ADDRESS:
5332 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5333 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5335 case RELOAD_FOR_INPADDR_ADDRESS:
5336 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5337 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5339 case RELOAD_FOR_OUTPUT_ADDRESS:
5340 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5341 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5343 case RELOAD_FOR_OUTADDR_ADDRESS:
5344 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5345 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5347 case RELOAD_FOR_OPERAND_ADDRESS:
5348 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5349 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5350 && (!reloads_unique_chain_p (r1, r2)
5351 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5353 case RELOAD_FOR_OPADDR_ADDR:
5354 return (r2_type == RELOAD_FOR_INPUT
5355 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5357 case RELOAD_FOR_OUTPUT:
5358 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5359 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5360 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5361 && r2_opnum >= r1_opnum));
5363 case RELOAD_FOR_INSN:
5364 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5365 || r2_type == RELOAD_FOR_INSN
5366 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5368 case RELOAD_FOR_OTHER_ADDRESS:
5369 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5371 case RELOAD_OTHER:
5372 return 1;
5374 default:
5375 gcc_unreachable ();
5379 /* Indexed by reload number, 1 if incoming value
5380 inherited from previous insns. */
5381 static char reload_inherited[MAX_RELOADS];
5383 /* For an inherited reload, this is the insn the reload was inherited from,
5384 if we know it. Otherwise, this is 0. */
5385 static rtx reload_inheritance_insn[MAX_RELOADS];
5387 /* If nonzero, this is a place to get the value of the reload,
5388 rather than using reload_in. */
5389 static rtx reload_override_in[MAX_RELOADS];
5391 /* For each reload, the hard register number of the register used,
5392 or -1 if we did not need a register for this reload. */
5393 static int reload_spill_index[MAX_RELOADS];
5395 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5396 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5398 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5399 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5401 /* Subroutine of free_for_value_p, used to check a single register.
5402 START_REGNO is the starting regno of the full reload register
5403 (possibly comprising multiple hard registers) that we are considering. */
5405 static int
5406 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5407 enum reload_type type, rtx value, rtx out,
5408 int reloadnum, int ignore_address_reloads)
5410 int time1;
5411 /* Set if we see an input reload that must not share its reload register
5412 with any new earlyclobber, but might otherwise share the reload
5413 register with an output or input-output reload. */
5414 int check_earlyclobber = 0;
5415 int i;
5416 int copy = 0;
5418 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5419 return 0;
5421 if (out == const0_rtx)
5423 copy = 1;
5424 out = NULL_RTX;
5427 /* We use some pseudo 'time' value to check if the lifetimes of the
5428 new register use would overlap with the one of a previous reload
5429 that is not read-only or uses a different value.
5430 The 'time' used doesn't have to be linear in any shape or form, just
5431 monotonic.
5432 Some reload types use different 'buckets' for each operand.
5433 So there are MAX_RECOG_OPERANDS different time values for each
5434 such reload type.
5435 We compute TIME1 as the time when the register for the prospective
5436 new reload ceases to be live, and TIME2 for each existing
5437 reload as the time when that the reload register of that reload
5438 becomes live.
5439 Where there is little to be gained by exact lifetime calculations,
5440 we just make conservative assumptions, i.e. a longer lifetime;
5441 this is done in the 'default:' cases. */
5442 switch (type)
5444 case RELOAD_FOR_OTHER_ADDRESS:
5445 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5446 time1 = copy ? 0 : 1;
5447 break;
5448 case RELOAD_OTHER:
5449 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5450 break;
5451 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5452 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5453 respectively, to the time values for these, we get distinct time
5454 values. To get distinct time values for each operand, we have to
5455 multiply opnum by at least three. We round that up to four because
5456 multiply by four is often cheaper. */
5457 case RELOAD_FOR_INPADDR_ADDRESS:
5458 time1 = opnum * 4 + 2;
5459 break;
5460 case RELOAD_FOR_INPUT_ADDRESS:
5461 time1 = opnum * 4 + 3;
5462 break;
5463 case RELOAD_FOR_INPUT:
5464 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5465 executes (inclusive). */
5466 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5467 break;
5468 case RELOAD_FOR_OPADDR_ADDR:
5469 /* opnum * 4 + 4
5470 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5471 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5472 break;
5473 case RELOAD_FOR_OPERAND_ADDRESS:
5474 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5475 is executed. */
5476 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5477 break;
5478 case RELOAD_FOR_OUTADDR_ADDRESS:
5479 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5480 break;
5481 case RELOAD_FOR_OUTPUT_ADDRESS:
5482 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5483 break;
5484 default:
5485 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5488 for (i = 0; i < n_reloads; i++)
5490 rtx reg = rld[i].reg_rtx;
5491 if (reg && REG_P (reg)
5492 && ((unsigned) regno - true_regnum (reg)
5493 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5494 && i != reloadnum)
5496 rtx other_input = rld[i].in;
5498 /* If the other reload loads the same input value, that
5499 will not cause a conflict only if it's loading it into
5500 the same register. */
5501 if (true_regnum (reg) != start_regno)
5502 other_input = NULL_RTX;
5503 if (! other_input || ! rtx_equal_p (other_input, value)
5504 || rld[i].out || out)
5506 int time2;
5507 switch (rld[i].when_needed)
5509 case RELOAD_FOR_OTHER_ADDRESS:
5510 time2 = 0;
5511 break;
5512 case RELOAD_FOR_INPADDR_ADDRESS:
5513 /* find_reloads makes sure that a
5514 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5515 by at most one - the first -
5516 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5517 address reload is inherited, the address address reload
5518 goes away, so we can ignore this conflict. */
5519 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5520 && ignore_address_reloads
5521 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5522 Then the address address is still needed to store
5523 back the new address. */
5524 && ! rld[reloadnum].out)
5525 continue;
5526 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5527 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5528 reloads go away. */
5529 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5530 && ignore_address_reloads
5531 /* Unless we are reloading an auto_inc expression. */
5532 && ! rld[reloadnum].out)
5533 continue;
5534 time2 = rld[i].opnum * 4 + 2;
5535 break;
5536 case RELOAD_FOR_INPUT_ADDRESS:
5537 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5538 && ignore_address_reloads
5539 && ! rld[reloadnum].out)
5540 continue;
5541 time2 = rld[i].opnum * 4 + 3;
5542 break;
5543 case RELOAD_FOR_INPUT:
5544 time2 = rld[i].opnum * 4 + 4;
5545 check_earlyclobber = 1;
5546 break;
5547 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5548 == MAX_RECOG_OPERAND * 4 */
5549 case RELOAD_FOR_OPADDR_ADDR:
5550 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5551 && ignore_address_reloads
5552 && ! rld[reloadnum].out)
5553 continue;
5554 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5555 break;
5556 case RELOAD_FOR_OPERAND_ADDRESS:
5557 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5558 check_earlyclobber = 1;
5559 break;
5560 case RELOAD_FOR_INSN:
5561 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5562 break;
5563 case RELOAD_FOR_OUTPUT:
5564 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5565 instruction is executed. */
5566 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5567 break;
5568 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5569 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5570 value. */
5571 case RELOAD_FOR_OUTADDR_ADDRESS:
5572 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5573 && ignore_address_reloads
5574 && ! rld[reloadnum].out)
5575 continue;
5576 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5577 break;
5578 case RELOAD_FOR_OUTPUT_ADDRESS:
5579 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5580 break;
5581 case RELOAD_OTHER:
5582 /* If there is no conflict in the input part, handle this
5583 like an output reload. */
5584 if (! rld[i].in || rtx_equal_p (other_input, value))
5586 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5587 /* Earlyclobbered outputs must conflict with inputs. */
5588 if (earlyclobber_operand_p (rld[i].out))
5589 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5591 break;
5593 time2 = 1;
5594 /* RELOAD_OTHER might be live beyond instruction execution,
5595 but this is not obvious when we set time2 = 1. So check
5596 here if there might be a problem with the new reload
5597 clobbering the register used by the RELOAD_OTHER. */
5598 if (out)
5599 return 0;
5600 break;
5601 default:
5602 return 0;
5604 if ((time1 >= time2
5605 && (! rld[i].in || rld[i].out
5606 || ! rtx_equal_p (other_input, value)))
5607 || (out && rld[reloadnum].out_reg
5608 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5609 return 0;
5614 /* Earlyclobbered outputs must conflict with inputs. */
5615 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5616 return 0;
5618 return 1;
5621 /* Return 1 if the value in reload reg REGNO, as used by a reload
5622 needed for the part of the insn specified by OPNUM and TYPE,
5623 may be used to load VALUE into it.
5625 MODE is the mode in which the register is used, this is needed to
5626 determine how many hard regs to test.
5628 Other read-only reloads with the same value do not conflict
5629 unless OUT is nonzero and these other reloads have to live while
5630 output reloads live.
5631 If OUT is CONST0_RTX, this is a special case: it means that the
5632 test should not be for using register REGNO as reload register, but
5633 for copying from register REGNO into the reload register.
5635 RELOADNUM is the number of the reload we want to load this value for;
5636 a reload does not conflict with itself.
5638 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5639 reloads that load an address for the very reload we are considering.
5641 The caller has to make sure that there is no conflict with the return
5642 register. */
5644 static int
5645 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5646 enum reload_type type, rtx value, rtx out, int reloadnum,
5647 int ignore_address_reloads)
5649 int nregs = hard_regno_nregs[regno][mode];
5650 while (nregs-- > 0)
5651 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5652 value, out, reloadnum,
5653 ignore_address_reloads))
5654 return 0;
5655 return 1;
5658 /* Return nonzero if the rtx X is invariant over the current function. */
5659 /* ??? Actually, the places where we use this expect exactly what is
5660 tested here, and not everything that is function invariant. In
5661 particular, the frame pointer and arg pointer are special cased;
5662 pic_offset_table_rtx is not, and we must not spill these things to
5663 memory. */
5666 function_invariant_p (const_rtx x)
5668 if (CONSTANT_P (x))
5669 return 1;
5670 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5671 return 1;
5672 if (GET_CODE (x) == PLUS
5673 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5674 && CONSTANT_P (XEXP (x, 1)))
5675 return 1;
5676 return 0;
5679 /* Determine whether the reload reg X overlaps any rtx'es used for
5680 overriding inheritance. Return nonzero if so. */
5682 static int
5683 conflicts_with_override (rtx x)
5685 int i;
5686 for (i = 0; i < n_reloads; i++)
5687 if (reload_override_in[i]
5688 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5689 return 1;
5690 return 0;
5693 /* Give an error message saying we failed to find a reload for INSN,
5694 and clear out reload R. */
5695 static void
5696 failed_reload (rtx insn, int r)
5698 if (asm_noperands (PATTERN (insn)) < 0)
5699 /* It's the compiler's fault. */
5700 fatal_insn ("could not find a spill register", insn);
5702 /* It's the user's fault; the operand's mode and constraint
5703 don't match. Disable this reload so we don't crash in final. */
5704 error_for_asm (insn,
5705 "%<asm%> operand constraint incompatible with operand size");
5706 rld[r].in = 0;
5707 rld[r].out = 0;
5708 rld[r].reg_rtx = 0;
5709 rld[r].optional = 1;
5710 rld[r].secondary_p = 1;
5713 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5714 for reload R. If it's valid, get an rtx for it. Return nonzero if
5715 successful. */
5716 static int
5717 set_reload_reg (int i, int r)
5719 int regno;
5720 rtx reg = spill_reg_rtx[i];
5722 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5723 spill_reg_rtx[i] = reg
5724 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5726 regno = true_regnum (reg);
5728 /* Detect when the reload reg can't hold the reload mode.
5729 This used to be one `if', but Sequent compiler can't handle that. */
5730 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5732 enum machine_mode test_mode = VOIDmode;
5733 if (rld[r].in)
5734 test_mode = GET_MODE (rld[r].in);
5735 /* If rld[r].in has VOIDmode, it means we will load it
5736 in whatever mode the reload reg has: to wit, rld[r].mode.
5737 We have already tested that for validity. */
5738 /* Aside from that, we need to test that the expressions
5739 to reload from or into have modes which are valid for this
5740 reload register. Otherwise the reload insns would be invalid. */
5741 if (! (rld[r].in != 0 && test_mode != VOIDmode
5742 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5743 if (! (rld[r].out != 0
5744 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5746 /* The reg is OK. */
5747 last_spill_reg = i;
5749 /* Mark as in use for this insn the reload regs we use
5750 for this. */
5751 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5752 rld[r].when_needed, rld[r].mode);
5754 rld[r].reg_rtx = reg;
5755 reload_spill_index[r] = spill_regs[i];
5756 return 1;
5759 return 0;
5762 /* Find a spill register to use as a reload register for reload R.
5763 LAST_RELOAD is nonzero if this is the last reload for the insn being
5764 processed.
5766 Set rld[R].reg_rtx to the register allocated.
5768 We return 1 if successful, or 0 if we couldn't find a spill reg and
5769 we didn't change anything. */
5771 static int
5772 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5773 int last_reload)
5775 int i, pass, count;
5777 /* If we put this reload ahead, thinking it is a group,
5778 then insist on finding a group. Otherwise we can grab a
5779 reg that some other reload needs.
5780 (That can happen when we have a 68000 DATA_OR_FP_REG
5781 which is a group of data regs or one fp reg.)
5782 We need not be so restrictive if there are no more reloads
5783 for this insn.
5785 ??? Really it would be nicer to have smarter handling
5786 for that kind of reg class, where a problem like this is normal.
5787 Perhaps those classes should be avoided for reloading
5788 by use of more alternatives. */
5790 int force_group = rld[r].nregs > 1 && ! last_reload;
5792 /* If we want a single register and haven't yet found one,
5793 take any reg in the right class and not in use.
5794 If we want a consecutive group, here is where we look for it.
5796 We use two passes so we can first look for reload regs to
5797 reuse, which are already in use for other reloads in this insn,
5798 and only then use additional registers.
5799 I think that maximizing reuse is needed to make sure we don't
5800 run out of reload regs. Suppose we have three reloads, and
5801 reloads A and B can share regs. These need two regs.
5802 Suppose A and B are given different regs.
5803 That leaves none for C. */
5804 for (pass = 0; pass < 2; pass++)
5806 /* I is the index in spill_regs.
5807 We advance it round-robin between insns to use all spill regs
5808 equally, so that inherited reloads have a chance
5809 of leapfrogging each other. */
5811 i = last_spill_reg;
5813 for (count = 0; count < n_spills; count++)
5815 int rclass = (int) rld[r].rclass;
5816 int regnum;
5818 i++;
5819 if (i >= n_spills)
5820 i -= n_spills;
5821 regnum = spill_regs[i];
5823 if ((reload_reg_free_p (regnum, rld[r].opnum,
5824 rld[r].when_needed)
5825 || (rld[r].in
5826 /* We check reload_reg_used to make sure we
5827 don't clobber the return register. */
5828 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5829 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5830 rld[r].when_needed, rld[r].in,
5831 rld[r].out, r, 1)))
5832 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
5833 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5834 /* Look first for regs to share, then for unshared. But
5835 don't share regs used for inherited reloads; they are
5836 the ones we want to preserve. */
5837 && (pass
5838 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5839 regnum)
5840 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5841 regnum))))
5843 int nr = hard_regno_nregs[regnum][rld[r].mode];
5844 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5845 (on 68000) got us two FP regs. If NR is 1,
5846 we would reject both of them. */
5847 if (force_group)
5848 nr = rld[r].nregs;
5849 /* If we need only one reg, we have already won. */
5850 if (nr == 1)
5852 /* But reject a single reg if we demand a group. */
5853 if (force_group)
5854 continue;
5855 break;
5857 /* Otherwise check that as many consecutive regs as we need
5858 are available here. */
5859 while (nr > 1)
5861 int regno = regnum + nr - 1;
5862 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
5863 && spill_reg_order[regno] >= 0
5864 && reload_reg_free_p (regno, rld[r].opnum,
5865 rld[r].when_needed)))
5866 break;
5867 nr--;
5869 if (nr == 1)
5870 break;
5874 /* If we found something on pass 1, omit pass 2. */
5875 if (count < n_spills)
5876 break;
5879 /* We should have found a spill register by now. */
5880 if (count >= n_spills)
5881 return 0;
5883 /* I is the index in SPILL_REG_RTX of the reload register we are to
5884 allocate. Get an rtx for it and find its register number. */
5886 return set_reload_reg (i, r);
5889 /* Initialize all the tables needed to allocate reload registers.
5890 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5891 is the array we use to restore the reg_rtx field for every reload. */
5893 static void
5894 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5896 int i;
5898 for (i = 0; i < n_reloads; i++)
5899 rld[i].reg_rtx = save_reload_reg_rtx[i];
5901 memset (reload_inherited, 0, MAX_RELOADS);
5902 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5903 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5905 CLEAR_HARD_REG_SET (reload_reg_used);
5906 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5907 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5908 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5909 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5910 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5912 CLEAR_HARD_REG_SET (reg_used_in_insn);
5914 HARD_REG_SET tmp;
5915 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5916 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5917 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5918 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5919 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5920 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5923 for (i = 0; i < reload_n_operands; i++)
5925 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5926 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5927 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5928 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5929 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5930 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5933 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5935 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5937 for (i = 0; i < n_reloads; i++)
5938 /* If we have already decided to use a certain register,
5939 don't use it in another way. */
5940 if (rld[i].reg_rtx)
5941 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5942 rld[i].when_needed, rld[i].mode);
5945 /* Assign hard reg targets for the pseudo-registers we must reload
5946 into hard regs for this insn.
5947 Also output the instructions to copy them in and out of the hard regs.
5949 For machines with register classes, we are responsible for
5950 finding a reload reg in the proper class. */
5952 static void
5953 choose_reload_regs (struct insn_chain *chain)
5955 rtx insn = chain->insn;
5956 int i, j;
5957 unsigned int max_group_size = 1;
5958 enum reg_class group_class = NO_REGS;
5959 int pass, win, inheritance;
5961 rtx save_reload_reg_rtx[MAX_RELOADS];
5963 /* In order to be certain of getting the registers we need,
5964 we must sort the reloads into order of increasing register class.
5965 Then our grabbing of reload registers will parallel the process
5966 that provided the reload registers.
5968 Also note whether any of the reloads wants a consecutive group of regs.
5969 If so, record the maximum size of the group desired and what
5970 register class contains all the groups needed by this insn. */
5972 for (j = 0; j < n_reloads; j++)
5974 reload_order[j] = j;
5975 if (rld[j].reg_rtx != NULL_RTX)
5977 gcc_assert (REG_P (rld[j].reg_rtx)
5978 && HARD_REGISTER_P (rld[j].reg_rtx));
5979 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5981 else
5982 reload_spill_index[j] = -1;
5984 if (rld[j].nregs > 1)
5986 max_group_size = MAX (rld[j].nregs, max_group_size);
5987 group_class
5988 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
5991 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5994 if (n_reloads > 1)
5995 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5997 /* If -O, try first with inheritance, then turning it off.
5998 If not -O, don't do inheritance.
5999 Using inheritance when not optimizing leads to paradoxes
6000 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6001 because one side of the comparison might be inherited. */
6002 win = 0;
6003 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6005 choose_reload_regs_init (chain, save_reload_reg_rtx);
6007 /* Process the reloads in order of preference just found.
6008 Beyond this point, subregs can be found in reload_reg_rtx.
6010 This used to look for an existing reloaded home for all of the
6011 reloads, and only then perform any new reloads. But that could lose
6012 if the reloads were done out of reg-class order because a later
6013 reload with a looser constraint might have an old home in a register
6014 needed by an earlier reload with a tighter constraint.
6016 To solve this, we make two passes over the reloads, in the order
6017 described above. In the first pass we try to inherit a reload
6018 from a previous insn. If there is a later reload that needs a
6019 class that is a proper subset of the class being processed, we must
6020 also allocate a spill register during the first pass.
6022 Then make a second pass over the reloads to allocate any reloads
6023 that haven't been given registers yet. */
6025 for (j = 0; j < n_reloads; j++)
6027 int r = reload_order[j];
6028 rtx search_equiv = NULL_RTX;
6030 /* Ignore reloads that got marked inoperative. */
6031 if (rld[r].out == 0 && rld[r].in == 0
6032 && ! rld[r].secondary_p)
6033 continue;
6035 /* If find_reloads chose to use reload_in or reload_out as a reload
6036 register, we don't need to chose one. Otherwise, try even if it
6037 found one since we might save an insn if we find the value lying
6038 around.
6039 Try also when reload_in is a pseudo without a hard reg. */
6040 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6041 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6042 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6043 && !MEM_P (rld[r].in)
6044 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6045 continue;
6047 #if 0 /* No longer needed for correct operation.
6048 It might give better code, or might not; worth an experiment? */
6049 /* If this is an optional reload, we can't inherit from earlier insns
6050 until we are sure that any non-optional reloads have been allocated.
6051 The following code takes advantage of the fact that optional reloads
6052 are at the end of reload_order. */
6053 if (rld[r].optional != 0)
6054 for (i = 0; i < j; i++)
6055 if ((rld[reload_order[i]].out != 0
6056 || rld[reload_order[i]].in != 0
6057 || rld[reload_order[i]].secondary_p)
6058 && ! rld[reload_order[i]].optional
6059 && rld[reload_order[i]].reg_rtx == 0)
6060 allocate_reload_reg (chain, reload_order[i], 0);
6061 #endif
6063 /* First see if this pseudo is already available as reloaded
6064 for a previous insn. We cannot try to inherit for reloads
6065 that are smaller than the maximum number of registers needed
6066 for groups unless the register we would allocate cannot be used
6067 for the groups.
6069 We could check here to see if this is a secondary reload for
6070 an object that is already in a register of the desired class.
6071 This would avoid the need for the secondary reload register.
6072 But this is complex because we can't easily determine what
6073 objects might want to be loaded via this reload. So let a
6074 register be allocated here. In `emit_reload_insns' we suppress
6075 one of the loads in the case described above. */
6077 if (inheritance)
6079 int byte = 0;
6080 int regno = -1;
6081 enum machine_mode mode = VOIDmode;
6083 if (rld[r].in == 0)
6085 else if (REG_P (rld[r].in))
6087 regno = REGNO (rld[r].in);
6088 mode = GET_MODE (rld[r].in);
6090 else if (REG_P (rld[r].in_reg))
6092 regno = REGNO (rld[r].in_reg);
6093 mode = GET_MODE (rld[r].in_reg);
6095 else if (GET_CODE (rld[r].in_reg) == SUBREG
6096 && REG_P (SUBREG_REG (rld[r].in_reg)))
6098 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6099 if (regno < FIRST_PSEUDO_REGISTER)
6100 regno = subreg_regno (rld[r].in_reg);
6101 else
6102 byte = SUBREG_BYTE (rld[r].in_reg);
6103 mode = GET_MODE (rld[r].in_reg);
6105 #ifdef AUTO_INC_DEC
6106 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6107 && REG_P (XEXP (rld[r].in_reg, 0)))
6109 regno = REGNO (XEXP (rld[r].in_reg, 0));
6110 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6111 rld[r].out = rld[r].in;
6113 #endif
6114 #if 0
6115 /* This won't work, since REGNO can be a pseudo reg number.
6116 Also, it takes much more hair to keep track of all the things
6117 that can invalidate an inherited reload of part of a pseudoreg. */
6118 else if (GET_CODE (rld[r].in) == SUBREG
6119 && REG_P (SUBREG_REG (rld[r].in)))
6120 regno = subreg_regno (rld[r].in);
6121 #endif
6123 if (regno >= 0
6124 && reg_last_reload_reg[regno] != 0
6125 #ifdef CANNOT_CHANGE_MODE_CLASS
6126 /* Verify that the register it's in can be used in
6127 mode MODE. */
6128 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6129 GET_MODE (reg_last_reload_reg[regno]),
6130 mode)
6131 #endif
6134 enum reg_class rclass = rld[r].rclass, last_class;
6135 rtx last_reg = reg_last_reload_reg[regno];
6136 enum machine_mode need_mode;
6138 i = REGNO (last_reg);
6139 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6140 last_class = REGNO_REG_CLASS (i);
6142 if (byte == 0)
6143 need_mode = mode;
6144 else
6145 need_mode
6146 = smallest_mode_for_size
6147 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6148 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6149 ? MODE_INT : GET_MODE_CLASS (mode));
6151 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6152 >= GET_MODE_SIZE (need_mode))
6153 && reg_reloaded_contents[i] == regno
6154 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6155 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6156 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6157 /* Even if we can't use this register as a reload
6158 register, we might use it for reload_override_in,
6159 if copying it to the desired class is cheap
6160 enough. */
6161 || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6162 < MEMORY_MOVE_COST (mode, rclass, 1))
6163 && (secondary_reload_class (1, rclass, mode,
6164 last_reg)
6165 == NO_REGS)
6166 #ifdef SECONDARY_MEMORY_NEEDED
6167 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6168 mode)
6169 #endif
6172 && (rld[r].nregs == max_group_size
6173 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6175 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6176 rld[r].when_needed, rld[r].in,
6177 const0_rtx, r, 1))
6179 /* If a group is needed, verify that all the subsequent
6180 registers still have their values intact. */
6181 int nr = hard_regno_nregs[i][rld[r].mode];
6182 int k;
6184 for (k = 1; k < nr; k++)
6185 if (reg_reloaded_contents[i + k] != regno
6186 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6187 break;
6189 if (k == nr)
6191 int i1;
6192 int bad_for_class;
6194 last_reg = (GET_MODE (last_reg) == mode
6195 ? last_reg : gen_rtx_REG (mode, i));
6197 bad_for_class = 0;
6198 for (k = 0; k < nr; k++)
6199 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6200 i+k);
6202 /* We found a register that contains the
6203 value we need. If this register is the
6204 same as an `earlyclobber' operand of the
6205 current insn, just mark it as a place to
6206 reload from since we can't use it as the
6207 reload register itself. */
6209 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6210 if (reg_overlap_mentioned_for_reload_p
6211 (reg_last_reload_reg[regno],
6212 reload_earlyclobbers[i1]))
6213 break;
6215 if (i1 != n_earlyclobbers
6216 || ! (free_for_value_p (i, rld[r].mode,
6217 rld[r].opnum,
6218 rld[r].when_needed, rld[r].in,
6219 rld[r].out, r, 1))
6220 /* Don't use it if we'd clobber a pseudo reg. */
6221 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6222 && rld[r].out
6223 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6224 /* Don't clobber the frame pointer. */
6225 || (i == HARD_FRAME_POINTER_REGNUM
6226 && frame_pointer_needed
6227 && rld[r].out)
6228 /* Don't really use the inherited spill reg
6229 if we need it wider than we've got it. */
6230 || (GET_MODE_SIZE (rld[r].mode)
6231 > GET_MODE_SIZE (mode))
6232 || bad_for_class
6234 /* If find_reloads chose reload_out as reload
6235 register, stay with it - that leaves the
6236 inherited register for subsequent reloads. */
6237 || (rld[r].out && rld[r].reg_rtx
6238 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6240 if (! rld[r].optional)
6242 reload_override_in[r] = last_reg;
6243 reload_inheritance_insn[r]
6244 = reg_reloaded_insn[i];
6247 else
6249 int k;
6250 /* We can use this as a reload reg. */
6251 /* Mark the register as in use for this part of
6252 the insn. */
6253 mark_reload_reg_in_use (i,
6254 rld[r].opnum,
6255 rld[r].when_needed,
6256 rld[r].mode);
6257 rld[r].reg_rtx = last_reg;
6258 reload_inherited[r] = 1;
6259 reload_inheritance_insn[r]
6260 = reg_reloaded_insn[i];
6261 reload_spill_index[r] = i;
6262 for (k = 0; k < nr; k++)
6263 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6264 i + k);
6271 /* Here's another way to see if the value is already lying around. */
6272 if (inheritance
6273 && rld[r].in != 0
6274 && ! reload_inherited[r]
6275 && rld[r].out == 0
6276 && (CONSTANT_P (rld[r].in)
6277 || GET_CODE (rld[r].in) == PLUS
6278 || REG_P (rld[r].in)
6279 || MEM_P (rld[r].in))
6280 && (rld[r].nregs == max_group_size
6281 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6282 search_equiv = rld[r].in;
6283 /* If this is an output reload from a simple move insn, look
6284 if an equivalence for the input is available. */
6285 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6287 rtx set = single_set (insn);
6289 if (set
6290 && rtx_equal_p (rld[r].out, SET_DEST (set))
6291 && CONSTANT_P (SET_SRC (set)))
6292 search_equiv = SET_SRC (set);
6295 if (search_equiv)
6297 rtx equiv
6298 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6299 -1, NULL, 0, rld[r].mode);
6300 int regno = 0;
6302 if (equiv != 0)
6304 if (REG_P (equiv))
6305 regno = REGNO (equiv);
6306 else
6308 /* This must be a SUBREG of a hard register.
6309 Make a new REG since this might be used in an
6310 address and not all machines support SUBREGs
6311 there. */
6312 gcc_assert (GET_CODE (equiv) == SUBREG);
6313 regno = subreg_regno (equiv);
6314 equiv = gen_rtx_REG (rld[r].mode, regno);
6315 /* If we choose EQUIV as the reload register, but the
6316 loop below decides to cancel the inheritance, we'll
6317 end up reloading EQUIV in rld[r].mode, not the mode
6318 it had originally. That isn't safe when EQUIV isn't
6319 available as a spill register since its value might
6320 still be live at this point. */
6321 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6322 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6323 equiv = 0;
6327 /* If we found a spill reg, reject it unless it is free
6328 and of the desired class. */
6329 if (equiv != 0)
6331 int regs_used = 0;
6332 int bad_for_class = 0;
6333 int max_regno = regno + rld[r].nregs;
6335 for (i = regno; i < max_regno; i++)
6337 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6339 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6343 if ((regs_used
6344 && ! free_for_value_p (regno, rld[r].mode,
6345 rld[r].opnum, rld[r].when_needed,
6346 rld[r].in, rld[r].out, r, 1))
6347 || bad_for_class)
6348 equiv = 0;
6351 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6352 equiv = 0;
6354 /* We found a register that contains the value we need.
6355 If this register is the same as an `earlyclobber' operand
6356 of the current insn, just mark it as a place to reload from
6357 since we can't use it as the reload register itself. */
6359 if (equiv != 0)
6360 for (i = 0; i < n_earlyclobbers; i++)
6361 if (reg_overlap_mentioned_for_reload_p (equiv,
6362 reload_earlyclobbers[i]))
6364 if (! rld[r].optional)
6365 reload_override_in[r] = equiv;
6366 equiv = 0;
6367 break;
6370 /* If the equiv register we have found is explicitly clobbered
6371 in the current insn, it depends on the reload type if we
6372 can use it, use it for reload_override_in, or not at all.
6373 In particular, we then can't use EQUIV for a
6374 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6376 if (equiv != 0)
6378 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6379 switch (rld[r].when_needed)
6381 case RELOAD_FOR_OTHER_ADDRESS:
6382 case RELOAD_FOR_INPADDR_ADDRESS:
6383 case RELOAD_FOR_INPUT_ADDRESS:
6384 case RELOAD_FOR_OPADDR_ADDR:
6385 break;
6386 case RELOAD_OTHER:
6387 case RELOAD_FOR_INPUT:
6388 case RELOAD_FOR_OPERAND_ADDRESS:
6389 if (! rld[r].optional)
6390 reload_override_in[r] = equiv;
6391 /* Fall through. */
6392 default:
6393 equiv = 0;
6394 break;
6396 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6397 switch (rld[r].when_needed)
6399 case RELOAD_FOR_OTHER_ADDRESS:
6400 case RELOAD_FOR_INPADDR_ADDRESS:
6401 case RELOAD_FOR_INPUT_ADDRESS:
6402 case RELOAD_FOR_OPADDR_ADDR:
6403 case RELOAD_FOR_OPERAND_ADDRESS:
6404 case RELOAD_FOR_INPUT:
6405 break;
6406 case RELOAD_OTHER:
6407 if (! rld[r].optional)
6408 reload_override_in[r] = equiv;
6409 /* Fall through. */
6410 default:
6411 equiv = 0;
6412 break;
6416 /* If we found an equivalent reg, say no code need be generated
6417 to load it, and use it as our reload reg. */
6418 if (equiv != 0
6419 && (regno != HARD_FRAME_POINTER_REGNUM
6420 || !frame_pointer_needed))
6422 int nr = hard_regno_nregs[regno][rld[r].mode];
6423 int k;
6424 rld[r].reg_rtx = equiv;
6425 reload_spill_index[r] = regno;
6426 reload_inherited[r] = 1;
6428 /* If reg_reloaded_valid is not set for this register,
6429 there might be a stale spill_reg_store lying around.
6430 We must clear it, since otherwise emit_reload_insns
6431 might delete the store. */
6432 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6433 spill_reg_store[regno] = NULL_RTX;
6434 /* If any of the hard registers in EQUIV are spill
6435 registers, mark them as in use for this insn. */
6436 for (k = 0; k < nr; k++)
6438 i = spill_reg_order[regno + k];
6439 if (i >= 0)
6441 mark_reload_reg_in_use (regno, rld[r].opnum,
6442 rld[r].when_needed,
6443 rld[r].mode);
6444 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6445 regno + k);
6451 /* If we found a register to use already, or if this is an optional
6452 reload, we are done. */
6453 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6454 continue;
6456 #if 0
6457 /* No longer needed for correct operation. Might or might
6458 not give better code on the average. Want to experiment? */
6460 /* See if there is a later reload that has a class different from our
6461 class that intersects our class or that requires less register
6462 than our reload. If so, we must allocate a register to this
6463 reload now, since that reload might inherit a previous reload
6464 and take the only available register in our class. Don't do this
6465 for optional reloads since they will force all previous reloads
6466 to be allocated. Also don't do this for reloads that have been
6467 turned off. */
6469 for (i = j + 1; i < n_reloads; i++)
6471 int s = reload_order[i];
6473 if ((rld[s].in == 0 && rld[s].out == 0
6474 && ! rld[s].secondary_p)
6475 || rld[s].optional)
6476 continue;
6478 if ((rld[s].rclass != rld[r].rclass
6479 && reg_classes_intersect_p (rld[r].rclass,
6480 rld[s].rclass))
6481 || rld[s].nregs < rld[r].nregs)
6482 break;
6485 if (i == n_reloads)
6486 continue;
6488 allocate_reload_reg (chain, r, j == n_reloads - 1);
6489 #endif
6492 /* Now allocate reload registers for anything non-optional that
6493 didn't get one yet. */
6494 for (j = 0; j < n_reloads; j++)
6496 int r = reload_order[j];
6498 /* Ignore reloads that got marked inoperative. */
6499 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6500 continue;
6502 /* Skip reloads that already have a register allocated or are
6503 optional. */
6504 if (rld[r].reg_rtx != 0 || rld[r].optional)
6505 continue;
6507 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6508 break;
6511 /* If that loop got all the way, we have won. */
6512 if (j == n_reloads)
6514 win = 1;
6515 break;
6518 /* Loop around and try without any inheritance. */
6521 if (! win)
6523 /* First undo everything done by the failed attempt
6524 to allocate with inheritance. */
6525 choose_reload_regs_init (chain, save_reload_reg_rtx);
6527 /* Some sanity tests to verify that the reloads found in the first
6528 pass are identical to the ones we have now. */
6529 gcc_assert (chain->n_reloads == n_reloads);
6531 for (i = 0; i < n_reloads; i++)
6533 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6534 continue;
6535 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6536 for (j = 0; j < n_spills; j++)
6537 if (spill_regs[j] == chain->rld[i].regno)
6538 if (! set_reload_reg (j, i))
6539 failed_reload (chain->insn, i);
6543 /* If we thought we could inherit a reload, because it seemed that
6544 nothing else wanted the same reload register earlier in the insn,
6545 verify that assumption, now that all reloads have been assigned.
6546 Likewise for reloads where reload_override_in has been set. */
6548 /* If doing expensive optimizations, do one preliminary pass that doesn't
6549 cancel any inheritance, but removes reloads that have been needed only
6550 for reloads that we know can be inherited. */
6551 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6553 for (j = 0; j < n_reloads; j++)
6555 int r = reload_order[j];
6556 rtx check_reg;
6557 if (reload_inherited[r] && rld[r].reg_rtx)
6558 check_reg = rld[r].reg_rtx;
6559 else if (reload_override_in[r]
6560 && (REG_P (reload_override_in[r])
6561 || GET_CODE (reload_override_in[r]) == SUBREG))
6562 check_reg = reload_override_in[r];
6563 else
6564 continue;
6565 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6566 rld[r].opnum, rld[r].when_needed, rld[r].in,
6567 (reload_inherited[r]
6568 ? rld[r].out : const0_rtx),
6569 r, 1))
6571 if (pass)
6572 continue;
6573 reload_inherited[r] = 0;
6574 reload_override_in[r] = 0;
6576 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6577 reload_override_in, then we do not need its related
6578 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6579 likewise for other reload types.
6580 We handle this by removing a reload when its only replacement
6581 is mentioned in reload_in of the reload we are going to inherit.
6582 A special case are auto_inc expressions; even if the input is
6583 inherited, we still need the address for the output. We can
6584 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6585 If we succeeded removing some reload and we are doing a preliminary
6586 pass just to remove such reloads, make another pass, since the
6587 removal of one reload might allow us to inherit another one. */
6588 else if (rld[r].in
6589 && rld[r].out != rld[r].in
6590 && remove_address_replacements (rld[r].in) && pass)
6591 pass = 2;
6595 /* Now that reload_override_in is known valid,
6596 actually override reload_in. */
6597 for (j = 0; j < n_reloads; j++)
6598 if (reload_override_in[j])
6599 rld[j].in = reload_override_in[j];
6601 /* If this reload won't be done because it has been canceled or is
6602 optional and not inherited, clear reload_reg_rtx so other
6603 routines (such as subst_reloads) don't get confused. */
6604 for (j = 0; j < n_reloads; j++)
6605 if (rld[j].reg_rtx != 0
6606 && ((rld[j].optional && ! reload_inherited[j])
6607 || (rld[j].in == 0 && rld[j].out == 0
6608 && ! rld[j].secondary_p)))
6610 int regno = true_regnum (rld[j].reg_rtx);
6612 if (spill_reg_order[regno] >= 0)
6613 clear_reload_reg_in_use (regno, rld[j].opnum,
6614 rld[j].when_needed, rld[j].mode);
6615 rld[j].reg_rtx = 0;
6616 reload_spill_index[j] = -1;
6619 /* Record which pseudos and which spill regs have output reloads. */
6620 for (j = 0; j < n_reloads; j++)
6622 int r = reload_order[j];
6624 i = reload_spill_index[r];
6626 /* I is nonneg if this reload uses a register.
6627 If rld[r].reg_rtx is 0, this is an optional reload
6628 that we opted to ignore. */
6629 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6630 && rld[r].reg_rtx != 0)
6632 int nregno = REGNO (rld[r].out_reg);
6633 int nr = 1;
6635 if (nregno < FIRST_PSEUDO_REGISTER)
6636 nr = hard_regno_nregs[nregno][rld[r].mode];
6638 while (--nr >= 0)
6639 SET_REGNO_REG_SET (&reg_has_output_reload,
6640 nregno + nr);
6642 if (i >= 0)
6644 nr = hard_regno_nregs[i][rld[r].mode];
6645 while (--nr >= 0)
6646 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6649 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6650 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6651 || rld[r].when_needed == RELOAD_FOR_INSN);
6656 /* Deallocate the reload register for reload R. This is called from
6657 remove_address_replacements. */
6659 void
6660 deallocate_reload_reg (int r)
6662 int regno;
6664 if (! rld[r].reg_rtx)
6665 return;
6666 regno = true_regnum (rld[r].reg_rtx);
6667 rld[r].reg_rtx = 0;
6668 if (spill_reg_order[regno] >= 0)
6669 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6670 rld[r].mode);
6671 reload_spill_index[r] = -1;
6674 /* If the small_register_classes_for_mode_p target hook returns true for
6675 some machine modes, we may not have merged two reloads of the same item
6676 for fear that we might not have enough reload registers. However,
6677 normally they will get the same reload register and hence actually need
6678 not be loaded twice.
6680 Here we check for the most common case of this phenomenon: when we have
6681 a number of reloads for the same object, each of which were allocated
6682 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6683 reload, and is not modified in the insn itself. If we find such,
6684 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6685 This will not increase the number of spill registers needed and will
6686 prevent redundant code. */
6688 static void
6689 merge_assigned_reloads (rtx insn)
6691 int i, j;
6693 /* Scan all the reloads looking for ones that only load values and
6694 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6695 assigned and not modified by INSN. */
6697 for (i = 0; i < n_reloads; i++)
6699 int conflicting_input = 0;
6700 int max_input_address_opnum = -1;
6701 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6703 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6704 || rld[i].out != 0 || rld[i].reg_rtx == 0
6705 || reg_set_p (rld[i].reg_rtx, insn))
6706 continue;
6708 /* Look at all other reloads. Ensure that the only use of this
6709 reload_reg_rtx is in a reload that just loads the same value
6710 as we do. Note that any secondary reloads must be of the identical
6711 class since the values, modes, and result registers are the
6712 same, so we need not do anything with any secondary reloads. */
6714 for (j = 0; j < n_reloads; j++)
6716 if (i == j || rld[j].reg_rtx == 0
6717 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6718 rld[i].reg_rtx))
6719 continue;
6721 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6722 && rld[j].opnum > max_input_address_opnum)
6723 max_input_address_opnum = rld[j].opnum;
6725 /* If the reload regs aren't exactly the same (e.g, different modes)
6726 or if the values are different, we can't merge this reload.
6727 But if it is an input reload, we might still merge
6728 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6730 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6731 || rld[j].out != 0 || rld[j].in == 0
6732 || ! rtx_equal_p (rld[i].in, rld[j].in))
6734 if (rld[j].when_needed != RELOAD_FOR_INPUT
6735 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6736 || rld[i].opnum > rld[j].opnum)
6737 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6738 break;
6739 conflicting_input = 1;
6740 if (min_conflicting_input_opnum > rld[j].opnum)
6741 min_conflicting_input_opnum = rld[j].opnum;
6745 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6746 we, in fact, found any matching reloads. */
6748 if (j == n_reloads
6749 && max_input_address_opnum <= min_conflicting_input_opnum)
6751 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6753 for (j = 0; j < n_reloads; j++)
6754 if (i != j && rld[j].reg_rtx != 0
6755 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6756 && (! conflicting_input
6757 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6758 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6760 rld[i].when_needed = RELOAD_OTHER;
6761 rld[j].in = 0;
6762 reload_spill_index[j] = -1;
6763 transfer_replacements (i, j);
6766 /* If this is now RELOAD_OTHER, look for any reloads that
6767 load parts of this operand and set them to
6768 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6769 RELOAD_OTHER for outputs. Note that this test is
6770 equivalent to looking for reloads for this operand
6771 number.
6773 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6774 it may share registers with a RELOAD_FOR_INPUT, so we can
6775 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
6776 never need to, since we do not modify RELOAD_FOR_OUTPUT.
6778 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6779 instruction is assigned the same register as the earlier
6780 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
6781 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6782 instruction to be deleted later on. */
6784 if (rld[i].when_needed == RELOAD_OTHER)
6785 for (j = 0; j < n_reloads; j++)
6786 if (rld[j].in != 0
6787 && rld[j].when_needed != RELOAD_OTHER
6788 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6789 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6790 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6791 && (! conflicting_input
6792 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6793 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6794 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6795 rld[i].in))
6797 int k;
6799 rld[j].when_needed
6800 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6801 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6802 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6804 /* Check to see if we accidentally converted two
6805 reloads that use the same reload register with
6806 different inputs to the same type. If so, the
6807 resulting code won't work. */
6808 if (rld[j].reg_rtx)
6809 for (k = 0; k < j; k++)
6810 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6811 || rld[k].when_needed != rld[j].when_needed
6812 || !rtx_equal_p (rld[k].reg_rtx,
6813 rld[j].reg_rtx)
6814 || rtx_equal_p (rld[k].in,
6815 rld[j].in));
6821 /* These arrays are filled by emit_reload_insns and its subroutines. */
6822 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6823 static rtx other_input_address_reload_insns = 0;
6824 static rtx other_input_reload_insns = 0;
6825 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6826 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6827 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6828 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6829 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6830 static rtx operand_reload_insns = 0;
6831 static rtx other_operand_reload_insns = 0;
6832 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6834 /* Values to be put in spill_reg_store are put here first. */
6835 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6836 static HARD_REG_SET reg_reloaded_died;
6838 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6839 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
6840 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
6841 adjusted register, and return true. Otherwise, return false. */
6842 static bool
6843 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6844 enum reg_class new_class,
6845 enum machine_mode new_mode)
6848 rtx reg;
6850 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6852 unsigned regno = REGNO (reg);
6854 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6855 continue;
6856 if (GET_MODE (reg) != new_mode)
6858 if (!HARD_REGNO_MODE_OK (regno, new_mode))
6859 continue;
6860 if (hard_regno_nregs[regno][new_mode]
6861 > hard_regno_nregs[regno][GET_MODE (reg)])
6862 continue;
6863 reg = reload_adjust_reg_for_mode (reg, new_mode);
6865 *reload_reg = reg;
6866 return true;
6868 return false;
6871 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6872 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6873 nonzero, if that is suitable. On success, change *RELOAD_REG to the
6874 adjusted register, and return true. Otherwise, return false. */
6875 static bool
6876 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6877 enum insn_code icode)
6880 enum reg_class new_class = scratch_reload_class (icode);
6881 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6883 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6884 new_class, new_mode);
6887 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6888 has the number J. OLD contains the value to be used as input. */
6890 static void
6891 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6892 rtx old, int j)
6894 rtx insn = chain->insn;
6895 rtx reloadreg;
6896 rtx oldequiv_reg = 0;
6897 rtx oldequiv = 0;
6898 int special = 0;
6899 enum machine_mode mode;
6900 rtx *where;
6902 /* delete_output_reload is only invoked properly if old contains
6903 the original pseudo register. Since this is replaced with a
6904 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6905 find the pseudo in RELOAD_IN_REG. */
6906 if (reload_override_in[j]
6907 && REG_P (rl->in_reg))
6909 oldequiv = old;
6910 old = rl->in_reg;
6912 if (oldequiv == 0)
6913 oldequiv = old;
6914 else if (REG_P (oldequiv))
6915 oldequiv_reg = oldequiv;
6916 else if (GET_CODE (oldequiv) == SUBREG)
6917 oldequiv_reg = SUBREG_REG (oldequiv);
6919 reloadreg = reload_reg_rtx_for_input[j];
6920 mode = GET_MODE (reloadreg);
6922 /* If we are reloading from a register that was recently stored in
6923 with an output-reload, see if we can prove there was
6924 actually no need to store the old value in it. */
6926 if (optimize && REG_P (oldequiv)
6927 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6928 && spill_reg_store[REGNO (oldequiv)]
6929 && REG_P (old)
6930 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6931 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6932 rl->out_reg)))
6933 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6935 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6936 OLDEQUIV. */
6938 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6939 oldequiv = SUBREG_REG (oldequiv);
6940 if (GET_MODE (oldequiv) != VOIDmode
6941 && mode != GET_MODE (oldequiv))
6942 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6944 /* Switch to the right place to emit the reload insns. */
6945 switch (rl->when_needed)
6947 case RELOAD_OTHER:
6948 where = &other_input_reload_insns;
6949 break;
6950 case RELOAD_FOR_INPUT:
6951 where = &input_reload_insns[rl->opnum];
6952 break;
6953 case RELOAD_FOR_INPUT_ADDRESS:
6954 where = &input_address_reload_insns[rl->opnum];
6955 break;
6956 case RELOAD_FOR_INPADDR_ADDRESS:
6957 where = &inpaddr_address_reload_insns[rl->opnum];
6958 break;
6959 case RELOAD_FOR_OUTPUT_ADDRESS:
6960 where = &output_address_reload_insns[rl->opnum];
6961 break;
6962 case RELOAD_FOR_OUTADDR_ADDRESS:
6963 where = &outaddr_address_reload_insns[rl->opnum];
6964 break;
6965 case RELOAD_FOR_OPERAND_ADDRESS:
6966 where = &operand_reload_insns;
6967 break;
6968 case RELOAD_FOR_OPADDR_ADDR:
6969 where = &other_operand_reload_insns;
6970 break;
6971 case RELOAD_FOR_OTHER_ADDRESS:
6972 where = &other_input_address_reload_insns;
6973 break;
6974 default:
6975 gcc_unreachable ();
6978 push_to_sequence (*where);
6980 /* Auto-increment addresses must be reloaded in a special way. */
6981 if (rl->out && ! rl->out_reg)
6983 /* We are not going to bother supporting the case where a
6984 incremented register can't be copied directly from
6985 OLDEQUIV since this seems highly unlikely. */
6986 gcc_assert (rl->secondary_in_reload < 0);
6988 if (reload_inherited[j])
6989 oldequiv = reloadreg;
6991 old = XEXP (rl->in_reg, 0);
6993 if (optimize && REG_P (oldequiv)
6994 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6995 && spill_reg_store[REGNO (oldequiv)]
6996 && REG_P (old)
6997 && (dead_or_set_p (insn,
6998 spill_reg_stored_to[REGNO (oldequiv)])
6999 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7000 old)))
7001 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7003 /* Prevent normal processing of this reload. */
7004 special = 1;
7005 /* Output a special code sequence for this case. */
7006 new_spill_reg_store[REGNO (reloadreg)]
7007 = inc_for_reload (reloadreg, oldequiv, rl->out,
7008 rl->inc);
7011 /* If we are reloading a pseudo-register that was set by the previous
7012 insn, see if we can get rid of that pseudo-register entirely
7013 by redirecting the previous insn into our reload register. */
7015 else if (optimize && REG_P (old)
7016 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7017 && dead_or_set_p (insn, old)
7018 /* This is unsafe if some other reload
7019 uses the same reg first. */
7020 && ! conflicts_with_override (reloadreg)
7021 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7022 rl->when_needed, old, rl->out, j, 0))
7024 rtx temp = PREV_INSN (insn);
7025 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7026 temp = PREV_INSN (temp);
7027 if (temp
7028 && NONJUMP_INSN_P (temp)
7029 && GET_CODE (PATTERN (temp)) == SET
7030 && SET_DEST (PATTERN (temp)) == old
7031 /* Make sure we can access insn_operand_constraint. */
7032 && asm_noperands (PATTERN (temp)) < 0
7033 /* This is unsafe if operand occurs more than once in current
7034 insn. Perhaps some occurrences aren't reloaded. */
7035 && count_occurrences (PATTERN (insn), old, 0) == 1)
7037 rtx old = SET_DEST (PATTERN (temp));
7038 /* Store into the reload register instead of the pseudo. */
7039 SET_DEST (PATTERN (temp)) = reloadreg;
7041 /* Verify that resulting insn is valid. */
7042 extract_insn (temp);
7043 if (constrain_operands (1))
7045 /* If the previous insn is an output reload, the source is
7046 a reload register, and its spill_reg_store entry will
7047 contain the previous destination. This is now
7048 invalid. */
7049 if (REG_P (SET_SRC (PATTERN (temp)))
7050 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7052 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7053 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7056 /* If these are the only uses of the pseudo reg,
7057 pretend for GDB it lives in the reload reg we used. */
7058 if (REG_N_DEATHS (REGNO (old)) == 1
7059 && REG_N_SETS (REGNO (old)) == 1)
7061 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7062 if (ira_conflicts_p)
7063 /* Inform IRA about the change. */
7064 ira_mark_allocation_change (REGNO (old));
7065 alter_reg (REGNO (old), -1, false);
7067 special = 1;
7069 /* Adjust any debug insns between temp and insn. */
7070 while ((temp = NEXT_INSN (temp)) != insn)
7071 if (DEBUG_INSN_P (temp))
7072 replace_rtx (PATTERN (temp), old, reloadreg);
7073 else
7074 gcc_assert (NOTE_P (temp));
7076 else
7078 SET_DEST (PATTERN (temp)) = old;
7083 /* We can't do that, so output an insn to load RELOADREG. */
7085 /* If we have a secondary reload, pick up the secondary register
7086 and icode, if any. If OLDEQUIV and OLD are different or
7087 if this is an in-out reload, recompute whether or not we
7088 still need a secondary register and what the icode should
7089 be. If we still need a secondary register and the class or
7090 icode is different, go back to reloading from OLD if using
7091 OLDEQUIV means that we got the wrong type of register. We
7092 cannot have different class or icode due to an in-out reload
7093 because we don't make such reloads when both the input and
7094 output need secondary reload registers. */
7096 if (! special && rl->secondary_in_reload >= 0)
7098 rtx second_reload_reg = 0;
7099 rtx third_reload_reg = 0;
7100 int secondary_reload = rl->secondary_in_reload;
7101 rtx real_oldequiv = oldequiv;
7102 rtx real_old = old;
7103 rtx tmp;
7104 enum insn_code icode;
7105 enum insn_code tertiary_icode = CODE_FOR_nothing;
7107 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7108 and similarly for OLD.
7109 See comments in get_secondary_reload in reload.c. */
7110 /* If it is a pseudo that cannot be replaced with its
7111 equivalent MEM, we must fall back to reload_in, which
7112 will have all the necessary substitutions registered.
7113 Likewise for a pseudo that can't be replaced with its
7114 equivalent constant.
7116 Take extra care for subregs of such pseudos. Note that
7117 we cannot use reg_equiv_mem in this case because it is
7118 not in the right mode. */
7120 tmp = oldequiv;
7121 if (GET_CODE (tmp) == SUBREG)
7122 tmp = SUBREG_REG (tmp);
7123 if (REG_P (tmp)
7124 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7125 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7126 || reg_equiv_constant[REGNO (tmp)] != 0))
7128 if (! reg_equiv_mem[REGNO (tmp)]
7129 || num_not_at_initial_offset
7130 || GET_CODE (oldequiv) == SUBREG)
7131 real_oldequiv = rl->in;
7132 else
7133 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7136 tmp = old;
7137 if (GET_CODE (tmp) == SUBREG)
7138 tmp = SUBREG_REG (tmp);
7139 if (REG_P (tmp)
7140 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7141 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7142 || reg_equiv_constant[REGNO (tmp)] != 0))
7144 if (! reg_equiv_mem[REGNO (tmp)]
7145 || num_not_at_initial_offset
7146 || GET_CODE (old) == SUBREG)
7147 real_old = rl->in;
7148 else
7149 real_old = reg_equiv_mem[REGNO (tmp)];
7152 second_reload_reg = rld[secondary_reload].reg_rtx;
7153 if (rld[secondary_reload].secondary_in_reload >= 0)
7155 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7157 third_reload_reg = rld[tertiary_reload].reg_rtx;
7158 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7159 /* We'd have to add more code for quartary reloads. */
7160 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7162 icode = rl->secondary_in_icode;
7164 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7165 || (rl->in != 0 && rl->out != 0))
7167 secondary_reload_info sri, sri2;
7168 enum reg_class new_class, new_t_class;
7170 sri.icode = CODE_FOR_nothing;
7171 sri.prev_sri = NULL;
7172 new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7173 mode, &sri);
7175 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7176 second_reload_reg = 0;
7177 else if (new_class == NO_REGS)
7179 if (reload_adjust_reg_for_icode (&second_reload_reg,
7180 third_reload_reg,
7181 (enum insn_code) sri.icode))
7183 icode = (enum insn_code) sri.icode;
7184 third_reload_reg = 0;
7186 else
7188 oldequiv = old;
7189 real_oldequiv = real_old;
7192 else if (sri.icode != CODE_FOR_nothing)
7193 /* We currently lack a way to express this in reloads. */
7194 gcc_unreachable ();
7195 else
7197 sri2.icode = CODE_FOR_nothing;
7198 sri2.prev_sri = &sri;
7199 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7200 new_class, mode, &sri);
7201 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7203 if (reload_adjust_reg_for_temp (&second_reload_reg,
7204 third_reload_reg,
7205 new_class, mode))
7207 third_reload_reg = 0;
7208 tertiary_icode = (enum insn_code) sri2.icode;
7210 else
7212 oldequiv = old;
7213 real_oldequiv = real_old;
7216 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7218 rtx intermediate = second_reload_reg;
7220 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7221 new_class, mode)
7222 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7223 ((enum insn_code)
7224 sri2.icode)))
7226 second_reload_reg = intermediate;
7227 tertiary_icode = (enum insn_code) sri2.icode;
7229 else
7231 oldequiv = old;
7232 real_oldequiv = real_old;
7235 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7237 rtx intermediate = second_reload_reg;
7239 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7240 new_class, mode)
7241 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7242 new_t_class, mode))
7244 second_reload_reg = intermediate;
7245 tertiary_icode = (enum insn_code) sri2.icode;
7247 else
7249 oldequiv = old;
7250 real_oldequiv = real_old;
7253 else
7255 /* This could be handled more intelligently too. */
7256 oldequiv = old;
7257 real_oldequiv = real_old;
7262 /* If we still need a secondary reload register, check
7263 to see if it is being used as a scratch or intermediate
7264 register and generate code appropriately. If we need
7265 a scratch register, use REAL_OLDEQUIV since the form of
7266 the insn may depend on the actual address if it is
7267 a MEM. */
7269 if (second_reload_reg)
7271 if (icode != CODE_FOR_nothing)
7273 /* We'd have to add extra code to handle this case. */
7274 gcc_assert (!third_reload_reg);
7276 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7277 second_reload_reg));
7278 special = 1;
7280 else
7282 /* See if we need a scratch register to load the
7283 intermediate register (a tertiary reload). */
7284 if (tertiary_icode != CODE_FOR_nothing)
7286 emit_insn ((GEN_FCN (tertiary_icode)
7287 (second_reload_reg, real_oldequiv,
7288 third_reload_reg)));
7290 else if (third_reload_reg)
7292 gen_reload (third_reload_reg, real_oldequiv,
7293 rl->opnum,
7294 rl->when_needed);
7295 gen_reload (second_reload_reg, third_reload_reg,
7296 rl->opnum,
7297 rl->when_needed);
7299 else
7300 gen_reload (second_reload_reg, real_oldequiv,
7301 rl->opnum,
7302 rl->when_needed);
7304 oldequiv = second_reload_reg;
7309 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7311 rtx real_oldequiv = oldequiv;
7313 if ((REG_P (oldequiv)
7314 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7315 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7316 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7317 || (GET_CODE (oldequiv) == SUBREG
7318 && REG_P (SUBREG_REG (oldequiv))
7319 && (REGNO (SUBREG_REG (oldequiv))
7320 >= FIRST_PSEUDO_REGISTER)
7321 && ((reg_equiv_memory_loc
7322 [REGNO (SUBREG_REG (oldequiv))] != 0)
7323 || (reg_equiv_constant
7324 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7325 || (CONSTANT_P (oldequiv)
7326 && (PREFERRED_RELOAD_CLASS (oldequiv,
7327 REGNO_REG_CLASS (REGNO (reloadreg)))
7328 == NO_REGS)))
7329 real_oldequiv = rl->in;
7330 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7331 rl->when_needed);
7334 if (cfun->can_throw_non_call_exceptions)
7335 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7337 /* End this sequence. */
7338 *where = get_insns ();
7339 end_sequence ();
7341 /* Update reload_override_in so that delete_address_reloads_1
7342 can see the actual register usage. */
7343 if (oldequiv_reg)
7344 reload_override_in[j] = oldequiv;
7347 /* Generate insns to for the output reload RL, which is for the insn described
7348 by CHAIN and has the number J. */
7349 static void
7350 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7351 int j)
7353 rtx reloadreg;
7354 rtx insn = chain->insn;
7355 int special = 0;
7356 rtx old = rl->out;
7357 enum machine_mode mode;
7358 rtx p;
7359 rtx rl_reg_rtx;
7361 if (rl->when_needed == RELOAD_OTHER)
7362 start_sequence ();
7363 else
7364 push_to_sequence (output_reload_insns[rl->opnum]);
7366 rl_reg_rtx = reload_reg_rtx_for_output[j];
7367 mode = GET_MODE (rl_reg_rtx);
7369 reloadreg = rl_reg_rtx;
7371 /* If we need two reload regs, set RELOADREG to the intermediate
7372 one, since it will be stored into OLD. We might need a secondary
7373 register only for an input reload, so check again here. */
7375 if (rl->secondary_out_reload >= 0)
7377 rtx real_old = old;
7378 int secondary_reload = rl->secondary_out_reload;
7379 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7381 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7382 && reg_equiv_mem[REGNO (old)] != 0)
7383 real_old = reg_equiv_mem[REGNO (old)];
7385 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7387 rtx second_reloadreg = reloadreg;
7388 reloadreg = rld[secondary_reload].reg_rtx;
7390 /* See if RELOADREG is to be used as a scratch register
7391 or as an intermediate register. */
7392 if (rl->secondary_out_icode != CODE_FOR_nothing)
7394 /* We'd have to add extra code to handle this case. */
7395 gcc_assert (tertiary_reload < 0);
7397 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7398 (real_old, second_reloadreg, reloadreg)));
7399 special = 1;
7401 else
7403 /* See if we need both a scratch and intermediate reload
7404 register. */
7406 enum insn_code tertiary_icode
7407 = rld[secondary_reload].secondary_out_icode;
7409 /* We'd have to add more code for quartary reloads. */
7410 gcc_assert (tertiary_reload < 0
7411 || rld[tertiary_reload].secondary_out_reload < 0);
7413 if (GET_MODE (reloadreg) != mode)
7414 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7416 if (tertiary_icode != CODE_FOR_nothing)
7418 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7419 rtx tem;
7421 /* Copy primary reload reg to secondary reload reg.
7422 (Note that these have been swapped above, then
7423 secondary reload reg to OLD using our insn.) */
7425 /* If REAL_OLD is a paradoxical SUBREG, remove it
7426 and try to put the opposite SUBREG on
7427 RELOADREG. */
7428 if (GET_CODE (real_old) == SUBREG
7429 && (GET_MODE_SIZE (GET_MODE (real_old))
7430 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7431 && 0 != (tem = gen_lowpart_common
7432 (GET_MODE (SUBREG_REG (real_old)),
7433 reloadreg)))
7434 real_old = SUBREG_REG (real_old), reloadreg = tem;
7436 gen_reload (reloadreg, second_reloadreg,
7437 rl->opnum, rl->when_needed);
7438 emit_insn ((GEN_FCN (tertiary_icode)
7439 (real_old, reloadreg, third_reloadreg)));
7440 special = 1;
7443 else
7445 /* Copy between the reload regs here and then to
7446 OUT later. */
7448 gen_reload (reloadreg, second_reloadreg,
7449 rl->opnum, rl->when_needed);
7450 if (tertiary_reload >= 0)
7452 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7454 gen_reload (third_reloadreg, reloadreg,
7455 rl->opnum, rl->when_needed);
7456 reloadreg = third_reloadreg;
7463 /* Output the last reload insn. */
7464 if (! special)
7466 rtx set;
7468 /* Don't output the last reload if OLD is not the dest of
7469 INSN and is in the src and is clobbered by INSN. */
7470 if (! flag_expensive_optimizations
7471 || !REG_P (old)
7472 || !(set = single_set (insn))
7473 || rtx_equal_p (old, SET_DEST (set))
7474 || !reg_mentioned_p (old, SET_SRC (set))
7475 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7476 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7477 gen_reload (old, reloadreg, rl->opnum,
7478 rl->when_needed);
7481 /* Look at all insns we emitted, just to be safe. */
7482 for (p = get_insns (); p; p = NEXT_INSN (p))
7483 if (INSN_P (p))
7485 rtx pat = PATTERN (p);
7487 /* If this output reload doesn't come from a spill reg,
7488 clear any memory of reloaded copies of the pseudo reg.
7489 If this output reload comes from a spill reg,
7490 reg_has_output_reload will make this do nothing. */
7491 note_stores (pat, forget_old_reloads_1, NULL);
7493 if (reg_mentioned_p (rl_reg_rtx, pat))
7495 rtx set = single_set (insn);
7496 if (reload_spill_index[j] < 0
7497 && set
7498 && SET_SRC (set) == rl_reg_rtx)
7500 int src = REGNO (SET_SRC (set));
7502 reload_spill_index[j] = src;
7503 SET_HARD_REG_BIT (reg_is_output_reload, src);
7504 if (find_regno_note (insn, REG_DEAD, src))
7505 SET_HARD_REG_BIT (reg_reloaded_died, src);
7507 if (HARD_REGISTER_P (rl_reg_rtx))
7509 int s = rl->secondary_out_reload;
7510 set = single_set (p);
7511 /* If this reload copies only to the secondary reload
7512 register, the secondary reload does the actual
7513 store. */
7514 if (s >= 0 && set == NULL_RTX)
7515 /* We can't tell what function the secondary reload
7516 has and where the actual store to the pseudo is
7517 made; leave new_spill_reg_store alone. */
7519 else if (s >= 0
7520 && SET_SRC (set) == rl_reg_rtx
7521 && SET_DEST (set) == rld[s].reg_rtx)
7523 /* Usually the next instruction will be the
7524 secondary reload insn; if we can confirm
7525 that it is, setting new_spill_reg_store to
7526 that insn will allow an extra optimization. */
7527 rtx s_reg = rld[s].reg_rtx;
7528 rtx next = NEXT_INSN (p);
7529 rld[s].out = rl->out;
7530 rld[s].out_reg = rl->out_reg;
7531 set = single_set (next);
7532 if (set && SET_SRC (set) == s_reg
7533 && ! new_spill_reg_store[REGNO (s_reg)])
7535 SET_HARD_REG_BIT (reg_is_output_reload,
7536 REGNO (s_reg));
7537 new_spill_reg_store[REGNO (s_reg)] = next;
7540 else
7541 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7546 if (rl->when_needed == RELOAD_OTHER)
7548 emit_insn (other_output_reload_insns[rl->opnum]);
7549 other_output_reload_insns[rl->opnum] = get_insns ();
7551 else
7552 output_reload_insns[rl->opnum] = get_insns ();
7554 if (cfun->can_throw_non_call_exceptions)
7555 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7557 end_sequence ();
7560 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7561 and has the number J. */
7562 static void
7563 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7565 rtx insn = chain->insn;
7566 rtx old = (rl->in && MEM_P (rl->in)
7567 ? rl->in_reg : rl->in);
7568 rtx reg_rtx = rl->reg_rtx;
7570 if (old && reg_rtx)
7572 enum machine_mode mode;
7574 /* Determine the mode to reload in.
7575 This is very tricky because we have three to choose from.
7576 There is the mode the insn operand wants (rl->inmode).
7577 There is the mode of the reload register RELOADREG.
7578 There is the intrinsic mode of the operand, which we could find
7579 by stripping some SUBREGs.
7580 It turns out that RELOADREG's mode is irrelevant:
7581 we can change that arbitrarily.
7583 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7584 then the reload reg may not support QImode moves, so use SImode.
7585 If foo is in memory due to spilling a pseudo reg, this is safe,
7586 because the QImode value is in the least significant part of a
7587 slot big enough for a SImode. If foo is some other sort of
7588 memory reference, then it is impossible to reload this case,
7589 so previous passes had better make sure this never happens.
7591 Then consider a one-word union which has SImode and one of its
7592 members is a float, being fetched as (SUBREG:SF union:SI).
7593 We must fetch that as SFmode because we could be loading into
7594 a float-only register. In this case OLD's mode is correct.
7596 Consider an immediate integer: it has VOIDmode. Here we need
7597 to get a mode from something else.
7599 In some cases, there is a fourth mode, the operand's
7600 containing mode. If the insn specifies a containing mode for
7601 this operand, it overrides all others.
7603 I am not sure whether the algorithm here is always right,
7604 but it does the right things in those cases. */
7606 mode = GET_MODE (old);
7607 if (mode == VOIDmode)
7608 mode = rl->inmode;
7610 /* We cannot use gen_lowpart_common since it can do the wrong thing
7611 when REG_RTX has a multi-word mode. Note that REG_RTX must
7612 always be a REG here. */
7613 if (GET_MODE (reg_rtx) != mode)
7614 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7616 reload_reg_rtx_for_input[j] = reg_rtx;
7618 if (old != 0
7619 /* AUTO_INC reloads need to be handled even if inherited. We got an
7620 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7621 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7622 && ! rtx_equal_p (reg_rtx, old)
7623 && reg_rtx != 0)
7624 emit_input_reload_insns (chain, rld + j, old, j);
7626 /* When inheriting a wider reload, we have a MEM in rl->in,
7627 e.g. inheriting a SImode output reload for
7628 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7629 if (optimize && reload_inherited[j] && rl->in
7630 && MEM_P (rl->in)
7631 && MEM_P (rl->in_reg)
7632 && reload_spill_index[j] >= 0
7633 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7634 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7636 /* If we are reloading a register that was recently stored in with an
7637 output-reload, see if we can prove there was
7638 actually no need to store the old value in it. */
7640 if (optimize
7641 && (reload_inherited[j] || reload_override_in[j])
7642 && reg_rtx
7643 && REG_P (reg_rtx)
7644 && spill_reg_store[REGNO (reg_rtx)] != 0
7645 #if 0
7646 /* There doesn't seem to be any reason to restrict this to pseudos
7647 and doing so loses in the case where we are copying from a
7648 register of the wrong class. */
7649 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7650 #endif
7651 /* The insn might have already some references to stackslots
7652 replaced by MEMs, while reload_out_reg still names the
7653 original pseudo. */
7654 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7655 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7656 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7659 /* Do output reloading for reload RL, which is for the insn described by
7660 CHAIN and has the number J.
7661 ??? At some point we need to support handling output reloads of
7662 JUMP_INSNs or insns that set cc0. */
7663 static void
7664 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7666 rtx note, old;
7667 rtx insn = chain->insn;
7668 /* If this is an output reload that stores something that is
7669 not loaded in this same reload, see if we can eliminate a previous
7670 store. */
7671 rtx pseudo = rl->out_reg;
7672 rtx reg_rtx = rl->reg_rtx;
7674 if (rl->out && reg_rtx)
7676 enum machine_mode mode;
7678 /* Determine the mode to reload in.
7679 See comments above (for input reloading). */
7680 mode = GET_MODE (rl->out);
7681 if (mode == VOIDmode)
7683 /* VOIDmode should never happen for an output. */
7684 if (asm_noperands (PATTERN (insn)) < 0)
7685 /* It's the compiler's fault. */
7686 fatal_insn ("VOIDmode on an output", insn);
7687 error_for_asm (insn, "output operand is constant in %<asm%>");
7688 /* Prevent crash--use something we know is valid. */
7689 mode = word_mode;
7690 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7692 if (GET_MODE (reg_rtx) != mode)
7693 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7695 reload_reg_rtx_for_output[j] = reg_rtx;
7697 if (pseudo
7698 && optimize
7699 && REG_P (pseudo)
7700 && ! rtx_equal_p (rl->in_reg, pseudo)
7701 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7702 && reg_last_reload_reg[REGNO (pseudo)])
7704 int pseudo_no = REGNO (pseudo);
7705 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7707 /* We don't need to test full validity of last_regno for
7708 inherit here; we only want to know if the store actually
7709 matches the pseudo. */
7710 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7711 && reg_reloaded_contents[last_regno] == pseudo_no
7712 && spill_reg_store[last_regno]
7713 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7714 delete_output_reload (insn, j, last_regno, reg_rtx);
7717 old = rl->out_reg;
7718 if (old == 0
7719 || reg_rtx == 0
7720 || rtx_equal_p (old, reg_rtx))
7721 return;
7723 /* An output operand that dies right away does need a reload,
7724 but need not be copied from it. Show the new location in the
7725 REG_UNUSED note. */
7726 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7727 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7729 XEXP (note, 0) = reg_rtx;
7730 return;
7732 /* Likewise for a SUBREG of an operand that dies. */
7733 else if (GET_CODE (old) == SUBREG
7734 && REG_P (SUBREG_REG (old))
7735 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7736 SUBREG_REG (old))))
7738 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7739 return;
7741 else if (GET_CODE (old) == SCRATCH)
7742 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7743 but we don't want to make an output reload. */
7744 return;
7746 /* If is a JUMP_INSN, we can't support output reloads yet. */
7747 gcc_assert (NONJUMP_INSN_P (insn));
7749 emit_output_reload_insns (chain, rld + j, j);
7752 /* A reload copies values of MODE from register SRC to register DEST.
7753 Return true if it can be treated for inheritance purposes like a
7754 group of reloads, each one reloading a single hard register. The
7755 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7756 occupy the same number of hard registers. */
7758 static bool
7759 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7760 int src ATTRIBUTE_UNUSED,
7761 enum machine_mode mode ATTRIBUTE_UNUSED)
7763 #ifdef CANNOT_CHANGE_MODE_CLASS
7764 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7765 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7766 #else
7767 return true;
7768 #endif
7771 /* Output insns to reload values in and out of the chosen reload regs. */
7773 static void
7774 emit_reload_insns (struct insn_chain *chain)
7776 rtx insn = chain->insn;
7778 int j;
7780 CLEAR_HARD_REG_SET (reg_reloaded_died);
7782 for (j = 0; j < reload_n_operands; j++)
7783 input_reload_insns[j] = input_address_reload_insns[j]
7784 = inpaddr_address_reload_insns[j]
7785 = output_reload_insns[j] = output_address_reload_insns[j]
7786 = outaddr_address_reload_insns[j]
7787 = other_output_reload_insns[j] = 0;
7788 other_input_address_reload_insns = 0;
7789 other_input_reload_insns = 0;
7790 operand_reload_insns = 0;
7791 other_operand_reload_insns = 0;
7793 /* Dump reloads into the dump file. */
7794 if (dump_file)
7796 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7797 debug_reload_to_stream (dump_file);
7800 /* Now output the instructions to copy the data into and out of the
7801 reload registers. Do these in the order that the reloads were reported,
7802 since reloads of base and index registers precede reloads of operands
7803 and the operands may need the base and index registers reloaded. */
7805 for (j = 0; j < n_reloads; j++)
7807 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7809 unsigned int i;
7811 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7812 new_spill_reg_store[i] = 0;
7815 do_input_reload (chain, rld + j, j);
7816 do_output_reload (chain, rld + j, j);
7819 /* Now write all the insns we made for reloads in the order expected by
7820 the allocation functions. Prior to the insn being reloaded, we write
7821 the following reloads:
7823 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7825 RELOAD_OTHER reloads.
7827 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7828 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7829 RELOAD_FOR_INPUT reload for the operand.
7831 RELOAD_FOR_OPADDR_ADDRS reloads.
7833 RELOAD_FOR_OPERAND_ADDRESS reloads.
7835 After the insn being reloaded, we write the following:
7837 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7838 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7839 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7840 reloads for the operand. The RELOAD_OTHER output reloads are
7841 output in descending order by reload number. */
7843 emit_insn_before (other_input_address_reload_insns, insn);
7844 emit_insn_before (other_input_reload_insns, insn);
7846 for (j = 0; j < reload_n_operands; j++)
7848 emit_insn_before (inpaddr_address_reload_insns[j], insn);
7849 emit_insn_before (input_address_reload_insns[j], insn);
7850 emit_insn_before (input_reload_insns[j], insn);
7853 emit_insn_before (other_operand_reload_insns, insn);
7854 emit_insn_before (operand_reload_insns, insn);
7856 for (j = 0; j < reload_n_operands; j++)
7858 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7859 x = emit_insn_after (output_address_reload_insns[j], x);
7860 x = emit_insn_after (output_reload_insns[j], x);
7861 emit_insn_after (other_output_reload_insns[j], x);
7864 /* For all the spill regs newly reloaded in this instruction,
7865 record what they were reloaded from, so subsequent instructions
7866 can inherit the reloads.
7868 Update spill_reg_store for the reloads of this insn.
7869 Copy the elements that were updated in the loop above. */
7871 for (j = 0; j < n_reloads; j++)
7873 int r = reload_order[j];
7874 int i = reload_spill_index[r];
7876 /* If this is a non-inherited input reload from a pseudo, we must
7877 clear any memory of a previous store to the same pseudo. Only do
7878 something if there will not be an output reload for the pseudo
7879 being reloaded. */
7880 if (rld[r].in_reg != 0
7881 && ! (reload_inherited[r] || reload_override_in[r]))
7883 rtx reg = rld[r].in_reg;
7885 if (GET_CODE (reg) == SUBREG)
7886 reg = SUBREG_REG (reg);
7888 if (REG_P (reg)
7889 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7890 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7892 int nregno = REGNO (reg);
7894 if (reg_last_reload_reg[nregno])
7896 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7898 if (reg_reloaded_contents[last_regno] == nregno)
7899 spill_reg_store[last_regno] = 0;
7904 /* I is nonneg if this reload used a register.
7905 If rld[r].reg_rtx is 0, this is an optional reload
7906 that we opted to ignore. */
7908 if (i >= 0 && rld[r].reg_rtx != 0)
7910 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7911 int k;
7913 /* For a multi register reload, we need to check if all or part
7914 of the value lives to the end. */
7915 for (k = 0; k < nr; k++)
7916 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7917 rld[r].when_needed))
7918 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7920 /* Maybe the spill reg contains a copy of reload_out. */
7921 if (rld[r].out != 0
7922 && (REG_P (rld[r].out)
7923 #ifdef AUTO_INC_DEC
7924 || ! rld[r].out_reg
7925 #endif
7926 || REG_P (rld[r].out_reg)))
7928 rtx reg;
7929 enum machine_mode mode;
7930 int regno, nregs;
7932 reg = reload_reg_rtx_for_output[r];
7933 mode = GET_MODE (reg);
7934 regno = REGNO (reg);
7935 nregs = hard_regno_nregs[regno][mode];
7936 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7937 rld[r].when_needed))
7939 rtx out = (REG_P (rld[r].out)
7940 ? rld[r].out
7941 : rld[r].out_reg
7942 ? rld[r].out_reg
7943 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7944 int out_regno = REGNO (out);
7945 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7946 : hard_regno_nregs[out_regno][mode]);
7947 bool piecemeal;
7949 spill_reg_store[regno] = new_spill_reg_store[regno];
7950 spill_reg_stored_to[regno] = out;
7951 reg_last_reload_reg[out_regno] = reg;
7953 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7954 && nregs == out_nregs
7955 && inherit_piecemeal_p (out_regno, regno, mode));
7957 /* If OUT_REGNO is a hard register, it may occupy more than
7958 one register. If it does, say what is in the
7959 rest of the registers assuming that both registers
7960 agree on how many words the object takes. If not,
7961 invalidate the subsequent registers. */
7963 if (HARD_REGISTER_NUM_P (out_regno))
7964 for (k = 1; k < out_nregs; k++)
7965 reg_last_reload_reg[out_regno + k]
7966 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
7968 /* Now do the inverse operation. */
7969 for (k = 0; k < nregs; k++)
7971 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7972 reg_reloaded_contents[regno + k]
7973 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7974 ? out_regno
7975 : out_regno + k);
7976 reg_reloaded_insn[regno + k] = insn;
7977 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7978 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7979 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7980 regno + k);
7981 else
7982 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7983 regno + k);
7987 /* Maybe the spill reg contains a copy of reload_in. Only do
7988 something if there will not be an output reload for
7989 the register being reloaded. */
7990 else if (rld[r].out_reg == 0
7991 && rld[r].in != 0
7992 && ((REG_P (rld[r].in)
7993 && !HARD_REGISTER_P (rld[r].in)
7994 && !REGNO_REG_SET_P (&reg_has_output_reload,
7995 REGNO (rld[r].in)))
7996 || (REG_P (rld[r].in_reg)
7997 && !REGNO_REG_SET_P (&reg_has_output_reload,
7998 REGNO (rld[r].in_reg))))
7999 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8001 rtx reg;
8002 enum machine_mode mode;
8003 int regno, nregs;
8005 reg = reload_reg_rtx_for_input[r];
8006 mode = GET_MODE (reg);
8007 regno = REGNO (reg);
8008 nregs = hard_regno_nregs[regno][mode];
8009 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8010 rld[r].when_needed))
8012 int in_regno;
8013 int in_nregs;
8014 rtx in;
8015 bool piecemeal;
8017 if (REG_P (rld[r].in)
8018 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8019 in = rld[r].in;
8020 else if (REG_P (rld[r].in_reg))
8021 in = rld[r].in_reg;
8022 else
8023 in = XEXP (rld[r].in_reg, 0);
8024 in_regno = REGNO (in);
8026 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8027 : hard_regno_nregs[in_regno][mode]);
8029 reg_last_reload_reg[in_regno] = reg;
8031 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8032 && nregs == in_nregs
8033 && inherit_piecemeal_p (regno, in_regno, mode));
8035 if (HARD_REGISTER_NUM_P (in_regno))
8036 for (k = 1; k < in_nregs; k++)
8037 reg_last_reload_reg[in_regno + k]
8038 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8040 /* Unless we inherited this reload, show we haven't
8041 recently done a store.
8042 Previous stores of inherited auto_inc expressions
8043 also have to be discarded. */
8044 if (! reload_inherited[r]
8045 || (rld[r].out && ! rld[r].out_reg))
8046 spill_reg_store[regno] = 0;
8048 for (k = 0; k < nregs; k++)
8050 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8051 reg_reloaded_contents[regno + k]
8052 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8053 ? in_regno
8054 : in_regno + k);
8055 reg_reloaded_insn[regno + k] = insn;
8056 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8057 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8058 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8059 regno + k);
8060 else
8061 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8062 regno + k);
8068 /* The following if-statement was #if 0'd in 1.34 (or before...).
8069 It's reenabled in 1.35 because supposedly nothing else
8070 deals with this problem. */
8072 /* If a register gets output-reloaded from a non-spill register,
8073 that invalidates any previous reloaded copy of it.
8074 But forget_old_reloads_1 won't get to see it, because
8075 it thinks only about the original insn. So invalidate it here.
8076 Also do the same thing for RELOAD_OTHER constraints where the
8077 output is discarded. */
8078 if (i < 0
8079 && ((rld[r].out != 0
8080 && (REG_P (rld[r].out)
8081 || (MEM_P (rld[r].out)
8082 && REG_P (rld[r].out_reg))))
8083 || (rld[r].out == 0 && rld[r].out_reg
8084 && REG_P (rld[r].out_reg))))
8086 rtx out = ((rld[r].out && REG_P (rld[r].out))
8087 ? rld[r].out : rld[r].out_reg);
8088 int out_regno = REGNO (out);
8089 enum machine_mode mode = GET_MODE (out);
8091 /* REG_RTX is now set or clobbered by the main instruction.
8092 As the comment above explains, forget_old_reloads_1 only
8093 sees the original instruction, and there is no guarantee
8094 that the original instruction also clobbered REG_RTX.
8095 For example, if find_reloads sees that the input side of
8096 a matched operand pair dies in this instruction, it may
8097 use the input register as the reload register.
8099 Calling forget_old_reloads_1 is a waste of effort if
8100 REG_RTX is also the output register.
8102 If we know that REG_RTX holds the value of a pseudo
8103 register, the code after the call will record that fact. */
8104 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8105 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8107 if (!HARD_REGISTER_NUM_P (out_regno))
8109 rtx src_reg, store_insn = NULL_RTX;
8111 reg_last_reload_reg[out_regno] = 0;
8113 /* If we can find a hard register that is stored, record
8114 the storing insn so that we may delete this insn with
8115 delete_output_reload. */
8116 src_reg = reload_reg_rtx_for_output[r];
8118 /* If this is an optional reload, try to find the source reg
8119 from an input reload. */
8120 if (! src_reg)
8122 rtx set = single_set (insn);
8123 if (set && SET_DEST (set) == rld[r].out)
8125 int k;
8127 src_reg = SET_SRC (set);
8128 store_insn = insn;
8129 for (k = 0; k < n_reloads; k++)
8131 if (rld[k].in == src_reg)
8133 src_reg = reload_reg_rtx_for_input[k];
8134 break;
8139 else
8140 store_insn = new_spill_reg_store[REGNO (src_reg)];
8141 if (src_reg && REG_P (src_reg)
8142 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8144 int src_regno, src_nregs, k;
8145 rtx note;
8147 gcc_assert (GET_MODE (src_reg) == mode);
8148 src_regno = REGNO (src_reg);
8149 src_nregs = hard_regno_nregs[src_regno][mode];
8150 /* The place where to find a death note varies with
8151 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8152 necessarily checked exactly in the code that moves
8153 notes, so just check both locations. */
8154 note = find_regno_note (insn, REG_DEAD, src_regno);
8155 if (! note && store_insn)
8156 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8157 for (k = 0; k < src_nregs; k++)
8159 spill_reg_store[src_regno + k] = store_insn;
8160 spill_reg_stored_to[src_regno + k] = out;
8161 reg_reloaded_contents[src_regno + k] = out_regno;
8162 reg_reloaded_insn[src_regno + k] = store_insn;
8163 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8164 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8165 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8166 mode))
8167 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8168 src_regno + k);
8169 else
8170 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8171 src_regno + k);
8172 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8173 if (note)
8174 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8175 else
8176 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8178 reg_last_reload_reg[out_regno] = src_reg;
8179 /* We have to set reg_has_output_reload here, or else
8180 forget_old_reloads_1 will clear reg_last_reload_reg
8181 right away. */
8182 SET_REGNO_REG_SET (&reg_has_output_reload,
8183 out_regno);
8186 else
8188 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8190 for (k = 0; k < out_nregs; k++)
8191 reg_last_reload_reg[out_regno + k] = 0;
8195 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8198 /* Go through the motions to emit INSN and test if it is strictly valid.
8199 Return the emitted insn if valid, else return NULL. */
8201 static rtx
8202 emit_insn_if_valid_for_reload (rtx insn)
8204 rtx last = get_last_insn ();
8205 int code;
8207 insn = emit_insn (insn);
8208 code = recog_memoized (insn);
8210 if (code >= 0)
8212 extract_insn (insn);
8213 /* We want constrain operands to treat this insn strictly in its
8214 validity determination, i.e., the way it would after reload has
8215 completed. */
8216 if (constrain_operands (1))
8217 return insn;
8220 delete_insns_since (last);
8221 return NULL;
8224 /* Emit code to perform a reload from IN (which may be a reload register) to
8225 OUT (which may also be a reload register). IN or OUT is from operand
8226 OPNUM with reload type TYPE.
8228 Returns first insn emitted. */
8230 static rtx
8231 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8233 rtx last = get_last_insn ();
8234 rtx tem;
8236 /* If IN is a paradoxical SUBREG, remove it and try to put the
8237 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8238 if (GET_CODE (in) == SUBREG
8239 && (GET_MODE_SIZE (GET_MODE (in))
8240 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8241 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8242 in = SUBREG_REG (in), out = tem;
8243 else if (GET_CODE (out) == SUBREG
8244 && (GET_MODE_SIZE (GET_MODE (out))
8245 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8246 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8247 out = SUBREG_REG (out), in = tem;
8249 /* How to do this reload can get quite tricky. Normally, we are being
8250 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8251 register that didn't get a hard register. In that case we can just
8252 call emit_move_insn.
8254 We can also be asked to reload a PLUS that adds a register or a MEM to
8255 another register, constant or MEM. This can occur during frame pointer
8256 elimination and while reloading addresses. This case is handled by
8257 trying to emit a single insn to perform the add. If it is not valid,
8258 we use a two insn sequence.
8260 Or we can be asked to reload an unary operand that was a fragment of
8261 an addressing mode, into a register. If it isn't recognized as-is,
8262 we try making the unop operand and the reload-register the same:
8263 (set reg:X (unop:X expr:Y))
8264 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8266 Finally, we could be called to handle an 'o' constraint by putting
8267 an address into a register. In that case, we first try to do this
8268 with a named pattern of "reload_load_address". If no such pattern
8269 exists, we just emit a SET insn and hope for the best (it will normally
8270 be valid on machines that use 'o').
8272 This entire process is made complex because reload will never
8273 process the insns we generate here and so we must ensure that
8274 they will fit their constraints and also by the fact that parts of
8275 IN might be being reloaded separately and replaced with spill registers.
8276 Because of this, we are, in some sense, just guessing the right approach
8277 here. The one listed above seems to work.
8279 ??? At some point, this whole thing needs to be rethought. */
8281 if (GET_CODE (in) == PLUS
8282 && (REG_P (XEXP (in, 0))
8283 || GET_CODE (XEXP (in, 0)) == SUBREG
8284 || MEM_P (XEXP (in, 0)))
8285 && (REG_P (XEXP (in, 1))
8286 || GET_CODE (XEXP (in, 1)) == SUBREG
8287 || CONSTANT_P (XEXP (in, 1))
8288 || MEM_P (XEXP (in, 1))))
8290 /* We need to compute the sum of a register or a MEM and another
8291 register, constant, or MEM, and put it into the reload
8292 register. The best possible way of doing this is if the machine
8293 has a three-operand ADD insn that accepts the required operands.
8295 The simplest approach is to try to generate such an insn and see if it
8296 is recognized and matches its constraints. If so, it can be used.
8298 It might be better not to actually emit the insn unless it is valid,
8299 but we need to pass the insn as an operand to `recog' and
8300 `extract_insn' and it is simpler to emit and then delete the insn if
8301 not valid than to dummy things up. */
8303 rtx op0, op1, tem, insn;
8304 int code;
8306 op0 = find_replacement (&XEXP (in, 0));
8307 op1 = find_replacement (&XEXP (in, 1));
8309 /* Since constraint checking is strict, commutativity won't be
8310 checked, so we need to do that here to avoid spurious failure
8311 if the add instruction is two-address and the second operand
8312 of the add is the same as the reload reg, which is frequently
8313 the case. If the insn would be A = B + A, rearrange it so
8314 it will be A = A + B as constrain_operands expects. */
8316 if (REG_P (XEXP (in, 1))
8317 && REGNO (out) == REGNO (XEXP (in, 1)))
8318 tem = op0, op0 = op1, op1 = tem;
8320 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8321 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8323 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8324 if (insn)
8325 return insn;
8327 /* If that failed, we must use a conservative two-insn sequence.
8329 Use a move to copy one operand into the reload register. Prefer
8330 to reload a constant, MEM or pseudo since the move patterns can
8331 handle an arbitrary operand. If OP1 is not a constant, MEM or
8332 pseudo and OP1 is not a valid operand for an add instruction, then
8333 reload OP1.
8335 After reloading one of the operands into the reload register, add
8336 the reload register to the output register.
8338 If there is another way to do this for a specific machine, a
8339 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8340 we emit below. */
8342 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8344 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8345 || (REG_P (op1)
8346 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8347 || (code != CODE_FOR_nothing
8348 && ! ((*insn_data[code].operand[2].predicate)
8349 (op1, insn_data[code].operand[2].mode))))
8350 tem = op0, op0 = op1, op1 = tem;
8352 gen_reload (out, op0, opnum, type);
8354 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8355 This fixes a problem on the 32K where the stack pointer cannot
8356 be used as an operand of an add insn. */
8358 if (rtx_equal_p (op0, op1))
8359 op1 = out;
8361 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8362 if (insn)
8364 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8365 set_unique_reg_note (insn, REG_EQUIV, in);
8366 return insn;
8369 /* If that failed, copy the address register to the reload register.
8370 Then add the constant to the reload register. */
8372 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8373 gen_reload (out, op1, opnum, type);
8374 insn = emit_insn (gen_add2_insn (out, op0));
8375 set_unique_reg_note (insn, REG_EQUIV, in);
8378 #ifdef SECONDARY_MEMORY_NEEDED
8379 /* If we need a memory location to do the move, do it that way. */
8380 else if ((REG_P (in)
8381 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8382 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8383 && (REG_P (out)
8384 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8385 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8386 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8387 REGNO_REG_CLASS (reg_or_subregno (out)),
8388 GET_MODE (out)))
8390 /* Get the memory to use and rewrite both registers to its mode. */
8391 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8393 if (GET_MODE (loc) != GET_MODE (out))
8394 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8396 if (GET_MODE (loc) != GET_MODE (in))
8397 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8399 gen_reload (loc, in, opnum, type);
8400 gen_reload (out, loc, opnum, type);
8402 #endif
8403 else if (REG_P (out) && UNARY_P (in))
8405 rtx insn;
8406 rtx op1;
8407 rtx out_moded;
8408 rtx set;
8410 op1 = find_replacement (&XEXP (in, 0));
8411 if (op1 != XEXP (in, 0))
8412 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8414 /* First, try a plain SET. */
8415 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8416 if (set)
8417 return set;
8419 /* If that failed, move the inner operand to the reload
8420 register, and try the same unop with the inner expression
8421 replaced with the reload register. */
8423 if (GET_MODE (op1) != GET_MODE (out))
8424 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8425 else
8426 out_moded = out;
8428 gen_reload (out_moded, op1, opnum, type);
8430 insn
8431 = gen_rtx_SET (VOIDmode, out,
8432 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8433 out_moded));
8434 insn = emit_insn_if_valid_for_reload (insn);
8435 if (insn)
8437 set_unique_reg_note (insn, REG_EQUIV, in);
8438 return insn;
8441 fatal_insn ("Failure trying to reload:", set);
8443 /* If IN is a simple operand, use gen_move_insn. */
8444 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8446 tem = emit_insn (gen_move_insn (out, in));
8447 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8448 mark_jump_label (in, tem, 0);
8451 #ifdef HAVE_reload_load_address
8452 else if (HAVE_reload_load_address)
8453 emit_insn (gen_reload_load_address (out, in));
8454 #endif
8456 /* Otherwise, just write (set OUT IN) and hope for the best. */
8457 else
8458 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8460 /* Return the first insn emitted.
8461 We can not just return get_last_insn, because there may have
8462 been multiple instructions emitted. Also note that gen_move_insn may
8463 emit more than one insn itself, so we can not assume that there is one
8464 insn emitted per emit_insn_before call. */
8466 return last ? NEXT_INSN (last) : get_insns ();
8469 /* Delete a previously made output-reload whose result we now believe
8470 is not needed. First we double-check.
8472 INSN is the insn now being processed.
8473 LAST_RELOAD_REG is the hard register number for which we want to delete
8474 the last output reload.
8475 J is the reload-number that originally used REG. The caller has made
8476 certain that reload J doesn't use REG any longer for input.
8477 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8479 static void
8480 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8482 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8483 rtx reg = spill_reg_stored_to[last_reload_reg];
8484 int k;
8485 int n_occurrences;
8486 int n_inherited = 0;
8487 rtx i1;
8488 rtx substed;
8490 /* It is possible that this reload has been only used to set another reload
8491 we eliminated earlier and thus deleted this instruction too. */
8492 if (INSN_DELETED_P (output_reload_insn))
8493 return;
8495 /* Get the raw pseudo-register referred to. */
8497 while (GET_CODE (reg) == SUBREG)
8498 reg = SUBREG_REG (reg);
8499 substed = reg_equiv_memory_loc[REGNO (reg)];
8501 /* This is unsafe if the operand occurs more often in the current
8502 insn than it is inherited. */
8503 for (k = n_reloads - 1; k >= 0; k--)
8505 rtx reg2 = rld[k].in;
8506 if (! reg2)
8507 continue;
8508 if (MEM_P (reg2) || reload_override_in[k])
8509 reg2 = rld[k].in_reg;
8510 #ifdef AUTO_INC_DEC
8511 if (rld[k].out && ! rld[k].out_reg)
8512 reg2 = XEXP (rld[k].in_reg, 0);
8513 #endif
8514 while (GET_CODE (reg2) == SUBREG)
8515 reg2 = SUBREG_REG (reg2);
8516 if (rtx_equal_p (reg2, reg))
8518 if (reload_inherited[k] || reload_override_in[k] || k == j)
8519 n_inherited++;
8520 else
8521 return;
8524 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8525 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8526 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8527 reg, 0);
8528 if (substed)
8529 n_occurrences += count_occurrences (PATTERN (insn),
8530 eliminate_regs (substed, VOIDmode,
8531 NULL_RTX), 0);
8532 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8534 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8535 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8537 if (n_occurrences > n_inherited)
8538 return;
8540 /* If the pseudo-reg we are reloading is no longer referenced
8541 anywhere between the store into it and here,
8542 and we're within the same basic block, then the value can only
8543 pass through the reload reg and end up here.
8544 Otherwise, give up--return. */
8545 for (i1 = NEXT_INSN (output_reload_insn);
8546 i1 != insn; i1 = NEXT_INSN (i1))
8548 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8549 return;
8550 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8551 && reg_mentioned_p (reg, PATTERN (i1)))
8553 /* If this is USE in front of INSN, we only have to check that
8554 there are no more references than accounted for by inheritance. */
8555 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8557 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8558 i1 = NEXT_INSN (i1);
8560 if (n_occurrences <= n_inherited && i1 == insn)
8561 break;
8562 return;
8566 /* We will be deleting the insn. Remove the spill reg information. */
8567 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8569 spill_reg_store[last_reload_reg + k] = 0;
8570 spill_reg_stored_to[last_reload_reg + k] = 0;
8573 /* The caller has already checked that REG dies or is set in INSN.
8574 It has also checked that we are optimizing, and thus some
8575 inaccuracies in the debugging information are acceptable.
8576 So we could just delete output_reload_insn. But in some cases
8577 we can improve the debugging information without sacrificing
8578 optimization - maybe even improving the code: See if the pseudo
8579 reg has been completely replaced with reload regs. If so, delete
8580 the store insn and forget we had a stack slot for the pseudo. */
8581 if (rld[j].out != rld[j].in
8582 && REG_N_DEATHS (REGNO (reg)) == 1
8583 && REG_N_SETS (REGNO (reg)) == 1
8584 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8585 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8587 rtx i2;
8589 /* We know that it was used only between here and the beginning of
8590 the current basic block. (We also know that the last use before
8591 INSN was the output reload we are thinking of deleting, but never
8592 mind that.) Search that range; see if any ref remains. */
8593 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8595 rtx set = single_set (i2);
8597 /* Uses which just store in the pseudo don't count,
8598 since if they are the only uses, they are dead. */
8599 if (set != 0 && SET_DEST (set) == reg)
8600 continue;
8601 if (LABEL_P (i2)
8602 || JUMP_P (i2))
8603 break;
8604 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8605 && reg_mentioned_p (reg, PATTERN (i2)))
8607 /* Some other ref remains; just delete the output reload we
8608 know to be dead. */
8609 delete_address_reloads (output_reload_insn, insn);
8610 delete_insn (output_reload_insn);
8611 return;
8615 /* Delete the now-dead stores into this pseudo. Note that this
8616 loop also takes care of deleting output_reload_insn. */
8617 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8619 rtx set = single_set (i2);
8621 if (set != 0 && SET_DEST (set) == reg)
8623 delete_address_reloads (i2, insn);
8624 delete_insn (i2);
8626 if (LABEL_P (i2)
8627 || JUMP_P (i2))
8628 break;
8631 /* For the debugging info, say the pseudo lives in this reload reg. */
8632 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8633 if (ira_conflicts_p)
8634 /* Inform IRA about the change. */
8635 ira_mark_allocation_change (REGNO (reg));
8636 alter_reg (REGNO (reg), -1, false);
8638 else
8640 delete_address_reloads (output_reload_insn, insn);
8641 delete_insn (output_reload_insn);
8645 /* We are going to delete DEAD_INSN. Recursively delete loads of
8646 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8647 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8648 static void
8649 delete_address_reloads (rtx dead_insn, rtx current_insn)
8651 rtx set = single_set (dead_insn);
8652 rtx set2, dst, prev, next;
8653 if (set)
8655 rtx dst = SET_DEST (set);
8656 if (MEM_P (dst))
8657 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8659 /* If we deleted the store from a reloaded post_{in,de}c expression,
8660 we can delete the matching adds. */
8661 prev = PREV_INSN (dead_insn);
8662 next = NEXT_INSN (dead_insn);
8663 if (! prev || ! next)
8664 return;
8665 set = single_set (next);
8666 set2 = single_set (prev);
8667 if (! set || ! set2
8668 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8669 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8670 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8671 return;
8672 dst = SET_DEST (set);
8673 if (! rtx_equal_p (dst, SET_DEST (set2))
8674 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8675 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8676 || (INTVAL (XEXP (SET_SRC (set), 1))
8677 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8678 return;
8679 delete_related_insns (prev);
8680 delete_related_insns (next);
8683 /* Subfunction of delete_address_reloads: process registers found in X. */
8684 static void
8685 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8687 rtx prev, set, dst, i2;
8688 int i, j;
8689 enum rtx_code code = GET_CODE (x);
8691 if (code != REG)
8693 const char *fmt = GET_RTX_FORMAT (code);
8694 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8696 if (fmt[i] == 'e')
8697 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8698 else if (fmt[i] == 'E')
8700 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8701 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8702 current_insn);
8705 return;
8708 if (spill_reg_order[REGNO (x)] < 0)
8709 return;
8711 /* Scan backwards for the insn that sets x. This might be a way back due
8712 to inheritance. */
8713 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8715 code = GET_CODE (prev);
8716 if (code == CODE_LABEL || code == JUMP_INSN)
8717 return;
8718 if (!INSN_P (prev))
8719 continue;
8720 if (reg_set_p (x, PATTERN (prev)))
8721 break;
8722 if (reg_referenced_p (x, PATTERN (prev)))
8723 return;
8725 if (! prev || INSN_UID (prev) < reload_first_uid)
8726 return;
8727 /* Check that PREV only sets the reload register. */
8728 set = single_set (prev);
8729 if (! set)
8730 return;
8731 dst = SET_DEST (set);
8732 if (!REG_P (dst)
8733 || ! rtx_equal_p (dst, x))
8734 return;
8735 if (! reg_set_p (dst, PATTERN (dead_insn)))
8737 /* Check if DST was used in a later insn -
8738 it might have been inherited. */
8739 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8741 if (LABEL_P (i2))
8742 break;
8743 if (! INSN_P (i2))
8744 continue;
8745 if (reg_referenced_p (dst, PATTERN (i2)))
8747 /* If there is a reference to the register in the current insn,
8748 it might be loaded in a non-inherited reload. If no other
8749 reload uses it, that means the register is set before
8750 referenced. */
8751 if (i2 == current_insn)
8753 for (j = n_reloads - 1; j >= 0; j--)
8754 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8755 || reload_override_in[j] == dst)
8756 return;
8757 for (j = n_reloads - 1; j >= 0; j--)
8758 if (rld[j].in && rld[j].reg_rtx == dst)
8759 break;
8760 if (j >= 0)
8761 break;
8763 return;
8765 if (JUMP_P (i2))
8766 break;
8767 /* If DST is still live at CURRENT_INSN, check if it is used for
8768 any reload. Note that even if CURRENT_INSN sets DST, we still
8769 have to check the reloads. */
8770 if (i2 == current_insn)
8772 for (j = n_reloads - 1; j >= 0; j--)
8773 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8774 || reload_override_in[j] == dst)
8775 return;
8776 /* ??? We can't finish the loop here, because dst might be
8777 allocated to a pseudo in this block if no reload in this
8778 block needs any of the classes containing DST - see
8779 spill_hard_reg. There is no easy way to tell this, so we
8780 have to scan till the end of the basic block. */
8782 if (reg_set_p (dst, PATTERN (i2)))
8783 break;
8786 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8787 reg_reloaded_contents[REGNO (dst)] = -1;
8788 delete_insn (prev);
8791 /* Output reload-insns to reload VALUE into RELOADREG.
8792 VALUE is an autoincrement or autodecrement RTX whose operand
8793 is a register or memory location;
8794 so reloading involves incrementing that location.
8795 IN is either identical to VALUE, or some cheaper place to reload from.
8797 INC_AMOUNT is the number to increment or decrement by (always positive).
8798 This cannot be deduced from VALUE.
8800 Return the instruction that stores into RELOADREG. */
8802 static rtx
8803 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8805 /* REG or MEM to be copied and incremented. */
8806 rtx incloc = find_replacement (&XEXP (value, 0));
8807 /* Nonzero if increment after copying. */
8808 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8809 || GET_CODE (value) == POST_MODIFY);
8810 rtx last;
8811 rtx inc;
8812 rtx add_insn;
8813 int code;
8814 rtx store;
8815 rtx real_in = in == value ? incloc : in;
8817 /* No hard register is equivalent to this register after
8818 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
8819 we could inc/dec that register as well (maybe even using it for
8820 the source), but I'm not sure it's worth worrying about. */
8821 if (REG_P (incloc))
8822 reg_last_reload_reg[REGNO (incloc)] = 0;
8824 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8826 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8827 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8829 else
8831 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8832 inc_amount = -inc_amount;
8834 inc = GEN_INT (inc_amount);
8837 /* If this is post-increment, first copy the location to the reload reg. */
8838 if (post && real_in != reloadreg)
8839 emit_insn (gen_move_insn (reloadreg, real_in));
8841 if (in == value)
8843 /* See if we can directly increment INCLOC. Use a method similar to
8844 that in gen_reload. */
8846 last = get_last_insn ();
8847 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8848 gen_rtx_PLUS (GET_MODE (incloc),
8849 incloc, inc)));
8851 code = recog_memoized (add_insn);
8852 if (code >= 0)
8854 extract_insn (add_insn);
8855 if (constrain_operands (1))
8857 /* If this is a pre-increment and we have incremented the value
8858 where it lives, copy the incremented value to RELOADREG to
8859 be used as an address. */
8861 if (! post)
8862 emit_insn (gen_move_insn (reloadreg, incloc));
8864 return add_insn;
8867 delete_insns_since (last);
8870 /* If couldn't do the increment directly, must increment in RELOADREG.
8871 The way we do this depends on whether this is pre- or post-increment.
8872 For pre-increment, copy INCLOC to the reload register, increment it
8873 there, then save back. */
8875 if (! post)
8877 if (in != reloadreg)
8878 emit_insn (gen_move_insn (reloadreg, real_in));
8879 emit_insn (gen_add2_insn (reloadreg, inc));
8880 store = emit_insn (gen_move_insn (incloc, reloadreg));
8882 else
8884 /* Postincrement.
8885 Because this might be a jump insn or a compare, and because RELOADREG
8886 may not be available after the insn in an input reload, we must do
8887 the incrementation before the insn being reloaded for.
8889 We have already copied IN to RELOADREG. Increment the copy in
8890 RELOADREG, save that back, then decrement RELOADREG so it has
8891 the original value. */
8893 emit_insn (gen_add2_insn (reloadreg, inc));
8894 store = emit_insn (gen_move_insn (incloc, reloadreg));
8895 if (CONST_INT_P (inc))
8896 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8897 else
8898 emit_insn (gen_sub2_insn (reloadreg, inc));
8901 return store;
8904 #ifdef AUTO_INC_DEC
8905 static void
8906 add_auto_inc_notes (rtx insn, rtx x)
8908 enum rtx_code code = GET_CODE (x);
8909 const char *fmt;
8910 int i, j;
8912 if (code == MEM && auto_inc_p (XEXP (x, 0)))
8914 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
8915 return;
8918 /* Scan all the operand sub-expressions. */
8919 fmt = GET_RTX_FORMAT (code);
8920 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8922 if (fmt[i] == 'e')
8923 add_auto_inc_notes (insn, XEXP (x, i));
8924 else if (fmt[i] == 'E')
8925 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8926 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8929 #endif
8931 /* This is used by reload pass, that does emit some instructions after
8932 abnormal calls moving basic block end, but in fact it wants to emit
8933 them on the edge. Looks for abnormal call edges, find backward the
8934 proper call and fix the damage.
8936 Similar handle instructions throwing exceptions internally. */
8937 void
8938 fixup_abnormal_edges (void)
8940 bool inserted = false;
8941 basic_block bb;
8943 FOR_EACH_BB (bb)
8945 edge e;
8946 edge_iterator ei;
8948 /* Look for cases we are interested in - calls or instructions causing
8949 exceptions. */
8950 FOR_EACH_EDGE (e, ei, bb->succs)
8952 if (e->flags & EDGE_ABNORMAL_CALL)
8953 break;
8954 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8955 == (EDGE_ABNORMAL | EDGE_EH))
8956 break;
8958 if (e && !CALL_P (BB_END (bb))
8959 && !can_throw_internal (BB_END (bb)))
8961 rtx insn;
8963 /* Get past the new insns generated. Allow notes, as the insns
8964 may be already deleted. */
8965 insn = BB_END (bb);
8966 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8967 && !can_throw_internal (insn)
8968 && insn != BB_HEAD (bb))
8969 insn = PREV_INSN (insn);
8971 if (CALL_P (insn) || can_throw_internal (insn))
8973 rtx stop, next;
8975 stop = NEXT_INSN (BB_END (bb));
8976 BB_END (bb) = insn;
8977 insn = NEXT_INSN (insn);
8979 FOR_EACH_EDGE (e, ei, bb->succs)
8980 if (e->flags & EDGE_FALLTHRU)
8981 break;
8983 while (insn && insn != stop)
8985 next = NEXT_INSN (insn);
8986 if (INSN_P (insn))
8988 delete_insn (insn);
8990 /* Sometimes there's still the return value USE.
8991 If it's placed after a trapping call (i.e. that
8992 call is the last insn anyway), we have no fallthru
8993 edge. Simply delete this use and don't try to insert
8994 on the non-existent edge. */
8995 if (GET_CODE (PATTERN (insn)) != USE)
8997 /* We're not deleting it, we're moving it. */
8998 INSN_DELETED_P (insn) = 0;
8999 PREV_INSN (insn) = NULL_RTX;
9000 NEXT_INSN (insn) = NULL_RTX;
9002 insert_insn_on_edge (insn, e);
9003 inserted = true;
9006 else if (!BARRIER_P (insn))
9007 set_block_for_insn (insn, NULL);
9008 insn = next;
9012 /* It may be that we don't find any such trapping insn. In this
9013 case we discovered quite late that the insn that had been
9014 marked as can_throw_internal in fact couldn't trap at all.
9015 So we should in fact delete the EH edges out of the block. */
9016 else
9017 purge_dead_edges (bb);
9021 /* We've possibly turned single trapping insn into multiple ones. */
9022 if (cfun->can_throw_non_call_exceptions)
9024 sbitmap blocks;
9025 blocks = sbitmap_alloc (last_basic_block);
9026 sbitmap_ones (blocks);
9027 find_many_sub_basic_blocks (blocks);
9028 sbitmap_free (blocks);
9031 if (inserted)
9032 commit_edge_insertions ();
9034 #ifdef ENABLE_CHECKING
9035 /* Verify that we didn't turn one trapping insn into many, and that
9036 we found and corrected all of the problems wrt fixups on the
9037 fallthru edge. */
9038 verify_flow_info ();
9039 #endif