* reload1.c (find_optional_reg, inherit_one_chain): Always use
[official-gcc.git] / gcc / reload1.c
blobb4e109bf444b96c48804318490ba99065944e5cb
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "basic-block.h"
39 #include "reload.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "real.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "tree.h"
47 /* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
52 that need them.
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
81 /* Element N is the constant value to which pseudo reg N is equivalent,
82 or zero if pseudo reg N is not equivalent to a constant.
83 find_reloads looks at this in order to replace pseudo reg N
84 with the constant it stands for. */
85 rtx *reg_equiv_constant;
87 /* Element N is a memory location to which pseudo reg N is equivalent,
88 prior to any register elimination (such as frame pointer to stack
89 pointer). Depending on whether or not it is a valid address, this value
90 is transferred to either reg_equiv_address or reg_equiv_mem. */
91 rtx *reg_equiv_memory_loc;
93 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
94 collector can keep track of what is inside. */
95 varray_type reg_equiv_memory_loc_varray;
97 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
98 This is used when the address is not valid as a memory address
99 (because its displacement is too big for the machine.) */
100 rtx *reg_equiv_address;
102 /* Element N is the memory slot to which pseudo reg N is equivalent,
103 or zero if pseudo reg N is not equivalent to a memory slot. */
104 rtx *reg_equiv_mem;
106 /* Widest width in which each pseudo reg is referred to (via subreg). */
107 static unsigned int *reg_max_ref_width;
109 /* Element N is the list of insns that initialized reg N from its equivalent
110 constant or memory slot. */
111 static rtx *reg_equiv_init;
113 /* Vector to remember old contents of reg_renumber before spilling. */
114 static short *reg_old_renumber;
116 /* Indicate whether the register's current value is one that is not
117 safe to retain across a call, even for registers that are normally
118 call-saved. */
119 static HARD_REG_SET reg_reloaded_call_part_clobbered;
121 /* Holds the last rtx used for any given reg, or 0 if it has never
122 been used for spilling yet. This rtx is reused, provided it has
123 the proper mode. */
124 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
126 /* This reg set indicates registers that can't be used as spill registers for
127 the currently processed insn. These are the hard registers which are live
128 during the insn, but not allocated to pseudos, as well as fixed
129 registers. */
130 static HARD_REG_SET bad_spill_regs;
132 /* These are the hard registers that can't be used as spill register for any
133 insn. This includes registers used for user variables and registers that
134 we can't eliminate. A register that appears in this set also can't be used
135 to retry register allocation. */
136 static HARD_REG_SET bad_spill_regs_global;
138 /* This vector of reg sets indicates, for each pseudo, which hard registers
139 may not be used for retrying global allocation because the register was
140 formerly spilled from one of them. If we allowed reallocating a pseudo to
141 a register that it was already allocated to, reload might not
142 terminate. */
143 static HARD_REG_SET *pseudo_previous_regs;
145 /* This vector of reg sets indicates, for each pseudo, which hard
146 registers may not be used for retrying global allocation because they
147 are used as spill registers during one of the insns in which the
148 pseudo is live. */
149 static HARD_REG_SET *pseudo_forbidden_regs;
151 /* All hard regs that have been used as spill registers for any insn are
152 marked in this set. This is used to update regs_ever_live in
153 finish_spills. */
154 static HARD_REG_SET used_spill_regs;
156 /* Nonzero if indirect addressing is supported on the machine; this means
157 that spilling (REG n) does not require reloading it into a register in
158 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
159 value indicates the level of indirect addressing supported, e.g., two
160 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
161 a hard register. */
162 static char spill_indirect_levels;
164 /* Nonzero if indirect addressing is supported when the innermost MEM is
165 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
166 which these are valid is the same as spill_indirect_levels, above. */
167 char indirect_symref_ok;
169 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
170 char double_reg_address_ok;
172 /* Record the stack slot for each spilled hard register. */
173 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
175 /* Width allocated so far for that stack slot. */
176 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
178 /* Record which pseudos needed to be spilled. */
179 static regset_head spilled_pseudos;
181 /* Used for communication between order_regs_for_reload and count_pseudo.
182 Used to avoid counting one pseudo twice. */
183 static regset_head pseudos_counted;
185 /* First uid used by insns created by reload in this function.
186 Used in find_equiv_reg. */
187 int reload_first_uid;
189 /* Flag set by local-alloc or global-alloc if anything is live in
190 a call-clobbered reg across calls. */
191 int caller_save_needed;
193 /* Set to 1 while reload_as_needed is operating.
194 Required by some machines to handle any generated moves differently. */
195 int reload_in_progress = 0;
197 /* These arrays record the insn_code of insns that may be needed to
198 perform input and output reloads of special objects. They provide a
199 place to pass a scratch register. */
200 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
201 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
203 /* This obstack is used for allocation of rtl during register elimination.
204 The allocated storage can be freed once find_reloads has processed the
205 insn. */
206 struct obstack reload_obstack;
208 /* Points to the beginning of the reload_obstack. All insn_chain structures
209 are allocated first. */
210 static char *reload_startobj;
212 /* The point after all insn_chain structures. Used to quickly deallocate
213 memory allocated in copy_reloads during calculate_needs_all_insns. */
214 static char *reload_firstobj;
216 /* This points before all local rtl generated by register elimination.
217 Used to quickly free all memory after processing one insn. */
218 static char *reload_insn_firstobj;
220 /* List of insn_chain instructions, one for every insn that reload needs to
221 examine. */
222 struct insn_chain *reload_insn_chain;
224 /* List of all insns needing reloads. */
225 static struct insn_chain *insns_need_reload;
227 /* This structure is used to record information about register eliminations.
228 Each array entry describes one possible way of eliminating a register
229 in favor of another. If there is more than one way of eliminating a
230 particular register, the most preferred should be specified first. */
232 struct elim_table
234 int from; /* Register number to be eliminated. */
235 int to; /* Register number used as replacement. */
236 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
237 int can_eliminate; /* Nonzero if this elimination can be done. */
238 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
239 insns made by reload. */
240 HOST_WIDE_INT offset; /* Current offset between the two regs. */
241 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
242 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
243 rtx from_rtx; /* REG rtx for the register to be eliminated.
244 We cannot simply compare the number since
245 we might then spuriously replace a hard
246 register corresponding to a pseudo
247 assigned to the reg to be eliminated. */
248 rtx to_rtx; /* REG rtx for the replacement. */
251 static struct elim_table *reg_eliminate = 0;
253 /* This is an intermediate structure to initialize the table. It has
254 exactly the members provided by ELIMINABLE_REGS. */
255 static const struct elim_table_1
257 const int from;
258 const int to;
259 } reg_eliminate_1[] =
261 /* If a set of eliminable registers was specified, define the table from it.
262 Otherwise, default to the normal case of the frame pointer being
263 replaced by the stack pointer. */
265 #ifdef ELIMINABLE_REGS
266 ELIMINABLE_REGS;
267 #else
268 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
269 #endif
271 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
273 /* Record the number of pending eliminations that have an offset not equal
274 to their initial offset. If nonzero, we use a new copy of each
275 replacement result in any insns encountered. */
276 int num_not_at_initial_offset;
278 /* Count the number of registers that we may be able to eliminate. */
279 static int num_eliminable;
280 /* And the number of registers that are equivalent to a constant that
281 can be eliminated to frame_pointer / arg_pointer + constant. */
282 static int num_eliminable_invariants;
284 /* For each label, we record the offset of each elimination. If we reach
285 a label by more than one path and an offset differs, we cannot do the
286 elimination. This information is indexed by the difference of the
287 number of the label and the first label number. We can't offset the
288 pointer itself as this can cause problems on machines with segmented
289 memory. The first table is an array of flags that records whether we
290 have yet encountered a label and the second table is an array of arrays,
291 one entry in the latter array for each elimination. */
293 static int first_label_num;
294 static char *offsets_known_at;
295 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
297 /* A preallocated REG rtx that is available throughout reload. */
298 static rtx permanent_test_reg;
300 /* Number of labels in the current function. */
302 static int num_labels;
304 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
305 static void maybe_fix_stack_asms (void);
306 static void calculate_needs_all_insns (int);
307 static void find_reload_regs (struct insn_chain *);
308 static void select_reload_regs (void);
309 static void delete_caller_save_insns (void);
311 static void spill_failure (rtx, enum reg_class);
312 static void count_spilled_pseudo (int, int, int);
313 static void delete_dead_insn (rtx);
314 static void alter_reg (int, int);
315 static void set_label_offsets (rtx, rtx, int);
316 static void check_eliminable_occurrences (rtx);
317 static void elimination_effects (rtx, enum machine_mode);
318 static int eliminate_regs_in_insn (rtx, int);
319 static void update_eliminable_offsets (void);
320 static void mark_not_eliminable (rtx, rtx, void *);
321 static void set_initial_elim_offsets (void);
322 static void verify_initial_elim_offsets (void);
323 static void set_initial_label_offsets (void);
324 static void set_offsets_for_label (rtx);
325 static void init_elim_table (void);
326 static void update_eliminables (HARD_REG_SET *);
327 static void spill_hard_reg (unsigned int, int);
328 static int finish_spills (int);
329 static void scan_paradoxical_subregs (rtx);
330 static void count_pseudo (int);
331 static void order_regs_for_reload (struct insn_chain *);
332 static void reload_as_needed (int);
333 static int reload_reg_class_lower (const void *, const void *);
334 static int function_invariant_p (rtx);
335 static void choose_reload_regs (struct insn_chain *);
336 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
337 rtx, int);
338 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
339 int);
340 static rtx do_input_reload (struct insn_chain *, struct reload *, int);
341 static rtx do_output_reload (struct insn_chain *, struct reload *, int);
342 static void emit_reload_insns (struct insn_chain *);
343 static rtx inc_for_reload (rtx, rtx, rtx, int);
344 #ifdef AUTO_INC_DEC
345 static void add_auto_inc_notes (rtx, rtx);
346 #endif
347 static void copy_eh_notes (rtx, rtx);
348 static rtx gen_reload (rtx, rtx, int);
350 /* Initialize the reload pass once per compilation. */
352 void
353 init_reload (void)
355 int i;
357 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
358 Set spill_indirect_levels to the number of levels such addressing is
359 permitted, zero if it is not permitted at all. */
361 rtx tem
362 = gen_rtx_MEM (Pmode,
363 gen_rtx_PLUS (Pmode,
364 gen_rtx_REG (Pmode,
365 LAST_VIRTUAL_REGISTER + 1),
366 GEN_INT (4)));
367 spill_indirect_levels = 0;
369 while (memory_address_p (QImode, tem))
371 spill_indirect_levels++;
372 tem = gen_rtx_MEM (Pmode, tem);
375 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
377 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
378 indirect_symref_ok = memory_address_p (QImode, tem);
380 /* See if reg+reg is a valid (and offsettable) address. */
382 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
384 tem = gen_rtx_PLUS (Pmode,
385 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
386 gen_rtx_REG (Pmode, i));
388 /* This way, we make sure that reg+reg is an offsettable address. */
389 tem = plus_constant (tem, 4);
391 if (memory_address_p (QImode, tem))
393 double_reg_address_ok = 1;
394 break;
398 /* Initialize obstack for our rtl allocation. */
399 gcc_obstack_init (&reload_obstack);
400 reload_startobj = obstack_alloc (&reload_obstack, 0);
402 INIT_REG_SET (&spilled_pseudos);
403 INIT_REG_SET (&pseudos_counted);
404 VARRAY_RTX_INIT (reg_equiv_memory_loc_varray, 0, "reg_equiv_memory_loc");
407 /* List of insn chains that are currently unused. */
408 static struct insn_chain *unused_insn_chains = 0;
410 /* Allocate an empty insn_chain structure. */
411 struct insn_chain *
412 new_insn_chain (void)
414 struct insn_chain *c;
416 if (unused_insn_chains == 0)
418 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
419 INIT_REG_SET (&c->live_before);
420 INIT_REG_SET (&c->live_after);
421 INIT_REG_SET (&c->unreloaded_sets);
422 INIT_REG_SET (&c->unreloaded_uses);
424 else
426 c = unused_insn_chains;
427 unused_insn_chains = c->next;
429 c->will_be_deleted = 0;
430 c->is_caller_save_insn = 0;
431 c->need_operand_change = 0;
432 c->need_elim = 0;
433 c->n_reloads = 0;
434 return c;
437 /* Small utility function to set all regs in hard reg set TO which are
438 allocated to pseudos in regset FROM. */
440 void
441 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
443 unsigned int regno;
444 reg_set_iterator rsi;
446 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
448 int r = reg_renumber[regno];
449 int nregs;
451 if (r < 0)
453 /* reload_combine uses the information from
454 BASIC_BLOCK->global_live_at_start, which might still
455 contain registers that have not actually been allocated
456 since they have an equivalence. */
457 gcc_assert (reload_completed);
459 else
461 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
462 while (nregs-- > 0)
463 SET_HARD_REG_BIT (*to, r + nregs);
468 /* Replace all pseudos found in LOC with their corresponding
469 equivalences. */
471 static void
472 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
474 rtx x = *loc;
475 enum rtx_code code;
476 const char *fmt;
477 int i, j;
479 if (! x)
480 return;
482 code = GET_CODE (x);
483 if (code == REG)
485 unsigned int regno = REGNO (x);
487 if (regno < FIRST_PSEUDO_REGISTER)
488 return;
490 x = eliminate_regs (x, mem_mode, usage);
491 if (x != *loc)
493 *loc = x;
494 replace_pseudos_in (loc, mem_mode, usage);
495 return;
498 if (reg_equiv_constant[regno])
499 *loc = reg_equiv_constant[regno];
500 else if (reg_equiv_mem[regno])
501 *loc = reg_equiv_mem[regno];
502 else if (reg_equiv_address[regno])
503 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
504 else
506 gcc_assert (!REG_P (regno_reg_rtx[regno])
507 || REGNO (regno_reg_rtx[regno]) != regno);
508 *loc = regno_reg_rtx[regno];
511 return;
513 else if (code == MEM)
515 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
516 return;
519 /* Process each of our operands recursively. */
520 fmt = GET_RTX_FORMAT (code);
521 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
522 if (*fmt == 'e')
523 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
524 else if (*fmt == 'E')
525 for (j = 0; j < XVECLEN (x, i); j++)
526 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
529 static void mark_dead_insns (void)
531 struct insn_chain *chain;
532 int i;
534 for (chain = reload_insn_chain; chain; chain = chain->next)
536 rtx insn = chain->insn;
537 chain->will_be_deleted = 0;
539 if (INSN_P (insn))
541 rtx set = single_set (insn);
542 PUT_MODE (insn, VOIDmode);
544 /* Skip insns that only set an equivalence. */
545 if (set && REG_P (SET_DEST (set))
546 && reg_renumber[REGNO (SET_DEST (set))] < 0
547 && reg_equiv_constant[REGNO (SET_DEST (set))])
548 chain->will_be_deleted = 1;
552 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
553 If that insn didn't set the register (i.e., it copied the register to
554 memory), just delete that insn instead of the equivalencing insn plus
555 anything now dead. If we call delete_dead_insn on that insn, we may
556 delete the insn that actually sets the register if the register dies
557 there and that is incorrect. */
559 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
561 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
563 rtx list;
564 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
566 rtx equiv_insn = XEXP (list, 0);
568 /* If we already deleted the insn or if it may trap, we can't
569 delete it. The latter case shouldn't happen, but can
570 if an insn has a variable address, gets a REG_EH_REGION
571 note added to it, and then gets converted into an load
572 from a constant address. */
573 if (GET_CODE (equiv_insn) == NOTE
574 || can_throw_internal (equiv_insn))
575 continue;
576 #if 0
577 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
578 delete_dead_insn (equiv_insn);
579 else
580 #endif
581 PUT_MODE (equiv_insn, SImode);
587 /* Global variables used by reload and its subroutines. */
589 /* Set during calculate_needs if an insn needs register elimination. */
590 static int something_needs_elimination;
591 /* Set during calculate_needs if an insn needs an operand changed. */
592 static int something_needs_operands_changed;
594 /* Nonzero means we couldn't get enough spill regs. */
595 static int failure;
597 /* Main entry point for the reload pass.
599 FIRST is the first insn of the function being compiled.
601 GLOBAL nonzero means we were called from global_alloc
602 and should attempt to reallocate any pseudoregs that we
603 displace from hard regs we will use for reloads.
604 If GLOBAL is zero, we do not have enough information to do that,
605 so any pseudo reg that is spilled must go to the stack.
607 Return value is nonzero if reload failed
608 and we must not do any more for this function. */
611 reload (rtx first, int global)
613 int i;
614 rtx insn;
615 struct elim_table *ep;
616 basic_block bb;
618 /* Make sure even insns with volatile mem refs are recognizable. */
619 init_recog ();
621 failure = 0;
623 reload_firstobj = obstack_alloc (&reload_obstack, 0);
625 /* Make sure that the last insn in the chain
626 is not something that needs reloading. */
627 emit_note (NOTE_INSN_DELETED);
629 /* Enable find_equiv_reg to distinguish insns made by reload. */
630 reload_first_uid = get_max_uid ();
632 #ifdef SECONDARY_MEMORY_NEEDED
633 /* Initialize the secondary memory table. */
634 clear_secondary_mem ();
635 #endif
637 /* We don't have a stack slot for any spill reg yet. */
638 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
639 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
641 /* Initialize the save area information for caller-save, in case some
642 are needed. */
643 init_save_areas ();
645 /* Compute which hard registers are now in use
646 as homes for pseudo registers.
647 This is done here rather than (eg) in global_alloc
648 because this point is reached even if not optimizing. */
649 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
650 mark_home_live (i);
652 /* A function that receives a nonlocal goto must save all call-saved
653 registers. */
654 if (current_function_has_nonlocal_label)
655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
656 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
657 regs_ever_live[i] = 1;
659 /* Find all the pseudo registers that didn't get hard regs
660 but do have known equivalent constants or memory slots.
661 These include parameters (known equivalent to parameter slots)
662 and cse'd or loop-moved constant memory addresses.
664 Record constant equivalents in reg_equiv_constant
665 so they will be substituted by find_reloads.
666 Record memory equivalents in reg_mem_equiv so they can
667 be substituted eventually by altering the REG-rtx's. */
669 reg_equiv_constant = xcalloc (max_regno, sizeof (rtx));
670 reg_equiv_mem = xcalloc (max_regno, sizeof (rtx));
671 reg_equiv_init = xcalloc (max_regno, sizeof (rtx));
672 reg_equiv_address = xcalloc (max_regno, sizeof (rtx));
673 reg_max_ref_width = xcalloc (max_regno, sizeof (int));
674 reg_old_renumber = xcalloc (max_regno, sizeof (short));
675 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
676 pseudo_forbidden_regs = xmalloc (max_regno * sizeof (HARD_REG_SET));
677 pseudo_previous_regs = xcalloc (max_regno, sizeof (HARD_REG_SET));
679 COPY_HARD_REG_SET (bad_spill_regs_global, fixed_reg_set);
681 permanent_test_reg = gen_rtx_raw_REG (Pmode, 1);
683 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
684 to. Also find all paradoxical subregs and find largest such for
685 each pseudo. */
687 num_eliminable_invariants = 0;
688 for (insn = first; insn; insn = NEXT_INSN (insn))
690 rtx set = single_set (insn);
692 /* We may introduce USEs that we want to remove at the end, so
693 we'll mark them with QImode. Make sure there are no
694 previously-marked insns left by say regmove. */
695 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
696 && GET_MODE (insn) != VOIDmode)
697 PUT_MODE (insn, VOIDmode);
699 if (set != 0 && REG_P (SET_DEST (set)))
701 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
702 if (note
703 && (! function_invariant_p (XEXP (note, 0))
704 || ! flag_pic
705 /* A function invariant is often CONSTANT_P but may
706 include a register. We promise to only pass
707 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
708 || (CONSTANT_P (XEXP (note, 0))
709 && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))))
711 rtx x = XEXP (note, 0);
712 i = REGNO (SET_DEST (set));
713 if (i > LAST_VIRTUAL_REGISTER)
715 /* It can happen that a REG_EQUIV note contains a MEM
716 that is not a legitimate memory operand. As later
717 stages of reload assume that all addresses found
718 in the reg_equiv_* arrays were originally legitimate,
719 we ignore such REG_EQUIV notes. */
720 if (memory_operand (x, VOIDmode))
722 /* Always unshare the equivalence, so we can
723 substitute into this insn without touching the
724 equivalence. */
725 reg_equiv_memory_loc[i] = copy_rtx (x);
727 else if (function_invariant_p (x))
729 if (GET_CODE (x) == PLUS)
731 /* This is PLUS of frame pointer and a constant,
732 and might be shared. Unshare it. */
733 reg_equiv_constant[i] = copy_rtx (x);
734 num_eliminable_invariants++;
736 else if (x == frame_pointer_rtx
737 || x == arg_pointer_rtx)
739 reg_equiv_constant[i] = x;
740 num_eliminable_invariants++;
742 else if (LEGITIMATE_CONSTANT_P (x))
743 reg_equiv_constant[i] = x;
744 else
746 reg_equiv_memory_loc[i]
747 = force_const_mem (GET_MODE (SET_DEST (set)), x);
748 if (!reg_equiv_memory_loc[i])
749 continue;
752 else
753 continue;
755 /* If this register is being made equivalent to a MEM
756 and the MEM is not SET_SRC, the equivalencing insn
757 is one with the MEM as a SET_DEST and it occurs later.
758 So don't mark this insn now. */
759 if (!MEM_P (x)
760 || rtx_equal_p (SET_SRC (set), x))
761 reg_equiv_init[i]
762 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
767 /* If this insn is setting a MEM from a register equivalent to it,
768 this is the equivalencing insn. */
769 else if (set && MEM_P (SET_DEST (set))
770 && REG_P (SET_SRC (set))
771 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
772 && rtx_equal_p (SET_DEST (set),
773 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
774 reg_equiv_init[REGNO (SET_SRC (set))]
775 = gen_rtx_INSN_LIST (VOIDmode, insn,
776 reg_equiv_init[REGNO (SET_SRC (set))]);
778 if (INSN_P (insn))
779 scan_paradoxical_subregs (PATTERN (insn));
782 init_elim_table ();
784 first_label_num = get_first_label_num ();
785 num_labels = max_label_num () - first_label_num;
787 /* Allocate the tables used to store offset information at labels. */
788 /* We used to use alloca here, but the size of what it would try to
789 allocate would occasionally cause it to exceed the stack limit and
790 cause a core dump. */
791 offsets_known_at = xmalloc (num_labels);
792 offsets_at = xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
794 /* Alter each pseudo-reg rtx to contain its hard reg number.
795 Assign stack slots to the pseudos that lack hard regs or equivalents.
796 Do not touch virtual registers. */
798 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
799 alter_reg (i, -1);
801 /* If we have some registers we think can be eliminated, scan all insns to
802 see if there is an insn that sets one of these registers to something
803 other than itself plus a constant. If so, the register cannot be
804 eliminated. Doing this scan here eliminates an extra pass through the
805 main reload loop in the most common case where register elimination
806 cannot be done. */
807 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
808 if (INSN_P (insn))
809 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
811 maybe_fix_stack_asms ();
813 insns_need_reload = 0;
814 something_needs_elimination = 0;
816 /* Spill any hard regs that we know we can't eliminate. */
817 CLEAR_HARD_REG_SET (used_spill_regs);
818 /* There can be multiple ways to eliminate a register;
819 they should be listed adjacently.
820 Elimination for any register fails only if all possible ways fail. */
821 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
823 int from = ep->from;
824 int can_eliminate = 0;
827 can_eliminate |= ep->can_eliminate;
828 ep++;
830 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
831 if (! can_eliminate)
832 spill_hard_reg (from, 1);
835 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
836 if (frame_pointer_needed)
837 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
838 #endif
839 finish_spills (global);
841 /* From now on, we may need to generate moves differently. We may also
842 allow modifications of insns which cause them to not be recognized.
843 Any such modifications will be cleaned up during reload itself. */
844 reload_in_progress = 1;
846 /* This loop scans the entire function each go-round
847 and repeats until one repetition spills no additional hard regs. */
848 for (;;)
850 int something_changed;
851 int did_spill;
853 HOST_WIDE_INT starting_frame_size;
855 /* Round size of stack frame to stack_alignment_needed. This must be done
856 here because the stack size may be a part of the offset computation
857 for register elimination, and there might have been new stack slots
858 created in the last iteration of this loop. */
859 if (cfun->stack_alignment_needed)
860 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
862 starting_frame_size = get_frame_size ();
864 set_initial_elim_offsets ();
865 set_initial_label_offsets ();
867 /* For each pseudo register that has an equivalent location defined,
868 try to eliminate any eliminable registers (such as the frame pointer)
869 assuming initial offsets for the replacement register, which
870 is the normal case.
872 If the resulting location is directly addressable, substitute
873 the MEM we just got directly for the old REG.
875 If it is not addressable but is a constant or the sum of a hard reg
876 and constant, it is probably not addressable because the constant is
877 out of range, in that case record the address; we will generate
878 hairy code to compute the address in a register each time it is
879 needed. Similarly if it is a hard register, but one that is not
880 valid as an address register.
882 If the location is not addressable, but does not have one of the
883 above forms, assign a stack slot. We have to do this to avoid the
884 potential of producing lots of reloads if, e.g., a location involves
885 a pseudo that didn't get a hard register and has an equivalent memory
886 location that also involves a pseudo that didn't get a hard register.
888 Perhaps at some point we will improve reload_when_needed handling
889 so this problem goes away. But that's very hairy. */
891 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
892 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
894 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
896 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
897 XEXP (x, 0)))
898 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
899 else if (CONSTANT_P (XEXP (x, 0))
900 || (REG_P (XEXP (x, 0))
901 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
902 || (GET_CODE (XEXP (x, 0)) == PLUS
903 && REG_P (XEXP (XEXP (x, 0), 0))
904 && (REGNO (XEXP (XEXP (x, 0), 0))
905 < FIRST_PSEUDO_REGISTER)
906 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
907 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
908 else
910 /* Make a new stack slot. Then indicate that something
911 changed so we go back and recompute offsets for
912 eliminable registers because the allocation of memory
913 below might change some offset. reg_equiv_{mem,address}
914 will be set up for this pseudo on the next pass around
915 the loop. */
916 reg_equiv_memory_loc[i] = 0;
917 reg_equiv_init[i] = 0;
918 alter_reg (i, -1);
922 if (caller_save_needed)
923 setup_save_areas ();
925 /* If we allocated another stack slot, redo elimination bookkeeping. */
926 if (starting_frame_size != get_frame_size ())
927 continue;
929 if (caller_save_needed)
931 save_call_clobbered_regs ();
932 /* That might have allocated new insn_chain structures. */
933 reload_firstobj = obstack_alloc (&reload_obstack, 0);
936 mark_dead_insns ();
938 calculate_needs_all_insns (global);
940 CLEAR_REG_SET (&spilled_pseudos);
941 did_spill = 0;
943 something_changed = 0;
945 /* If we allocated any new memory locations, make another pass
946 since it might have changed elimination offsets. */
947 if (starting_frame_size != get_frame_size ())
948 something_changed = 1;
951 HARD_REG_SET to_spill;
952 CLEAR_HARD_REG_SET (to_spill);
953 update_eliminables (&to_spill);
954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
955 if (TEST_HARD_REG_BIT (to_spill, i))
957 spill_hard_reg (i, 1);
958 did_spill = 1;
960 /* Regardless of the state of spills, if we previously had
961 a register that we thought we could eliminate, but now can
962 not eliminate, we must run another pass.
964 Consider pseudos which have an entry in reg_equiv_* which
965 reference an eliminable register. We must make another pass
966 to update reg_equiv_* so that we do not substitute in the
967 old value from when we thought the elimination could be
968 performed. */
969 something_changed = 1;
973 select_reload_regs ();
974 if (failure)
975 goto failed;
977 if (insns_need_reload != 0 || did_spill)
978 something_changed |= finish_spills (global);
980 if (! something_changed)
981 break;
983 if (caller_save_needed)
984 delete_caller_save_insns ();
986 obstack_free (&reload_obstack, reload_firstobj);
989 /* If global-alloc was run, notify it of any register eliminations we have
990 done. */
991 if (global)
992 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
993 if (ep->can_eliminate)
994 mark_elimination (ep->from, ep->to);
996 /* Use the reload registers where necessary
997 by generating move instructions to move the must-be-register
998 values into or out of the reload registers. */
1000 if (insns_need_reload != 0 || something_needs_elimination
1001 || something_needs_operands_changed)
1003 HOST_WIDE_INT old_frame_size = get_frame_size ();
1005 reload_as_needed (global);
1007 gcc_assert (old_frame_size == get_frame_size ());
1009 if (num_eliminable)
1010 verify_initial_elim_offsets ();
1013 /* If we were able to eliminate the frame pointer, show that it is no
1014 longer live at the start of any basic block. If it ls live by
1015 virtue of being in a pseudo, that pseudo will be marked live
1016 and hence the frame pointer will be known to be live via that
1017 pseudo. */
1019 if (! frame_pointer_needed)
1020 FOR_EACH_BB (bb)
1021 CLEAR_REGNO_REG_SET (bb->global_live_at_start,
1022 HARD_FRAME_POINTER_REGNUM);
1024 /* Come here (with failure set nonzero) if we can't get enough spill regs
1025 and we decide not to abort about it. */
1026 failed:
1028 CLEAR_REG_SET (&spilled_pseudos);
1029 reload_in_progress = 0;
1031 /* Now eliminate all pseudo regs by modifying them into
1032 their equivalent memory references.
1033 The REG-rtx's for the pseudos are modified in place,
1034 so all insns that used to refer to them now refer to memory.
1036 For a reg that has a reg_equiv_address, all those insns
1037 were changed by reloading so that no insns refer to it any longer;
1038 but the DECL_RTL of a variable decl may refer to it,
1039 and if so this causes the debugging info to mention the variable. */
1041 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1043 rtx addr = 0;
1045 if (reg_equiv_mem[i])
1046 addr = XEXP (reg_equiv_mem[i], 0);
1048 if (reg_equiv_address[i])
1049 addr = reg_equiv_address[i];
1051 if (addr)
1053 if (reg_renumber[i] < 0)
1055 rtx reg = regno_reg_rtx[i];
1057 REG_USERVAR_P (reg) = 0;
1058 PUT_CODE (reg, MEM);
1059 XEXP (reg, 0) = addr;
1060 if (reg_equiv_memory_loc[i])
1061 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1062 else
1064 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1065 MEM_ATTRS (reg) = 0;
1068 else if (reg_equiv_mem[i])
1069 XEXP (reg_equiv_mem[i], 0) = addr;
1073 /* We must set reload_completed now since the cleanup_subreg_operands call
1074 below will re-recognize each insn and reload may have generated insns
1075 which are only valid during and after reload. */
1076 reload_completed = 1;
1078 /* Make a pass over all the insns and delete all USEs which we inserted
1079 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1080 notes. Delete all CLOBBER insns, except those that refer to the return
1081 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1082 from misarranging variable-array code, and simplify (subreg (reg))
1083 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1084 are no longer useful or accurate. Strip and regenerate REG_INC notes
1085 that may have been moved around. */
1087 for (insn = first; insn; insn = NEXT_INSN (insn))
1088 if (INSN_P (insn))
1090 rtx *pnote;
1092 if (CALL_P (insn))
1093 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1094 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1096 if ((GET_CODE (PATTERN (insn)) == USE
1097 /* We mark with QImode USEs introduced by reload itself. */
1098 && (GET_MODE (insn) == QImode
1099 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1100 || (GET_CODE (PATTERN (insn)) == CLOBBER
1101 && (!MEM_P (XEXP (PATTERN (insn), 0))
1102 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1103 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1104 && XEXP (XEXP (PATTERN (insn), 0), 0)
1105 != stack_pointer_rtx))
1106 && (!REG_P (XEXP (PATTERN (insn), 0))
1107 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1109 delete_insn (insn);
1110 continue;
1113 /* Some CLOBBERs may survive until here and still reference unassigned
1114 pseudos with const equivalent, which may in turn cause ICE in later
1115 passes if the reference remains in place. */
1116 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1117 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1118 VOIDmode, PATTERN (insn));
1120 pnote = &REG_NOTES (insn);
1121 while (*pnote != 0)
1123 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1124 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1125 || REG_NOTE_KIND (*pnote) == REG_INC
1126 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1127 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1128 *pnote = XEXP (*pnote, 1);
1129 else
1130 pnote = &XEXP (*pnote, 1);
1133 #ifdef AUTO_INC_DEC
1134 add_auto_inc_notes (insn, PATTERN (insn));
1135 #endif
1137 /* And simplify (subreg (reg)) if it appears as an operand. */
1138 cleanup_subreg_operands (insn);
1141 /* If we are doing stack checking, give a warning if this function's
1142 frame size is larger than we expect. */
1143 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1145 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1146 static int verbose_warned = 0;
1148 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1149 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1150 size += UNITS_PER_WORD;
1152 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1154 warning ("frame size too large for reliable stack checking");
1155 if (! verbose_warned)
1157 warning ("try reducing the number of local variables");
1158 verbose_warned = 1;
1163 /* Indicate that we no longer have known memory locations or constants. */
1164 if (reg_equiv_constant)
1165 free (reg_equiv_constant);
1166 reg_equiv_constant = 0;
1167 VARRAY_GROW (reg_equiv_memory_loc_varray, 0);
1168 reg_equiv_memory_loc = 0;
1170 if (offsets_known_at)
1171 free (offsets_known_at);
1172 if (offsets_at)
1173 free (offsets_at);
1175 free (reg_equiv_mem);
1176 free (reg_equiv_init);
1177 free (reg_equiv_address);
1178 free (reg_max_ref_width);
1179 free (reg_old_renumber);
1180 free (pseudo_previous_regs);
1181 free (pseudo_forbidden_regs);
1183 /* Free all the insn_chain structures at once. */
1184 obstack_free (&reload_obstack, reload_startobj);
1185 unused_insn_chains = 0;
1186 fixup_abnormal_edges ();
1188 /* Replacing pseudos with their memory equivalents might have
1189 created shared rtx. Subsequent passes would get confused
1190 by this, so unshare everything here. */
1191 unshare_all_rtl_again (first);
1193 #ifdef STACK_BOUNDARY
1194 /* init_emit has set the alignment of the hard frame pointer
1195 to STACK_BOUNDARY. It is very likely no longer valid if
1196 the hard frame pointer was used for register allocation. */
1197 if (!frame_pointer_needed)
1198 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1199 #endif
1201 return failure;
1204 /* Yet another special case. Unfortunately, reg-stack forces people to
1205 write incorrect clobbers in asm statements. These clobbers must not
1206 cause the register to appear in bad_spill_regs, otherwise we'll call
1207 fatal_insn later. We clear the corresponding regnos in the live
1208 register sets to avoid this.
1209 The whole thing is rather sick, I'm afraid. */
1211 static void
1212 maybe_fix_stack_asms (void)
1214 #ifdef STACK_REGS
1215 const char *constraints[MAX_RECOG_OPERANDS];
1216 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1217 struct insn_chain *chain;
1219 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1221 int i, noperands;
1222 HARD_REG_SET clobbered, allowed;
1223 rtx pat;
1225 if (! INSN_P (chain->insn)
1226 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1227 continue;
1228 pat = PATTERN (chain->insn);
1229 if (GET_CODE (pat) != PARALLEL)
1230 continue;
1232 CLEAR_HARD_REG_SET (clobbered);
1233 CLEAR_HARD_REG_SET (allowed);
1235 /* First, make a mask of all stack regs that are clobbered. */
1236 for (i = 0; i < XVECLEN (pat, 0); i++)
1238 rtx t = XVECEXP (pat, 0, i);
1239 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1240 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1243 /* Get the operand values and constraints out of the insn. */
1244 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1245 constraints, operand_mode);
1247 /* For every operand, see what registers are allowed. */
1248 for (i = 0; i < noperands; i++)
1250 const char *p = constraints[i];
1251 /* For every alternative, we compute the class of registers allowed
1252 for reloading in CLS, and merge its contents into the reg set
1253 ALLOWED. */
1254 int cls = (int) NO_REGS;
1256 for (;;)
1258 char c = *p;
1260 if (c == '\0' || c == ',' || c == '#')
1262 /* End of one alternative - mark the regs in the current
1263 class, and reset the class. */
1264 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1265 cls = NO_REGS;
1266 p++;
1267 if (c == '#')
1268 do {
1269 c = *p++;
1270 } while (c != '\0' && c != ',');
1271 if (c == '\0')
1272 break;
1273 continue;
1276 switch (c)
1278 case '=': case '+': case '*': case '%': case '?': case '!':
1279 case '0': case '1': case '2': case '3': case '4': case 'm':
1280 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1281 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1282 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1283 case 'P':
1284 break;
1286 case 'p':
1287 cls = (int) reg_class_subunion[cls]
1288 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1289 break;
1291 case 'g':
1292 case 'r':
1293 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1294 break;
1296 default:
1297 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1298 cls = (int) reg_class_subunion[cls]
1299 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1300 else
1301 cls = (int) reg_class_subunion[cls]
1302 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1304 p += CONSTRAINT_LEN (c, p);
1307 /* Those of the registers which are clobbered, but allowed by the
1308 constraints, must be usable as reload registers. So clear them
1309 out of the life information. */
1310 AND_HARD_REG_SET (allowed, clobbered);
1311 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1312 if (TEST_HARD_REG_BIT (allowed, i))
1314 CLEAR_REGNO_REG_SET (&chain->live_before, i);
1315 CLEAR_REGNO_REG_SET (&chain->live_after, i);
1319 #endif
1322 static int
1323 conflict (struct reload_reg_use *ru1, struct reload_reg_use *ru2)
1325 return (! ru1->ignored && ! ru2->ignored
1326 && ru1->birth < ru2->death && ru2->birth < ru1->death);
1329 /* Called from scan_rtx if that function is called for the purpose of marking
1330 the occurring registers. mark_reg will update CHAIN->SET_REGS or
1331 CHAIN->USED_REGS for an occurrence of register X. If it is inside a
1332 subreg, WORD contains the value of SUBREG_WORD. MODE is the mode of the
1333 access, IS_OUTPUT describes whether the register is being written to. */
1334 static void
1335 mark_reg (struct insn_chain *chain, int regno, int real_regno, enum machine_mode mode,
1336 int is_output, int is_reloaded)
1338 int nregs = 1;
1339 int i;
1341 if (real_regno < 0)
1343 if (! is_reloaded)
1345 if (is_output)
1346 SET_REGNO_REG_SET (&chain->unreloaded_sets, regno);
1347 else
1348 SET_REGNO_REG_SET (&chain->unreloaded_uses, regno);
1350 return;
1353 if (regno < FIRST_PSEUDO_REGISTER)
1355 nregs = HARD_REGNO_NREGS (real_regno, mode);
1357 else
1358 abort ();
1360 for (i = 0; i < nregs; i++)
1361 if (is_output)
1362 SET_HARD_REG_BIT (chain->set_regs, real_regno + i);
1363 else
1364 SET_HARD_REG_BIT (chain->used_regs, real_regno + i);
1367 static void
1368 add_feed (struct reload_insn *source, struct reload_insn *dest)
1370 int space = source->feeds_space;
1371 if (source->n_feeds == space)
1373 space += 5;
1374 if (source->n_feeds == 0)
1375 source->feeds = (struct reload_insn **) xmalloc (space * sizeof (struct reload_insn *));
1376 else
1377 source->feeds = (struct reload_insn **) xrealloc (source->feeds,
1378 space * sizeof (struct reload_insn *));
1379 source->feeds_space = space;
1381 source->feeds[source->n_feeds] = dest;
1382 source->n_feeds++;
1383 source->n_feeds_remaining++;
1384 source->status = RLIS_NOT_SCHEDULED;
1385 dest->status = RLIS_NOT_SCHEDULED;
1388 static void
1389 add_reg_feed (struct insn_chain *chain, int regno, int real_regno,
1390 enum machine_mode mode, struct reload_insn *rli, int is_output)
1392 int scan_start = 1 + 2 * chain->n_reloads + (is_output != 0 ? chain->n_input_regs : 0);
1393 int scan_count = (is_output == 0 ? chain->n_input_regs : chain->n_output_regs);
1394 int nregs, i;
1396 if (regno >= FIRST_PSEUDO_REGISTER)
1398 if (real_regno >= 0)
1399 abort ();
1400 return;
1403 nregs = HARD_REGNO_NREGS (real_regno, mode);
1404 for (i = 0; i < nregs; i++)
1406 int j;
1407 struct reload_insn *reg_insn = chain->rli + scan_start;
1409 for (j = 0; j < scan_count; j++, reg_insn++)
1411 if (reg_insn->nr == real_regno + i)
1412 break;
1414 if (j == scan_count)
1415 abort ();
1416 if (is_output == 0)
1417 add_feed (reg_insn, rli);
1418 else
1420 if (is_output == 2)
1421 chain->reg_usage[j + chain->n_input_regs].earlyclobber |= is_output == 2;
1422 add_feed (rli, reg_insn);
1427 /* Walk the rtx found in *LOC, which occurs inside the insn described by
1428 CHAIN.
1430 We are looking for register references; JUST_MARK determines
1431 what we do with them. If it is nonzero, we compute the two reg sets
1432 CHAIN->SET_REGS and CHAIN->USED_REGS, otherwise we compute
1433 dependence information for the reload_insn structures of CHAIN.
1435 If we compute dependency information, RLI contains the reload insn that
1436 we are "inside", this information is gathered by looking at the
1437 replacements set up by find_reloads. The caller must call subst_dummy
1438 before calling scan_rtx so that we can detect replacements.
1440 IS_OUTPUT determines whether the piece of rtl we are scanning is being
1441 written to; it is 1 for normal writes, 2 for earlyclobbers, and 0 for
1442 reads.
1444 NO_ADDRESSES can be used to skip memory addresses. */
1445 static void
1446 scan_rtx (struct insn_chain *chain, rtx *loc, int is_output,
1447 struct reload_insn *rli, int no_addresses, int just_mark,
1448 int is_reloaded)
1450 const char *fmt;
1451 rtx x = *loc;
1452 enum rtx_code code = GET_CODE (x);
1453 enum machine_mode mode = GET_MODE (x);
1454 int i, j, regno, real_regno;
1456 if (x == &dummy_replacement_rtx)
1458 /* Look up the reload for this replacement and continue scanning inside
1459 the replaced contents. */
1460 int k = replacement_nr (loc);
1461 int reload_nr = replacements[k].what;
1462 rtx contents = replacements[k].contents;
1463 struct reload_insn *reload_in_insn, *reload_out_insn;
1464 struct reload *rl;
1466 if (reload_nr < 0)
1467 abort ();
1468 rl = chain->rld + reload_nr;
1470 if ((! rl->in && ! rl->out) || rl->secondary_p)
1471 abort ();
1473 if (is_output != 0 && ! rl->out)
1474 abort ();
1475 if (is_output == 0 && ! rl->in)
1476 abort ();
1478 if (just_mark)
1480 if (is_output == 0)
1482 if (rl->optional)
1483 scan_rtx (chain, &contents, 0, 0, 0, 1, 1);
1484 else
1485 scan_rtx (chain, &rl->in, 0, 0, 0, 1, 1);
1487 else
1489 if (rl->optional)
1490 scan_rtx (chain, &contents, is_output, 0, 0, 1, 1);
1491 else
1492 scan_rtx (chain, &rl->out, is_output, 0, 0, 1, 1);
1494 return;
1497 reload_in_insn = chain->rli + 1 + reload_nr * 2;
1498 reload_out_insn = chain->rli + 2 + reload_nr * 2;
1500 if (is_output == 0)
1502 add_feed (reload_in_insn, rli);
1503 if (rl->scanned_input)
1504 return;
1505 if (! just_mark)
1506 rl->scanned_input = 1;
1507 if (rl->optional)
1508 scan_rtx (chain, &contents, 0, reload_in_insn, 0, 0, 1);
1509 else
1510 scan_rtx (chain, &rl->in, 0, reload_in_insn, 0, 0, 1);
1512 else
1514 rl->reginfo.earlyclobber |= is_output == 2;
1515 add_feed (rli, reload_out_insn);
1516 if (rl->scanned_output)
1517 return;
1518 if (! just_mark)
1519 rl->scanned_output = 1;
1520 if (rl->optional)
1521 scan_rtx (chain, &contents, is_output, reload_out_insn, 0, 0, 1);
1522 else
1523 scan_rtx (chain, &rl->out, is_output, reload_out_insn, 0, 0, 1);
1526 return;
1529 code = GET_CODE (x);
1530 fmt = GET_RTX_FORMAT (code);
1531 switch (code)
1533 case SUBREG:
1534 if (GET_CODE (SUBREG_REG (x)) != REG)
1535 break;
1536 regno = REGNO (SUBREG_REG (x));
1537 real_regno = (regno >= FIRST_PSEUDO_REGISTER
1538 ? reg_renumber[regno] : regno);
1540 if (real_regno >= 0)
1541 real_regno += subreg_regno_offset (real_regno,
1542 GET_MODE (SUBREG_REG (x)),
1543 SUBREG_BYTE (x), GET_MODE (x));
1545 /* @@@ This is probably going to trigger some day... */
1546 if (x == &dummy_replacement_rtx)
1547 abort ();
1549 goto reg_subreg_common;
1551 case REG:
1552 regno = REGNO (x);
1553 real_regno = (regno >= FIRST_PSEUDO_REGISTER
1554 ? reg_renumber[regno] : regno);
1555 reg_subreg_common:
1556 if (just_mark)
1557 mark_reg (chain, regno, real_regno, mode, is_output, is_reloaded);
1558 else
1559 add_reg_feed (chain, regno, real_regno, mode, rli, is_output);
1560 return;
1562 case MEM:
1563 if (! no_addresses)
1564 scan_rtx (chain, &XEXP (x, 0), 0, rli, 0, just_mark, 0);
1565 return;
1567 case SET:
1568 scan_rtx (chain, &SET_SRC (x), 0, rli, 0, just_mark, 0);
1569 scan_rtx (chain, &SET_DEST (x), 1, rli, 0, just_mark, 0);
1570 return;
1572 case STRICT_LOW_PART:
1573 scan_rtx (chain, &XEXP (x, 0), 0, rli, 0, just_mark, 0);
1574 scan_rtx (chain, &XEXP (x, 0), is_output, rli, 0, just_mark, 0);
1575 return;
1577 case ZERO_EXTRACT:
1578 case SIGN_EXTRACT:
1579 scan_rtx (chain, &XEXP (x, 0), 0, rli, 0, just_mark, 0);
1580 if (is_output)
1581 scan_rtx (chain, &XEXP (x, 0), is_output, rli, 0, just_mark, 0);
1582 scan_rtx (chain, &XEXP (x, 1), 0, rli, 0, just_mark, 0);
1583 scan_rtx (chain, &XEXP (x, 2), 0, rli, 0, just_mark, 0);
1584 return;
1586 case POST_INC:
1587 case PRE_INC:
1588 case POST_DEC:
1589 case PRE_DEC:
1590 scan_rtx (chain, &XEXP (x, 0), 0, rli, 0, just_mark, 0);
1591 scan_rtx (chain, &XEXP (x, 0), 1, rli, 0, just_mark, 0);
1592 return;
1594 case CLOBBER:
1595 scan_rtx (chain, &SET_DEST (x), 2, rli, 0, just_mark, 0);
1596 return;
1598 default:
1599 break;
1602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1604 if (fmt[i] == 'e')
1605 scan_rtx (chain, &XEXP (x, i), is_output, rli, no_addresses,
1606 just_mark, 0);
1607 else if (fmt[i] == 'E')
1608 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1609 scan_rtx (chain, &XVECEXP (x, i, j), is_output, rli, no_addresses,
1610 just_mark, 0);
1614 /* Generate dependencies for the secondary memory rtx's. We need to do this
1615 because secondary memory may have address reloads. */
1616 static void
1617 scan_secondary_mem (struct insn_chain *chain, int just_mark)
1619 #ifdef SECONDARY_MEMORY_NEEDED
1620 int i;
1622 for (i = 0; i < chain->n_reloads; i++)
1624 struct reload *rl = chain->rld + i;
1625 if (rl->in
1626 && GET_CODE (rl->in) == REG
1627 && REGNO (rl->in) < FIRST_PSEUDO_REGISTER
1628 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (rl->in)),
1629 rl->class, rl->inmode))
1631 rtx mem = get_secondary_mem (NULL_RTX, rl->inmode, i, RELOAD_FOR_NONE);
1632 scan_rtx (chain, &XEXP (mem, 0), 0, chain->rli + 1 + i * 2, 0,
1633 just_mark, 0);
1635 if (rl->out
1636 && GET_CODE (rl->out) == REG
1637 && REGNO (rl->out) < FIRST_PSEUDO_REGISTER
1638 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (rl->out)),
1639 rl->class, rl->outmode))
1641 rtx mem = get_secondary_mem (NULL_RTX, rl->outmode, i, RELOAD_FOR_NONE);
1642 scan_rtx (chain, &XEXP (mem, 0), 0, chain->rli + 2 + i * 2, 0,
1643 just_mark, 0);
1646 #endif
1649 static inline struct reload_insn *
1650 rli_from_reload_nr (struct insn_chain *chain, int nr, int output)
1652 return chain->rli + (output ? 2 : 1) + nr * 2;
1655 /* Compute dependency information for the reloads and hard regs in CHAIN. */
1656 static void
1657 compute_feeds (struct insn_chain *chain)
1659 int count;
1660 int i;
1662 for (i = 0; i < chain->n_reloads; i++)
1664 chain->rld[i].scanned_input = 0;
1665 chain->rld[i].scanned_output = 0;
1666 chain->rld[i].reginfo.earlyclobber = 0;
1669 extract_insn (chain->insn_copy);
1671 subst_dummy ();
1673 for (i = 0; i < recog_data.n_operands; i++)
1675 if (recog_data.operand_type[i] != OP_OUT)
1676 scan_rtx (chain, recog_data.operand_loc[i], 0, chain->rli, 0, 0, 0);
1677 if (recog_data.operand_type[i] != OP_IN)
1679 int ec = earlyclobber_operand_p (recog_data.operand[i]);
1680 scan_rtx (chain, recog_data.operand_loc[i], 1 + ec, chain->rli,
1681 recog_data.operand_type[i] == OP_INOUT, 0, 0);
1685 scan_secondary_mem (chain, 0);
1687 undo_subst_dummy ();
1689 /* We're scanning only the operands of the insn. Hard register references
1690 may be outside the operands, so see if we have any RLIS_IGNORED
1691 registers. */
1692 count = chain->n_input_regs + chain->n_output_regs;
1693 for (i = 0; i < count; i++)
1695 struct reload_insn *rli = chain->rli + 1 + 2 * chain->n_reloads + i;
1696 if (rli->status != RLIS_IGNORED)
1697 continue;
1698 if (rli->type == RLI_INPUTREG)
1699 add_feed (rli, chain->rli);
1700 else if (rli->type == RLI_OUTPUTREG)
1701 add_feed (chain->rli, rli);
1702 else
1703 abort ();
1708 /* Determine whether we need to actually emit an output reload for RL. */
1709 static int
1710 output_reload_unnecessary (struct insn_chain *chain, struct reload *rl)
1712 rtx old = rl->out_reg;
1714 if (GET_CODE (old) == SCRATCH)
1715 return 1;
1716 if (GET_CODE (old) == SUBREG)
1717 old = SUBREG_REG (old);
1719 if (REG_P (old)
1720 && find_reg_note (chain->insn, REG_UNUSED, old) != 0)
1721 return 1;
1722 return 0;
1725 /* Generate a valid order for the reload insns, taking into account the
1726 feeder/user relationships between them. This is basically a "mini
1727 scheduler". It gives out time slots to every reload insn and the main
1728 insn we're reloading. Each reload insn gets two time slots, and the main
1729 insn three. */
1730 static void
1731 compute_reload_order (struct insn_chain *chain)
1733 int count = 1 + 2 * chain->n_reloads + chain->n_input_regs + chain->n_output_regs;
1734 int i, j;
1735 int last = -1;
1736 int pos;
1738 pos = 0;
1739 for (;;)
1741 struct reload_insn *rli;
1742 int any_available = 0;
1743 /* Find an insn that can be scheduled at this point. */
1744 for (i = 0; i < count; i++)
1746 rli = chain->rli + i;
1747 if (rli->status != RLIS_NOT_SCHEDULED)
1748 continue;
1749 any_available = 1;
1750 if (rli->n_feeds_remaining > 0)
1751 continue;
1752 break;
1754 if (i == count)
1756 if (any_available)
1757 abort ();
1758 else
1759 break;
1762 if (last == -1)
1763 chain->last_rlinsn = i;
1764 else
1765 chain->rli[last].prev_order = i;
1766 last = i;
1768 /* Leave three slots instead of two for the main insn. */
1769 if (i == 0)
1770 pos--;
1772 rli->order = pos;
1773 pos -= 2;
1774 rli->status = RLIS_SCHEDULED;
1775 /* Reduce the remaining feed count for all insns that feed this one. */
1776 for (j = 0; j < count; j++)
1778 int k;
1779 struct reload_insn *rlj = chain->rli + j;
1780 if (rlj->status != 1 || rlj->n_feeds_remaining == 0)
1781 continue;
1782 for (k = 0; k < rlj->n_feeds; k++)
1783 if (rlj->feeds[k] == rli)
1784 rlj->n_feeds_remaining--;
1787 if (last < 0)
1788 abort ();
1789 chain->rli[last].prev_order = -1;
1791 /* Make the order values start at 1. */
1792 for (i = 0; i < count; i++)
1793 if (chain->rli[i].status == RLIS_SCHEDULED)
1794 chain->rli[i].order -= pos + 1;
1796 /* Correct the order for unnecessary output reloads: these die within the
1797 main insn's final slot. */
1798 for (i = 0; i < chain->n_reloads; i++)
1800 struct reload *rl = chain->rld + i;
1801 if (rl->out_reg != 0
1802 && output_reload_unnecessary (chain, rl))
1804 struct reload_insn *rli = chain->rli + 2 + 2 * i;
1805 rli->order = chain->rli[0].order + 1;
1806 rli->unnecessary = 1;
1811 /* The following two functions deal with computing proper values of
1812 the "order" field in a reload_insn for reloads that are optional.
1813 An optional reload can be enabled or disabled; if it's enabled its
1814 order is used normally, otherwise we pretend it isn't there. */
1815 static int
1816 rlinsn_order (struct insn_chain *chain, struct reload_insn *rli)
1818 if (rli->type != RLI_OUTPUTRELOAD
1819 || ! chain->rld[rli->nr].optional
1820 || chain->rld[rli->nr].enabled)
1821 return rli->order;
1822 return chain->rli[0].order;
1825 static int
1826 rlinsn_last_use (struct insn_chain *chain, struct reload_insn *rli)
1828 int j;
1829 int retval = 0;
1831 /* An input reload dies no earlier than its last use. */
1832 for (j = 0; j < rli->n_feeds; j++)
1834 struct reload_insn *rlj = rli->feeds[j];
1835 int order;
1837 if (rlj->ignored)
1838 continue;
1840 order = rlinsn_order (chain, rlj);
1842 /* Optional reloads are transparent for this purpose. */
1843 if (rlj->type == RLI_INPUTRELOAD
1844 && chain->rld[rlj->nr].optional
1845 && ! chain->rld[rlj->nr].enabled)
1846 order = rlinsn_last_use (chain, rlj);
1848 if (order > retval)
1849 retval = order;
1851 return retval;
1854 /* This function computes the IGNORED fields of reload insns and hard register
1855 usage information. Ignored reload insns are never emitted, and ignored
1856 hard regs do not conflict with anything.
1857 The following cases are handled:
1858 - If an output reload is unnecessary (because the output register is dead
1859 after the insn), then it is ignored. There will be an output register
1860 for it, which will also be marked ignored.
1861 @@@ Still need to handle reg that dies, but has no output reload. This
1862 will give better code.
1863 - If we decide to perform an optional input reload, there may be address
1864 reloads for the now reloaded rtx. These are unnecessary and will be
1865 ignored. This can also affect input regs. */
1866 static void
1867 compute_ignored (struct insn_chain *chain)
1869 int count = 1 + 2 * chain->n_reloads + chain->n_input_regs + chain->n_output_regs;
1870 int i, changed;
1872 /* Initialization. */
1873 for (i = 0; i < count; i++)
1874 chain->rli[i].n_feeds_remaining = 0;
1876 for (i = 0; i < count; i++)
1878 int j;
1879 struct reload_insn *rli = chain->rli + i;
1880 rli->ignored = rli->unnecessary;
1882 if (! rli->ignored)
1883 for (j = 0; j < rli->n_feeds; j++)
1884 rli->feeds[j]->n_feeds_remaining++;
1887 /* Iterate until no more reload_insns are marked ignored. */
1890 changed = 0;
1891 for (i = 0; i < count; i++)
1893 struct reload_insn *rli = chain->rli + i;
1894 int j, needed = 0;
1896 if (rli->status == RLIS_IGNORED || rli->ignored)
1897 continue;
1899 /* Can't ignore an input reload that is the head for inheritance. */
1900 if (rli->type == RLI_INPUTRELOAD
1901 && chain->rld[rli->nr].out == 0
1902 && chain->rld[rli->nr].reginfo.live_until_end)
1903 needed = 1;
1905 if (rli->type == RLI_OUTPUTREG)
1907 if (rli->n_feeds_remaining != 0)
1908 needed = 1;
1910 else if (rli->type == RLI_INPUTRELOAD || rli->type == RLI_INPUTREG)
1912 for (j = 0; j < rli->n_feeds; j++)
1914 struct reload_insn *fed = rli->feeds[j];
1915 if ((fed->type != RLI_INPUTRELOAD
1916 || chain->rld[fed->nr].override_in == 0)
1917 && ! rli->feeds[j]->ignored)
1919 needed = 1;
1920 break;
1924 else
1925 continue;
1927 if (! needed)
1929 rli->ignored = 1;
1930 changed = 1;
1931 for (j = 0; j < rli->n_feeds; j++)
1932 rli->feeds[j]->n_feeds_remaining--;
1936 while (changed);
1938 /* Propagate the ignored flag from the reload_insns to the register life
1939 information for hard regs. */
1940 for (i = 0; i < chain->n_output_regs + chain->n_input_regs; i++)
1942 struct reload_reg_use *ru = chain->reg_usage + i;
1943 struct reload_insn *rli = chain->rli + 1 + 2 * chain->n_reloads + i;
1944 if (rli->ignored)
1945 ru->ignored = 1;
1949 /* Find out the birth and death positions for reloads and hard register
1950 references. They are computed from the relative positions of the
1951 reload_insn structures after these have been scheduled in
1952 compute_reload_order. */
1953 static void
1954 compute_birth_death (struct insn_chain *chain)
1956 int count = 1 + 2 * chain->n_reloads + chain->n_input_regs + chain->n_output_regs;
1957 int i;
1958 int maxpos;
1959 HARD_REG_SET live_after, live_before, tmp;
1961 REG_SET_TO_HARD_REG_SET (live_before, &chain->live_before);
1962 compute_use_by_pseudos (&live_before, &chain->live_before);
1963 REG_SET_TO_HARD_REG_SET (live_after, &chain->live_after);
1964 compute_use_by_pseudos (&live_after, &chain->live_after);
1966 /* This is an additional optimization. For pseudos allocated to a multi-word
1967 hard register, LIVE_BEFORE will contain each of the single hard regs, even
1968 if this insn stores into one of them. We can take them out again very
1969 easily with the following three lines. */
1970 COPY_HARD_REG_SET (tmp, chain->set_regs);
1971 AND_COMPL_HARD_REG_SET (tmp, chain->used_regs);
1972 AND_COMPL_HARD_REG_SET (live_before, tmp);
1974 maxpos = chain->rli[chain->last_rlinsn].order;
1976 compute_ignored (chain);
1978 /* Step 1: Initialize birth and death positions to default values. */
1979 for (i = 0; i < chain->n_reloads; i++)
1981 chain->rld[i].reginfo.birth = maxpos + 1;
1982 chain->rld[i].reginfo.death = chain->rld[i].out ? chain->rli[0].order + 1 : 0;
1984 for (i = 0; i < chain->n_input_regs; i++)
1986 struct reload_reg_use *ru = chain->reg_usage + i;
1987 int regno = ru->regno;
1988 ru->birth = 0;
1989 /* For a reg that dies in this insn, initialize death to 0;
1990 it will get increased for every use found.
1991 If the reg does not die in this insn, we may not record a
1992 death earlier than the last time slot. However, if we know
1993 that this insn sets the register, we know we'll have a
1994 corresponding entry in the output regs section which will
1995 properly record the fact that this reg is live beyond the insn. */
1996 ru->death = ((! TEST_HARD_REG_BIT (live_after, regno)
1997 || TEST_HARD_REG_BIT (chain->set_regs, regno))
1998 ? 0 : maxpos + 1);
2000 for (i = 0; i < chain->n_output_regs; i++)
2002 struct reload_reg_use *ru = chain->reg_usage + chain->n_input_regs + i;
2003 int regno = ru->regno;
2004 ru->death = maxpos + 1;
2005 /* Similar issue as for the input regs above. */
2006 ru->birth = ((! TEST_HARD_REG_BIT (live_before, regno)
2007 || TEST_HARD_REG_BIT (chain->used_regs, regno))
2008 ? maxpos + 1 : 0);
2011 /* Step 2: Look at all the reload insns, and adjust live ranges for the
2012 involved registers. */
2014 /* Process the inputs. */
2015 for (i = 0; i < count; i++)
2017 struct reload_insn *rli = chain->rli + i;
2018 struct reload_reg_use *ru;
2019 if (rli->ignored || rli->status != RLIS_SCHEDULED)
2020 continue;
2021 if (rli->type == RLI_INPUTRELOAD)
2023 ru = &chain->rld[rli->nr].reginfo;
2024 /* An input reload is born at the place where it is loaded. */
2025 ru->birth = rlinsn_order (chain, rli);
2027 else if (rli->type == RLI_INPUTREG)
2029 ru = chain->reg_usage + i - 2 * chain->n_reloads - 1;
2030 if (ru->regno != chain->rli[i].nr)
2031 abort ();
2033 else
2034 continue;
2036 /* An input reload dies no earlier than its last use. */
2037 ru->death = MAX (ru->death, rlinsn_last_use (chain, rli));
2040 /* Process the outputs. */
2041 for (i = 0; i < count; i++)
2043 int j;
2044 struct reload_insn *rli = chain->rli + i;
2045 int order;
2047 if (rli->ignored
2048 || rli->status != RLIS_SCHEDULED
2049 || (rli->type != RLI_OUTPUTRELOAD && rli->type != RLI_INSN))
2050 continue;
2052 order = rlinsn_order (chain, rli);
2053 /* An output reload dies at the place where it is stored into its
2054 final destination. */
2055 if (rli->type == RLI_OUTPUTRELOAD)
2056 chain->rld[rli->nr].reginfo.death = order;
2058 /* Anything fed by an output gets born no later than the time of the
2059 output. */
2060 for (j = 0; j < rli->n_feeds; j++)
2062 struct reload_insn *rlj = rli->feeds[j];
2063 struct reload_reg_use *ru_fed;
2064 if (rlj->type == RLI_OUTPUTRELOAD)
2065 ru_fed = &chain->rld[rlj->nr].reginfo;
2066 else if (rlj->type == RLI_OUTPUTREG)
2068 ru_fed = chain->reg_usage + (rlj - chain->rli) - 2 * chain->n_reloads - 1;
2069 if (ru_fed->regno != rlj->nr)
2070 abort ();
2072 else
2073 continue;
2074 ru_fed->birth = MIN (ru_fed->birth, order);
2078 /* Perform necessary adjustments for earlyclobbered operands. If they
2079 currently are marked as being born at the main insn, decrement their
2080 birth by one so it matches the earlyclobber slot. */
2081 for (i = 0; i < chain->n_reloads; i++)
2083 struct reload_reg_use *ru = &chain->rld[i].reginfo;
2084 if (ru->ignored || ! ru->earlyclobber)
2085 continue;
2086 if (ru->birth == chain->rli[0].order)
2087 ru->birth--;
2089 for (i = 0; i < chain->n_output_regs; i++)
2091 struct reload_reg_use *ru = chain->reg_usage + i + chain->n_input_regs;
2092 if (ru->ignored || ! ru->earlyclobber)
2093 continue;
2094 if (ru->birth == chain->rli[0].order)
2095 ru->birth--;
2098 /* Time slots for secondary reloads are computed from the time slots of
2099 their primary reloads.
2100 Walk reloads in reverse order so that we first see the primary, then
2101 the secondary, and finally any tertiary reloads. */
2102 i = chain->n_reloads;
2103 while (i-- > 0)
2105 struct reload *rl = chain->rld + i;
2107 if (rl->secondary_in_reload >= i || rl->secondary_out_reload >= i)
2108 abort ();
2110 if (rl->secondary_in_reload >= 0)
2112 struct reload *rl2 = chain->rld + rl->secondary_in_reload;
2113 rl2->reginfo.birth = rl->reginfo.birth;
2114 rl2->reginfo.death = rl->reginfo.birth;
2116 if (rl->secondary_out_reload >= 0)
2118 struct reload *rl2 = chain->rld + rl->secondary_out_reload;
2119 rl2->reginfo.birth = rl->reginfo.death;
2120 rl2->reginfo.death = rl->reginfo.death;
2123 /* Second pass over the secondary reloads, this time to make sure secondary
2124 reloads and their primaries conflict. */
2125 for (i = 0; i < chain->n_reloads; i++)
2127 struct reload *rl = chain->rld + i;
2128 if (rl->secondary_in_reload >= 0)
2130 struct reload *rl2 = chain->rld + rl->secondary_in_reload;
2131 /* Make the secondary conflict with the input register. */
2132 rl2->reginfo.birth--;
2133 /* Make secondary conflict with its primary. */
2134 rl->reginfo.birth--;
2136 if (rl->secondary_out_reload >= 0)
2138 struct reload *rl2 = chain->rld + rl->secondary_out_reload;
2139 rl2->reginfo.birth--;
2142 /* Extend lifetimes for things that need to live past the insn. */
2143 for (i = 0; i < chain->n_reloads; i++)
2145 struct reload_reg_use *ru = &chain->rld[i].reginfo;
2147 if (ru->live_until_end)
2148 ru->death = maxpos + 1;
2152 /* Enable an optional reload and recompute birth/death positions for
2153 CHAIN. */
2154 static void
2155 enable_optional (struct insn_chain *chain, struct reload *rl)
2157 if (! rl->optional || rl->enabled)
2158 return;
2159 if (rl->reginfo.allocated)
2160 abort ();
2161 rl->enabled = 1;
2162 compute_birth_death (chain);
2165 /* Disable an optional reload and recompute birth/death positions for
2166 CHAIN. */
2167 static void
2168 disable_optional (struct insn_chain *chain, struct reload *rl)
2170 if (! rl->optional || ! rl->enabled)
2171 return;
2172 rl->enabled = 0;
2173 rl->reginfo.allocated = 0;
2174 compute_birth_death (chain);
2177 /* For the insn described by CHAIN, set up all the data structures we need
2178 to allocate reload registers later on. */
2179 static void
2180 init_rlinsns (struct insn_chain *chain)
2182 struct reload_insn *rli;
2183 struct reload_reg_use *ru;
2184 int i, count;
2185 HARD_REG_SET tmp;
2186 int n_regs;
2188 REG_SET_TO_HARD_REG_SET (chain->hard_live_across, &chain->live_before);
2189 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_after);
2190 AND_HARD_REG_SET (chain->hard_live_across, tmp);
2192 CLEAR_HARD_REG_SET (chain->pseudo_hard_live_across);
2193 compute_use_by_pseudos (&chain->pseudo_hard_live_across, &chain->live_before);
2194 compute_use_by_pseudos (&tmp, &chain->live_after);
2195 AND_HARD_REG_SET (chain->pseudo_hard_live_across, tmp);
2197 AND_COMPL_HARD_REG_SET (chain->hard_live_across, chain->set_regs);
2198 AND_COMPL_HARD_REG_SET (chain->hard_live_across, chain->used_regs);
2199 AND_COMPL_HARD_REG_SET (chain->pseudo_hard_live_across, chain->set_regs);
2200 AND_COMPL_HARD_REG_SET (chain->pseudo_hard_live_across, chain->used_regs);
2202 /* Count hard register references. */
2203 chain->n_input_regs = 0;
2204 chain->n_output_regs = 0;
2205 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2207 if (TEST_HARD_REG_BIT (chain->set_regs, i))
2208 chain->n_output_regs++;
2209 if (TEST_HARD_REG_BIT (chain->used_regs, i))
2210 chain->n_input_regs++;
2213 /* Set up reload_insn and reload_reg_use structures. */
2214 n_regs = chain->n_input_regs + chain->n_output_regs;
2215 count = n_regs + 2 * chain->n_reloads + 1;
2217 rli = (struct reload_insn *) xmalloc (sizeof (struct reload_insn) * count);
2218 chain->rli = rli;
2219 memset (rli, 0, sizeof (struct reload_insn) * count);
2220 ru = (struct reload_reg_use *) xmalloc (sizeof (struct reload_reg_use) * n_regs);
2221 chain->reg_usage = ru;
2222 memset (ru, 0, sizeof (struct reload_reg_use) * n_regs);
2223 rli->type = RLI_INSN;
2224 rli->status = RLIS_NOT_SCHEDULED;
2225 rli++;
2227 for (i = 0; i < chain->n_reloads; i++)
2229 rli->type = RLI_INPUTRELOAD;
2230 rli->status = RLIS_IGNORED;
2231 rli->nr = i;
2232 rli++;
2233 rli->type = RLI_OUTPUTRELOAD;
2234 rli->status = RLIS_IGNORED;
2235 rli->nr = i;
2236 rli++;
2238 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2239 if (TEST_HARD_REG_BIT (chain->used_regs, i))
2241 rli->type = RLI_INPUTREG;
2242 rli->status = RLIS_IGNORED;
2243 rli->nr = i;
2244 rli++;
2245 ru->regno = i;
2246 ru->hardreg = REGNO_REG_SET_P (&chain->live_before, i);
2247 ru++;
2249 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2250 if (TEST_HARD_REG_BIT (chain->set_regs, i))
2252 rli->type = RLI_OUTPUTREG;
2253 rli->status = RLIS_IGNORED;
2254 rli->nr = i;
2255 rli++;
2256 ru->regno = i;
2257 ru->hardreg = REGNO_REG_SET_P (&chain->live_after, i);
2258 ru++;
2262 /* After find_reloads has run for CHAIN, save the information it gathered
2263 for later. This will also set up lifetime information for the reloads
2264 and registers used by the insn. */
2265 static void
2266 copy_reloads (struct insn_chain *chain)
2268 int i;
2270 CLEAR_HARD_REG_SET (chain->used_regs);
2271 CLEAR_HARD_REG_SET (chain->set_regs);
2273 chain->rld = (struct reload *) xmalloc (chain->n_reloads
2274 * sizeof (struct reload));
2275 memcpy (chain->rld, rld, chain->n_reloads * sizeof (struct reload));
2277 /* Find out which hard regs are set or used inside this insn. */
2278 subst_dummy ();
2279 scan_rtx (chain, &PATTERN (chain->insn_copy), 0, 0, 0, 1, 0);
2280 scan_secondary_mem (chain, 1);
2281 undo_subst_dummy ();
2283 init_rlinsns (chain);
2285 for (i = 0; i < chain->n_reloads; i++)
2287 struct reload *rl = chain->rld + i;
2288 rl->reginfo.allocated = 0;
2289 rl->reginfo.live_until_end = 0;
2290 rl->override_in = rl->override_out = 0;
2291 #if 0
2292 if (rl->reg_rtx != 0)
2293 abort ();
2294 #endif
2297 compute_feeds (chain);
2298 compute_reload_order (chain);
2299 compute_birth_death (chain);
2302 /* Walk the chain of insns, and determine for each whether it needs reloads
2303 and/or eliminations. Build the corresponding insns_need_reload list, and
2304 set something_needs_elimination as appropriate. */
2305 static void
2306 calculate_needs_all_insns (int global)
2308 struct insn_chain **pprev_reload = &insns_need_reload;
2309 struct insn_chain *chain, *next = 0;
2311 something_needs_elimination = 0;
2313 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
2314 for (chain = reload_insn_chain; chain != 0; chain = next)
2316 rtx insn = chain->insn;
2318 next = chain->next;
2320 /* Clear out the shortcuts. */
2321 chain->n_reloads = 0;
2322 chain->need_elim = 0;
2323 chain->need_operand_change = 0;
2325 CLEAR_HARD_REG_SET (chain->inherited_live);
2326 CLEAR_REG_SET (&chain->unreloaded_sets);
2327 CLEAR_REG_SET (&chain->unreloaded_uses);
2329 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
2330 include REG_LABEL), we need to see what effects this has on the
2331 known offsets at labels. */
2333 if (LABEL_P (insn) || JUMP_P (insn)
2334 || (INSN_P (insn) && REG_NOTES (insn) != 0))
2335 set_label_offsets (insn, insn, 0);
2337 if (INSN_P (insn))
2339 rtx copy;
2340 int did_elimination = 0;
2341 int operands_changed = 0;
2343 copy = copy_insn (insn);
2344 chain->insn_copy = copy;
2346 /* If needed, eliminate any eliminable registers. */
2347 if (num_eliminable || num_eliminable_invariants)
2348 did_elimination = eliminate_regs_in_insn (copy, 0);
2350 if (GET_MODE (copy) == SImode)
2352 chain->will_be_deleted = 1;
2353 /* Make sure we call reload_as_needed to delete this insn. */
2354 something_needs_elimination = 1;
2355 continue;
2358 /* Analyze the instruction. */
2359 operands_changed = find_reloads (chain, copy, spill_indirect_levels,
2360 global);
2362 if (num_eliminable)
2363 update_eliminable_offsets ();
2365 /* Remember for later shortcuts which insns had any reloads or
2366 register eliminations. */
2367 chain->need_elim = did_elimination;
2368 chain->need_operand_change = operands_changed;
2370 something_needs_elimination |= did_elimination;
2371 something_needs_operands_changed |= operands_changed;
2373 if (chain->n_reloads != 0)
2375 copy_reloads (chain);
2376 *pprev_reload = chain;
2377 pprev_reload = &chain->next_need_reload;
2379 else
2380 scan_rtx (chain, &PATTERN (chain->insn_copy), 0, 0, 0, 1, 0);
2383 *pprev_reload = 0;
2386 void
2387 visualize_chain (struct insn_chain *chain)
2392 /* Comparison function for qsort to decide which of two reloads
2393 should be handled first. *P1 and *P2 are the reload numbers. */
2395 static int
2396 reload_reg_class_lower (const void *r1p, const void *r2p)
2398 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
2399 int t;
2401 /* Consider required reloads before optional ones. */
2402 t = rld[r1].optional - rld[r2].optional;
2403 if (t != 0)
2404 return t;
2406 /* Count all solitary classes before non-solitary ones. */
2407 t = ((reg_class_size[(int) rld[r2].class] == 1)
2408 - (reg_class_size[(int) rld[r1].class] == 1));
2409 if (t != 0)
2410 return t;
2412 /* Aside from solitaires, consider all multi-reg groups first. */
2413 t = rld[r2].nregs - rld[r1].nregs;
2414 if (t != 0)
2415 return t;
2417 /* Consider reloads in order of increasing reg-class number. */
2418 t = (int) rld[r1].class - (int) rld[r2].class;
2419 if (t != 0)
2420 return t;
2422 /* If reloads are equally urgent, sort by reload number,
2423 so that the results of qsort leave nothing to chance. */
2424 return r1 - r2;
2427 /* The cost of spilling each hard reg. */
2428 static int spill_cost[FIRST_PSEUDO_REGISTER];
2430 /* When spilling multiple hard registers, we use SPILL_COST for the first
2431 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
2432 only the first hard reg for a multi-reg pseudo. */
2433 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
2435 /* Update the spill cost arrays, considering that pseudo REG is live. */
2437 static void
2438 count_pseudo (int reg)
2440 int freq = REG_FREQ (reg);
2441 int r = reg_renumber[reg];
2442 int nregs;
2444 if (REGNO_REG_SET_P (&pseudos_counted, reg)
2445 || REGNO_REG_SET_P (&spilled_pseudos, reg))
2446 return;
2448 SET_REGNO_REG_SET (&pseudos_counted, reg);
2450 gcc_assert (r >= 0);
2452 spill_add_cost[r] += freq;
2454 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
2455 while (nregs-- > 0)
2456 spill_cost[r + nregs] += freq;
2459 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
2460 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
2462 static void
2463 order_regs_for_reload (struct insn_chain *chain)
2465 unsigned i;
2466 HARD_REG_SET used_by_pseudos;
2467 HARD_REG_SET used_by_pseudos2;
2468 reg_set_iterator rsi;
2470 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
2472 memset (spill_cost, 0, sizeof spill_cost);
2473 memset (spill_add_cost, 0, sizeof spill_add_cost);
2475 /* Count number of uses of each hard reg by pseudo regs allocated to it
2476 and then order them by decreasing use. First exclude hard registers
2477 that are live in or across this insn. */
2479 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_before);
2480 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->live_after);
2481 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
2482 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
2484 /* Now find out which pseudos are allocated to it, and update
2485 hard_reg_n_uses. */
2486 CLEAR_REG_SET (&pseudos_counted);
2488 EXECUTE_IF_SET_IN_REG_SET
2489 (&chain->live_before, FIRST_PSEUDO_REGISTER, i, rsi)
2491 count_pseudo (i);
2493 EXECUTE_IF_SET_IN_REG_SET
2494 (&chain->live_after, FIRST_PSEUDO_REGISTER, i, rsi)
2496 count_pseudo (i);
2498 CLEAR_REG_SET (&pseudos_counted);
2501 /* Vector of reload-numbers showing the order in which the reloads should
2502 be processed. */
2503 static short reload_order[MAX_RELOADS];
2505 /* This is used to keep track of the spill regs used in one insn. */
2506 static HARD_REG_SET used_spill_regs_local;
2508 /* We decided to spill hard register SPILLED, which has a size of
2509 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
2510 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
2511 update SPILL_COST/SPILL_ADD_COST. */
2513 static void
2514 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
2516 int r = reg_renumber[reg];
2517 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
2519 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
2520 || spilled + spilled_nregs <= r || r + nregs <= spilled)
2521 return;
2523 SET_REGNO_REG_SET (&spilled_pseudos, reg);
2525 spill_add_cost[r] -= REG_FREQ (reg);
2526 while (nregs-- > 0)
2527 spill_cost[r + nregs] -= REG_FREQ (reg);
2530 /* Determine whether pseudo REGNO conflicts with the reload RL.
2531 We have no information about lifetime of pseudos, but we do know about
2532 hard register lifetimes. We only check for hard register references of
2533 type TYPE (either RLI_INPUTREG or RLI_OUTPUTREG). */
2534 static int
2535 no_conflict_p (struct insn_chain *chain, int regno, enum rlinsn_type type,
2536 struct reload *rl)
2538 int hardreg = reg_renumber[regno];
2539 int i, nregs;
2540 int scan_end = chain->n_input_regs + chain->n_output_regs;
2542 if (hardreg < 0)
2543 abort ();
2545 nregs = HARD_REGNO_NREGS (hardreg, PSEUDO_REGNO_MODE (regno));
2546 for (i = 0; i < nregs; i++)
2548 /* It's possible that only parts of a pseudo are in
2549 pseudo_hard_live_across; this can happen if this insn stores into
2550 one reg of a multi-reg pseudo. There is also code in
2551 compute_birth_death to handle that case. */
2552 if (hardreg + i >= rl->reginfo.regno
2553 && hardreg + i < rl->reginfo.regno + rl->nregs
2554 && TEST_HARD_REG_BIT (chain->pseudo_hard_live_across, hardreg + i))
2555 return 0;
2557 for (i = 0; i < scan_end; i++)
2559 int test_reg = chain->reg_usage[i].regno;
2560 enum rlinsn_type t = chain->rli[i + 2 * chain->n_reloads + 1].type;
2561 if (test_reg < hardreg || test_reg >= hardreg + nregs)
2562 continue;
2563 if (t == type && conflict (chain->reg_usage + i, &rl->reginfo))
2564 return 0;
2566 return 1;
2569 /* Compute the set of hard registers which can be used for reload R in insn
2570 CHAIN without spilling a pseudo, even though they are used somewhere in
2571 the insn. */
2572 static void
2573 compute_not_conflicting (struct insn_chain *chain, struct reload *rl,
2574 HARD_REG_SET *set)
2576 int i;
2578 CLEAR_HARD_REG_SET (*set);
2580 /* Two loops are necessary because the same register may occur twice in
2581 our data structures, and one of them may conflict but not the other. */
2582 for (i = 0; i < chain->n_input_regs + chain->n_output_regs; i++)
2583 SET_HARD_REG_BIT (*set, chain->reg_usage[i].regno);
2585 for (i = 0; i < chain->n_input_regs + chain->n_output_regs; i++)
2586 if (conflict (&rl->reginfo, chain->reg_usage + i))
2587 CLEAR_HARD_REG_BIT (*set, chain->reg_usage[i].regno);
2589 AND_COMPL_HARD_REG_SET (*set, chain->pseudo_hard_live_across);
2592 static int
2593 new_reg_preferred (int old_reg, int new_reg, int old_cost, int new_cost)
2595 if (new_cost < old_cost
2596 /* Among registers with equal cost, prefer caller-saved ones, or
2597 use REG_ALLOC_ORDER if it is defined. */
2598 || (new_cost == old_cost
2599 #ifdef REG_ALLOC_ORDER
2600 && (inv_reg_alloc_order[new_reg]
2601 < inv_reg_alloc_order[old_reg])
2602 #else
2603 && call_used_regs[new_reg]
2604 && ! call_used_regs[old_reg]
2605 #endif
2607 return 1;
2608 return 0;
2611 /* Compute the set of registers which can't be used for reload RL of insn
2612 CHAIN. It consists of explicit hard registers used in the insn whose
2613 lifetime overlaps the reload, and hard registers already allocated to
2614 other reloads that conflict with RL. The set is stored in *SET. */
2615 static void
2616 compute_not_usable (struct insn_chain *chain, struct reload *rl,
2617 HARD_REG_SET *set)
2619 int i;
2621 COPY_HARD_REG_SET (*set, bad_spill_regs_global);
2622 IOR_COMPL_HARD_REG_SET (*set, reg_class_contents[rl->class]);
2623 IOR_HARD_REG_SET (*set, chain->hard_live_across);
2625 /* Process conflicts with explicit hard registers. */
2626 for (i = 0; i < chain->n_input_regs + chain->n_output_regs; i++)
2628 struct reload_reg_use *ru = chain->reg_usage + i;
2629 if (ru->hardreg && conflict (&rl->reginfo, ru))
2630 SET_HARD_REG_BIT (*set, ru->regno);
2633 /* Process conflicts with other reloads. */
2634 for (i = 0; i < chain->n_reloads; i++)
2636 unsigned int j;
2637 struct reload *other = chain->rld + i;
2638 unsigned int regno = other->reginfo.regno;
2639 unsigned int nregs;
2641 if (other == rl
2642 || ! other->reginfo.allocated
2643 || ! conflict (&other->reginfo, &rl->reginfo))
2644 continue;
2646 nregs = HARD_REGNO_NREGS (regno, other->mode);
2647 for (j = 0; j < nregs; j++)
2648 SET_HARD_REG_BIT (*set, regno + j);
2651 /* CALL_INSNS need some special handling. We could allocate proper
2652 reload_reg_use structures and all that for call-clobbered and
2653 argument registers, but this is easier. */
2654 if (GET_CODE (chain->insn) == CALL_INSN)
2656 HARD_REG_SET clobbered, used;
2657 rtx x;
2658 int call_order = chain->rli[0].order;
2660 COPY_HARD_REG_SET (clobbered, call_used_reg_set);
2661 CLEAR_HARD_REG_SET (used);
2663 for (x = CALL_INSN_FUNCTION_USAGE (chain->insn); x; x = XEXP (x, 1))
2665 rtx y = XEXP (x, 0);
2666 enum rtx_code code = GET_CODE (y);
2667 int regno, nregs;
2669 if (code != CLOBBER && code != USE)
2670 abort ();
2671 y = XEXP (y, 0);
2672 if (GET_CODE (y) == MEM)
2673 continue;
2674 if (GET_CODE (y) != REG)
2675 abort ();
2676 regno = REGNO (y);
2677 nregs = HARD_REGNO_NREGS (regno, GET_MODE (y));
2678 while (nregs-- > 0)
2679 if (code == CLOBBER)
2680 SET_HARD_REG_BIT (clobbered, regno + nregs);
2681 else
2682 SET_HARD_REG_BIT (used, regno + nregs);
2684 if (rl->reginfo.birth < call_order)
2686 IOR_HARD_REG_SET (*set, used);
2687 if (rl->reginfo.death > call_order)
2688 IOR_HARD_REG_SET (*set, clobbered);
2692 /* Process conflicts with inherited registers. */
2693 for (i = 0; i < chain->n_reloads; i++)
2695 unsigned int j;
2696 struct reload *other = chain->rld + i;
2697 unsigned int regno, nregs;
2698 if (other == rl
2699 || other->override_in == 0
2700 || other->reginfo.birth < rl->reginfo.birth)
2701 continue;
2703 regno = REGNO (other->override_in);
2704 nregs = HARD_REGNO_NREGS (regno, other->mode);
2705 for (j = 0; j < nregs; j++)
2706 SET_HARD_REG_BIT (*set, regno + j);
2708 IOR_HARD_REG_SET (*set, chain->inherited_live);
2711 static void
2712 set_reload_reg (struct reload *rl, unsigned regno)
2714 int i;
2716 rl->nregs = HARD_REGNO_NREGS (regno, rl->mode);
2717 rl->reginfo.regno = regno;
2718 rl->reginfo.allocated = 1;
2719 for (i = 0; i < rl->nregs; i++)
2720 SET_HARD_REG_BIT (used_spill_regs_local, regno + i);
2723 /* Find reload register to use for reload number ORDER. REG_RTX, if nonnull,
2724 was suggested by find_reloads. */
2726 static int
2727 find_reg (struct insn_chain *chain, int order, rtx reg_rtx)
2729 int rnum = reload_order[order];
2730 struct reload *rl = chain->rld + rnum;
2731 int best_cost = INT_MAX;
2732 int best_reg = -1;
2733 unsigned i;
2734 HARD_REG_SET not_conflicting;
2735 HARD_REG_SET not_usable;
2736 reg_set_iterator rsi;
2738 compute_not_conflicting (chain, rl, &not_conflicting);
2739 compute_not_usable (chain, rl, &not_usable);
2741 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2743 unsigned int regno = i;
2744 if (! TEST_HARD_REG_BIT (not_usable, regno)
2745 && HARD_REGNO_MODE_OK (regno, rl->mode))
2747 int exception = TEST_HARD_REG_BIT (not_conflicting, regno);
2748 int this_cost = exception ? 0 : spill_cost[regno];
2749 int ok = 1;
2750 int j, this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
2752 for (j = 1; j < this_nregs; j++)
2754 exception = TEST_HARD_REG_BIT (not_conflicting, regno + j);
2755 this_cost += exception ? 0 : spill_add_cost[regno + j];
2756 if (TEST_HARD_REG_BIT (not_usable, regno + j))
2757 ok = 0;
2759 if (! ok)
2760 continue;
2761 if (reg_rtx && REGNO (reg_rtx) == regno)
2763 best_reg = regno;
2764 break;
2766 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
2767 this_cost--;
2768 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
2769 this_cost--;
2770 if (new_reg_preferred (best_reg, regno, best_cost, this_cost))
2772 best_reg = regno;
2773 best_cost = this_cost;
2778 if (best_reg == -1)
2779 return 0;
2781 if (dump_file)
2782 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
2784 set_reload_reg (rl, best_reg);
2786 EXECUTE_IF_SET_IN_REG_SET
2787 (&chain->live_after, FIRST_PSEUDO_REGISTER, i, rsi)
2789 if (! no_conflict_p (chain, i, RLI_OUTPUTREG, rl))
2790 count_spilled_pseudo (best_reg, rl->nregs, i);
2793 EXECUTE_IF_SET_IN_REG_SET
2794 (&chain->live_before, FIRST_PSEUDO_REGISTER, i, rsi)
2796 if (! no_conflict_p (chain, i, RLI_INPUTREG, rl))
2797 count_spilled_pseudo (best_reg, rl->nregs, i);
2801 return 1;
2804 /* Find more reload regs to satisfy the remaining need of an insn, which
2805 is given by CHAIN.
2806 Do it by ascending class number, since otherwise a reg
2807 might be spilled for a big class and might fail to count
2808 for a smaller class even though it belongs to that class. */
2810 static void
2811 find_reload_regs (struct insn_chain *chain)
2813 int i;
2815 memcpy (rld, chain->rld, chain->n_reloads * sizeof (struct reload));
2817 CLEAR_HARD_REG_SET (used_spill_regs_local);
2819 if (dump_file)
2820 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2822 /* In order to be certain of getting the registers we need,
2823 we must sort the reloads into order of increasing register class. */
2824 for (i = 0; i < chain->n_reloads; i++)
2825 reload_order[i] = i;
2827 qsort (reload_order, chain->n_reloads, sizeof (short),
2828 reload_reg_class_lower);
2830 /* Compute the order of preference for hard registers to spill. */
2831 order_regs_for_reload (chain);
2833 for (i = 0; i < chain->n_reloads; i++)
2835 struct reload *rl = chain->rld + reload_order[i];
2837 /* Ignore reloads that got marked inoperative. */
2838 if ((rl->out != 0 || rl->in != 0 || rl->secondary_p)
2839 && ! rl->optional
2840 && ! rl->reginfo.allocated)
2841 if (! find_reg (chain, i, rl->reg_rtx))
2843 spill_failure (chain->insn, rl->class);
2844 failure = 1;
2845 return;
2849 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2850 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2853 /* The following functions all deal with reload inheritance. */
2855 /* Given a reload RL for the insn described by CHAIN, compute the set of
2856 hard registers which can be used without having to spill a pseudo. */
2857 static void
2858 compute_unallocated (struct insn_chain *chain, struct reload *rl, HARD_REG_SET *set)
2860 unsigned i;
2861 HARD_REG_SET tmp;
2862 reg_set_iterator rsi;
2864 CLEAR_HARD_REG_SET (tmp);
2866 EXECUTE_IF_SET_IN_REG_SET
2867 (&chain->live_before, FIRST_PSEUDO_REGISTER, i, rsi)
2869 int r = reg_renumber[i];
2870 int last = r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (i));
2872 if (r < 0)
2873 abort ();
2876 SET_HARD_REG_BIT (tmp, r);
2877 while (++r != last);
2879 EXECUTE_IF_SET_IN_REG_SET
2880 (&chain->live_after, FIRST_PSEUDO_REGISTER, i, rsi)
2882 int r = reg_renumber[i];
2883 int last = r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (i));
2885 if (r < 0)
2886 abort ();
2888 SET_HARD_REG_BIT (tmp, r);
2889 while (++r != last);
2891 COMPL_HARD_REG_SET (*set, tmp);
2893 compute_not_conflicting (chain, rl, &tmp);
2894 IOR_HARD_REG_SET (*set, tmp);
2896 compute_not_usable (chain, rl, &tmp);
2897 AND_COMPL_HARD_REG_SET (*set, tmp);
2900 /* Like find_reg, but modified to handle optional reloads during inheritance.
2901 This function does not need to worry about spilling pseudos, it only ever
2902 considers regs that are definitely free. */
2903 static int
2904 find_optional_reg (struct reload *rl, HARD_REG_SET *usable, int suggested,
2905 int do_allocate)
2907 int i;
2908 int best_reg = -1;
2909 int best_cost = INT_MAX;
2911 if (suggested >= 0)
2913 int nregs = HARD_REGNO_NREGS (suggested, rl->mode);
2914 for (i = 0; i < nregs; i++)
2915 if (! TEST_HARD_REG_BIT (*usable, suggested + i))
2916 break;
2917 if (i == nregs)
2919 if (do_allocate)
2920 set_reload_reg (rl, suggested);
2921 return 1;
2925 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2927 int nregs, k, cost;
2928 if (! HARD_REGNO_MODE_OK (i, rl->mode))
2929 continue;
2930 nregs = HARD_REGNO_NREGS (i, rl->mode);
2931 for (k = 0; k < nregs; k++)
2932 if (! TEST_HARD_REG_BIT (*usable, i + k))
2933 break;
2935 if (k < nregs)
2936 continue;
2937 cost = 0;
2938 if (new_reg_preferred (best_reg, i, best_cost, cost))
2939 best_cost = cost, best_reg = i;
2941 if (best_reg == -1)
2942 return 0;
2944 if (do_allocate)
2945 set_reload_reg (rl, best_reg);
2947 return 1;
2950 /* Subroutine of compute_set_between. Called via note_stores. */
2951 static void
2952 note_hard_reg_sets (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
2954 enum machine_mode mode = GET_MODE (x);
2955 HARD_REG_SET *set = (HARD_REG_SET *)data;
2956 int offset = 0;
2957 int nregs, regno;
2959 if (GET_CODE (x) == SUBREG)
2961 offset = subreg_regno_offset (REGNO (SUBREG_REG (x)),
2962 GET_MODE (SUBREG_REG (x)),
2963 SUBREG_BYTE (x),
2964 GET_MODE (x));
2965 x = SUBREG_REG (x);
2968 if (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER)
2969 return;
2971 regno = REGNO (x) + offset;
2972 nregs = HARD_REGNO_NREGS (regno, mode);
2974 while (nregs-- > 0)
2975 SET_HARD_REG_BIT (*set, regno + nregs);
2978 /* Compute the set of hard registers which are clobbered by insns between FROM
2979 and TO (excluding these boundary insns). */
2980 static void
2981 compute_set_between (HARD_REG_SET *set, struct insn_chain *from,
2982 struct insn_chain *to)
2984 CLEAR_HARD_REG_SET (*set);
2986 while (from->next != to)
2988 int i;
2990 from = from->next;
2992 if (! INSN_P (from->insn) || from->will_be_deleted)
2993 continue;
2995 if (from->n_reloads == 0)
2996 note_stores (PATTERN (from->insn), note_hard_reg_sets, set);
2997 else
2999 for (i = 0; i < from->n_reloads; i++)
3000 if (from->rld[i].reginfo.allocated)
3002 int nregs = HARD_REGNO_NREGS (from->rld[i].reginfo.regno, from->rld[i].mode);
3003 while (nregs-- > 0)
3004 SET_HARD_REG_BIT (*set, from->rld[i].reginfo.regno + nregs);
3006 for (i = 0; i < from->n_output_regs; i++)
3007 SET_HARD_REG_BIT (*set, from->reg_usage[from->n_input_regs + i].regno);
3009 if (GET_CODE (from->insn) == CALL_INSN)
3010 IOR_HARD_REG_SET (*set, call_used_reg_set);
3014 /* Subroutine of find_clobbered_between_reloads. This function handles the
3015 case of two reloads in the same insn. */
3016 static void
3017 find_clobbered_between_reloads_1 (HARD_REG_SET *total_conflicts,
3018 struct insn_chain *chain,
3019 struct reload *src_rl, struct reload *rl)
3021 int i;
3022 int inherit_start = src_rl->reginfo.birth;
3023 int inherit_end = rl->reginfo.birth;
3025 CLEAR_HARD_REG_SET (*total_conflicts);
3027 for (i = 0; i < chain->n_reloads; i++)
3029 struct reload *rl = chain->rld + i;
3030 struct reload_reg_use *ru = &rl->reginfo;
3031 int rl_set_order = chain->rli[0].order;
3033 if (! ru->allocated || ru->birth >= inherit_end)
3034 continue;
3035 if (rl->reginfo.earlyclobber)
3036 rl_set_order--;
3037 if (ru->birth > inherit_start
3038 || (rl->out && rl_set_order > inherit_start))
3040 int nregs = HARD_REGNO_NREGS (ru->regno, chain->rld[i].mode);
3041 while (nregs-- > 0)
3042 SET_HARD_REG_BIT (*total_conflicts, ru->regno + nregs);
3045 for (i = 0; i < chain->n_output_regs; i++)
3047 struct reload_reg_use *ru = chain->reg_usage + chain->n_input_regs + i;
3048 if (ru->birth > inherit_start && ru->birth < inherit_end)
3049 SET_HARD_REG_BIT (*total_conflicts, ru->regno);
3053 /* When inheriting reloads, we need to find out which hard registers will be
3054 clobbered between two given reloads. These reloads are described by
3055 SRC_RL (the earlier) and RL (the later reload), with SRC_CHAIN and CHAIN
3056 being the insns they appear in. Store the conflicts in
3057 TOTAL_CONFLICTS. */
3058 static void
3059 find_clobbered_between_reloads (HARD_REG_SET *total_conflicts,
3060 struct insn_chain *src_chain,
3061 struct reload *src_rl,
3062 struct insn_chain *chain, struct reload *rl,
3063 int src_is_head)
3065 int i;
3066 int inherit_start = src_rl->reginfo.birth;
3067 int inherit_end = rl->reginfo.birth;
3069 HARD_REG_SET in_conflicts;
3070 HARD_REG_SET out_conflicts;
3072 if (chain == src_chain)
3074 find_clobbered_between_reloads_1 (total_conflicts, chain, src_rl, rl);
3075 return;
3078 compute_set_between (total_conflicts, src_chain, chain);
3080 /* Compute the registers which conflict with the outgoing value on
3081 SRC_CHAIN. */
3082 CLEAR_HARD_REG_SET (out_conflicts);
3083 for (i = 0; i < src_chain->n_reloads; i++)
3085 struct reload *rli = src_chain->rld + i;
3086 int nregs;
3087 /* If the previous reload has an output part, its register will be
3088 clobbered, so we must count it as a conflict (unless it's the
3089 head reload). */
3090 if ((rli == src_rl && src_rl->out != 0 && ! src_is_head)
3091 || (rli != src_rl && rli->reginfo.allocated && rli->reginfo.birth >= inherit_start))
3093 nregs = HARD_REGNO_NREGS (rli->reginfo.regno, rli->mode);
3094 while (nregs-- > 0)
3095 SET_HARD_REG_BIT (out_conflicts, rli->reginfo.regno + nregs);
3098 for (i = 0; i < src_chain->n_output_regs; i++)
3100 struct reload_reg_use *ru = src_chain->reg_usage + src_chain->n_input_regs + i;
3101 if (! ru->ignored && ru->birth > inherit_start)
3102 SET_HARD_REG_BIT (out_conflicts, ru->regno);
3104 if (GET_CODE (src_chain->insn) == CALL_INSN
3105 && src_rl->reginfo.birth < src_chain->rli[0].order)
3106 IOR_HARD_REG_SET (out_conflicts, call_used_reg_set);
3108 /* Compute the registers which conflict with the incoming value on
3109 CHAIN. */
3110 CLEAR_HARD_REG_SET (in_conflicts);
3111 for (i = 0; i < chain->n_reloads; i++)
3113 struct reload *rli = chain->rld + i;
3114 int nregs;
3115 if (rli == rl)
3116 continue;
3117 if (rli->reginfo.birth > inherit_end || ! rli->reginfo.allocated)
3118 continue;
3119 nregs = HARD_REGNO_NREGS (rli->reginfo.regno, rli->mode);
3120 while (nregs-- > 0)
3121 SET_HARD_REG_BIT (in_conflicts, rli->reginfo.regno + nregs);
3123 for (i = 0; i < chain->n_output_regs; i++)
3125 struct reload_reg_use *ru = chain->reg_usage + chain->n_input_regs + i;
3126 if (! ru->ignored && ru->birth < inherit_end)
3127 SET_HARD_REG_BIT (in_conflicts, ru->regno);
3130 IOR_HARD_REG_SET (*total_conflicts, out_conflicts);
3131 IOR_HARD_REG_SET (*total_conflicts, in_conflicts);
3134 /* Describe an inheritance opportunity. For all reloaded rtx's, we create
3135 a list of all places in the current block which reload the same rtx.
3136 These lists are ordered; earlier uses precede later ones. */
3137 struct inherit_chain
3139 struct inherit_chain *next_chain;
3140 /* The next inheritance opportunity on this chain. */
3141 struct inherit_chain *next_same;
3143 /* The set of registers clobbered between the previous reload and this
3144 one. */
3145 HARD_REG_SET new_conflicts;
3146 /* The place for this inheritance opportunity. */
3147 struct insn_chain *chain;
3148 struct reload_insn *rli;
3149 unsigned int was_enabled:1;
3150 unsigned int used_after:1;
3153 /* All inheritance chains we found, linked through their NEXT_CHAIN
3154 fields. */
3155 static struct inherit_chain *all_inherit_chains;
3157 /* For post-processing, all chains that actually were used for inheritance. */
3158 static struct inherit_chain *done_inherit_chains;
3160 /* Indexed by pseudo register, gives the last inheritance opportunity we found
3161 for that pseudo. */
3162 static struct inherit_chain **pseudo_inherit;
3164 /* Undo actions done in inherit_one_chain: turn off all optional reloads that
3165 weren't enabled beforehand. Do that for all places recorded in the chain
3166 from HEAD until the end of the chain. */
3168 static void
3169 undo_enables (struct inherit_chain *head, struct inherit_chain *last)
3171 while (head != last)
3173 int rl_nr = head->rli->nr;
3174 struct reload *rl = head->chain->rld + rl_nr;
3176 if (rl->optional && ! head->was_enabled)
3177 disable_optional (head->chain, rl);
3179 head = head->next_same;
3183 /* Return nonzero if it's not too expensive to use hard reg REGNO, used in
3184 mode MODE, as an input override for a reload register in class CLASS. */
3186 static int
3187 suitable_for_copy (int regno, enum machine_mode mode, enum reg_class class)
3189 enum reg_class regno_class = REGNO_REG_CLASS (regno);
3190 int reg_move_cost = REGISTER_MOVE_COST (mode, regno_class, class);
3191 int mem_move_cost = MEMORY_MOVE_COST (mode, class, 1);
3193 /* This used to be a greater or equal comparison. At least for the i386,
3194 better code is generated by a greater than comparison, and I'm guessing
3195 other machines also benefit from this. */
3196 if (regno_class != class && reg_move_cost > mem_move_cost)
3197 return 0;
3199 #ifdef SECONDARY_INPUT_RELOAD_CLASS
3200 PUT_MODE (permanent_test_reg, mode);
3201 REGNO (permanent_test_reg) = regno;
3203 if (SECONDARY_INPUT_RELOAD_CLASS (class, mode, permanent_test_reg)
3204 != NO_REGS)
3205 return 0;
3206 #endif
3208 #ifdef SECONDARY_MEMORY_NEEDED
3209 if (SECONDARY_MEMORY_NEEDED (regno_class, class, mode))
3210 return 0;
3211 #endif
3213 return 1;
3216 enum inherit_type
3218 IT_NONE,
3219 IT_OVERRIDE_INPUT,
3220 IT_USE_REG
3223 /* We test for two things for each register: whether it can be used as input
3224 for this reload (if not, we can't use it at all), and whether it can even
3225 be used as the reload register itself (which gives better code if
3226 possible). */
3227 static enum inherit_type
3228 usable_for_inheritance (rtx head_rtx, struct reload *rl, rtx this_rtx,
3229 int regno, HARD_REG_SET *usable_regs,
3230 HARD_REG_SET *unallocated)
3232 enum machine_mode mode = rl->mode;
3233 enum machine_mode inmode = rl->inmode;
3234 int can_use_inheritance_reg = rl->out_reg == 0;
3235 int nregs, k;
3236 int offsetted_regno = regno;
3238 if (GET_CODE (head_rtx) == REG && GET_CODE (this_rtx) == SUBREG)
3239 offsetted_regno += subreg_regno_offset (regno,
3240 GET_MODE (SUBREG_REG (this_rtx)),
3241 SUBREG_BYTE (this_rtx),
3242 GET_MODE (this_rtx));
3244 if (GET_MODE (head_rtx) != inmode)
3246 #ifdef CLASS_CANNOT_CHANGE_MODE
3247 if (TEST_HARD_REG_BIT (reg_class_contents[CLASS_CANNOT_CHANGE_MODE],
3248 regno)
3249 && CLASS_CANNOT_CHANGE_MODE_P (regno, mode))
3250 return IT_NONE;
3251 #endif
3254 /* To be usable at all, the register must be valid in the mode we are
3255 going to use for the input. */
3256 if (! HARD_REGNO_MODE_OK (offsetted_regno, inmode))
3257 return IT_NONE;
3259 /* Check whether it's valid to use the register as reload reg. For this,
3260 it must be valid in MODE, not just INMODE. */
3262 if (mode != inmode && ! HARD_REGNO_MODE_OK (offsetted_regno, mode))
3264 nregs = HARD_REGNO_NREGS (offsetted_regno, inmode);
3266 for (k = 0; k < nregs; k++)
3267 if (! TEST_HARD_REG_BIT (*usable_regs, offsetted_regno + k))
3268 return IT_NONE;
3270 can_use_inheritance_reg = 0;
3272 else
3274 nregs = HARD_REGNO_NREGS (offsetted_regno, mode);
3276 for (k = 0; k < nregs; k++)
3278 if (! TEST_HARD_REG_BIT (*usable_regs, offsetted_regno + k))
3279 return IT_NONE;
3280 if (! TEST_HARD_REG_BIT (*unallocated, offsetted_regno + k))
3281 can_use_inheritance_reg = 0;
3285 /* Can we inherit for this opportunity? */
3286 if (! can_use_inheritance_reg
3287 && ! suitable_for_copy (offsetted_regno, inmode, rl->class))
3288 return IT_NONE;
3290 return can_use_inheritance_reg ? IT_USE_REG : IT_OVERRIDE_INPUT;
3293 /* Given the inheritance opportunity THIS, which is for reload THIS_RL,
3294 try to use BEST_REG for inheritance. USABLE_REGS contains all the
3295 registers which have not been clobbered since the head insn.
3296 Return zero if we could not do inheritance for this insn, nonzero
3297 otherwise. */
3299 static int
3300 perform_inheritance (rtx head_rtx, struct inherit_chain *this,
3301 struct reload *this_rl, int best_reg,
3302 HARD_REG_SET *usable_regs)
3304 rtx this_rtx = (this->rli->type == RLI_OUTPUTRELOAD
3305 ? this_rl->out_reg : this_rl->in_reg);
3306 enum machine_mode mode = this_rl->mode;
3307 enum inherit_type itype;
3308 HARD_REG_SET this_usable;
3309 int offsetted_regno = best_reg;
3311 if (GET_CODE (head_rtx) == REG && GET_CODE (this_rtx) == SUBREG)
3312 offsetted_regno += subreg_regno_offset (best_reg,
3313 GET_MODE (SUBREG_REG (this_rtx)),
3314 SUBREG_BYTE (this_rtx),
3315 GET_MODE (this_rtx));
3317 compute_unallocated (this->chain, this_rl, &this_usable);
3319 itype = usable_for_inheritance (head_rtx, this_rl, this_rtx, best_reg,
3320 usable_regs, &this_usable);
3321 if (itype == IT_NONE)
3322 return 0;
3324 if (this_rl->optional && ! this->was_enabled)
3325 find_optional_reg (this_rl, &this_usable, offsetted_regno, 1);
3326 else if (itype == IT_USE_REG)
3328 this_rl->nregs = HARD_REGNO_NREGS (offsetted_regno, mode);
3329 this_rl->reginfo.regno = offsetted_regno;
3330 /* We're overriding - this must already have been allocated earlier. */
3331 if (! this_rl->reginfo.allocated)
3332 abort ();
3335 this_rl->override_in = gen_rtx_REG (this_rl->inmode, offsetted_regno);
3337 return 1;
3340 /* While processing a chain of inheritance opportunities that starts with
3341 HEAD, we arrived at position THIS. Return nonzero if we must stop
3342 inheritance at this point. */
3343 static int
3344 must_terminate_inheritance (struct reload *head_rl,
3345 struct reload_insn *head_rli,
3346 struct reload *this_rl,
3347 struct reload_insn *this_rli)
3349 rtx head_rtx = (head_rli->type == RLI_OUTPUTRELOAD
3350 ? head_rl->out_reg : head_rl->in_reg);
3351 rtx this_rtx = (this_rli->type == RLI_OUTPUTRELOAD
3352 ? this_rl->out_reg : this_rl->in_reg);
3354 /* If the head rtx is a subreg, terminate as soon as we see the full reg
3355 or a different subreg.
3356 @@@ This may possibly lose a few opportunities, but it probably isn't
3357 too big a deal. */
3358 if (GET_CODE (head_rtx) == SUBREG
3359 && (GET_CODE (this_rtx) != SUBREG
3360 || SUBREG_BYTE (this_rtx) != SUBREG_BYTE (head_rtx)))
3361 return 1;
3363 /* Can't inherit from a narrower rtx. */
3364 if (GET_MODE_SIZE (GET_MODE (head_rtx))
3365 < GET_MODE_SIZE (GET_MODE (this_rtx)))
3366 return 1;
3368 return 0;
3371 /* This is the first pass done when trying to perform inheritance for a given
3372 chain of opportunities. It determines which registers are best suited for
3373 the optimization. */
3374 static struct inherit_chain *
3375 evaluate_regs_for_inherit (struct inherit_chain *head,
3376 HARD_REG_SET *usable_for_head, int *goodness)
3378 unsigned int i;
3379 struct inherit_chain *this, *prev = head;
3380 struct reload *head_rl = head->chain->rld + head->rli->nr;
3381 struct reload *prev_rl = head_rl;
3382 rtx head_rtx = (head->rli->type == RLI_OUTPUTRELOAD
3383 ? head_rl->out_reg : head_rl->in_reg);
3384 HARD_REG_SET usable_regs;
3386 prev->was_enabled = prev_rl->enabled;
3387 enable_optional (prev->chain, prev_rl);
3389 COPY_HARD_REG_SET (usable_regs, *usable_for_head);
3391 this = prev->next_same;
3392 /* In each iteration, try to extend the span during which the reload is
3393 inherited. We keep track of which registers we can use in the
3394 USABLE_FOR_HEAD set. */
3395 while (this != 0)
3397 HARD_REG_SET this_usable;
3398 struct reload *this_rl = this->chain->rld + this->rli->nr;
3399 rtx this_rtx = (this->rli->type == RLI_OUTPUTRELOAD
3400 ? this_rl->out_reg : this_rl->in_reg);
3402 if (dump_file)
3403 fprintf (dump_file, " at insn %d rl %d: ",
3404 INSN_UID (this->chain->insn), this->rli->nr);
3406 if (must_terminate_inheritance (head_rl, head->rli, this_rl, this->rli))
3408 if (dump_file)
3409 fprintf (dump_file, " must terminate\n");
3410 break;
3412 if (this->rli->type == RLI_OUTPUTRELOAD)
3414 if (dump_file)
3415 fprintf (dump_file, " is output reload\n");
3416 break;
3419 this->was_enabled = this_rl->enabled;
3420 enable_optional (this->chain, this_rl);
3422 /* We can't use a register that's clobbered between these two
3423 reloads. */
3424 find_clobbered_between_reloads (&this->new_conflicts, prev->chain,
3425 prev_rl, this->chain, this_rl, prev == head);
3426 AND_COMPL_HARD_REG_SET (usable_regs, this->new_conflicts);
3428 if (dump_file)
3430 int i;
3431 fprintf (dump_file, "new conflicts: ");
3432 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3433 if (TEST_HARD_REG_BIT (this->new_conflicts, i))
3434 fprintf (dump_file, "%s ", reg_names[i]);
3435 fprintf (dump_file, "\n");
3437 /* Find out which registers we can use as reload register for this
3438 reload. */
3439 compute_unallocated (this->chain, this_rl, &this_usable);
3441 if (this->rli->ignored)
3442 goto ignore;
3444 /* No use trying to inherit for an optional reload for which no
3445 register is available anymore. */
3446 if (this_rl->optional && ! this->was_enabled)
3447 if (! find_optional_reg (this_rl, &this_usable, -1, 0))
3448 goto ignore;
3450 /* Walk through all hard regs and determine their desirability. */
3451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3453 enum inherit_type itype;
3455 itype = usable_for_inheritance (head_rtx, this_rl, this_rtx, i,
3456 &usable_regs, &this_usable);
3457 if (itype != IT_NONE)
3458 goodness[i] += itype == IT_USE_REG ? 2 : 1;
3461 ignore:
3462 prev = this;
3463 prev_rl = this_rl;
3464 this = prev->next_same;
3467 return this;
3470 static int
3471 allocation_goodness (struct insn_chain *chain, struct reload *rl, unsigned regno)
3473 int goodness = 0;
3475 if (rl->override_in && REGNO (rl->override_in) == regno)
3476 goodness++;
3477 #if 0
3478 if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
3479 goodness++;
3480 if (chain->n_reloads == 1)
3482 rtx set = single_set (chain->insn);
3483 if (set)
3485 if (rl->out && ! rl->in)
3489 #endif
3490 return goodness;
3493 static bool
3494 sets_full_reg_p (struct inherit_chain *head)
3496 struct reload *head_rl = head->chain->rld + head->rli->nr;
3497 rtx head_rtx = (head->rli->type == RLI_OUTPUTRELOAD
3498 ? head_rl->out_reg : head_rl->in_reg);
3500 if (head->rli->type != RLI_OUTPUTRELOAD)
3501 return false;
3502 if (GET_CODE (head_rtx) != REG)
3503 return false;
3504 return true;
3507 static bool
3508 jump_insn_between_p (struct insn_chain *first, struct insn_chain *last)
3510 struct insn_chain *chain;
3511 for (chain = first; chain != last; chain = chain->next)
3512 if (GET_CODE (chain->insn) == JUMP_INSN)
3513 return true;
3514 return false;
3517 /* Process one chain of inheritance opportunities. All entries on the
3518 chain reload the same rtx (modulo subregs).
3520 The strategy:
3521 - The first reload in the chain is the base from which all the others
3522 inherit. Its reload register will be used to override the inputs of
3523 the following reloads. We choose a suitable register for it here,
3524 even if we already allocated one. The goal is to maximize the
3525 lifetime of the inheritance register, and to make sure it is usable
3526 as input for the following reloads.
3527 - For every following reload, test the available possible inheritance
3528 registers to make sure they can be used as input. If it's an
3529 input-only reload, we can also determine whether we can use the
3530 inheritance register as reload register. This isn't necessary, but
3531 cheaper, and we count it that way.
3533 So, what about output reloads, then? In inherit_reloads, chains are
3534 generated for all kinds of reload insns, no matter whether they are for
3535 input for or output. If we have an in-out reload which is both loaded
3536 from a pseudo and stored to the same pseudo, we will see the same reload
3537 twice in this function, once for the input and once for the output.
3539 As soon as we find an output reload in the inherit chain, we stop
3540 processing. The chain is cut in half: the first half which we have
3541 already examined is processed, the other half which starts with the
3542 new output reload is put back into the ALL_INHERIT_CHAINS list so
3543 that we will process it during the next iteration.
3544 This means that such an in-out reload may be used for inheritance twice:
3545 first, inheriting from another reload; and second, as a base for other
3546 reloads that can inherit from it. */
3548 static void
3549 inherit_one_chain (struct inherit_chain *head)
3551 struct inherit_chain *last_performed;
3553 unsigned int i, noninherited_use;
3554 /* For each hard reg, count the number of inheritances we can perform if
3555 we select it. */
3556 int goodness[FIRST_PSEUDO_REGISTER];
3557 struct inherit_chain *this, *last, *prev = head;
3558 struct reload *head_rl = head->chain->rld + head->rli->nr;
3559 struct reload *prev_rl = head_rl;
3560 rtx head_rtx = (head->rli->type == RLI_OUTPUTRELOAD
3561 ? head_rl->out_reg : head_rl->in_reg);
3562 int best_reg;
3563 int best_value;
3564 /* This keeps track of the last insn we modified. Each time we find a
3565 new one that uses the inheritance register, we walk the chains in
3566 between to update their INHERITED_LIVE sets. */
3567 struct insn_chain *previous_chain = head->chain->next;
3569 HARD_REG_SET usable_for_head, usable_regs, tmp;
3571 if (dump_file)
3573 fprintf (dump_file, "*** inherit_one_chain: head insn: %d, rtx: ", INSN_UID (head->chain->insn));
3574 print_inline_rtx (dump_file, head_rtx, 0);
3575 fprintf (dump_file, "\n");
3578 memset (goodness, 0, sizeof goodness);
3580 /* Find out which registers we can use as reload register for the first
3581 entry on the chain. */
3582 compute_unallocated (prev->chain, prev_rl, &tmp);
3584 CLEAR_HARD_REG_SET (usable_for_head);
3585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3587 unsigned int k, nregs;
3589 if (! HARD_REGNO_MODE_OK (i, head_rl->mode))
3590 continue;
3591 nregs = HARD_REGNO_NREGS (i, head_rl->mode);
3593 for (k = 0; k < nregs; k++)
3594 if (! TEST_HARD_REG_BIT (tmp, i + k))
3595 break;
3596 if (k == nregs)
3597 SET_HARD_REG_BIT (usable_for_head, i);
3600 last = evaluate_regs_for_inherit (head, &usable_for_head, goodness);
3602 best_reg = -1;
3603 best_value = INT_MAX;
3604 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3606 /* new_reg_preferred takes cost arguments, so negate the goodness. */
3607 int value = - goodness[i];
3609 /* Re-test against USABLE_FOR_HEAD. evaluate_regs_for_inherit does
3610 sometimes think a partial inheritance is possible, even though
3611 that would force the head into an unusable register. */
3612 if (! TEST_HARD_REG_BIT (usable_for_head, i) || value == 0)
3613 continue;
3615 /* Adjust the value if we know we can save a copy. We do it here
3616 to avoid choosing a register whose value would otherwise be
3617 zero. */
3618 value -= allocation_goodness (head->chain, head_rl, i);
3620 if (new_reg_preferred (best_reg, i, best_value, value))
3622 best_value = value;
3623 best_reg = i;
3627 if (best_reg == -1)
3629 /* Now that's very disappointing. */
3630 undo_enables (head, last);
3631 last = head->next_same;
3632 goto put_back_last;
3635 if (dump_file)
3637 int i;
3638 fprintf (dump_file, " best_reg is %d\n", best_reg);
3639 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3640 if (TEST_HARD_REG_BIT (usable_for_head, i))
3641 fprintf (dump_file, "%s ", reg_names[i]);
3642 fprintf (dump_file, "\n");
3645 head->next_chain = done_inherit_chains;
3646 done_inherit_chains = head;
3648 /* Now, actually perform all the necessary modifications. This is a
3649 second pass over all inheritance opportunites, done in similar fashion
3650 as above. */
3652 /* Change the head reload to use the inheritance register. */
3653 set_reload_reg (head_rl, best_reg);
3655 /* Extend lifetime of original reload. Set the LIVE_UNTIL_END flag so
3656 that following calls to compute_birth_death will preserve this
3657 state. */
3658 head_rl->reginfo.death = head->chain->rli[head->chain->last_rlinsn].order + 1;
3659 head_rl->reginfo.live_until_end = 1;
3661 COPY_HARD_REG_SET (usable_regs, usable_for_head);
3663 /* Create a set of the hard registers we use to keep the inherited value. */
3665 unsigned int k, nregs = HARD_REGNO_NREGS (best_reg, head_rl->mode);
3667 CLEAR_HARD_REG_SET (tmp);
3668 for (k = 0; k < nregs; k++)
3669 SET_HARD_REG_BIT (tmp, best_reg + k);
3672 last_performed = head;
3673 prev = head;
3674 this = head->next_same;
3675 noninherited_use = head->used_after;
3676 while (this != last)
3678 struct insn_chain *chain;
3679 struct reload *rl = this->chain->rld + this->rli->nr;
3681 noninherited_use |= this->used_after;
3683 if (! noninherited_use)
3684 noninherited_use |= jump_insn_between_p (prev->chain, this->chain);
3686 AND_COMPL_HARD_REG_SET (usable_regs, this->new_conflicts);
3688 if (perform_inheritance (head_rtx, this, rl, best_reg, &usable_regs))
3690 /* Update register life information.
3691 @@@ This is slightly conservative in rare cases involving
3692 inheritance of partial registers. Fixing it is way too
3693 complicated for too little gain. */
3694 if (this->chain != head->chain)
3695 while (previous_chain != this->chain)
3697 IOR_HARD_REG_SET (previous_chain->inherited_live, tmp);
3698 previous_chain = previous_chain->next;
3700 if (dump_file)
3701 fprintf (dump_file, " Inherited insn: %d\n", INSN_UID (this->chain->insn));
3702 compute_ignored (this->chain);
3703 last_performed = this;
3705 else
3707 noninherited_use = 1;
3708 if (rl->optional && ! this->was_enabled)
3709 disable_optional (this->chain, rl);
3710 if (dump_file)
3711 fprintf (dump_file, " Ignored insn: %d\n", INSN_UID (this->chain->insn));
3714 if (! noninherited_use && this->next_same == last)
3716 if ((this->rli->type == RLI_OUTPUTRELOAD
3717 && GET_CODE (rl->out_reg) != REG)
3718 || (this->rli->type == RLI_INPUTRELOAD
3719 && ! find_regno_note (this->chain->insn, REG_DEAD,
3720 REGNO (head_rtx))
3721 && ! (last && sets_full_reg_p (last)
3722 && ! jump_insn_between_p (this->chain, last->chain))))
3723 noninherited_use = 1;
3726 prev = this;
3727 this = this->next_same;
3729 /* If we have a trail of unperformed opportunities, put them all back for
3730 another attempt. */
3731 if (last_performed->next_same != last)
3733 last = last_performed->next_same;
3736 /* If the head is an output, and we changed all the subsequent uses until
3737 the death or another store into the same register, we can disable the
3738 head reload. */
3739 if (head->rli->type == RLI_OUTPUTRELOAD
3740 && GET_CODE (head_rtx) == REG
3741 && ! noninherited_use)
3743 head->rli->order = head->chain->rli[0].order;
3744 head->rli->unnecessary = 1;
3745 head->rli->ignored = 1;
3746 compute_birth_death (head->chain);
3749 put_back_last:
3751 if (last)
3753 /* The caller has removed the whole chain from ALL_INHERIT_CHAINS.
3754 If LAST is nonzero at this point, it means that we stopped half-way
3755 through. The rest of the chain can still be used for inheritance,
3756 so put it back. */
3757 last->next_chain = all_inherit_chains;
3758 all_inherit_chains = last;
3762 /* Some inherit_chains are not suitable for starting inheritance. These
3763 are the ones that refer to the input part of an in-out reload: the
3764 reload register will be clobbered and thus cannot start inheritance. */
3765 static int
3766 usable_as_inheritance_head (struct inherit_chain *p)
3768 struct reload *p_rl = p->chain->rld + p->rli->nr;
3770 if (p->rli->type == RLI_INPUTRELOAD && p_rl->out_reg)
3771 return 0;
3772 return 1;
3775 static char *inherit_startobj;
3777 /* Called by inherit_reloads when a block boundary (CODE_LABEL, etc.) is
3778 seen. Process the collected data about inheritance opportunities.
3780 Upon return, all state will be reset and the caller can proceed to
3781 collect data for the remaining insns. */
3782 static void
3783 inherit_block (void)
3786 /* @@@ Todo: we might put some code here to select the most promising of
3787 the chains. */
3788 while (all_inherit_chains)
3790 struct inherit_chain *p = all_inherit_chains;
3791 all_inherit_chains = p->next_chain;
3793 while (p && ! usable_as_inheritance_head (p))
3794 p = p->next_same;
3796 if (p && p->next_same)
3797 inherit_one_chain (p);
3800 memset (pseudo_inherit, 0, (sizeof (struct inherit_chain *)
3801 * max_reg_num ()));
3802 all_inherit_chains = 0;
3803 done_inherit_chains = 0;
3804 obstack_free (&reload_obstack, inherit_startobj);
3807 /* Try to improve the allocation by inheriting reloads.
3809 This function is the entry point for the inheritance pass. It walks all
3810 insns, recording inheritance opportunties, and hands them off at the end
3811 of each extended basic block to the function inherit_block.
3813 Inheritance will modify the data we have gathered about reloads. It can
3814 change allocated registers. It will set the override_in fields for input
3815 reloads that want to inherit from a register. Life information will be
3816 updated. */
3817 static void
3818 inherit_reloads (void)
3820 struct insn_chain *chain;
3821 int prev_was_setjmp, this_is_setjmp = 0;
3822 done_inherit_chains = 0;
3823 all_inherit_chains = 0;
3824 pseudo_inherit
3825 = (struct inherit_chain **) xcalloc (sizeof (struct inherit_chain *),
3826 max_reg_num ());
3828 inherit_startobj = (char *) obstack_alloc (&reload_obstack, 0);
3830 for (chain = reload_insn_chain; chain; chain = chain->next)
3832 struct inherit_chain *this_inherit = 0;
3833 int i;
3834 unsigned r;
3835 rtx insn = chain->insn;
3836 reg_set_iterator rsi;
3838 prev_was_setjmp = this_is_setjmp;
3839 this_is_setjmp = (CALL_P (insn)
3840 && find_reg_note (insn, REG_SETJMP, NULL));
3843 if (GET_CODE (insn) == NOTE || chain->will_be_deleted)
3844 continue;
3846 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER
3847 || prev_was_setjmp
3848 || (GET_CODE (insn) == INSN
3849 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
3850 && MEM_VOLATILE_P (PATTERN (insn))))
3852 inherit_block ();
3853 continue;
3856 EXECUTE_IF_SET_IN_REG_SET (&chain->unreloaded_sets, FIRST_PSEUDO_REGISTER,
3857 r, rsi)
3859 pseudo_inherit[r] = 0;
3861 EXECUTE_IF_SET_IN_REG_SET
3862 (&chain->unreloaded_uses, FIRST_PSEUDO_REGISTER, r, rsi)
3864 if (pseudo_inherit[r])
3865 pseudo_inherit[r]->used_after = 1;
3868 if (chain->n_reloads == 0)
3869 continue;
3871 /* See which reloads might be inherited. This loop walks the
3872 reload_insns of CHAIN in descending order and produces a list of
3873 inheritance opportunties that is sorted in ascending order. */
3874 for (i = chain->last_rlinsn; i >= 0;)
3876 struct reload_insn *rli = chain->rli + i;
3878 if (rli->type == RLI_OUTPUTRELOAD || rli->type == RLI_INPUTRELOAD)
3880 struct reload *rl = chain->rld + rli->nr;
3881 rtx which = (rli->type == RLI_OUTPUTRELOAD
3882 ? rl->out_reg : rl->in_reg);
3883 rtx reg = which;
3884 if (GET_CODE (reg) == SUBREG)
3885 reg = SUBREG_REG (reg);
3887 if (GET_CODE (reg) == REG
3888 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
3889 && ! REGNO_REG_SET_P (&chain->unreloaded_sets, REGNO (reg)))
3891 struct inherit_chain *new_inherit;
3892 new_inherit
3893 = (struct inherit_chain *) obstack_alloc (&reload_obstack,
3894 sizeof (struct inherit_chain));
3895 new_inherit->next_chain = this_inherit;
3896 new_inherit->next_same = 0;
3897 new_inherit->used_after = 0;
3898 new_inherit->chain = chain;
3899 new_inherit->rli = chain->rli + i;
3900 this_inherit = new_inherit;
3903 i = rli->prev_order;
3906 /* Now, walk again over all the new opportunities we found and add them
3907 to the global chains. */
3908 while (this_inherit)
3910 struct inherit_chain *tmp = this_inherit;
3911 struct inherit_chain *next = tmp->next_chain;
3912 struct reload *rl = chain->rld + this_inherit->rli->nr;
3913 int regno;
3914 rtx which = (this_inherit->rli->type == RLI_OUTPUTRELOAD
3915 ? rl->out_reg : rl->in_reg);
3916 rtx reg = which;
3918 if (GET_CODE (reg) == SUBREG)
3919 reg = SUBREG_REG (reg);
3920 regno = REGNO (reg);
3922 if (pseudo_inherit[regno] == 0)
3924 tmp->next_chain = all_inherit_chains;
3925 all_inherit_chains = tmp;
3927 else
3928 pseudo_inherit[regno]->next_same = tmp;
3929 pseudo_inherit[regno] = tmp;
3930 this_inherit = next;
3933 inherit_block ();
3935 free (pseudo_inherit);
3938 /* This is called after calculate_needs_all_insns. We have full information
3939 about the required reloads at this point, so try to find reload registers
3940 to satisfy the needs. */
3941 static void
3942 select_reload_regs (void)
3944 struct insn_chain *chain;
3946 CLEAR_HARD_REG_SET (used_spill_regs);
3948 /* Try to satisfy the needs for each insn. */
3949 for (chain = insns_need_reload; chain != 0;
3950 chain = chain->next_need_reload)
3951 find_reload_regs (chain);
3953 if (optimize)
3954 inherit_reloads ();
3957 /* Delete all insns that were inserted by emit_caller_save_insns during
3958 this iteration. */
3959 static void
3960 delete_caller_save_insns (void)
3962 struct insn_chain *c = reload_insn_chain;
3964 while (c != 0)
3966 while (c != 0 && c->is_caller_save_insn)
3968 struct insn_chain *next = c->next;
3969 rtx insn = c->insn;
3971 if (c == reload_insn_chain)
3972 reload_insn_chain = next;
3973 delete_insn (insn);
3975 if (next)
3976 next->prev = c->prev;
3977 if (c->prev)
3978 c->prev->next = next;
3979 c->next = unused_insn_chains;
3980 unused_insn_chains = c;
3981 c = next;
3983 if (c != 0)
3984 c = c->next;
3988 /* Handle the failure to find a register to spill.
3989 INSN should be one of the insns which needed this particular spill reg. */
3991 static void
3992 spill_failure (rtx insn, enum reg_class class)
3994 if (asm_noperands (PATTERN (insn)) >= 0)
3995 error_for_asm (insn, "can't find a register in class %qs while "
3996 "reloading %<asm%>",
3997 reg_class_names[class]);
3998 else
4000 error ("unable to find a register to spill in class %qs",
4001 reg_class_names[class]);
4002 fatal_insn ("this is the insn:", insn);
4006 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
4007 data that is dead in INSN. */
4009 static void
4010 delete_dead_insn (rtx insn)
4012 rtx prev = prev_real_insn (insn);
4013 rtx prev_dest;
4015 /* If the previous insn sets a register that dies in our insn, delete it
4016 too. */
4017 if (prev && GET_CODE (PATTERN (prev)) == SET
4018 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
4019 && reg_mentioned_p (prev_dest, PATTERN (insn))
4020 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
4021 && ! side_effects_p (SET_SRC (PATTERN (prev))))
4022 delete_dead_insn (prev);
4024 SET_INSN_DELETED (insn);
4027 /* Modify the home of pseudo-reg I.
4028 The new home is present in reg_renumber[I].
4030 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
4031 or it may be -1, meaning there is none or it is not relevant.
4032 This is used so that all pseudos spilled from a given hard reg
4033 can share one stack slot. */
4035 static void
4036 alter_reg (int i, int from_reg)
4038 /* When outputting an inline function, this can happen
4039 for a reg that isn't actually used. */
4040 if (regno_reg_rtx[i] == 0)
4041 return;
4043 /* If the reg got changed to a MEM at rtl-generation time,
4044 ignore it. */
4045 if (!REG_P (regno_reg_rtx[i]))
4046 return;
4048 /* Modify the reg-rtx to contain the new hard reg
4049 number or else to contain its pseudo reg number. */
4050 REGNO (regno_reg_rtx[i])
4051 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
4053 /* If we have a pseudo that is needed but has no hard reg or equivalent,
4054 allocate a stack slot for it. */
4056 if (reg_renumber[i] < 0
4057 && REG_N_REFS (i) > 0
4058 && reg_equiv_constant[i] == 0
4059 && reg_equiv_memory_loc[i] == 0)
4061 rtx x;
4062 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
4063 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
4064 int adjust = 0;
4066 /* Each pseudo reg has an inherent size which comes from its own mode,
4067 and a total size which provides room for paradoxical subregs
4068 which refer to the pseudo reg in wider modes.
4070 We can use a slot already allocated if it provides both
4071 enough inherent space and enough total space.
4072 Otherwise, we allocate a new slot, making sure that it has no less
4073 inherent space, and no less total space, then the previous slot. */
4074 if (from_reg == -1)
4076 /* No known place to spill from => no slot to reuse. */
4077 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
4078 inherent_size == total_size ? 0 : -1);
4079 if (BYTES_BIG_ENDIAN)
4080 /* Cancel the big-endian correction done in assign_stack_local.
4081 Get the address of the beginning of the slot.
4082 This is so we can do a big-endian correction unconditionally
4083 below. */
4084 adjust = inherent_size - total_size;
4086 /* Nothing can alias this slot except this pseudo. */
4087 set_mem_alias_set (x, new_alias_set ());
4090 /* Reuse a stack slot if possible. */
4091 else if (spill_stack_slot[from_reg] != 0
4092 && spill_stack_slot_width[from_reg] >= total_size
4093 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
4094 >= inherent_size))
4095 x = spill_stack_slot[from_reg];
4097 /* Allocate a bigger slot. */
4098 else
4100 /* Compute maximum size needed, both for inherent size
4101 and for total size. */
4102 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4103 rtx stack_slot;
4105 if (spill_stack_slot[from_reg])
4107 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
4108 > inherent_size)
4109 mode = GET_MODE (spill_stack_slot[from_reg]);
4110 if (spill_stack_slot_width[from_reg] > total_size)
4111 total_size = spill_stack_slot_width[from_reg];
4114 /* Make a slot with that size. */
4115 x = assign_stack_local (mode, total_size,
4116 inherent_size == total_size ? 0 : -1);
4117 stack_slot = x;
4119 /* All pseudos mapped to this slot can alias each other. */
4120 if (spill_stack_slot[from_reg])
4121 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
4122 else
4123 set_mem_alias_set (x, new_alias_set ());
4125 if (BYTES_BIG_ENDIAN)
4127 /* Cancel the big-endian correction done in assign_stack_local.
4128 Get the address of the beginning of the slot.
4129 This is so we can do a big-endian correction unconditionally
4130 below. */
4131 adjust = GET_MODE_SIZE (mode) - total_size;
4132 if (adjust)
4133 stack_slot
4134 = adjust_address_nv (x, mode_for_size (total_size
4135 * BITS_PER_UNIT,
4136 MODE_INT, 1),
4137 adjust);
4140 spill_stack_slot[from_reg] = stack_slot;
4141 spill_stack_slot_width[from_reg] = total_size;
4144 /* On a big endian machine, the "address" of the slot
4145 is the address of the low part that fits its inherent mode. */
4146 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
4147 adjust += (total_size - inherent_size);
4149 /* If we have any adjustment to make, or if the stack slot is the
4150 wrong mode, make a new stack slot. */
4151 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
4153 /* If we have a decl for the original register, set it for the
4154 memory. If this is a shared MEM, make a copy. */
4155 if (REG_EXPR (regno_reg_rtx[i])
4156 && DECL_P (REG_EXPR (regno_reg_rtx[i])))
4158 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
4160 /* We can do this only for the DECLs home pseudo, not for
4161 any copies of it, since otherwise when the stack slot
4162 is reused, nonoverlapping_memrefs_p might think they
4163 cannot overlap. */
4164 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
4166 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
4167 x = copy_rtx (x);
4169 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
4173 /* Save the stack slot for later. */
4174 reg_equiv_memory_loc[i] = x;
4178 /* Mark the slots in regs_ever_live for the hard regs
4179 used by pseudo-reg number REGNO. */
4181 void
4182 mark_home_live (int regno)
4184 int i, lim;
4186 i = reg_renumber[regno];
4187 if (i < 0)
4188 return;
4189 lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
4190 while (i < lim)
4191 regs_ever_live[i++] = 1;
4194 /* This function handles the tracking of elimination offsets around branches.
4196 X is a piece of RTL being scanned.
4198 INSN is the insn that it came from, if any.
4200 INITIAL_P is nonzero if we are to set the offset to be the initial
4201 offset and zero if we are setting the offset of the label to be the
4202 current offset. */
4204 static void
4205 set_label_offsets (rtx x, rtx insn, int initial_p)
4207 enum rtx_code code = GET_CODE (x);
4208 rtx tem;
4209 unsigned int i;
4210 struct elim_table *p;
4212 switch (code)
4214 case LABEL_REF:
4215 if (LABEL_REF_NONLOCAL_P (x))
4216 return;
4218 x = XEXP (x, 0);
4220 /* ... fall through ... */
4222 case CODE_LABEL:
4223 /* If we know nothing about this label, set the desired offsets. Note
4224 that this sets the offset at a label to be the offset before a label
4225 if we don't know anything about the label. This is not correct for
4226 the label after a BARRIER, but is the best guess we can make. If
4227 we guessed wrong, we will suppress an elimination that might have
4228 been possible had we been able to guess correctly. */
4230 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
4232 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4233 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
4234 = (initial_p ? reg_eliminate[i].initial_offset
4235 : reg_eliminate[i].offset);
4236 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
4239 /* Otherwise, if this is the definition of a label and it is
4240 preceded by a BARRIER, set our offsets to the known offset of
4241 that label. */
4243 else if (x == insn
4244 && (tem = prev_nonnote_insn (insn)) != 0
4245 && BARRIER_P (tem))
4246 set_offsets_for_label (insn);
4247 else
4248 /* If neither of the above cases is true, compare each offset
4249 with those previously recorded and suppress any eliminations
4250 where the offsets disagree. */
4252 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4253 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
4254 != (initial_p ? reg_eliminate[i].initial_offset
4255 : reg_eliminate[i].offset))
4256 reg_eliminate[i].can_eliminate = 0;
4258 return;
4260 case JUMP_INSN:
4261 set_label_offsets (PATTERN (insn), insn, initial_p);
4263 /* ... fall through ... */
4265 case INSN:
4266 case CALL_INSN:
4267 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
4268 and hence must have all eliminations at their initial offsets. */
4269 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
4270 if (REG_NOTE_KIND (tem) == REG_LABEL)
4271 set_label_offsets (XEXP (tem, 0), insn, 1);
4272 return;
4274 case PARALLEL:
4275 case ADDR_VEC:
4276 case ADDR_DIFF_VEC:
4277 /* Each of the labels in the parallel or address vector must be
4278 at their initial offsets. We want the first field for PARALLEL
4279 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
4281 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
4282 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
4283 insn, initial_p);
4284 return;
4286 case SET:
4287 /* We only care about setting PC. If the source is not RETURN,
4288 IF_THEN_ELSE, or a label, disable any eliminations not at
4289 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
4290 isn't one of those possibilities. For branches to a label,
4291 call ourselves recursively.
4293 Note that this can disable elimination unnecessarily when we have
4294 a non-local goto since it will look like a non-constant jump to
4295 someplace in the current function. This isn't a significant
4296 problem since such jumps will normally be when all elimination
4297 pairs are back to their initial offsets. */
4299 if (SET_DEST (x) != pc_rtx)
4300 return;
4302 switch (GET_CODE (SET_SRC (x)))
4304 case PC:
4305 case RETURN:
4306 return;
4308 case LABEL_REF:
4309 set_label_offsets (SET_SRC (x), insn, initial_p);
4310 return;
4312 case IF_THEN_ELSE:
4313 tem = XEXP (SET_SRC (x), 1);
4314 if (GET_CODE (tem) == LABEL_REF)
4315 set_label_offsets (XEXP (tem, 0), insn, initial_p);
4316 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
4317 break;
4319 tem = XEXP (SET_SRC (x), 2);
4320 if (GET_CODE (tem) == LABEL_REF)
4321 set_label_offsets (XEXP (tem, 0), insn, initial_p);
4322 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
4323 break;
4324 return;
4326 default:
4327 break;
4330 /* If we reach here, all eliminations must be at their initial
4331 offset because we are doing a jump to a variable address. */
4332 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
4333 if (p->offset != p->initial_offset)
4334 p->can_eliminate = 0;
4335 break;
4337 default:
4338 break;
4342 /* Scan X and replace any eliminable registers (such as fp) with a
4343 replacement (such as sp), plus an offset.
4345 MEM_MODE is the mode of an enclosing MEM. We need this to know how
4346 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
4347 MEM, we are allowed to replace a sum of a register and the constant zero
4348 with the register, which we cannot do outside a MEM. In addition, we need
4349 to record the fact that a register is referenced outside a MEM.
4351 If INSN is an insn, it is the insn containing X. If we replace a REG
4352 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
4353 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
4354 the REG is being modified.
4356 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
4357 That's used when we eliminate in expressions stored in notes.
4358 This means, do not set ref_outside_mem even if the reference
4359 is outside of MEMs.
4361 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
4362 replacements done assuming all offsets are at their initial values. If
4363 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
4364 encounter, return the actual location so that find_reloads will do
4365 the proper thing. */
4368 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
4370 enum rtx_code code = GET_CODE (x);
4371 struct elim_table *ep;
4372 int regno;
4373 rtx new;
4374 int i, j;
4375 const char *fmt;
4376 int copied = 0;
4378 if (! current_function_decl)
4379 return x;
4381 switch (code)
4383 case CONST_INT:
4384 case CONST_DOUBLE:
4385 case CONST_VECTOR:
4386 case CONST:
4387 case SYMBOL_REF:
4388 case CODE_LABEL:
4389 case PC:
4390 case CC0:
4391 case ASM_INPUT:
4392 case ADDR_VEC:
4393 case ADDR_DIFF_VEC:
4394 case RETURN:
4395 return x;
4397 case REG:
4398 regno = REGNO (x);
4400 /* First handle the case where we encounter a bare register that
4401 is eliminable. Replace it with a PLUS. */
4402 if (regno < FIRST_PSEUDO_REGISTER)
4404 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
4405 ep++)
4406 if (ep->from_rtx == x && ep->can_eliminate)
4407 return plus_constant (ep->to_rtx, ep->previous_offset);
4410 else if (reg_renumber && reg_renumber[regno] < 0
4411 && reg_equiv_constant && reg_equiv_constant[regno]
4412 && ! CONSTANT_P (reg_equiv_constant[regno]))
4413 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
4414 mem_mode, insn);
4415 return x;
4417 /* You might think handling MINUS in a manner similar to PLUS is a
4418 good idea. It is not. It has been tried multiple times and every
4419 time the change has had to have been reverted.
4421 Other parts of reload know a PLUS is special (gen_reload for example)
4422 and require special code to handle code a reloaded PLUS operand.
4424 Also consider backends where the flags register is clobbered by a
4425 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
4426 lea instruction comes to mind). If we try to reload a MINUS, we
4427 may kill the flags register that was holding a useful value.
4429 So, please before trying to handle MINUS, consider reload as a
4430 whole instead of this little section as well as the backend issues. */
4431 case PLUS:
4432 /* If this is the sum of an eliminable register and a constant, rework
4433 the sum. */
4434 if (REG_P (XEXP (x, 0))
4435 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4436 && CONSTANT_P (XEXP (x, 1)))
4438 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
4439 ep++)
4440 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
4442 /* The only time we want to replace a PLUS with a REG (this
4443 occurs when the constant operand of the PLUS is the negative
4444 of the offset) is when we are inside a MEM. We won't want
4445 to do so at other times because that would change the
4446 structure of the insn in a way that reload can't handle.
4447 We special-case the commonest situation in
4448 eliminate_regs_in_insn, so just replace a PLUS with a
4449 PLUS here, unless inside a MEM. */
4450 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
4451 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
4452 return ep->to_rtx;
4453 else
4454 return gen_rtx_PLUS (Pmode, ep->to_rtx,
4455 plus_constant (XEXP (x, 1),
4456 ep->previous_offset));
4459 /* If the register is not eliminable, we are done since the other
4460 operand is a constant. */
4461 return x;
4464 /* If this is part of an address, we want to bring any constant to the
4465 outermost PLUS. We will do this by doing register replacement in
4466 our operands and seeing if a constant shows up in one of them.
4468 Note that there is no risk of modifying the structure of the insn,
4469 since we only get called for its operands, thus we are either
4470 modifying the address inside a MEM, or something like an address
4471 operand of a load-address insn. */
4474 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
4475 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
4477 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
4479 /* If one side is a PLUS and the other side is a pseudo that
4480 didn't get a hard register but has a reg_equiv_constant,
4481 we must replace the constant here since it may no longer
4482 be in the position of any operand. */
4483 if (GET_CODE (new0) == PLUS && REG_P (new1)
4484 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
4485 && reg_renumber[REGNO (new1)] < 0
4486 && reg_equiv_constant != 0
4487 && reg_equiv_constant[REGNO (new1)] != 0)
4488 new1 = reg_equiv_constant[REGNO (new1)];
4489 else if (GET_CODE (new1) == PLUS && REG_P (new0)
4490 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
4491 && reg_renumber[REGNO (new0)] < 0
4492 && reg_equiv_constant[REGNO (new0)] != 0)
4493 new0 = reg_equiv_constant[REGNO (new0)];
4495 new = form_sum (new0, new1);
4497 /* As above, if we are not inside a MEM we do not want to
4498 turn a PLUS into something else. We might try to do so here
4499 for an addition of 0 if we aren't optimizing. */
4500 if (! mem_mode && GET_CODE (new) != PLUS)
4501 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
4502 else
4503 return new;
4506 return x;
4508 case MULT:
4509 /* If this is the product of an eliminable register and a
4510 constant, apply the distribute law and move the constant out
4511 so that we have (plus (mult ..) ..). This is needed in order
4512 to keep load-address insns valid. This case is pathological.
4513 We ignore the possibility of overflow here. */
4514 if (REG_P (XEXP (x, 0))
4515 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4516 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4517 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
4518 ep++)
4519 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
4521 if (! mem_mode
4522 /* Refs inside notes don't count for this purpose. */
4523 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
4524 || GET_CODE (insn) == INSN_LIST)))
4525 ep->ref_outside_mem = 1;
4527 return
4528 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
4529 ep->previous_offset * INTVAL (XEXP (x, 1)));
4532 /* ... fall through ... */
4534 case CALL:
4535 case COMPARE:
4536 /* See comments before PLUS about handling MINUS. */
4537 case MINUS:
4538 case DIV: case UDIV:
4539 case MOD: case UMOD:
4540 case AND: case IOR: case XOR:
4541 case ROTATERT: case ROTATE:
4542 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4543 case NE: case EQ:
4544 case GE: case GT: case GEU: case GTU:
4545 case LE: case LT: case LEU: case LTU:
4547 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
4548 rtx new1
4549 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
4551 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
4552 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
4554 return x;
4556 case EXPR_LIST:
4557 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
4558 if (XEXP (x, 0))
4560 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
4561 if (new != XEXP (x, 0))
4563 /* If this is a REG_DEAD note, it is not valid anymore.
4564 Using the eliminated version could result in creating a
4565 REG_DEAD note for the stack or frame pointer. */
4566 if (GET_MODE (x) == REG_DEAD)
4567 return (XEXP (x, 1)
4568 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
4569 : NULL_RTX);
4571 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
4575 /* ... fall through ... */
4577 case INSN_LIST:
4578 /* Now do eliminations in the rest of the chain. If this was
4579 an EXPR_LIST, this might result in allocating more memory than is
4580 strictly needed, but it simplifies the code. */
4581 if (XEXP (x, 1))
4583 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
4584 if (new != XEXP (x, 1))
4585 return
4586 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
4588 return x;
4590 case PRE_INC:
4591 case POST_INC:
4592 case PRE_DEC:
4593 case POST_DEC:
4594 case STRICT_LOW_PART:
4595 case NEG: case NOT:
4596 case SIGN_EXTEND: case ZERO_EXTEND:
4597 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4598 case FLOAT: case FIX:
4599 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4600 case ABS:
4601 case SQRT:
4602 case FFS:
4603 case CLZ:
4604 case CTZ:
4605 case POPCOUNT:
4606 case PARITY:
4607 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
4608 if (new != XEXP (x, 0))
4609 return gen_rtx_fmt_e (code, GET_MODE (x), new);
4610 return x;
4612 case SUBREG:
4613 /* Similar to above processing, but preserve SUBREG_BYTE.
4614 Convert (subreg (mem)) to (mem) if not paradoxical.
4615 Also, if we have a non-paradoxical (subreg (pseudo)) and the
4616 pseudo didn't get a hard reg, we must replace this with the
4617 eliminated version of the memory location because push_reload
4618 may do the replacement in certain circumstances. */
4619 if (REG_P (SUBREG_REG (x))
4620 && (GET_MODE_SIZE (GET_MODE (x))
4621 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4622 && reg_equiv_memory_loc != 0
4623 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
4625 new = SUBREG_REG (x);
4627 else
4628 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
4630 if (new != SUBREG_REG (x))
4632 int x_size = GET_MODE_SIZE (GET_MODE (x));
4633 int new_size = GET_MODE_SIZE (GET_MODE (new));
4635 if (MEM_P (new)
4636 && ((x_size < new_size
4637 #ifdef WORD_REGISTER_OPERATIONS
4638 /* On these machines, combine can create rtl of the form
4639 (set (subreg:m1 (reg:m2 R) 0) ...)
4640 where m1 < m2, and expects something interesting to
4641 happen to the entire word. Moreover, it will use the
4642 (reg:m2 R) later, expecting all bits to be preserved.
4643 So if the number of words is the same, preserve the
4644 subreg so that push_reload can see it. */
4645 && ! ((x_size - 1) / UNITS_PER_WORD
4646 == (new_size -1 ) / UNITS_PER_WORD)
4647 #endif
4649 || x_size == new_size)
4651 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
4652 else
4653 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
4656 return x;
4658 case MEM:
4659 /* Our only special processing is to pass the mode of the MEM to our
4660 recursive call and copy the flags. While we are here, handle this
4661 case more efficiently. */
4662 return
4663 replace_equiv_address_nv (x,
4664 eliminate_regs (XEXP (x, 0),
4665 GET_MODE (x), insn));
4667 case USE:
4668 /* Handle insn_list USE that a call to a pure function may generate. */
4669 new = eliminate_regs (XEXP (x, 0), 0, insn);
4670 if (new != XEXP (x, 0))
4671 return gen_rtx_USE (GET_MODE (x), new);
4672 return x;
4674 case CLOBBER:
4675 case ASM_OPERANDS:
4676 case SET:
4677 gcc_unreachable ();
4679 default:
4680 break;
4683 /* Process each of our operands recursively. If any have changed, make a
4684 copy of the rtx. */
4685 fmt = GET_RTX_FORMAT (code);
4686 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4688 if (*fmt == 'e')
4690 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
4691 if (new != XEXP (x, i) && ! copied)
4693 rtx new_x = rtx_alloc (code);
4694 memcpy (new_x, x, RTX_SIZE (code));
4695 x = new_x;
4696 copied = 1;
4698 XEXP (x, i) = new;
4700 else if (*fmt == 'E')
4702 int copied_vec = 0;
4703 for (j = 0; j < XVECLEN (x, i); j++)
4705 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
4706 if (new != XVECEXP (x, i, j) && ! copied_vec)
4708 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
4709 XVEC (x, i)->elem);
4710 if (! copied)
4712 rtx new_x = rtx_alloc (code);
4713 memcpy (new_x, x, RTX_SIZE (code));
4714 x = new_x;
4715 copied = 1;
4717 XVEC (x, i) = new_v;
4718 copied_vec = 1;
4720 XVECEXP (x, i, j) = new;
4725 return x;
4728 /* Scan rtx X for modifications of elimination target registers. Update
4729 the table of eliminables to reflect the changed state. MEM_MODE is
4730 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
4732 static void
4733 elimination_effects (rtx x, enum machine_mode mem_mode)
4735 enum rtx_code code = GET_CODE (x);
4736 struct elim_table *ep;
4737 int regno;
4738 int i, j;
4739 const char *fmt;
4741 switch (code)
4743 case CONST_INT:
4744 case CONST_DOUBLE:
4745 case CONST_VECTOR:
4746 case CONST:
4747 case SYMBOL_REF:
4748 case CODE_LABEL:
4749 case PC:
4750 case CC0:
4751 case ASM_INPUT:
4752 case ADDR_VEC:
4753 case ADDR_DIFF_VEC:
4754 case RETURN:
4755 return;
4757 case REG:
4758 regno = REGNO (x);
4760 /* First handle the case where we encounter a bare register that
4761 is eliminable. Replace it with a PLUS. */
4762 if (regno < FIRST_PSEUDO_REGISTER)
4764 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
4765 ep++)
4766 if (ep->from_rtx == x && ep->can_eliminate)
4768 if (! mem_mode)
4769 ep->ref_outside_mem = 1;
4770 return;
4774 else if (reg_renumber[regno] < 0 && reg_equiv_constant
4775 && reg_equiv_constant[regno]
4776 && ! function_invariant_p (reg_equiv_constant[regno]))
4777 elimination_effects (reg_equiv_constant[regno], mem_mode);
4778 return;
4780 case PRE_INC:
4781 case POST_INC:
4782 case PRE_DEC:
4783 case POST_DEC:
4784 case POST_MODIFY:
4785 case PRE_MODIFY:
4786 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4787 if (ep->to_rtx == XEXP (x, 0))
4789 int size = GET_MODE_SIZE (mem_mode);
4791 /* If more bytes than MEM_MODE are pushed, account for them. */
4792 #ifdef PUSH_ROUNDING
4793 if (ep->to_rtx == stack_pointer_rtx)
4794 size = PUSH_ROUNDING (size);
4795 #endif
4796 if (code == PRE_DEC || code == POST_DEC)
4797 ep->offset += size;
4798 else if (code == PRE_INC || code == POST_INC)
4799 ep->offset -= size;
4800 else if ((code == PRE_MODIFY || code == POST_MODIFY)
4801 && GET_CODE (XEXP (x, 1)) == PLUS
4802 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
4803 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
4804 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
4807 /* These two aren't unary operators. */
4808 if (code == POST_MODIFY || code == PRE_MODIFY)
4809 break;
4811 /* Fall through to generic unary operation case. */
4812 case STRICT_LOW_PART:
4813 case NEG: case NOT:
4814 case SIGN_EXTEND: case ZERO_EXTEND:
4815 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4816 case FLOAT: case FIX:
4817 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4818 case ABS:
4819 case SQRT:
4820 case FFS:
4821 case CLZ:
4822 case CTZ:
4823 case POPCOUNT:
4824 case PARITY:
4825 elimination_effects (XEXP (x, 0), mem_mode);
4826 return;
4828 case SUBREG:
4829 if (REG_P (SUBREG_REG (x))
4830 && (GET_MODE_SIZE (GET_MODE (x))
4831 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4832 && reg_equiv_memory_loc != 0
4833 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
4834 return;
4836 elimination_effects (SUBREG_REG (x), mem_mode);
4837 return;
4839 case USE:
4840 /* If using a register that is the source of an eliminate we still
4841 think can be performed, note it cannot be performed since we don't
4842 know how this register is used. */
4843 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4844 if (ep->from_rtx == XEXP (x, 0))
4845 ep->can_eliminate = 0;
4847 elimination_effects (XEXP (x, 0), mem_mode);
4848 return;
4850 case CLOBBER:
4851 /* If clobbering a register that is the replacement register for an
4852 elimination we still think can be performed, note that it cannot
4853 be performed. Otherwise, we need not be concerned about it. */
4854 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4855 if (ep->to_rtx == XEXP (x, 0))
4856 ep->can_eliminate = 0;
4858 elimination_effects (XEXP (x, 0), mem_mode);
4859 return;
4861 case SET:
4862 /* Check for setting a register that we know about. */
4863 if (REG_P (SET_DEST (x)))
4865 /* See if this is setting the replacement register for an
4866 elimination.
4868 If DEST is the hard frame pointer, we do nothing because we
4869 assume that all assignments to the frame pointer are for
4870 non-local gotos and are being done at a time when they are valid
4871 and do not disturb anything else. Some machines want to
4872 eliminate a fake argument pointer (or even a fake frame pointer)
4873 with either the real frame or the stack pointer. Assignments to
4874 the hard frame pointer must not prevent this elimination. */
4876 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
4877 ep++)
4878 if (ep->to_rtx == SET_DEST (x)
4879 && SET_DEST (x) != hard_frame_pointer_rtx)
4881 /* If it is being incremented, adjust the offset. Otherwise,
4882 this elimination can't be done. */
4883 rtx src = SET_SRC (x);
4885 if (GET_CODE (src) == PLUS
4886 && XEXP (src, 0) == SET_DEST (x)
4887 && GET_CODE (XEXP (src, 1)) == CONST_INT)
4888 ep->offset -= INTVAL (XEXP (src, 1));
4889 else
4890 ep->can_eliminate = 0;
4894 elimination_effects (SET_DEST (x), 0);
4895 elimination_effects (SET_SRC (x), 0);
4896 return;
4898 case MEM:
4899 /* Our only special processing is to pass the mode of the MEM to our
4900 recursive call. */
4901 elimination_effects (XEXP (x, 0), GET_MODE (x));
4902 return;
4904 default:
4905 break;
4908 fmt = GET_RTX_FORMAT (code);
4909 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4911 if (*fmt == 'e')
4912 elimination_effects (XEXP (x, i), mem_mode);
4913 else if (*fmt == 'E')
4914 for (j = 0; j < XVECLEN (x, i); j++)
4915 elimination_effects (XVECEXP (x, i, j), mem_mode);
4919 /* Descend through rtx X and verify that no references to eliminable registers
4920 remain. If any do remain, mark the involved register as not
4921 eliminable. */
4923 static void
4924 check_eliminable_occurrences (rtx x)
4926 const char *fmt;
4927 int i;
4928 enum rtx_code code;
4930 if (x == 0)
4931 return;
4933 code = GET_CODE (x);
4935 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4937 struct elim_table *ep;
4939 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4940 if (ep->from_rtx == x)
4941 ep->can_eliminate = 0;
4942 return;
4945 fmt = GET_RTX_FORMAT (code);
4946 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4948 if (*fmt == 'e')
4949 check_eliminable_occurrences (XEXP (x, i));
4950 else if (*fmt == 'E')
4952 int j;
4953 for (j = 0; j < XVECLEN (x, i); j++)
4954 check_eliminable_occurrences (XVECEXP (x, i, j));
4959 /* Scan INSN and eliminate all eliminable registers in it.
4961 If REPLACE is nonzero, do the replacement destructively. Also
4962 delete the insn as dead it if it is setting an eliminable register.
4964 If REPLACE is zero, do all our allocations in reload_obstack.
4966 If no eliminations were done and this insn doesn't require any elimination
4967 processing (these are not identical conditions: it might be updating sp,
4968 but not referencing fp; this needs to be seen during reload_as_needed so
4969 that the offset between fp and sp can be taken into consideration), zero
4970 is returned. Otherwise, 1 is returned. */
4972 static int
4973 eliminate_regs_in_insn (rtx insn, int replace)
4975 int icode = recog_memoized (insn);
4976 rtx old_body = PATTERN (insn);
4977 int insn_is_asm = asm_noperands (old_body) >= 0;
4978 rtx old_set = single_set (insn);
4979 rtx new_body;
4980 int val = 0;
4981 int i;
4982 rtx substed_operand[MAX_RECOG_OPERANDS];
4983 rtx orig_operand[MAX_RECOG_OPERANDS];
4984 struct elim_table *ep;
4985 rtx plus_src;
4987 if (! insn_is_asm && icode < 0)
4989 gcc_assert (GET_CODE (PATTERN (insn)) == USE
4990 || GET_CODE (PATTERN (insn)) == CLOBBER
4991 || GET_CODE (PATTERN (insn)) == ADDR_VEC
4992 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
4993 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4994 return 0;
4997 if (old_set != 0 && REG_P (SET_DEST (old_set))
4998 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
5000 /* Check for setting an eliminable register. */
5001 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5002 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
5004 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
5005 /* If this is setting the frame pointer register to the
5006 hardware frame pointer register and this is an elimination
5007 that will be done (tested above), this insn is really
5008 adjusting the frame pointer downward to compensate for
5009 the adjustment done before a nonlocal goto. */
5010 if (ep->from == FRAME_POINTER_REGNUM
5011 && ep->to == HARD_FRAME_POINTER_REGNUM)
5013 rtx base = SET_SRC (old_set);
5014 rtx base_insn = insn;
5015 HOST_WIDE_INT offset = 0;
5017 while (base != ep->to_rtx)
5019 rtx prev_insn, prev_set;
5021 if (GET_CODE (base) == PLUS
5022 && GET_CODE (XEXP (base, 1)) == CONST_INT)
5024 offset += INTVAL (XEXP (base, 1));
5025 base = XEXP (base, 0);
5027 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
5028 && (prev_set = single_set (prev_insn)) != 0
5029 && rtx_equal_p (SET_DEST (prev_set), base))
5031 base = SET_SRC (prev_set);
5032 base_insn = prev_insn;
5034 else
5035 break;
5038 if (base == ep->to_rtx)
5040 rtx src
5041 = plus_constant (ep->to_rtx, offset - ep->offset);
5043 new_body = old_body;
5044 if (! replace)
5046 new_body = copy_insn (old_body);
5047 if (REG_NOTES (insn))
5048 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
5050 PATTERN (insn) = new_body;
5051 old_set = single_set (insn);
5053 /* First see if this insn remains valid when we
5054 make the change. If not, keep the INSN_CODE
5055 the same and let reload fit it up. */
5056 validate_change (insn, &SET_SRC (old_set), src, 1);
5057 validate_change (insn, &SET_DEST (old_set),
5058 ep->to_rtx, 1);
5059 if (! apply_change_group ())
5061 SET_SRC (old_set) = src;
5062 SET_DEST (old_set) = ep->to_rtx;
5065 val = 1;
5066 goto done;
5069 #endif
5071 /* In this case this insn isn't serving a useful purpose. We
5072 will delete it in reload_as_needed once we know that this
5073 elimination is, in fact, being done. */
5074 PUT_MODE (insn, SImode);
5075 return 1;
5079 /* We allow one special case which happens to work on all machines we
5080 currently support: a single set with the source or a REG_EQUAL
5081 note being a PLUS of an eliminable register and a constant. */
5082 plus_src = 0;
5083 if (old_set && REG_P (SET_DEST (old_set)))
5085 /* First see if the source is of the form (plus (reg) CST). */
5086 if (GET_CODE (SET_SRC (old_set)) == PLUS
5087 && REG_P (XEXP (SET_SRC (old_set), 0))
5088 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
5089 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
5090 plus_src = SET_SRC (old_set);
5091 else if (REG_P (SET_SRC (old_set)))
5093 /* Otherwise, see if we have a REG_EQUAL note of the form
5094 (plus (reg) CST). */
5095 rtx links;
5096 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5098 if (REG_NOTE_KIND (links) == REG_EQUAL
5099 && GET_CODE (XEXP (links, 0)) == PLUS
5100 && REG_P (XEXP (XEXP (links, 0), 0))
5101 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT
5102 && REGNO (XEXP (XEXP (links, 0), 0)) < FIRST_PSEUDO_REGISTER)
5104 plus_src = XEXP (links, 0);
5105 break;
5110 if (plus_src)
5112 rtx reg = XEXP (plus_src, 0);
5113 HOST_WIDE_INT offset = INTVAL (XEXP (plus_src, 1));
5115 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5116 if (ep->from_rtx == reg && ep->can_eliminate)
5118 offset += ep->offset;
5120 if (offset == 0)
5122 int num_clobbers;
5123 /* We assume here that if we need a PARALLEL with
5124 CLOBBERs for this assignment, we can do with the
5125 MATCH_SCRATCHes that add_clobbers allocates.
5126 There's not much we can do if that doesn't work. */
5127 PATTERN (insn) = gen_rtx_SET (VOIDmode,
5128 SET_DEST (old_set),
5129 ep->to_rtx);
5130 num_clobbers = 0;
5131 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
5132 if (num_clobbers)
5134 rtvec vec = rtvec_alloc (num_clobbers + 1);
5136 vec->elem[0] = PATTERN (insn);
5137 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
5138 add_clobbers (PATTERN (insn), INSN_CODE (insn));
5140 gcc_assert (INSN_CODE (insn) >= 0);
5142 /* If we have a nonzero offset, and the source is already
5143 a simple REG, the following transformation would
5144 increase the cost of the insn by replacing a simple REG
5145 with (plus (reg sp) CST). So try only when plus_src
5146 comes from old_set proper, not REG_NOTES. */
5147 else if (SET_SRC (old_set) == plus_src)
5149 new_body = old_body;
5150 if (! replace)
5152 new_body = copy_insn (old_body);
5153 if (REG_NOTES (insn))
5154 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
5156 PATTERN (insn) = new_body;
5157 old_set = single_set (insn);
5159 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
5160 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
5162 else
5163 break;
5165 val = 1;
5166 /* This can't have an effect on elimination offsets, so skip right
5167 to the end. */
5168 goto done;
5172 /* Determine the effects of this insn on elimination offsets. */
5173 elimination_effects (old_body, 0);
5175 /* Eliminate all eliminable registers occurring in operands that
5176 can be handled by reload. */
5177 extract_insn (insn);
5178 for (i = 0; i < recog_data.n_operands; i++)
5180 orig_operand[i] = recog_data.operand[i];
5181 substed_operand[i] = recog_data.operand[i];
5183 /* For an asm statement, every operand is eliminable. */
5184 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
5186 /* Check for setting a register that we know about. */
5187 if (recog_data.operand_type[i] != OP_IN
5188 && REG_P (orig_operand[i]))
5190 /* If we are assigning to a register that can be eliminated, it
5191 must be as part of a PARALLEL, since the code above handles
5192 single SETs. We must indicate that we can no longer
5193 eliminate this reg. */
5194 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
5195 ep++)
5196 if (ep->from_rtx == orig_operand[i])
5197 ep->can_eliminate = 0;
5200 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
5201 replace ? insn : NULL_RTX);
5202 if (substed_operand[i] != orig_operand[i])
5203 val = 1;
5204 /* Terminate the search in check_eliminable_occurrences at
5205 this point. */
5206 *recog_data.operand_loc[i] = 0;
5208 /* If an output operand changed from a REG to a MEM and INSN is an
5209 insn, write a CLOBBER insn. */
5210 if (recog_data.operand_type[i] != OP_IN
5211 && REG_P (orig_operand[i])
5212 && MEM_P (substed_operand[i])
5213 && replace)
5214 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
5215 insn);
5219 for (i = 0; i < recog_data.n_dups; i++)
5220 *recog_data.dup_loc[i]
5221 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
5223 /* If any eliminable remain, they aren't eliminable anymore. */
5224 check_eliminable_occurrences (old_body);
5226 /* Substitute the operands; the new values are in the substed_operand
5227 array. */
5228 for (i = 0; i < recog_data.n_operands; i++)
5229 *recog_data.operand_loc[i] = substed_operand[i];
5230 for (i = 0; i < recog_data.n_dups; i++)
5231 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
5233 /* If we are replacing a body that was a (set X (plus Y Z)), try to
5234 re-recognize the insn. We do this in case we had a simple addition
5235 but now can do this as a load-address. This saves an insn in this
5236 common case.
5237 If re-recognition fails, the old insn code number will still be used,
5238 and some register operands may have changed into PLUS expressions.
5239 These will be handled by find_reloads by loading them into a register
5240 again. */
5242 if (val)
5244 /* If we aren't replacing things permanently and we changed something,
5245 make another copy to ensure that all the RTL is new. Otherwise
5246 things can go wrong if find_reload swaps commutative operands
5247 and one is inside RTL that has been copied while the other is not. */
5248 new_body = old_body;
5249 if (! replace)
5251 new_body = copy_insn (old_body);
5252 if (REG_NOTES (insn))
5253 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
5255 PATTERN (insn) = new_body;
5257 /* If we had a move insn but now we don't, rerecognize it. This will
5258 cause spurious re-recognition if the old move had a PARALLEL since
5259 the new one still will, but we can't call single_set without
5260 having put NEW_BODY into the insn and the re-recognition won't
5261 hurt in this rare case. */
5262 /* ??? Why this huge if statement - why don't we just rerecognize the
5263 thing always? */
5264 if (! insn_is_asm
5265 && old_set != 0
5266 && ((REG_P (SET_SRC (old_set))
5267 && (GET_CODE (new_body) != SET
5268 || !REG_P (SET_SRC (new_body))))
5269 /* If this was a load from or store to memory, compare
5270 the MEM in recog_data.operand to the one in the insn.
5271 If they are not equal, then rerecognize the insn. */
5272 || (old_set != 0
5273 && ((MEM_P (SET_SRC (old_set))
5274 && SET_SRC (old_set) != recog_data.operand[1])
5275 || (MEM_P (SET_DEST (old_set))
5276 && SET_DEST (old_set) != recog_data.operand[0])))
5277 /* If this was an add insn before, rerecognize. */
5278 || GET_CODE (SET_SRC (old_set)) == PLUS))
5280 int new_icode = recog (PATTERN (insn), insn, 0);
5281 if (new_icode < 0)
5282 INSN_CODE (insn) = icode;
5286 /* Restore the old body. If there were any changes to it, we made a copy
5287 of it while the changes were still in place, so we'll correctly return
5288 a modified insn below. */
5289 if (! replace)
5291 /* Restore the old body. */
5292 for (i = 0; i < recog_data.n_operands; i++)
5293 *recog_data.operand_loc[i] = orig_operand[i];
5294 for (i = 0; i < recog_data.n_dups; i++)
5295 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
5298 /* Update all elimination pairs to reflect the status after the current
5299 insn. The changes we make were determined by the earlier call to
5300 elimination_effects.
5302 We also detect cases where register elimination cannot be done,
5303 namely, if a register would be both changed and referenced outside a MEM
5304 in the resulting insn since such an insn is often undefined and, even if
5305 not, we cannot know what meaning will be given to it. Note that it is
5306 valid to have a register used in an address in an insn that changes it
5307 (presumably with a pre- or post-increment or decrement).
5309 If anything changes, return nonzero. */
5311 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5313 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
5314 ep->can_eliminate = 0;
5316 ep->ref_outside_mem = 0;
5318 if (ep->previous_offset != ep->offset)
5319 val = 1;
5322 done:
5323 /* If we changed something, perform elimination in REG_NOTES. This is
5324 needed even when REPLACE is zero because a REG_DEAD note might refer
5325 to a register that we eliminate and could cause a different number
5326 of spill registers to be needed in the final reload pass than in
5327 the pre-passes. */
5328 if (val && REG_NOTES (insn) != 0)
5329 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
5331 return val;
5334 /* Loop through all elimination pairs.
5335 Recalculate the number not at initial offset.
5337 Compute the maximum offset (minimum offset if the stack does not
5338 grow downward) for each elimination pair. */
5340 static void
5341 update_eliminable_offsets (void)
5343 struct elim_table *ep;
5345 num_not_at_initial_offset = 0;
5346 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5348 ep->previous_offset = ep->offset;
5349 if (ep->can_eliminate && ep->offset != ep->initial_offset)
5350 num_not_at_initial_offset++;
5354 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
5355 replacement we currently believe is valid, mark it as not eliminable if X
5356 modifies DEST in any way other than by adding a constant integer to it.
5358 If DEST is the frame pointer, we do nothing because we assume that
5359 all assignments to the hard frame pointer are nonlocal gotos and are being
5360 done at a time when they are valid and do not disturb anything else.
5361 Some machines want to eliminate a fake argument pointer with either the
5362 frame or stack pointer. Assignments to the hard frame pointer must not
5363 prevent this elimination.
5365 Called via note_stores from reload before starting its passes to scan
5366 the insns of the function. */
5368 static void
5369 mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
5371 unsigned int i;
5373 /* A SUBREG of a hard register here is just changing its mode. We should
5374 not see a SUBREG of an eliminable hard register, but check just in
5375 case. */
5376 if (GET_CODE (dest) == SUBREG)
5377 dest = SUBREG_REG (dest);
5379 if (dest == hard_frame_pointer_rtx)
5380 return;
5382 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
5383 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
5384 && (GET_CODE (x) != SET
5385 || GET_CODE (SET_SRC (x)) != PLUS
5386 || XEXP (SET_SRC (x), 0) != dest
5387 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
5389 reg_eliminate[i].can_eliminate_previous
5390 = reg_eliminate[i].can_eliminate = 0;
5391 num_eliminable--;
5395 /* Verify that the initial elimination offsets did not change since the
5396 last call to set_initial_elim_offsets. This is used to catch cases
5397 where something illegal happened during reload_as_needed that could
5398 cause incorrect code to be generated if we did not check for it. */
5400 static void
5401 verify_initial_elim_offsets (void)
5403 HOST_WIDE_INT t;
5405 #ifdef ELIMINABLE_REGS
5406 struct elim_table *ep;
5408 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5410 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
5411 gcc_assert (t == ep->initial_offset);
5413 #else
5414 INITIAL_FRAME_POINTER_OFFSET (t);
5415 gcc_assert (t == reg_eliminate[0].initial_offset);
5416 #endif
5419 /* Reset all offsets on eliminable registers to their initial values. */
5421 static void
5422 set_initial_elim_offsets (void)
5424 struct elim_table *ep = reg_eliminate;
5426 #ifdef ELIMINABLE_REGS
5427 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5429 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
5430 ep->previous_offset = ep->offset = ep->initial_offset;
5432 #else
5433 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
5434 ep->previous_offset = ep->offset = ep->initial_offset;
5435 #endif
5437 num_not_at_initial_offset = 0;
5440 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
5442 static void
5443 set_initial_eh_label_offset (rtx label)
5445 set_label_offsets (label, NULL_RTX, 1);
5448 /* Initialize the known label offsets.
5449 Set a known offset for each forced label to be at the initial offset
5450 of each elimination. We do this because we assume that all
5451 computed jumps occur from a location where each elimination is
5452 at its initial offset.
5453 For all other labels, show that we don't know the offsets. */
5455 static void
5456 set_initial_label_offsets (void)
5458 rtx x;
5459 memset (offsets_known_at, 0, num_labels);
5461 for (x = forced_labels; x; x = XEXP (x, 1))
5462 if (XEXP (x, 0))
5463 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
5465 for_each_eh_label (set_initial_eh_label_offset);
5468 /* Set all elimination offsets to the known values for the code label given
5469 by INSN. */
5471 static void
5472 set_offsets_for_label (rtx insn)
5474 unsigned int i;
5475 int label_nr = CODE_LABEL_NUMBER (insn);
5476 struct elim_table *ep;
5478 num_not_at_initial_offset = 0;
5479 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
5481 ep->offset = ep->previous_offset
5482 = offsets_at[label_nr - first_label_num][i];
5483 if (ep->can_eliminate && ep->offset != ep->initial_offset)
5484 num_not_at_initial_offset++;
5488 /* See if anything that happened changes which eliminations are valid.
5489 For example, on the SPARC, whether or not the frame pointer can
5490 be eliminated can depend on what registers have been used. We need
5491 not check some conditions again (such as flag_omit_frame_pointer)
5492 since they can't have changed. */
5494 static void
5495 update_eliminables (HARD_REG_SET *pset)
5497 int previous_frame_pointer_needed = frame_pointer_needed;
5498 struct elim_table *ep;
5500 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5501 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5502 #ifdef ELIMINABLE_REGS
5503 || ! CAN_ELIMINATE (ep->from, ep->to)
5504 #endif
5506 ep->can_eliminate = 0;
5508 /* Look for the case where we have discovered that we can't replace
5509 register A with register B and that means that we will now be
5510 trying to replace register A with register C. This means we can
5511 no longer replace register C with register B and we need to disable
5512 such an elimination, if it exists. This occurs often with A == ap,
5513 B == sp, and C == fp. */
5515 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5517 struct elim_table *op;
5518 int new_to = -1;
5520 if (! ep->can_eliminate && ep->can_eliminate_previous)
5522 /* Find the current elimination for ep->from, if there is a
5523 new one. */
5524 for (op = reg_eliminate;
5525 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
5526 if (op->from == ep->from && op->can_eliminate)
5528 new_to = op->to;
5529 break;
5532 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
5533 disable it. */
5534 for (op = reg_eliminate;
5535 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
5536 if (op->from == new_to && op->to == ep->to)
5537 op->can_eliminate = 0;
5541 /* See if any registers that we thought we could eliminate the previous
5542 time are no longer eliminable. If so, something has changed and we
5543 must spill the register. Also, recompute the number of eliminable
5544 registers and see if the frame pointer is needed; it is if there is
5545 no elimination of the frame pointer that we can perform. */
5547 frame_pointer_needed = 1;
5548 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5550 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
5551 && ep->to != HARD_FRAME_POINTER_REGNUM)
5552 frame_pointer_needed = 0;
5554 if (! ep->can_eliminate && ep->can_eliminate_previous)
5556 ep->can_eliminate_previous = 0;
5557 SET_HARD_REG_BIT (*pset, ep->from);
5558 num_eliminable--;
5562 /* If we didn't need a frame pointer last time, but we do now, spill
5563 the hard frame pointer. */
5564 if (frame_pointer_needed && ! previous_frame_pointer_needed)
5565 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
5568 /* Initialize the table of registers to eliminate. */
5570 static void
5571 init_elim_table (void)
5573 struct elim_table *ep;
5574 #ifdef ELIMINABLE_REGS
5575 const struct elim_table_1 *ep1;
5576 #endif
5578 if (!reg_eliminate)
5579 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
5581 /* Does this function require a frame pointer? */
5583 frame_pointer_needed = (! flag_omit_frame_pointer
5584 /* ?? If EXIT_IGNORE_STACK is set, we will not save
5585 and restore sp for alloca. So we can't eliminate
5586 the frame pointer in that case. At some point,
5587 we should improve this by emitting the
5588 sp-adjusting insns for this case. */
5589 || (current_function_calls_alloca
5590 && EXIT_IGNORE_STACK)
5591 || FRAME_POINTER_REQUIRED);
5593 num_eliminable = 0;
5595 #ifdef ELIMINABLE_REGS
5596 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
5597 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
5599 ep->from = ep1->from;
5600 ep->to = ep1->to;
5601 ep->can_eliminate = ep->can_eliminate_previous
5602 = (CAN_ELIMINATE (ep->from, ep->to)
5603 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
5605 #else
5606 reg_eliminate[0].from = reg_eliminate_1[0].from;
5607 reg_eliminate[0].to = reg_eliminate_1[0].to;
5608 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
5609 = ! frame_pointer_needed;
5610 #endif
5612 /* Count the number of eliminable registers and build the FROM and TO
5613 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
5614 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
5615 We depend on this. */
5616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
5618 num_eliminable += ep->can_eliminate;
5619 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
5620 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
5624 /* Kick all pseudos out of hard register REGNO.
5626 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
5627 because we found we can't eliminate some register. In the case, no pseudos
5628 are allowed to be in the register, even if they are only in a block that
5629 doesn't require spill registers, unlike the case when we are spilling this
5630 hard reg to produce another spill register.
5632 Return nonzero if any pseudos needed to be kicked out. */
5634 static void
5635 spill_hard_reg (unsigned int regno, int cant_eliminate)
5637 int i;
5639 if (cant_eliminate)
5641 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
5642 regs_ever_live[regno] = 1;
5645 /* Spill every pseudo reg that was allocated to this reg
5646 or to something that overlaps this reg. */
5648 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5649 if (reg_renumber[i] >= 0
5650 && (unsigned int) reg_renumber[i] <= regno
5651 && ((unsigned int) reg_renumber[i]
5652 + hard_regno_nregs[(unsigned int) reg_renumber[i]]
5653 [PSEUDO_REGNO_MODE (i)]
5654 > regno))
5655 SET_REGNO_REG_SET (&spilled_pseudos, i);
5658 /* After find_reload_regs has been run for all insn that need reloads,
5659 and/or spill_hard_regs was called, this function is used to actually
5660 spill pseudo registers and try to reallocate them. It also sets up the
5661 spill_regs array for use by choose_reload_regs. */
5663 static int
5664 finish_spills (int global)
5666 struct insn_chain *chain;
5667 int something_changed = 0;
5668 unsigned i;
5669 reg_set_iterator rsi;
5671 /* Build the spill_regs array for the function. */
5672 /* If there are some registers still to eliminate and one of the spill regs
5673 wasn't ever used before, additional stack space may have to be
5674 allocated to store this register. Thus, we may have changed the offset
5675 between the stack and frame pointers, so mark that something has changed.
5677 One might think that we need only set VAL to 1 if this is a call-used
5678 register. However, the set of registers that must be saved by the
5679 prologue is not identical to the call-used set. For example, the
5680 register used by the call insn for the return PC is a call-used register,
5681 but must be saved by the prologue. */
5683 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5684 if (TEST_HARD_REG_BIT (used_spill_regs, i))
5686 if (num_eliminable && ! regs_ever_live[i])
5687 something_changed = 1;
5688 regs_ever_live[i] = 1;
5691 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
5693 /* Record the current hard register the pseudo is allocated to in
5694 pseudo_previous_regs so we avoid reallocating it to the same
5695 hard reg in a later pass. */
5696 gcc_assert (reg_renumber[i] >= 0);
5698 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
5699 /* Mark it as no longer having a hard register home. */
5700 reg_renumber[i] = -1;
5701 /* We will need to scan everything again. */
5702 something_changed = 1;
5705 /* Retry global register allocation if possible. */
5706 if (global)
5708 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
5709 /* For every insn that needs reloads, set the registers used as spill
5710 regs in pseudo_forbidden_regs for every pseudo live across the
5711 insn. */
5712 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
5714 EXECUTE_IF_SET_IN_REG_SET
5715 (&chain->live_before, FIRST_PSEUDO_REGISTER, i, rsi)
5717 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
5718 chain->used_spill_regs);
5720 EXECUTE_IF_SET_IN_REG_SET
5721 (&chain->live_after, FIRST_PSEUDO_REGISTER, i, rsi)
5723 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
5724 chain->used_spill_regs);
5728 /* Retry allocating the spilled pseudos. For each reg, merge the
5729 various reg sets that indicate which hard regs can't be used,
5730 and call retry_global_alloc.
5731 We change spill_pseudos here to only contain pseudos that did not
5732 get a new hard register. */
5733 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
5734 if (reg_old_renumber[i] != reg_renumber[i])
5736 HARD_REG_SET forbidden;
5737 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
5738 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
5739 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
5740 retry_global_alloc (i, forbidden);
5741 if (reg_renumber[i] >= 0)
5742 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
5746 /* Fix up the register information in the insn chain.
5747 This involves deleting those of the spilled pseudos which did not get
5748 a new hard register home from the live_{before,after} sets. */
5749 for (chain = reload_insn_chain; chain; chain = chain->next)
5751 AND_COMPL_REG_SET (&chain->live_before, &spilled_pseudos);
5752 AND_COMPL_REG_SET (&chain->live_after, &spilled_pseudos);
5755 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
5756 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
5758 int regno = reg_renumber[i];
5759 if (reg_old_renumber[i] == regno)
5760 continue;
5762 alter_reg (i, reg_old_renumber[i]);
5763 reg_old_renumber[i] = regno;
5764 if (dump_file)
5766 if (regno == -1)
5767 fprintf (dump_file, " Register %d now on stack.\n\n", i);
5768 else
5769 fprintf (dump_file, " Register %d now in %d.\n\n",
5770 i, reg_renumber[i]);
5774 return something_changed;
5777 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
5779 static void
5780 scan_paradoxical_subregs (rtx x)
5782 int i;
5783 const char *fmt;
5784 enum rtx_code code = GET_CODE (x);
5786 switch (code)
5788 case REG:
5789 case CONST_INT:
5790 case CONST:
5791 case SYMBOL_REF:
5792 case LABEL_REF:
5793 case CONST_DOUBLE:
5794 case CONST_VECTOR: /* shouldn't happen, but just in case. */
5795 case CC0:
5796 case PC:
5797 case USE:
5798 case CLOBBER:
5799 return;
5801 case SUBREG:
5802 if (REG_P (SUBREG_REG (x))
5803 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5804 reg_max_ref_width[REGNO (SUBREG_REG (x))]
5805 = GET_MODE_SIZE (GET_MODE (x));
5806 return;
5808 default:
5809 break;
5812 fmt = GET_RTX_FORMAT (code);
5813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5815 if (fmt[i] == 'e')
5816 scan_paradoxical_subregs (XEXP (x, i));
5817 else if (fmt[i] == 'E')
5819 int j;
5820 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5821 scan_paradoxical_subregs (XVECEXP (x, i, j));
5826 /* Reload pseudo-registers into hard regs around each insn as needed.
5827 Additional register load insns are output before the insn that needs it
5828 and perhaps store insns after insns that modify the reloaded pseudo reg. */
5830 static void
5831 reload_as_needed (int live_known)
5833 struct insn_chain *chain;
5834 #if defined (AUTO_INC_DEC)
5835 int i;
5836 #endif
5838 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
5839 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
5841 set_initial_elim_offsets ();
5843 for (chain = reload_insn_chain; chain; chain = chain->next)
5845 rtx prev = 0;
5846 rtx insn = chain->insn;
5848 /* If we pass a label, copy the offsets from the label information
5849 into the current offsets of each elimination. */
5850 if (LABEL_P (insn))
5851 set_offsets_for_label (insn);
5853 else if (INSN_P (insn))
5855 /* If this is a USE and CLOBBER of a MEM, ensure that any
5856 references to eliminable registers have been removed. */
5858 if ((GET_CODE (PATTERN (insn)) == USE
5859 || GET_CODE (PATTERN (insn)) == CLOBBER)
5860 && MEM_P (XEXP (PATTERN (insn), 0)))
5861 XEXP (XEXP (PATTERN (insn), 0), 0)
5862 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
5863 GET_MODE (XEXP (PATTERN (insn), 0)),
5864 NULL_RTX);
5866 /* If we need to do register elimination processing, do so.
5867 This might delete the insn, in which case we are done. */
5868 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
5870 eliminate_regs_in_insn (insn, 1);
5871 if (NOTE_P (insn))
5873 update_eliminable_offsets ();
5874 continue;
5878 if (chain->will_be_deleted)
5880 PUT_CODE (insn, NOTE);
5881 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5882 NOTE_SOURCE_FILE (insn) = 0;
5883 update_eliminable_offsets ();
5884 continue;
5887 /* If any of these three conditions are true, the actions of
5888 find_reloads must be redone here; it may modify the insn. */
5890 if (chain->need_elim || chain->n_reloads > 0
5891 || chain->need_operand_change)
5892 /* First find the pseudo regs that must be reloaded for this insn.
5893 This info is returned in the tables reload_... (see reload.h).
5894 Also modify the body of INSN by substituting RELOAD
5895 rtx's for those pseudo regs. */
5896 find_reloads (chain, insn, spill_indirect_levels, live_known);
5898 if (chain->n_reloads > 0)
5900 rtx next = NEXT_INSN (insn);
5901 rtx p;
5903 prev = PREV_INSN (insn);
5905 /* Now compute which reload regs to reload them into. Perhaps
5906 reusing reload regs from previous insns, or else output
5907 load insns to reload them. Maybe output store insns too.
5908 Record the choices of reload reg in reload_reg_rtx. */
5909 choose_reload_regs (chain);
5911 /* Generate the insns to reload operands into or out of
5912 their reload regs. */
5913 emit_reload_insns (chain);
5915 /* Substitute the chosen reload regs from reload_reg_rtx
5916 into the insn's body (or perhaps into the bodies of other
5917 load and store insn that we just made for reloading
5918 and that we moved the structure into). */
5919 subst_reloads (insn);
5921 /* If this was an ASM, make sure that all the reload insns
5922 we have generated are valid. If not, give an error
5923 and delete them. */
5925 if (asm_noperands (PATTERN (insn)) >= 0)
5926 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
5927 if (p != insn && INSN_P (p)
5928 && GET_CODE (PATTERN (p)) != USE
5929 && (recog_memoized (p) < 0
5930 || (extract_insn (p), ! constrain_operands (1))))
5932 error_for_asm (insn,
5933 "%<asm%> operand requires "
5934 "impossible reload");
5935 delete_insn (p);
5939 if (num_eliminable && chain->need_elim)
5940 update_eliminable_offsets ();
5945 /* Return nonzero if the rtx X is invariant over the current function. */
5946 /* ??? Actually, the places where we use this expect exactly what
5947 * is tested here, and not everything that is function invariant. In
5948 * particular, the frame pointer and arg pointer are special cased;
5949 * pic_offset_table_rtx is not, and this will cause aborts when we
5950 * go to spill these things to memory. */
5952 static int
5953 function_invariant_p (rtx x)
5955 if (CONSTANT_P (x))
5956 return 1;
5957 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5958 return 1;
5959 if (GET_CODE (x) == PLUS
5960 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5961 && CONSTANT_P (XEXP (x, 1)))
5962 return 1;
5963 return 0;
5966 /* Assign hard reg targets for the pseudo-registers we must reload
5967 into hard regs for this insn. */
5969 static void
5970 choose_reload_regs (struct insn_chain *chain)
5972 int i;
5974 for (i = 0; i < chain->n_reloads; i++)
5976 if (rld[i].class != chain->rld[i].class
5977 || rld[i].inmode != chain->rld[i].inmode
5978 || rld[i].outmode != chain->rld[i].outmode)
5979 abort ();
5982 for (i = 0; i < chain->n_reloads; i++)
5984 rtx new;
5985 int regno = chain->rld[i].reginfo.regno;
5986 rld[i].reg_rtx = 0;
5988 if (! chain->rld[i].reginfo.allocated)
5989 continue;
5991 if (chain->rld[i].override_in)
5992 rld[i].in = chain->rld[i].override_in;
5993 if (chain->rld[i].override_out)
5994 rld[i].out = chain->rld[i].override_out;
5996 new = spill_reg_rtx[regno];
5998 if (new == 0 || GET_MODE (new) != rld[i].mode)
5999 new = gen_rtx_REG (rld[i].mode, regno);
6000 rld[i].reg_rtx = spill_reg_rtx[regno] = new;
6004 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6005 has the number J. OLD contains the value to be used as input. */
6007 static void
6008 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6009 rtx inval, int j)
6011 rtx insn = chain->insn;
6012 rtx reloadreg = rl->reg_rtx;
6013 enum machine_mode mode;
6015 /* Determine the mode to reload in.
6016 This is very tricky because we have three to choose from.
6017 There is the mode the insn operand wants (rl->inmode).
6018 There is the mode of the reload register RELOADREG.
6019 There is the intrinsic mode of the operand, which we could find
6020 by stripping some SUBREGs.
6021 It turns out that RELOADREG's mode is irrelevant:
6022 we can change that arbitrarily.
6024 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6025 then the reload reg may not support QImode moves, so use SImode.
6026 If foo is in memory due to spilling a pseudo reg, this is safe,
6027 because the QImode value is in the least significant part of a
6028 slot big enough for a SImode. If foo is some other sort of
6029 memory reference, then it is impossible to reload this case,
6030 so previous passes had better make sure this never happens.
6032 Then consider a one-word union which has SImode and one of its
6033 members is a float, being fetched as (SUBREG:SF union:SI).
6034 We must fetch that as SFmode because we could be loading into
6035 a float-only register. In this case INVAL's mode is correct.
6037 Consider an immediate integer: it has VOIDmode. Here we need
6038 to get a mode from something else.
6040 In some cases, there is a fourth mode, the operand's
6041 containing mode. If the insn specifies a containing mode for
6042 this operand, it overrides all others.
6044 I am not sure whether the algorithm here is always right,
6045 but it does the right things in those cases. */
6047 mode = GET_MODE (inval);
6048 if (mode == VOIDmode)
6049 mode = rl->inmode;
6051 /* Encapsulate both RELOADREG and INVAL into that mode,
6052 then load RELOADREG from INVAL. Note that we cannot use
6053 gen_lowpart_common since it can do the wrong thing when
6054 RELOADREG has a multi-word mode. Note that RELOADREG
6055 must always be a REG here. */
6057 if (GET_MODE (reloadreg) != mode)
6058 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6059 while (GET_CODE (inval) == SUBREG && GET_MODE (inval) != mode)
6060 inval = SUBREG_REG (inval);
6061 if (GET_MODE (inval) != VOIDmode
6062 && mode != GET_MODE (inval))
6063 inval = gen_lowpart_SUBREG (mode, inval);
6065 /* Auto-increment addresses must be reloaded in a special way. */
6066 if (rl->inc)
6068 /* We are not going to bother supporting the case where a
6069 incremented register can't be copied directly from
6070 INVAL since this seems highly unlikely. */
6071 if (rl->secondary_in_reload >= 0)
6072 abort ();
6074 /* Output a special code sequence for this case. */
6075 inc_for_reload (reloadreg, inval, inval, rl->inc);
6076 /* Prevent normal processing of this reload. */
6077 goto out;
6080 /* We can't do that, so output an insn to load RELOADREG. */
6082 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6083 if (rl->secondary_in_reload >= 0)
6085 struct reload *rl2 = rld + rl->secondary_in_reload;
6086 rtx tmp;
6087 enum insn_code icode = rl->secondary_in_icode;
6088 /* See if we need a scratch register to load the
6089 intermediate register (a tertiary reload). */
6090 enum insn_code tertiary_icode = rl2->secondary_in_icode;
6092 if (! rl2->reg_rtx)
6093 abort ();
6095 if (GET_CODE (inval) == REG && REGNO (inval) >= FIRST_PSEUDO_REGISTER
6096 && reg_equiv_mem[REGNO (inval)] != 0)
6097 inval = reg_equiv_mem[REGNO (inval)];
6099 if (icode != CODE_FOR_nothing)
6101 emit_insn (GEN_FCN (icode) (reloadreg, inval, rl2->reg_rtx));
6102 goto out;
6105 if (tertiary_icode != CODE_FOR_nothing)
6107 rtx third_reload_reg = rld[rl2->secondary_in_reload].reg_rtx;
6109 emit_insn (GEN_FCN (tertiary_icode) (rl2->reg_rtx, inval,
6110 third_reload_reg));
6112 else
6113 gen_reload (rl2->reg_rtx, inval, j);
6115 inval = rl2->reg_rtx;
6117 #endif
6119 if (! rtx_equal_p (reloadreg, inval))
6120 gen_reload (reloadreg, inval, j);
6122 out:
6123 if (flag_non_call_exceptions)
6124 copy_eh_notes (insn, get_insns ());
6127 /* Generate insns to for the output reload RL, which is for the insn described
6128 by CHAIN and has the number J. */
6129 static void
6130 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6131 int j)
6133 rtx reloadreg = rl->reg_rtx;
6134 rtx insn = chain->insn;
6135 rtx set, old = rl->out;
6136 enum machine_mode mode = GET_MODE (old);
6138 /* Determine the mode to reload in.
6139 See comments above (for input reloading). */
6141 if (mode == VOIDmode)
6143 /* VOIDmode should never happen for an output. */
6144 if (asm_noperands (PATTERN (insn)) < 0)
6145 /* It's the compiler's fault. */
6146 fatal_insn ("VOIDmode on an output", insn);
6147 error_for_asm (insn, "output operand is constant in %<asm%>");
6148 /* Prevent crash--use something we know is valid. */
6149 mode = word_mode;
6150 old = gen_rtx_REG (mode, REGNO (reloadreg));
6153 if (GET_MODE (reloadreg) != mode)
6154 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6156 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6158 /* If we need two reload regs, set RELOADREG to the intermediate
6159 one, since it will be stored into OLD. We might need a secondary
6160 register only for an input reload, so check again here. */
6162 if (rl->secondary_out_reload >= 0)
6164 struct reload *rl2 = rld + rl->secondary_out_reload;
6165 rtx second_reloadreg = reloadreg;
6166 rtx real_old = old;
6167 enum insn_code tertiary_icode = rl2->secondary_out_icode;
6169 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6170 && reg_equiv_mem[REGNO (old)] != 0)
6171 real_old = reg_equiv_mem[REGNO (old)];
6173 reloadreg = rl2->reg_rtx;
6175 /* See if RELOADREG is to be used as a scratch register
6176 or as an intermediate register. */
6177 if (rl->secondary_out_icode != CODE_FOR_nothing)
6179 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6180 (real_old, second_reloadreg, reloadreg)));
6181 goto out;
6184 /* See if we need both a scratch and intermediate reload
6185 register. */
6187 if (GET_MODE (reloadreg) != mode)
6188 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6190 if (tertiary_icode != CODE_FOR_nothing)
6192 rtx third_reloadreg = rld[rl2->secondary_out_reload].reg_rtx;
6193 rtx tem;
6195 /* Copy primary reload reg to secondary reload reg.
6196 (Note that these have been swapped above, then
6197 secondary reload reg to OLD using our insn. */
6199 /* If REAL_OLD is a paradoxical SUBREG, remove it
6200 and try to put the opposite SUBREG on
6201 RELOADREG. */
6202 if (GET_CODE (real_old) == SUBREG
6203 && (GET_MODE_SIZE (GET_MODE (real_old))
6204 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6205 && 0 != (tem = gen_lowpart_common
6206 (GET_MODE (SUBREG_REG (real_old)),
6207 reloadreg)))
6208 real_old = SUBREG_REG (real_old), reloadreg = tem;
6210 gen_reload (reloadreg, second_reloadreg, j);
6211 emit_insn ((GEN_FCN (tertiary_icode)
6212 (real_old, reloadreg, third_reloadreg)));
6213 goto out;
6216 /* Copy between the reload regs here and then to
6217 OUT later. */
6218 gen_reload (reloadreg, second_reloadreg, j);
6220 #endif
6222 /* Output the last reload insn. */
6224 /* Don't output the last reload if OLD is not the dest of
6225 INSN and is in the src and is clobbered by INSN. */
6226 if (! flag_expensive_optimizations
6227 || GET_CODE (old) != REG
6228 || ! (set = single_set (insn))
6229 || rtx_equal_p (old, SET_DEST (set))
6230 || ! reg_mentioned_p (old, SET_SRC (set))
6231 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
6232 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
6233 gen_reload (old, reloadreg, j);
6235 out:
6236 if (flag_non_call_exceptions)
6237 copy_eh_notes (insn, get_insns ());
6240 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6241 and has the number J. */
6242 static rtx
6243 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
6245 register rtx retval;
6246 register rtx reloadreg = rl->reg_rtx;
6247 rtx old = rl->in;
6249 if (old == 0
6250 || reloadreg == 0
6251 || rtx_equal_p (reloadreg, old))
6252 return 0;
6254 start_sequence ();
6255 emit_input_reload_insns (chain, rl, old, j);
6256 retval = get_insns ();
6257 end_sequence ();
6258 return retval;
6261 /* Do output reloading for reload RL, which is for the insn described by
6262 CHAIN and has the number J.
6263 ??? At some point we need to support handling output reloads of
6264 JUMP_INSNs or insns that set cc0. */
6265 static rtx
6266 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
6268 rtx insn = chain->insn;
6269 register rtx old, retval;
6271 old = rl->out;
6272 if (old == 0
6273 || rl->reg_rtx == 0
6274 || rtx_equal_p (rl->reg_rtx, old))
6275 return 0;
6277 /* If is a JUMP_INSN, we can't support output reloads yet. */
6278 gcc_assert (!JUMP_P (insn));
6280 start_sequence ();
6281 emit_output_reload_insns (chain, rl, j);
6282 retval = get_insns ();
6283 end_sequence ();
6285 return retval;
6288 /* Output insns to reload values in and out of the chosen reload regs. */
6290 static void
6291 emit_reload_insns (struct insn_chain *chain)
6293 rtx insn = chain->insn;
6294 rtx mark = insn;
6295 int after = 1;
6296 int rli_nr = chain->last_rlinsn;
6298 /* Dump reloads into the dump file. */
6299 if (dump_file)
6301 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6302 debug_reload_to_stream (dump_file);
6305 while (rli_nr >= 0)
6307 struct reload_insn *rli = chain->rli + rli_nr;
6309 if (rli->status != RLIS_SCHEDULED)
6310 abort ();
6312 if (rli->type == RLI_INSN)
6313 after = 0;
6314 else if (! rli->ignored)
6316 rtx seq = 0;
6317 if (rli->type == RLI_OUTPUTRELOAD)
6318 seq = do_output_reload (chain, rld + rli->nr, rli->nr);
6319 else if (rli->type == RLI_INPUTRELOAD)
6320 seq = do_input_reload (chain, rld + rli->nr, rli->nr);
6321 if (seq)
6323 if (after)
6324 emit_insn_after (seq, mark);
6325 else
6327 emit_insn_before (seq, mark);
6328 mark = seq;
6332 rli_nr = rli->prev_order;
6336 /* Emit code to perform a reload from IN (which may be a reload register) to
6337 OUT (which may also be a reload register). IN or OUT is from operand
6338 OPNUM with reload type TYPE.
6340 Returns first insn emitted. */
6342 static rtx
6343 gen_reload (rtx out, rtx in, int opnum)
6345 rtx last = get_last_insn ();
6346 rtx tem;
6348 /* If IN is a paradoxical SUBREG, remove it and try to put the
6349 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6350 if (GET_CODE (in) == SUBREG
6351 && (GET_MODE_SIZE (GET_MODE (in))
6352 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6353 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6354 in = SUBREG_REG (in), out = tem;
6355 else if (GET_CODE (out) == SUBREG
6356 && (GET_MODE_SIZE (GET_MODE (out))
6357 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6358 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6359 out = SUBREG_REG (out), in = tem;
6361 /* How to do this reload can get quite tricky. Normally, we are being
6362 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6363 register that didn't get a hard register. In that case we can just
6364 call emit_move_insn.
6366 We can also be asked to reload a PLUS that adds a register or a MEM to
6367 another register, constant or MEM. This can occur during frame pointer
6368 elimination and while reloading addresses. This case is handled by
6369 trying to emit a single insn to perform the add. If it is not valid,
6370 we use a two insn sequence.
6372 Finally, we could be called to handle an 'o' constraint by putting
6373 an address into a register. In that case, we first try to do this
6374 with a named pattern of "reload_load_address". If no such pattern
6375 exists, we just emit a SET insn and hope for the best (it will normally
6376 be valid on machines that use 'o').
6378 This entire process is made complex because reload will never
6379 process the insns we generate here and so we must ensure that
6380 they will fit their constraints and also by the fact that parts of
6381 IN might be being reloaded separately and replaced with spill registers.
6382 Because of this, we are, in some sense, just guessing the right approach
6383 here. The one listed above seems to work.
6385 ??? At some point, this whole thing needs to be rethought. */
6387 if (GET_CODE (in) == PLUS
6388 && (REG_P (XEXP (in, 0))
6389 || GET_CODE (XEXP (in, 0)) == SUBREG
6390 || MEM_P (XEXP (in, 0)))
6391 && (REG_P (XEXP (in, 1))
6392 || GET_CODE (XEXP (in, 1)) == SUBREG
6393 || CONSTANT_P (XEXP (in, 1))
6394 || MEM_P (XEXP (in, 1))))
6396 /* We need to compute the sum of a register or a MEM and another
6397 register, constant, or MEM, and put it into the reload
6398 register. The best possible way of doing this is if the machine
6399 has a three-operand ADD insn that accepts the required operands.
6401 The simplest approach is to try to generate such an insn and see if it
6402 is recognized and matches its constraints. If so, it can be used.
6404 It might be better not to actually emit the insn unless it is valid,
6405 but we need to pass the insn as an operand to `recog' and
6406 `extract_insn' and it is simpler to emit and then delete the insn if
6407 not valid than to dummy things up. */
6409 rtx op0, op1, tem, insn;
6410 int code;
6412 op0 = find_replacement (&XEXP (in, 0));
6413 op1 = find_replacement (&XEXP (in, 1));
6415 /* Since constraint checking is strict, commutativity won't be
6416 checked, so we need to do that here to avoid spurious failure
6417 if the add instruction is two-address and the second operand
6418 of the add is the same as the reload reg, which is frequently
6419 the case. If the insn would be A = B + A, rearrange it so
6420 it will be A = A + B as constrain_operands expects. */
6422 if (REG_P (XEXP (in, 1))
6423 && REGNO (out) == REGNO (XEXP (in, 1)))
6424 tem = op0, op0 = op1, op1 = tem;
6426 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6427 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
6429 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
6430 code = recog_memoized (insn);
6432 if (code >= 0)
6434 extract_insn (insn);
6435 /* We want constrain operands to treat this insn strictly in
6436 its validity determination, i.e., the way it would after reload
6437 has completed. */
6438 if (constrain_operands (1))
6439 return insn;
6442 delete_insns_since (last);
6444 /* If that failed, we must use a conservative two-insn sequence.
6446 Use a move to copy one operand into the reload register. Prefer
6447 to reload a constant, MEM or pseudo since the move patterns can
6448 handle an arbitrary operand. If OP1 is not a constant, MEM or
6449 pseudo and OP1 is not a valid operand for an add instruction, then
6450 reload OP1.
6452 After reloading one of the operands into the reload register, add
6453 the reload register to the output register.
6455 If there is another way to do this for a specific machine, a
6456 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6457 we emit below. */
6459 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
6461 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
6462 || (REG_P (op1)
6463 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
6464 || (code != CODE_FOR_nothing
6465 && ! ((*insn_data[code].operand[2].predicate)
6466 (op1, insn_data[code].operand[2].mode))))
6467 tem = op0, op0 = op1, op1 = tem;
6469 gen_reload (out, op0, opnum);
6471 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6472 This fixes a problem on the 32K where the stack pointer cannot
6473 be used as an operand of an add insn. */
6475 if (rtx_equal_p (op0, op1))
6476 op1 = out;
6478 insn = emit_insn (gen_add2_insn (out, op1));
6480 /* If that failed, copy the address register to the reload register.
6481 Then add the constant to the reload register. */
6483 code = recog_memoized (insn);
6485 if (code >= 0)
6487 extract_insn (insn);
6488 /* We want constrain operands to treat this insn strictly in
6489 its validity determination, i.e., the way it would after reload
6490 has completed. */
6491 if (constrain_operands (1))
6492 return insn;
6495 delete_insns_since (last);
6497 gen_reload (out, op1, opnum);
6498 insn = emit_insn (gen_add2_insn (out, op0));
6499 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
6502 #ifdef SECONDARY_MEMORY_NEEDED
6503 /* If we need a memory location to do the move, do it that way. */
6504 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
6505 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
6506 && (REG_P (out) || GET_CODE (out) == SUBREG)
6507 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
6508 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
6509 REGNO_REG_CLASS (reg_or_subregno (out)),
6510 GET_MODE (out)))
6512 /* Get the memory to use and rewrite both registers to its mode. */
6513 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum,
6514 RELOAD_FOR_NONE);
6516 if (GET_MODE (loc) != GET_MODE (out))
6517 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
6519 if (GET_MODE (loc) != GET_MODE (in))
6520 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
6522 gen_reload (loc, in, opnum);
6523 gen_reload (out, loc, opnum);
6525 #endif
6527 /* If IN is a simple operand, use gen_move_insn. */
6528 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
6529 emit_insn (gen_move_insn (out, in));
6531 #ifdef HAVE_reload_load_address
6532 else if (HAVE_reload_load_address)
6533 emit_insn (gen_reload_load_address (out, in));
6534 #endif
6536 /* Otherwise, just write (set OUT IN) and hope for the best. */
6537 else
6538 emit_insn (gen_rtx_SET (VOIDmode, out, in));
6540 /* Return the first insn emitted.
6541 We can not just return get_last_insn, because there may have
6542 been multiple instructions emitted. Also note that gen_move_insn may
6543 emit more than one insn itself, so we can not assume that there is one
6544 insn emitted per emit_insn_before call. */
6546 return last ? NEXT_INSN (last) : get_insns ();
6549 /* Output reload-insns to reload VALUE into RELOADREG.
6550 VALUE is an autoincrement or autodecrement RTX whose operand
6551 is a register or memory location;
6552 so reloading involves incrementing that location.
6553 IN is either identical to VALUE, or some cheaper place to reload from.
6555 INC_AMOUNT is the number to increment or decrement by (always positive).
6556 This cannot be deduced from VALUE.
6558 Return the instruction that stores into RELOADREG. */
6560 static rtx
6561 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
6563 /* REG or MEM to be copied and incremented. */
6564 rtx incloc = XEXP (value, 0);
6565 /* Nonzero if increment after copying. */
6566 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6567 rtx last;
6568 rtx inc;
6569 rtx add_insn;
6570 int code;
6571 rtx store;
6572 rtx real_in = in == value ? XEXP (in, 0) : in;
6574 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6575 inc_amount = -inc_amount;
6577 inc = GEN_INT (inc_amount);
6579 /* If this is post-increment, first copy the location to the reload reg. */
6580 if (post && real_in != reloadreg)
6581 emit_insn (gen_move_insn (reloadreg, real_in));
6583 if (in == value)
6585 /* See if we can directly increment INCLOC. Use a method similar to
6586 that in gen_reload. */
6588 last = get_last_insn ();
6589 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
6590 gen_rtx_PLUS (GET_MODE (incloc),
6591 incloc, inc)));
6593 code = recog_memoized (add_insn);
6594 if (code >= 0)
6596 extract_insn (add_insn);
6597 if (constrain_operands (1))
6599 /* If this is a pre-increment and we have incremented the value
6600 where it lives, copy the incremented value to RELOADREG to
6601 be used as an address. */
6603 if (! post)
6604 emit_insn (gen_move_insn (reloadreg, incloc));
6606 return add_insn;
6609 delete_insns_since (last);
6612 /* If couldn't do the increment directly, must increment in RELOADREG.
6613 The way we do this depends on whether this is pre- or post-increment.
6614 For pre-increment, copy INCLOC to the reload register, increment it
6615 there, then save back. */
6617 if (! post)
6619 if (in != reloadreg)
6620 emit_insn (gen_move_insn (reloadreg, real_in));
6621 emit_insn (gen_add2_insn (reloadreg, inc));
6622 store = emit_insn (gen_move_insn (incloc, reloadreg));
6624 else
6626 /* Postincrement.
6627 Because this might be a jump insn or a compare, and because RELOADREG
6628 may not be available after the insn in an input reload, we must do
6629 the incrementation before the insn being reloaded for.
6631 We have already copied IN to RELOADREG. Increment the copy in
6632 RELOADREG, save that back, then decrement RELOADREG so it has
6633 the original value. */
6635 emit_insn (gen_add2_insn (reloadreg, inc));
6636 store = emit_insn (gen_move_insn (incloc, reloadreg));
6637 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6640 return store;
6643 #ifdef AUTO_INC_DEC
6644 static void
6645 add_auto_inc_notes (rtx insn, rtx x)
6647 enum rtx_code code = GET_CODE (x);
6648 const char *fmt;
6649 int i, j;
6651 if (code == MEM && auto_inc_p (XEXP (x, 0)))
6653 REG_NOTES (insn)
6654 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
6655 return;
6658 /* Scan all the operand sub-expressions. */
6659 fmt = GET_RTX_FORMAT (code);
6660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6662 if (fmt[i] == 'e')
6663 add_auto_inc_notes (insn, XEXP (x, i));
6664 else if (fmt[i] == 'E')
6665 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6666 add_auto_inc_notes (insn, XVECEXP (x, i, j));
6669 #endif
6671 /* Copy EH notes from an insn to its reloads. */
6672 static void
6673 copy_eh_notes (rtx insn, rtx x)
6675 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
6676 if (eh_note)
6678 for (; x != 0; x = NEXT_INSN (x))
6680 if (may_trap_p (PATTERN (x)))
6681 REG_NOTES (x)
6682 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
6683 REG_NOTES (x));
6688 /* This is used by reload pass, that does emit some instructions after
6689 abnormal calls moving basic block end, but in fact it wants to emit
6690 them on the edge. Looks for abnormal call edges, find backward the
6691 proper call and fix the damage.
6693 Similar handle instructions throwing exceptions internally. */
6694 void
6695 fixup_abnormal_edges (void)
6697 bool inserted = false;
6698 basic_block bb;
6700 FOR_EACH_BB (bb)
6702 edge e;
6703 edge_iterator ei;
6705 /* Look for cases we are interested in - calls or instructions causing
6706 exceptions. */
6707 FOR_EACH_EDGE (e, ei, bb->succs)
6709 if (e->flags & EDGE_ABNORMAL_CALL)
6710 break;
6711 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
6712 == (EDGE_ABNORMAL | EDGE_EH))
6713 break;
6715 if (e && !CALL_P (BB_END (bb))
6716 && !can_throw_internal (BB_END (bb)))
6718 rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
6719 rtx next;
6720 FOR_EACH_EDGE (e, ei, bb->succs)
6721 if (e->flags & EDGE_FALLTHRU)
6722 break;
6723 /* Get past the new insns generated. Allow notes, as the insns may
6724 be already deleted. */
6725 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
6726 && !can_throw_internal (insn)
6727 && insn != BB_HEAD (bb))
6728 insn = PREV_INSN (insn);
6729 gcc_assert (CALL_P (insn) || can_throw_internal (insn));
6730 BB_END (bb) = insn;
6731 inserted = true;
6732 insn = NEXT_INSN (insn);
6733 while (insn && insn != stop)
6735 next = NEXT_INSN (insn);
6736 if (INSN_P (insn))
6738 delete_insn (insn);
6740 /* Sometimes there's still the return value USE.
6741 If it's placed after a trapping call (i.e. that
6742 call is the last insn anyway), we have no fallthru
6743 edge. Simply delete this use and don't try to insert
6744 on the non-existent edge. */
6745 if (GET_CODE (PATTERN (insn)) != USE)
6747 /* We're not deleting it, we're moving it. */
6748 INSN_DELETED_P (insn) = 0;
6749 PREV_INSN (insn) = NULL_RTX;
6750 NEXT_INSN (insn) = NULL_RTX;
6752 insert_insn_on_edge (insn, e);
6755 insn = next;
6759 /* We've possibly turned single trapping insn into multiple ones. */
6760 if (flag_non_call_exceptions)
6762 sbitmap blocks;
6763 blocks = sbitmap_alloc (last_basic_block);
6764 sbitmap_ones (blocks);
6765 find_many_sub_basic_blocks (blocks);
6767 if (inserted)
6768 commit_edge_insertions ();