({save,restore}_stack_nonlocal): Delete.
[official-gcc.git] / gcc / reload1.c
blobe46d76421a8a2218095f5375a65063e228f2fa35
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
206 static char spill_indirect_levels;
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
212 char indirect_symref_ok;
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
216 char double_reg_address_ok;
218 /* Record the stack slot for each spilled hard register. */
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
222 /* Width allocated so far for that stack slot. */
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
231 char *basic_block_needs[N_REG_CLASSES];
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
240 int caller_save_needed;
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
245 int reload_in_progress = 0;
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
258 struct obstack reload_obstack;
259 char *reload_firstobj;
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
272 static struct elim_table
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
322 /* Number of labels in the current function. */
324 static int num_labels;
326 struct hard_reg_n_uses { int regno; int uses; };
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
369 /* Initialize the reload pass once per compilation. */
371 void
372 init_reload ()
374 register int i;
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
387 while (memory_address_p (QImode, tem))
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
398 /* See if reg+reg is a valid (and offsettable) address. */
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
408 if (memory_address_p (QImode, tem))
410 double_reg_address_ok = 1;
411 break;
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
420 /* Main entry point for the reload pass.
422 FIRST is the first insn of the function being compiled.
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
539 for (insn = first; insn; insn = NEXT_INSN (insn))
541 rtx set = single_set (insn);
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
567 else
568 continue;
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
594 /* Does this function require a frame pointer? */
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
608 num_eliminable = 0;
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
619 #else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622 #endif
624 /* Count the number of eliminable registers and build the FROM and TO
625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
635 num_labels = max_label_num () - get_first_label_num ();
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
669 #ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
681 return;
682 #endif
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
687 order_regs_for_reload ();
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
698 #ifndef SMALL_REGISTER_CLASSES
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700 #endif
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 spill_hard_reg (ep->from, global, dumpfile, 1);
707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710 #endif
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
716 bzero (basic_block_needs[i], n_basic_blocks);
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
725 /* This flag is set when a pseudo reg is spilled,
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
737 rtx after_call = 0;
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
762 rtx x;
763 int starting_frame_size = get_frame_size ();
764 int previous_frame_pointer_needed = frame_pointer_needed;
765 static char *reg_class_names[] = REG_CLASS_NAMES;
767 something_changed = 0;
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
785 /* Reset all offsets on eliminable registers to their initial values. */
786 #ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
793 #else
794 #ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796 #else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800 #endif
801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803 #endif
805 num_not_at_initial_offset = 0;
807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
861 /* Make a new stack slot. Then indicate that something
862 changed so we go back and recompute offsets for
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
874 /* If we allocated another pseudo to the stack, redo elimination
875 bookkeeping. */
876 if (something_changed)
877 continue;
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
882 if (caller_save_group_size > 1)
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
891 for (insn = first; insn; insn = NEXT_INSN (insn))
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
935 struct needs
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
950 struct needs op_addr_reload;
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
959 #ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
976 if (reg_referenced_p (after_call, PATTERN (insn)))
977 avoid_return_reg = after_call;
978 after_call = 0;
980 #endif /* SMALL_REGISTER_CLASSES */
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
1002 : GET_MODE (insn) == DImode ? DImode
1003 : VOIDmode));
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1015 /* If this insn has no reloads, we need not do anything except
1016 in the case of a CALL_INSN when we have caller-saves and
1017 caller-save needs reloads. */
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
1022 continue;
1024 something_needs_reloads = 1;
1025 bzero ((char *) &insn_needs, sizeof insn_needs);
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1030 for (i = 0; i < n_reloads; i++)
1032 register enum reg_class *p;
1033 enum reg_class class = reload_reg_class[i];
1034 int size;
1035 enum machine_mode mode;
1036 int nongroup_need;
1037 struct needs *this_needs;
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
1070 nongroup_need = 0;
1071 if (size == 1)
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
1078 > 1)
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
1082 && reloads_conflict (i, j)
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1086 nongroup_need = 1;
1087 break;
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1093 case RELOAD_OTHER:
1094 this_needs = &insn_needs.other;
1095 break;
1096 case RELOAD_FOR_INPUT:
1097 this_needs = &insn_needs.input;
1098 break;
1099 case RELOAD_FOR_OUTPUT:
1100 this_needs = &insn_needs.output;
1101 break;
1102 case RELOAD_FOR_INSN:
1103 this_needs = &insn_needs.insn;
1104 break;
1105 case RELOAD_FOR_OTHER_ADDRESS:
1106 this_needs = &insn_needs.other_addr;
1107 break;
1108 case RELOAD_FOR_INPUT_ADDRESS:
1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1110 break;
1111 case RELOAD_FOR_OUTPUT_ADDRESS:
1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1113 break;
1114 case RELOAD_FOR_OPERAND_ADDRESS:
1115 this_needs = &insn_needs.op_addr;
1116 break;
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
1122 if (size > 1)
1124 enum machine_mode other_mode, allocate_mode;
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
1128 this_needs->groups[(int) class]++;
1129 p = reg_class_superclasses[(int) class];
1130 while (*p != LIM_REG_CLASSES)
1131 this_needs->groups[(int) *p++]++;
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
1136 if (group_size[(int) class] < size)
1138 other_mode = group_mode[(int) class];
1139 allocate_mode = mode;
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
1144 else
1146 other_mode = mode;
1147 allocate_mode = group_mode[(int) class];
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1153 if (other_mode != VOIDmode && other_mode != allocate_mode
1154 && ! modes_equiv_for_class_p (allocate_mode,
1155 other_mode, class))
1156 abort ();
1158 else if (size == 1)
1160 this_needs->regs[nongroup_need][(int) class] += 1;
1161 p = reg_class_superclasses[(int) class];
1162 while (*p != LIM_REG_CLASSES)
1163 this_needs->regs[nongroup_need][(int) *p++] += 1;
1165 else
1166 abort ();
1169 /* All reloads have been counted for this insn;
1170 now merge the various times of use.
1171 This sets insn_needs, etc., to the maximum total number
1172 of registers needed at any point in this insn. */
1174 for (i = 0; i < N_REG_CLASSES; i++)
1176 int in_max, out_max;
1178 /* Compute normal and nongroup needs. */
1179 for (j = 0; j <= 1; j++)
1181 for (in_max = 0, out_max = 0, k = 0;
1182 k < reload_n_operands; k++)
1184 in_max
1185 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1186 out_max
1187 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1190 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1191 and operand addresses but not things used to reload
1192 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1193 don't conflict with things needed to reload inputs or
1194 outputs. */
1196 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1197 insn_needs.op_addr_reload.regs[j][i]),
1198 in_max);
1200 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1202 insn_needs.input.regs[j][i]
1203 = MAX (insn_needs.input.regs[j][i]
1204 + insn_needs.op_addr.regs[j][i]
1205 + insn_needs.insn.regs[j][i],
1206 in_max + insn_needs.input.regs[j][i]);
1208 insn_needs.output.regs[j][i] += out_max;
1209 insn_needs.other.regs[j][i]
1210 += MAX (MAX (insn_needs.input.regs[j][i],
1211 insn_needs.output.regs[j][i]),
1212 insn_needs.other_addr.regs[j][i]);
1216 /* Now compute group needs. */
1217 for (in_max = 0, out_max = 0, j = 0;
1218 j < reload_n_operands; j++)
1220 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1221 out_max
1222 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1225 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1226 insn_needs.op_addr_reload.groups[i]),
1227 in_max);
1228 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1230 insn_needs.input.groups[i]
1231 = MAX (insn_needs.input.groups[i]
1232 + insn_needs.op_addr.groups[i]
1233 + insn_needs.insn.groups[i],
1234 in_max + insn_needs.input.groups[i]);
1236 insn_needs.output.groups[i] += out_max;
1237 insn_needs.other.groups[i]
1238 += MAX (MAX (insn_needs.input.groups[i],
1239 insn_needs.output.groups[i]),
1240 insn_needs.other_addr.groups[i]);
1243 /* If this is a CALL_INSN and caller-saves will need
1244 a spill register, act as if the spill register is
1245 needed for this insn. However, the spill register
1246 can be used by any reload of this insn, so we only
1247 need do something if no need for that class has
1248 been recorded.
1250 The assumption that every CALL_INSN will trigger a
1251 caller-save is highly conservative, however, the number
1252 of cases where caller-saves will need a spill register but
1253 a block containing a CALL_INSN won't need a spill register
1254 of that class should be quite rare.
1256 If a group is needed, the size and mode of the group will
1257 have been set up at the beginning of this loop. */
1259 if (GET_CODE (insn) == CALL_INSN
1260 && caller_save_spill_class != NO_REGS)
1262 /* See if this register would conflict with any reload
1263 that needs a group. */
1264 int nongroup_need = 0;
1265 int *caller_save_needs;
1267 for (j = 0; j < n_reloads; j++)
1268 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1269 (GET_MODE_SIZE (reload_outmode[j])
1270 > GET_MODE_SIZE (reload_inmode[j]))
1271 ? reload_outmode[j]
1272 : reload_inmode[j])
1273 > 1)
1274 && reg_classes_intersect_p (caller_save_spill_class,
1275 reload_reg_class[j]))
1277 nongroup_need = 1;
1278 break;
1281 caller_save_needs
1282 = (caller_save_group_size > 1
1283 ? insn_needs.other.groups
1284 : insn_needs.other.regs[nongroup_need]);
1286 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1288 register enum reg_class *p
1289 = reg_class_superclasses[(int) caller_save_spill_class];
1291 caller_save_needs[(int) caller_save_spill_class]++;
1293 while (*p != LIM_REG_CLASSES)
1294 caller_save_needs[(int) *p++] += 1;
1297 /* Show that this basic block will need a register of
1298 this class. */
1300 if (global
1301 && ! (basic_block_needs[(int) caller_save_spill_class]
1302 [this_block]))
1304 basic_block_needs[(int) caller_save_spill_class]
1305 [this_block] = 1;
1306 new_basic_block_needs = 1;
1310 #ifdef SMALL_REGISTER_CLASSES
1311 /* If this insn stores the value of a function call,
1312 and that value is in a register that has been spilled,
1313 and if the insn needs a reload in a class
1314 that might use that register as the reload register,
1315 then add add an extra need in that class.
1316 This makes sure we have a register available that does
1317 not overlap the return value. */
1319 if (avoid_return_reg)
1321 int regno = REGNO (avoid_return_reg);
1322 int nregs
1323 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1324 int r;
1325 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1327 /* First compute the "basic needs", which counts a
1328 need only in the smallest class in which it
1329 is required. */
1331 bcopy (insn_needs.other.regs[0], basic_needs,
1332 sizeof basic_needs);
1333 bcopy (insn_needs.other.groups, basic_groups,
1334 sizeof basic_groups);
1336 for (i = 0; i < N_REG_CLASSES; i++)
1338 enum reg_class *p;
1340 if (basic_needs[i] >= 0)
1341 for (p = reg_class_superclasses[i];
1342 *p != LIM_REG_CLASSES; p++)
1343 basic_needs[(int) *p] -= basic_needs[i];
1345 if (basic_groups[i] >= 0)
1346 for (p = reg_class_superclasses[i];
1347 *p != LIM_REG_CLASSES; p++)
1348 basic_groups[(int) *p] -= basic_groups[i];
1351 /* Now count extra regs if there might be a conflict with
1352 the return value register.
1354 ??? This is not quite correct because we don't properly
1355 handle the case of groups, but if we end up doing
1356 something wrong, it either will end up not mattering or
1357 we will abort elsewhere. */
1359 for (r = regno; r < regno + nregs; r++)
1360 if (spill_reg_order[r] >= 0)
1361 for (i = 0; i < N_REG_CLASSES; i++)
1362 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1364 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1366 enum reg_class *p;
1368 insn_needs.other.regs[0][i]++;
1369 p = reg_class_superclasses[i];
1370 while (*p != LIM_REG_CLASSES)
1371 insn_needs.other.regs[0][(int) *p++]++;
1375 #endif /* SMALL_REGISTER_CLASSES */
1377 /* For each class, collect maximum need of any insn. */
1379 for (i = 0; i < N_REG_CLASSES; i++)
1381 if (max_needs[i] < insn_needs.other.regs[0][i])
1383 max_needs[i] = insn_needs.other.regs[0][i];
1384 max_needs_insn[i] = insn;
1386 if (max_groups[i] < insn_needs.other.groups[i])
1388 max_groups[i] = insn_needs.other.groups[i];
1389 max_groups_insn[i] = insn;
1391 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1393 max_nongroups[i] = insn_needs.other.regs[1][i];
1394 max_nongroups_insn[i] = insn;
1398 /* Note that there is a continue statement above. */
1401 /* If we allocated any new memory locations, make another pass
1402 since it might have changed elimination offsets. */
1403 if (starting_frame_size != get_frame_size ())
1404 something_changed = 1;
1406 if (dumpfile)
1407 for (i = 0; i < N_REG_CLASSES; i++)
1409 if (max_needs[i] > 0)
1410 fprintf (dumpfile,
1411 ";; Need %d reg%s of class %s (for insn %d).\n",
1412 max_needs[i], max_needs[i] == 1 ? "" : "s",
1413 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1414 if (max_nongroups[i] > 0)
1415 fprintf (dumpfile,
1416 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1417 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1418 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1419 if (max_groups[i] > 0)
1420 fprintf (dumpfile,
1421 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1422 max_groups[i], max_groups[i] == 1 ? "" : "s",
1423 mode_name[(int) group_mode[i]],
1424 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1427 /* If we have caller-saves, set up the save areas and see if caller-save
1428 will need a spill register. */
1430 if (caller_save_needed
1431 && ! setup_save_areas (&something_changed)
1432 && caller_save_spill_class == NO_REGS)
1434 /* The class we will need depends on whether the machine
1435 supports the sum of two registers for an address; see
1436 find_address_reloads for details. */
1438 caller_save_spill_class
1439 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1440 caller_save_group_size
1441 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1442 something_changed = 1;
1445 /* See if anything that happened changes which eliminations are valid.
1446 For example, on the Sparc, whether or not the frame pointer can
1447 be eliminated can depend on what registers have been used. We need
1448 not check some conditions again (such as flag_omit_frame_pointer)
1449 since they can't have changed. */
1451 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1452 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1453 #ifdef ELIMINABLE_REGS
1454 || ! CAN_ELIMINATE (ep->from, ep->to)
1455 #endif
1457 ep->can_eliminate = 0;
1459 /* Look for the case where we have discovered that we can't replace
1460 register A with register B and that means that we will now be
1461 trying to replace register A with register C. This means we can
1462 no longer replace register C with register B and we need to disable
1463 such an elimination, if it exists. This occurs often with A == ap,
1464 B == sp, and C == fp. */
1466 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1468 struct elim_table *op;
1469 register int new_to = -1;
1471 if (! ep->can_eliminate && ep->can_eliminate_previous)
1473 /* Find the current elimination for ep->from, if there is a
1474 new one. */
1475 for (op = reg_eliminate;
1476 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1477 if (op->from == ep->from && op->can_eliminate)
1479 new_to = op->to;
1480 break;
1483 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1484 disable it. */
1485 for (op = reg_eliminate;
1486 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1487 if (op->from == new_to && op->to == ep->to)
1488 op->can_eliminate = 0;
1492 /* See if any registers that we thought we could eliminate the previous
1493 time are no longer eliminable. If so, something has changed and we
1494 must spill the register. Also, recompute the number of eliminable
1495 registers and see if the frame pointer is needed; it is if there is
1496 no elimination of the frame pointer that we can perform. */
1498 frame_pointer_needed = 1;
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1501 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1502 && ep->to != HARD_FRAME_POINTER_REGNUM)
1503 frame_pointer_needed = 0;
1505 if (! ep->can_eliminate && ep->can_eliminate_previous)
1507 ep->can_eliminate_previous = 0;
1508 spill_hard_reg (ep->from, global, dumpfile, 1);
1509 something_changed = 1;
1510 num_eliminable--;
1514 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1515 /* If we didn't need a frame pointer last time, but we do now, spill
1516 the hard frame pointer. */
1517 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1519 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1520 something_changed = 1;
1522 #endif
1524 /* If all needs are met, we win. */
1526 for (i = 0; i < N_REG_CLASSES; i++)
1527 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1528 break;
1529 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1530 break;
1532 /* Not all needs are met; must spill some hard regs. */
1534 /* Put all registers spilled so far back in potential_reload_regs, but
1535 put them at the front, since we've already spilled most of the
1536 psuedos in them (we might have left some pseudos unspilled if they
1537 were in a block that didn't need any spill registers of a conflicting
1538 class. We used to try to mark off the need for those registers,
1539 but doing so properly is very complex and reallocating them is the
1540 simpler approach. First, "pack" potential_reload_regs by pushing
1541 any nonnegative entries towards the end. That will leave room
1542 for the registers we already spilled.
1544 Also, undo the marking of the spill registers from the last time
1545 around in FORBIDDEN_REGS since we will be probably be allocating
1546 them again below.
1548 ??? It is theoretically possible that we might end up not using one
1549 of our previously-spilled registers in this allocation, even though
1550 they are at the head of the list. It's not clear what to do about
1551 this, but it was no better before, when we marked off the needs met
1552 by the previously-spilled registers. With the current code, globals
1553 can be allocated into these registers, but locals cannot. */
1555 if (n_spills)
1557 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1558 if (potential_reload_regs[i] != -1)
1559 potential_reload_regs[j--] = potential_reload_regs[i];
1561 for (i = 0; i < n_spills; i++)
1563 potential_reload_regs[i] = spill_regs[i];
1564 spill_reg_order[spill_regs[i]] = -1;
1565 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1568 n_spills = 0;
1571 /* Now find more reload regs to satisfy the remaining need
1572 Do it by ascending class number, since otherwise a reg
1573 might be spilled for a big class and might fail to count
1574 for a smaller class even though it belongs to that class.
1576 Count spilled regs in `spills', and add entries to
1577 `spill_regs' and `spill_reg_order'.
1579 ??? Note there is a problem here.
1580 When there is a need for a group in a high-numbered class,
1581 and also need for non-group regs that come from a lower class,
1582 the non-group regs are chosen first. If there aren't many regs,
1583 they might leave no room for a group.
1585 This was happening on the 386. To fix it, we added the code
1586 that calls possible_group_p, so that the lower class won't
1587 break up the last possible group.
1589 Really fixing the problem would require changes above
1590 in counting the regs already spilled, and in choose_reload_regs.
1591 It might be hard to avoid introducing bugs there. */
1593 CLEAR_HARD_REG_SET (counted_for_groups);
1594 CLEAR_HARD_REG_SET (counted_for_nongroups);
1596 for (class = 0; class < N_REG_CLASSES; class++)
1598 /* First get the groups of registers.
1599 If we got single registers first, we might fragment
1600 possible groups. */
1601 while (max_groups[class] > 0)
1603 /* If any single spilled regs happen to form groups,
1604 count them now. Maybe we don't really need
1605 to spill another group. */
1606 count_possible_groups (group_size, group_mode, max_groups);
1608 if (max_groups[class] <= 0)
1609 break;
1611 /* Groups of size 2 (the only groups used on most machines)
1612 are treated specially. */
1613 if (group_size[class] == 2)
1615 /* First, look for a register that will complete a group. */
1616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1618 int other;
1620 j = potential_reload_regs[i];
1621 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1623 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (other, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 /* We don't want one part of another group.
1630 We could get "two groups" that overlap! */
1631 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1633 (j < FIRST_PSEUDO_REGISTER - 1
1634 && (other = j + 1, spill_reg_order[other] >= 0)
1635 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1636 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1637 && HARD_REGNO_MODE_OK (j, group_mode[class])
1638 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1639 other)
1640 && ! TEST_HARD_REG_BIT (counted_for_groups,
1641 other))))
1643 register enum reg_class *p;
1645 /* We have found one that will complete a group,
1646 so count off one group as provided. */
1647 max_groups[class]--;
1648 p = reg_class_superclasses[class];
1649 while (*p != LIM_REG_CLASSES)
1650 max_groups[(int) *p++]--;
1652 /* Indicate both these regs are part of a group. */
1653 SET_HARD_REG_BIT (counted_for_groups, j);
1654 SET_HARD_REG_BIT (counted_for_groups, other);
1655 break;
1658 /* We can't complete a group, so start one. */
1659 #ifdef SMALL_REGISTER_CLASSES
1660 /* Look for a pair neither of which is explicitly used. */
1661 if (i == FIRST_PSEUDO_REGISTER)
1662 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1664 int k;
1665 j = potential_reload_regs[i];
1666 /* Verify that J+1 is a potential reload reg. */
1667 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1668 if (potential_reload_regs[k] == j + 1)
1669 break;
1670 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1671 && k < FIRST_PSEUDO_REGISTER
1672 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1673 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1675 && HARD_REGNO_MODE_OK (j, group_mode[class])
1676 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1677 j + 1)
1678 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1679 /* Reject J at this stage
1680 if J+1 was explicitly used. */
1681 && ! regs_explicitly_used[j + 1])
1682 break;
1684 #endif
1685 /* Now try any group at all
1686 whose registers are not in bad_spill_regs. */
1687 if (i == FIRST_PSEUDO_REGISTER)
1688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1690 int k;
1691 j = potential_reload_regs[i];
1692 /* Verify that J+1 is a potential reload reg. */
1693 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1694 if (potential_reload_regs[k] == j + 1)
1695 break;
1696 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1697 && k < FIRST_PSEUDO_REGISTER
1698 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1699 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1701 && HARD_REGNO_MODE_OK (j, group_mode[class])
1702 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1703 j + 1)
1704 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1705 break;
1708 /* I should be the index in potential_reload_regs
1709 of the new reload reg we have found. */
1711 if (i >= FIRST_PSEUDO_REGISTER)
1713 /* There are no groups left to spill. */
1714 spill_failure (max_groups_insn[class]);
1715 failure = 1;
1716 goto failed;
1718 else
1719 something_changed
1720 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1721 global, dumpfile);
1723 else
1725 /* For groups of more than 2 registers,
1726 look for a sufficient sequence of unspilled registers,
1727 and spill them all at once. */
1728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 int k;
1732 j = potential_reload_regs[i];
1733 if (j >= 0
1734 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1735 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1737 /* Check each reg in the sequence. */
1738 for (k = 0; k < group_size[class]; k++)
1739 if (! (spill_reg_order[j + k] < 0
1740 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1741 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1742 break;
1743 /* We got a full sequence, so spill them all. */
1744 if (k == group_size[class])
1746 register enum reg_class *p;
1747 for (k = 0; k < group_size[class]; k++)
1749 int idx;
1750 SET_HARD_REG_BIT (counted_for_groups, j + k);
1751 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1752 if (potential_reload_regs[idx] == j + k)
1753 break;
1754 something_changed
1755 |= new_spill_reg (idx, class,
1756 max_needs, NULL_PTR,
1757 global, dumpfile);
1760 /* We have found one that will complete a group,
1761 so count off one group as provided. */
1762 max_groups[class]--;
1763 p = reg_class_superclasses[class];
1764 while (*p != LIM_REG_CLASSES)
1765 max_groups[(int) *p++]--;
1767 break;
1771 /* We couldn't find any registers for this reload.
1772 Avoid going into an infinite loop. */
1773 if (i >= FIRST_PSEUDO_REGISTER)
1775 /* There are no groups left. */
1776 spill_failure (max_groups_insn[class]);
1777 failure = 1;
1778 goto failed;
1783 /* Now similarly satisfy all need for single registers. */
1785 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1787 #ifdef SMALL_REGISTER_CLASSES
1788 /* This should be right for all machines, but only the 386
1789 is known to need it, so this conditional plays safe.
1790 ??? For 2.5, try making this unconditional. */
1791 /* If we spilled enough regs, but they weren't counted
1792 against the non-group need, see if we can count them now.
1793 If so, we can avoid some actual spilling. */
1794 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1795 for (i = 0; i < n_spills; i++)
1796 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1797 spill_regs[i])
1798 && !TEST_HARD_REG_BIT (counted_for_groups,
1799 spill_regs[i])
1800 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1801 spill_regs[i])
1802 && max_nongroups[class] > 0)
1804 register enum reg_class *p;
1806 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1807 max_nongroups[class]--;
1808 p = reg_class_superclasses[class];
1809 while (*p != LIM_REG_CLASSES)
1810 max_nongroups[(int) *p++]--;
1812 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1813 break;
1814 #endif
1816 /* Consider the potential reload regs that aren't
1817 yet in use as reload regs, in order of preference.
1818 Find the most preferred one that's in this class. */
1820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1821 if (potential_reload_regs[i] >= 0
1822 && TEST_HARD_REG_BIT (reg_class_contents[class],
1823 potential_reload_regs[i])
1824 /* If this reg will not be available for groups,
1825 pick one that does not foreclose possible groups.
1826 This is a kludge, and not very general,
1827 but it should be sufficient to make the 386 work,
1828 and the problem should not occur on machines with
1829 more registers. */
1830 && (max_nongroups[class] == 0
1831 || possible_group_p (potential_reload_regs[i], max_groups)))
1832 break;
1834 /* If we couldn't get a register, try to get one even if we
1835 might foreclose possible groups. This may cause problems
1836 later, but that's better than aborting now, since it is
1837 possible that we will, in fact, be able to form the needed
1838 group even with this allocation. */
1840 if (i >= FIRST_PSEUDO_REGISTER
1841 && (asm_noperands (max_needs[class] > 0
1842 ? max_needs_insn[class]
1843 : max_nongroups_insn[class])
1844 < 0))
1845 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1846 if (potential_reload_regs[i] >= 0
1847 && TEST_HARD_REG_BIT (reg_class_contents[class],
1848 potential_reload_regs[i]))
1849 break;
1851 /* I should be the index in potential_reload_regs
1852 of the new reload reg we have found. */
1854 if (i >= FIRST_PSEUDO_REGISTER)
1856 /* There are no possible registers left to spill. */
1857 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1858 : max_nongroups_insn[class]);
1859 failure = 1;
1860 goto failed;
1862 else
1863 something_changed
1864 |= new_spill_reg (i, class, max_needs, max_nongroups,
1865 global, dumpfile);
1870 /* If global-alloc was run, notify it of any register eliminations we have
1871 done. */
1872 if (global)
1873 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1874 if (ep->can_eliminate)
1875 mark_elimination (ep->from, ep->to);
1877 /* Insert code to save and restore call-clobbered hard regs
1878 around calls. Tell if what mode to use so that we will process
1879 those insns in reload_as_needed if we have to. */
1881 if (caller_save_needed)
1882 save_call_clobbered_regs (num_eliminable ? QImode
1883 : caller_save_spill_class != NO_REGS ? HImode
1884 : VOIDmode);
1886 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1887 If that insn didn't set the register (i.e., it copied the register to
1888 memory), just delete that insn instead of the equivalencing insn plus
1889 anything now dead. If we call delete_dead_insn on that insn, we may
1890 delete the insn that actually sets the register if the register die
1891 there and that is incorrect. */
1893 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1894 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1895 && GET_CODE (reg_equiv_init[i]) != NOTE)
1897 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1898 delete_dead_insn (reg_equiv_init[i]);
1899 else
1901 PUT_CODE (reg_equiv_init[i], NOTE);
1902 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1903 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1907 /* Use the reload registers where necessary
1908 by generating move instructions to move the must-be-register
1909 values into or out of the reload registers. */
1911 if (something_needs_reloads || something_needs_elimination
1912 || (caller_save_needed && num_eliminable)
1913 || caller_save_spill_class != NO_REGS)
1914 reload_as_needed (first, global);
1916 /* If we were able to eliminate the frame pointer, show that it is no
1917 longer live at the start of any basic block. If it ls live by
1918 virtue of being in a pseudo, that pseudo will be marked live
1919 and hence the frame pointer will be known to be live via that
1920 pseudo. */
1922 if (! frame_pointer_needed)
1923 for (i = 0; i < n_basic_blocks; i++)
1924 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1925 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1926 % REGSET_ELT_BITS));
1928 /* Come here (with failure set nonzero) if we can't get enough spill regs
1929 and we decide not to abort about it. */
1930 failed:
1932 reload_in_progress = 0;
1934 /* Now eliminate all pseudo regs by modifying them into
1935 their equivalent memory references.
1936 The REG-rtx's for the pseudos are modified in place,
1937 so all insns that used to refer to them now refer to memory.
1939 For a reg that has a reg_equiv_address, all those insns
1940 were changed by reloading so that no insns refer to it any longer;
1941 but the DECL_RTL of a variable decl may refer to it,
1942 and if so this causes the debugging info to mention the variable. */
1944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1946 rtx addr = 0;
1947 int in_struct = 0;
1948 if (reg_equiv_mem[i])
1950 addr = XEXP (reg_equiv_mem[i], 0);
1951 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1953 if (reg_equiv_address[i])
1954 addr = reg_equiv_address[i];
1955 if (addr)
1957 if (reg_renumber[i] < 0)
1959 rtx reg = regno_reg_rtx[i];
1960 XEXP (reg, 0) = addr;
1961 REG_USERVAR_P (reg) = 0;
1962 MEM_IN_STRUCT_P (reg) = in_struct;
1963 PUT_CODE (reg, MEM);
1965 else if (reg_equiv_mem[i])
1966 XEXP (reg_equiv_mem[i], 0) = addr;
1970 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1971 /* Make a pass over all the insns and remove death notes for things that
1972 are no longer registers or no longer die in the insn (e.g., an input
1973 and output pseudo being tied). */
1975 for (insn = first; insn; insn = NEXT_INSN (insn))
1976 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1978 rtx note, next;
1980 for (note = REG_NOTES (insn); note; note = next)
1982 next = XEXP (note, 1);
1983 if (REG_NOTE_KIND (note) == REG_DEAD
1984 && (GET_CODE (XEXP (note, 0)) != REG
1985 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1986 remove_note (insn, note);
1989 #endif
1991 /* Indicate that we no longer have known memory locations or constants. */
1992 reg_equiv_constant = 0;
1993 reg_equiv_memory_loc = 0;
1995 if (scratch_list)
1996 free (scratch_list);
1997 scratch_list = 0;
1998 if (scratch_block)
1999 free (scratch_block);
2000 scratch_block = 0;
2002 return failure;
2005 /* Nonzero if, after spilling reg REGNO for non-groups,
2006 it will still be possible to find a group if we still need one. */
2008 static int
2009 possible_group_p (regno, max_groups)
2010 int regno;
2011 int *max_groups;
2013 int i;
2014 int class = (int) NO_REGS;
2016 for (i = 0; i < (int) N_REG_CLASSES; i++)
2017 if (max_groups[i] > 0)
2019 class = i;
2020 break;
2023 if (class == (int) NO_REGS)
2024 return 1;
2026 /* Consider each pair of consecutive registers. */
2027 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2029 /* Ignore pairs that include reg REGNO. */
2030 if (i == regno || i + 1 == regno)
2031 continue;
2033 /* Ignore pairs that are outside the class that needs the group.
2034 ??? Here we fail to handle the case where two different classes
2035 independently need groups. But this never happens with our
2036 current machine descriptions. */
2037 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2038 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2039 continue;
2041 /* A pair of consecutive regs we can still spill does the trick. */
2042 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2043 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2044 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2045 return 1;
2047 /* A pair of one already spilled and one we can spill does it
2048 provided the one already spilled is not otherwise reserved. */
2049 if (spill_reg_order[i] < 0
2050 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2051 && spill_reg_order[i + 1] >= 0
2052 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2053 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2054 return 1;
2055 if (spill_reg_order[i + 1] < 0
2056 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2057 && spill_reg_order[i] >= 0
2058 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2059 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2060 return 1;
2063 return 0;
2066 /* Count any groups that can be formed from the registers recently spilled.
2067 This is done class by class, in order of ascending class number. */
2069 static void
2070 count_possible_groups (group_size, group_mode, max_groups)
2071 int *group_size;
2072 enum machine_mode *group_mode;
2073 int *max_groups;
2075 int i;
2076 /* Now find all consecutive groups of spilled registers
2077 and mark each group off against the need for such groups.
2078 But don't count them against ordinary need, yet. */
2080 for (i = 0; i < N_REG_CLASSES; i++)
2081 if (group_size[i] > 1)
2083 HARD_REG_SET new;
2084 int j;
2086 CLEAR_HARD_REG_SET (new);
2088 /* Make a mask of all the regs that are spill regs in class I. */
2089 for (j = 0; j < n_spills; j++)
2090 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2091 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2092 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2093 spill_regs[j]))
2094 SET_HARD_REG_BIT (new, spill_regs[j]);
2096 /* Find each consecutive group of them. */
2097 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2098 if (TEST_HARD_REG_BIT (new, j)
2099 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2100 /* Next line in case group-mode for this class
2101 demands an even-odd pair. */
2102 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2104 int k;
2105 for (k = 1; k < group_size[i]; k++)
2106 if (! TEST_HARD_REG_BIT (new, j + k))
2107 break;
2108 if (k == group_size[i])
2110 /* We found a group. Mark it off against this class's
2111 need for groups, and against each superclass too. */
2112 register enum reg_class *p;
2113 max_groups[i]--;
2114 p = reg_class_superclasses[i];
2115 while (*p != LIM_REG_CLASSES)
2116 max_groups[(int) *p++]--;
2117 /* Don't count these registers again. */
2118 for (k = 0; k < group_size[i]; k++)
2119 SET_HARD_REG_BIT (counted_for_groups, j + k);
2121 /* Skip to the last reg in this group. When j is incremented
2122 above, it will then point to the first reg of the next
2123 possible group. */
2124 j += k - 1;
2130 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2131 another mode that needs to be reloaded for the same register class CLASS.
2132 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2133 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2135 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2136 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2137 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2138 causes unnecessary failures on machines requiring alignment of register
2139 groups when the two modes are different sizes, because the larger mode has
2140 more strict alignment rules than the smaller mode. */
2142 static int
2143 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2144 enum machine_mode allocate_mode, other_mode;
2145 enum reg_class class;
2147 register int regno;
2148 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2150 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2151 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2152 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2153 return 0;
2155 return 1;
2158 /* Handle the failure to find a register to spill.
2159 INSN should be one of the insns which needed this particular spill reg. */
2161 static void
2162 spill_failure (insn)
2163 rtx insn;
2165 if (asm_noperands (PATTERN (insn)) >= 0)
2166 error_for_asm (insn, "`asm' needs too many reloads");
2167 else
2168 abort ();
2171 /* Add a new register to the tables of available spill-registers
2172 (as well as spilling all pseudos allocated to the register).
2173 I is the index of this register in potential_reload_regs.
2174 CLASS is the regclass whose need is being satisfied.
2175 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2176 so that this register can count off against them.
2177 MAX_NONGROUPS is 0 if this register is part of a group.
2178 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2180 static int
2181 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2182 int i;
2183 int class;
2184 int *max_needs;
2185 int *max_nongroups;
2186 int global;
2187 FILE *dumpfile;
2189 register enum reg_class *p;
2190 int val;
2191 int regno = potential_reload_regs[i];
2193 if (i >= FIRST_PSEUDO_REGISTER)
2194 abort (); /* Caller failed to find any register. */
2196 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2197 fatal ("fixed or forbidden register was spilled.\n\
2198 This may be due to a compiler bug or to impossible asm\n\
2199 statements or clauses.");
2201 /* Make reg REGNO an additional reload reg. */
2203 potential_reload_regs[i] = -1;
2204 spill_regs[n_spills] = regno;
2205 spill_reg_order[regno] = n_spills;
2206 if (dumpfile)
2207 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2209 /* Clear off the needs we just satisfied. */
2211 max_needs[class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 max_needs[(int) *p++]--;
2216 if (max_nongroups && max_nongroups[class] > 0)
2218 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2219 max_nongroups[class]--;
2220 p = reg_class_superclasses[class];
2221 while (*p != LIM_REG_CLASSES)
2222 max_nongroups[(int) *p++]--;
2225 /* Spill every pseudo reg that was allocated to this reg
2226 or to something that overlaps this reg. */
2228 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2230 /* If there are some registers still to eliminate and this register
2231 wasn't ever used before, additional stack space may have to be
2232 allocated to store this register. Thus, we may have changed the offset
2233 between the stack and frame pointers, so mark that something has changed.
2234 (If new pseudos were spilled, thus requiring more space, VAL would have
2235 been set non-zero by the call to spill_hard_reg above since additional
2236 reloads may be needed in that case.
2238 One might think that we need only set VAL to 1 if this is a call-used
2239 register. However, the set of registers that must be saved by the
2240 prologue is not identical to the call-used set. For example, the
2241 register used by the call insn for the return PC is a call-used register,
2242 but must be saved by the prologue. */
2243 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2244 val = 1;
2246 regs_ever_live[spill_regs[n_spills]] = 1;
2247 n_spills++;
2249 return val;
2252 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2253 data that is dead in INSN. */
2255 static void
2256 delete_dead_insn (insn)
2257 rtx insn;
2259 rtx prev = prev_real_insn (insn);
2260 rtx prev_dest;
2262 /* If the previous insn sets a register that dies in our insn, delete it
2263 too. */
2264 if (prev && GET_CODE (PATTERN (prev)) == SET
2265 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2266 && reg_mentioned_p (prev_dest, PATTERN (insn))
2267 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2268 delete_dead_insn (prev);
2270 PUT_CODE (insn, NOTE);
2271 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2272 NOTE_SOURCE_FILE (insn) = 0;
2275 /* Modify the home of pseudo-reg I.
2276 The new home is present in reg_renumber[I].
2278 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2279 or it may be -1, meaning there is none or it is not relevant.
2280 This is used so that all pseudos spilled from a given hard reg
2281 can share one stack slot. */
2283 static void
2284 alter_reg (i, from_reg)
2285 register int i;
2286 int from_reg;
2288 /* When outputting an inline function, this can happen
2289 for a reg that isn't actually used. */
2290 if (regno_reg_rtx[i] == 0)
2291 return;
2293 /* If the reg got changed to a MEM at rtl-generation time,
2294 ignore it. */
2295 if (GET_CODE (regno_reg_rtx[i]) != REG)
2296 return;
2298 /* Modify the reg-rtx to contain the new hard reg
2299 number or else to contain its pseudo reg number. */
2300 REGNO (regno_reg_rtx[i])
2301 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2303 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2304 allocate a stack slot for it. */
2306 if (reg_renumber[i] < 0
2307 && reg_n_refs[i] > 0
2308 && reg_equiv_constant[i] == 0
2309 && reg_equiv_memory_loc[i] == 0)
2311 register rtx x;
2312 int inherent_size = PSEUDO_REGNO_BYTES (i);
2313 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2314 int adjust = 0;
2316 /* Each pseudo reg has an inherent size which comes from its own mode,
2317 and a total size which provides room for paradoxical subregs
2318 which refer to the pseudo reg in wider modes.
2320 We can use a slot already allocated if it provides both
2321 enough inherent space and enough total space.
2322 Otherwise, we allocate a new slot, making sure that it has no less
2323 inherent space, and no less total space, then the previous slot. */
2324 if (from_reg == -1)
2326 /* No known place to spill from => no slot to reuse. */
2327 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2328 #if BYTES_BIG_ENDIAN
2329 /* Cancel the big-endian correction done in assign_stack_local.
2330 Get the address of the beginning of the slot.
2331 This is so we can do a big-endian correction unconditionally
2332 below. */
2333 adjust = inherent_size - total_size;
2334 #endif
2336 /* Reuse a stack slot if possible. */
2337 else if (spill_stack_slot[from_reg] != 0
2338 && spill_stack_slot_width[from_reg] >= total_size
2339 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 >= inherent_size))
2341 x = spill_stack_slot[from_reg];
2342 /* Allocate a bigger slot. */
2343 else
2345 /* Compute maximum size needed, both for inherent size
2346 and for total size. */
2347 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2348 if (spill_stack_slot[from_reg])
2350 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2351 > inherent_size)
2352 mode = GET_MODE (spill_stack_slot[from_reg]);
2353 if (spill_stack_slot_width[from_reg] > total_size)
2354 total_size = spill_stack_slot_width[from_reg];
2356 /* Make a slot with that size. */
2357 x = assign_stack_local (mode, total_size, -1);
2358 #if BYTES_BIG_ENDIAN
2359 /* Cancel the big-endian correction done in assign_stack_local.
2360 Get the address of the beginning of the slot.
2361 This is so we can do a big-endian correction unconditionally
2362 below. */
2363 adjust = GET_MODE_SIZE (mode) - total_size;
2364 #endif
2365 spill_stack_slot[from_reg] = x;
2366 spill_stack_slot_width[from_reg] = total_size;
2369 #if BYTES_BIG_ENDIAN
2370 /* On a big endian machine, the "address" of the slot
2371 is the address of the low part that fits its inherent mode. */
2372 if (inherent_size < total_size)
2373 adjust += (total_size - inherent_size);
2374 #endif /* BYTES_BIG_ENDIAN */
2376 /* If we have any adjustment to make, or if the stack slot is the
2377 wrong mode, make a new stack slot. */
2378 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2380 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2381 plus_constant (XEXP (x, 0), adjust));
2382 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2385 /* Save the stack slot for later. */
2386 reg_equiv_memory_loc[i] = x;
2390 /* Mark the slots in regs_ever_live for the hard regs
2391 used by pseudo-reg number REGNO. */
2393 void
2394 mark_home_live (regno)
2395 int regno;
2397 register int i, lim;
2398 i = reg_renumber[regno];
2399 if (i < 0)
2400 return;
2401 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2402 while (i < lim)
2403 regs_ever_live[i++] = 1;
2406 /* Mark the registers used in SCRATCH as being live. */
2408 static void
2409 mark_scratch_live (scratch)
2410 rtx scratch;
2412 register int i;
2413 int regno = REGNO (scratch);
2414 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2416 for (i = regno; i < lim; i++)
2417 regs_ever_live[i] = 1;
2420 /* This function handles the tracking of elimination offsets around branches.
2422 X is a piece of RTL being scanned.
2424 INSN is the insn that it came from, if any.
2426 INITIAL_P is non-zero if we are to set the offset to be the initial
2427 offset and zero if we are setting the offset of the label to be the
2428 current offset. */
2430 static void
2431 set_label_offsets (x, insn, initial_p)
2432 rtx x;
2433 rtx insn;
2434 int initial_p;
2436 enum rtx_code code = GET_CODE (x);
2437 rtx tem;
2438 int i;
2439 struct elim_table *p;
2441 switch (code)
2443 case LABEL_REF:
2444 if (LABEL_REF_NONLOCAL_P (x))
2445 return;
2447 x = XEXP (x, 0);
2449 /* ... fall through ... */
2451 case CODE_LABEL:
2452 /* If we know nothing about this label, set the desired offsets. Note
2453 that this sets the offset at a label to be the offset before a label
2454 if we don't know anything about the label. This is not correct for
2455 the label after a BARRIER, but is the best guess we can make. If
2456 we guessed wrong, we will suppress an elimination that might have
2457 been possible had we been able to guess correctly. */
2459 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2461 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2462 offsets_at[CODE_LABEL_NUMBER (x)][i]
2463 = (initial_p ? reg_eliminate[i].initial_offset
2464 : reg_eliminate[i].offset);
2465 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2468 /* Otherwise, if this is the definition of a label and it is
2469 preceded by a BARRIER, set our offsets to the known offset of
2470 that label. */
2472 else if (x == insn
2473 && (tem = prev_nonnote_insn (insn)) != 0
2474 && GET_CODE (tem) == BARRIER)
2476 num_not_at_initial_offset = 0;
2477 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2479 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2480 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2481 if (reg_eliminate[i].can_eliminate
2482 && (reg_eliminate[i].offset
2483 != reg_eliminate[i].initial_offset))
2484 num_not_at_initial_offset++;
2488 else
2489 /* If neither of the above cases is true, compare each offset
2490 with those previously recorded and suppress any eliminations
2491 where the offsets disagree. */
2493 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2494 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2495 != (initial_p ? reg_eliminate[i].initial_offset
2496 : reg_eliminate[i].offset))
2497 reg_eliminate[i].can_eliminate = 0;
2499 return;
2501 case JUMP_INSN:
2502 set_label_offsets (PATTERN (insn), insn, initial_p);
2504 /* ... fall through ... */
2506 case INSN:
2507 case CALL_INSN:
2508 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2509 and hence must have all eliminations at their initial offsets. */
2510 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2511 if (REG_NOTE_KIND (tem) == REG_LABEL)
2512 set_label_offsets (XEXP (tem, 0), insn, 1);
2513 return;
2515 case ADDR_VEC:
2516 case ADDR_DIFF_VEC:
2517 /* Each of the labels in the address vector must be at their initial
2518 offsets. We want the first first for ADDR_VEC and the second
2519 field for ADDR_DIFF_VEC. */
2521 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2522 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2523 insn, initial_p);
2524 return;
2526 case SET:
2527 /* We only care about setting PC. If the source is not RETURN,
2528 IF_THEN_ELSE, or a label, disable any eliminations not at
2529 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2530 isn't one of those possibilities. For branches to a label,
2531 call ourselves recursively.
2533 Note that this can disable elimination unnecessarily when we have
2534 a non-local goto since it will look like a non-constant jump to
2535 someplace in the current function. This isn't a significant
2536 problem since such jumps will normally be when all elimination
2537 pairs are back to their initial offsets. */
2539 if (SET_DEST (x) != pc_rtx)
2540 return;
2542 switch (GET_CODE (SET_SRC (x)))
2544 case PC:
2545 case RETURN:
2546 return;
2548 case LABEL_REF:
2549 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2550 return;
2552 case IF_THEN_ELSE:
2553 tem = XEXP (SET_SRC (x), 1);
2554 if (GET_CODE (tem) == LABEL_REF)
2555 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2556 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2557 break;
2559 tem = XEXP (SET_SRC (x), 2);
2560 if (GET_CODE (tem) == LABEL_REF)
2561 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2562 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2563 break;
2564 return;
2567 /* If we reach here, all eliminations must be at their initial
2568 offset because we are doing a jump to a variable address. */
2569 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2570 if (p->offset != p->initial_offset)
2571 p->can_eliminate = 0;
2575 /* Used for communication between the next two function to properly share
2576 the vector for an ASM_OPERANDS. */
2578 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2580 /* Scan X and replace any eliminable registers (such as fp) with a
2581 replacement (such as sp), plus an offset.
2583 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2584 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2585 MEM, we are allowed to replace a sum of a register and the constant zero
2586 with the register, which we cannot do outside a MEM. In addition, we need
2587 to record the fact that a register is referenced outside a MEM.
2589 If INSN is an insn, it is the insn containing X. If we replace a REG
2590 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2591 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2592 that the REG is being modified.
2594 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2595 That's used when we eliminate in expressions stored in notes.
2596 This means, do not set ref_outside_mem even if the reference
2597 is outside of MEMs.
2599 If we see a modification to a register we know about, take the
2600 appropriate action (see case SET, below).
2602 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2603 replacements done assuming all offsets are at their initial values. If
2604 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2605 encounter, return the actual location so that find_reloads will do
2606 the proper thing. */
2609 eliminate_regs (x, mem_mode, insn)
2610 rtx x;
2611 enum machine_mode mem_mode;
2612 rtx insn;
2614 enum rtx_code code = GET_CODE (x);
2615 struct elim_table *ep;
2616 int regno;
2617 rtx new;
2618 int i, j;
2619 char *fmt;
2620 int copied = 0;
2622 switch (code)
2624 case CONST_INT:
2625 case CONST_DOUBLE:
2626 case CONST:
2627 case SYMBOL_REF:
2628 case CODE_LABEL:
2629 case PC:
2630 case CC0:
2631 case ASM_INPUT:
2632 case ADDR_VEC:
2633 case ADDR_DIFF_VEC:
2634 case RETURN:
2635 return x;
2637 case REG:
2638 regno = REGNO (x);
2640 /* First handle the case where we encounter a bare register that
2641 is eliminable. Replace it with a PLUS. */
2642 if (regno < FIRST_PSEUDO_REGISTER)
2644 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2645 ep++)
2646 if (ep->from_rtx == x && ep->can_eliminate)
2648 if (! mem_mode
2649 /* Refs inside notes don't count for this purpose. */
2650 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2651 || GET_CODE (insn) == INSN_LIST)))
2652 ep->ref_outside_mem = 1;
2653 return plus_constant (ep->to_rtx, ep->previous_offset);
2657 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2658 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2660 /* In this case, find_reloads would attempt to either use an
2661 incorrect address (if something is not at its initial offset)
2662 or substitute an replaced address into an insn (which loses
2663 if the offset is changed by some later action). So we simply
2664 return the replaced stack slot (assuming it is changed by
2665 elimination) and ignore the fact that this is actually a
2666 reference to the pseudo. Ensure we make a copy of the
2667 address in case it is shared. */
2668 new = eliminate_regs (reg_equiv_memory_loc[regno],
2669 mem_mode, insn);
2670 if (new != reg_equiv_memory_loc[regno])
2672 cannot_omit_stores[regno] = 1;
2673 return copy_rtx (new);
2676 return x;
2678 case PLUS:
2679 /* If this is the sum of an eliminable register and a constant, rework
2680 the sum. */
2681 if (GET_CODE (XEXP (x, 0)) == REG
2682 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2683 && CONSTANT_P (XEXP (x, 1)))
2685 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2686 ep++)
2687 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2689 if (! mem_mode
2690 /* Refs inside notes don't count for this purpose. */
2691 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2692 || GET_CODE (insn) == INSN_LIST)))
2693 ep->ref_outside_mem = 1;
2695 /* The only time we want to replace a PLUS with a REG (this
2696 occurs when the constant operand of the PLUS is the negative
2697 of the offset) is when we are inside a MEM. We won't want
2698 to do so at other times because that would change the
2699 structure of the insn in a way that reload can't handle.
2700 We special-case the commonest situation in
2701 eliminate_regs_in_insn, so just replace a PLUS with a
2702 PLUS here, unless inside a MEM. */
2703 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2704 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2705 return ep->to_rtx;
2706 else
2707 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2708 plus_constant (XEXP (x, 1),
2709 ep->previous_offset));
2712 /* If the register is not eliminable, we are done since the other
2713 operand is a constant. */
2714 return x;
2717 /* If this is part of an address, we want to bring any constant to the
2718 outermost PLUS. We will do this by doing register replacement in
2719 our operands and seeing if a constant shows up in one of them.
2721 We assume here this is part of an address (or a "load address" insn)
2722 since an eliminable register is not likely to appear in any other
2723 context.
2725 If we have (plus (eliminable) (reg)), we want to produce
2726 (plus (plus (replacement) (reg) (const))). If this was part of a
2727 normal add insn, (plus (replacement) (reg)) will be pushed as a
2728 reload. This is the desired action. */
2731 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2732 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2734 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2736 /* If one side is a PLUS and the other side is a pseudo that
2737 didn't get a hard register but has a reg_equiv_constant,
2738 we must replace the constant here since it may no longer
2739 be in the position of any operand. */
2740 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2741 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2742 && reg_renumber[REGNO (new1)] < 0
2743 && reg_equiv_constant != 0
2744 && reg_equiv_constant[REGNO (new1)] != 0)
2745 new1 = reg_equiv_constant[REGNO (new1)];
2746 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2747 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2748 && reg_renumber[REGNO (new0)] < 0
2749 && reg_equiv_constant[REGNO (new0)] != 0)
2750 new0 = reg_equiv_constant[REGNO (new0)];
2752 new = form_sum (new0, new1);
2754 /* As above, if we are not inside a MEM we do not want to
2755 turn a PLUS into something else. We might try to do so here
2756 for an addition of 0 if we aren't optimizing. */
2757 if (! mem_mode && GET_CODE (new) != PLUS)
2758 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2759 else
2760 return new;
2763 return x;
2765 case MULT:
2766 /* If this is the product of an eliminable register and a
2767 constant, apply the distribute law and move the constant out
2768 so that we have (plus (mult ..) ..). This is needed in order
2769 to keep load-address insns valid. This case is pathalogical.
2770 We ignore the possibility of overflow here. */
2771 if (GET_CODE (XEXP (x, 0)) == REG
2772 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2773 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2774 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2775 ep++)
2776 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2778 if (! mem_mode
2779 /* Refs inside notes don't count for this purpose. */
2780 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2781 || GET_CODE (insn) == INSN_LIST)))
2782 ep->ref_outside_mem = 1;
2784 return
2785 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2786 ep->previous_offset * INTVAL (XEXP (x, 1)));
2789 /* ... fall through ... */
2791 case CALL:
2792 case COMPARE:
2793 case MINUS:
2794 case DIV: case UDIV:
2795 case MOD: case UMOD:
2796 case AND: case IOR: case XOR:
2797 case ROTATERT: case ROTATE:
2798 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2799 case NE: case EQ:
2800 case GE: case GT: case GEU: case GTU:
2801 case LE: case LT: case LEU: case LTU:
2803 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2804 rtx new1
2805 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2807 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2808 return gen_rtx (code, GET_MODE (x), new0, new1);
2810 return x;
2812 case EXPR_LIST:
2813 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2814 if (XEXP (x, 0))
2816 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2817 if (new != XEXP (x, 0))
2818 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2821 /* ... fall through ... */
2823 case INSN_LIST:
2824 /* Now do eliminations in the rest of the chain. If this was
2825 an EXPR_LIST, this might result in allocating more memory than is
2826 strictly needed, but it simplifies the code. */
2827 if (XEXP (x, 1))
2829 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2830 if (new != XEXP (x, 1))
2831 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2833 return x;
2835 case PRE_INC:
2836 case POST_INC:
2837 case PRE_DEC:
2838 case POST_DEC:
2839 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2840 if (ep->to_rtx == XEXP (x, 0))
2842 int size = GET_MODE_SIZE (mem_mode);
2844 /* If more bytes than MEM_MODE are pushed, account for them. */
2845 #ifdef PUSH_ROUNDING
2846 if (ep->to_rtx == stack_pointer_rtx)
2847 size = PUSH_ROUNDING (size);
2848 #endif
2849 if (code == PRE_DEC || code == POST_DEC)
2850 ep->offset += size;
2851 else
2852 ep->offset -= size;
2855 /* Fall through to generic unary operation case. */
2856 case USE:
2857 case STRICT_LOW_PART:
2858 case NEG: case NOT:
2859 case SIGN_EXTEND: case ZERO_EXTEND:
2860 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2861 case FLOAT: case FIX:
2862 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2863 case ABS:
2864 case SQRT:
2865 case FFS:
2866 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2867 if (new != XEXP (x, 0))
2868 return gen_rtx (code, GET_MODE (x), new);
2869 return x;
2871 case SUBREG:
2872 /* Similar to above processing, but preserve SUBREG_WORD.
2873 Convert (subreg (mem)) to (mem) if not paradoxical.
2874 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2875 pseudo didn't get a hard reg, we must replace this with the
2876 eliminated version of the memory location because push_reloads
2877 may do the replacement in certain circumstances. */
2878 if (GET_CODE (SUBREG_REG (x)) == REG
2879 && (GET_MODE_SIZE (GET_MODE (x))
2880 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2881 && reg_equiv_memory_loc != 0
2882 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2884 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2885 mem_mode, insn);
2887 /* If we didn't change anything, we must retain the pseudo. */
2888 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2889 new = XEXP (x, 0);
2890 else
2891 /* Otherwise, ensure NEW isn't shared in case we have to reload
2892 it. */
2893 new = copy_rtx (new);
2895 else
2896 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2898 if (new != XEXP (x, 0))
2900 if (GET_CODE (new) == MEM
2901 && (GET_MODE_SIZE (GET_MODE (x))
2902 <= GET_MODE_SIZE (GET_MODE (new)))
2903 #ifdef LOAD_EXTEND_OP
2904 /* On these machines we will be reloading what is
2905 inside the SUBREG if it originally was a pseudo and
2906 the inner and outer modes are both a word or
2907 smaller. So leave the SUBREG then. */
2908 && ! (GET_CODE (SUBREG_REG (x)) == REG
2909 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2910 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2911 #endif
2914 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2915 enum machine_mode mode = GET_MODE (x);
2917 #if BYTES_BIG_ENDIAN
2918 offset += (MIN (UNITS_PER_WORD,
2919 GET_MODE_SIZE (GET_MODE (new)))
2920 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2921 #endif
2923 PUT_MODE (new, mode);
2924 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2925 return new;
2927 else
2928 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2931 return x;
2933 case CLOBBER:
2934 /* If clobbering a register that is the replacement register for an
2935 elimination we still think can be performed, note that it cannot
2936 be performed. Otherwise, we need not be concerned about it. */
2937 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2938 if (ep->to_rtx == XEXP (x, 0))
2939 ep->can_eliminate = 0;
2941 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2942 if (new != XEXP (x, 0))
2943 return gen_rtx (code, GET_MODE (x), new);
2944 return x;
2946 case ASM_OPERANDS:
2948 rtx *temp_vec;
2949 /* Properly handle sharing input and constraint vectors. */
2950 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2952 /* When we come to a new vector not seen before,
2953 scan all its elements; keep the old vector if none
2954 of them changes; otherwise, make a copy. */
2955 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2956 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2957 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2958 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2959 mem_mode, insn);
2961 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2962 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2963 break;
2965 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2966 new_asm_operands_vec = old_asm_operands_vec;
2967 else
2968 new_asm_operands_vec
2969 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2972 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2973 if (new_asm_operands_vec == old_asm_operands_vec)
2974 return x;
2976 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2977 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2978 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2979 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2980 ASM_OPERANDS_SOURCE_FILE (x),
2981 ASM_OPERANDS_SOURCE_LINE (x));
2982 new->volatil = x->volatil;
2983 return new;
2986 case SET:
2987 /* Check for setting a register that we know about. */
2988 if (GET_CODE (SET_DEST (x)) == REG)
2990 /* See if this is setting the replacement register for an
2991 elimination.
2993 If DEST is the hard frame pointer, we do nothing because we
2994 assume that all assignments to the frame pointer are for
2995 non-local gotos and are being done at a time when they are valid
2996 and do not disturb anything else. Some machines want to
2997 eliminate a fake argument pointer (or even a fake frame pointer)
2998 with either the real frame or the stack pointer. Assignments to
2999 the hard frame pointer must not prevent this elimination. */
3001 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3002 ep++)
3003 if (ep->to_rtx == SET_DEST (x)
3004 && SET_DEST (x) != hard_frame_pointer_rtx)
3006 /* If it is being incremented, adjust the offset. Otherwise,
3007 this elimination can't be done. */
3008 rtx src = SET_SRC (x);
3010 if (GET_CODE (src) == PLUS
3011 && XEXP (src, 0) == SET_DEST (x)
3012 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3013 ep->offset -= INTVAL (XEXP (src, 1));
3014 else
3015 ep->can_eliminate = 0;
3018 /* Now check to see we are assigning to a register that can be
3019 eliminated. If so, it must be as part of a PARALLEL, since we
3020 will not have been called if this is a single SET. So indicate
3021 that we can no longer eliminate this reg. */
3022 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3023 ep++)
3024 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3025 ep->can_eliminate = 0;
3028 /* Now avoid the loop below in this common case. */
3030 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3031 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3033 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3034 write a CLOBBER insn. */
3035 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3036 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3037 && GET_CODE (insn) != INSN_LIST)
3038 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3040 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3041 return gen_rtx (SET, VOIDmode, new0, new1);
3044 return x;
3046 case MEM:
3047 /* Our only special processing is to pass the mode of the MEM to our
3048 recursive call and copy the flags. While we are here, handle this
3049 case more efficiently. */
3050 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3051 if (new != XEXP (x, 0))
3053 new = gen_rtx (MEM, GET_MODE (x), new);
3054 new->volatil = x->volatil;
3055 new->unchanging = x->unchanging;
3056 new->in_struct = x->in_struct;
3057 return new;
3059 else
3060 return x;
3063 /* Process each of our operands recursively. If any have changed, make a
3064 copy of the rtx. */
3065 fmt = GET_RTX_FORMAT (code);
3066 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3068 if (*fmt == 'e')
3070 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3071 if (new != XEXP (x, i) && ! copied)
3073 rtx new_x = rtx_alloc (code);
3074 bcopy ((char *) x, (char *) new_x,
3075 (sizeof (*new_x) - sizeof (new_x->fld)
3076 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3077 x = new_x;
3078 copied = 1;
3080 XEXP (x, i) = new;
3082 else if (*fmt == 'E')
3084 int copied_vec = 0;
3085 for (j = 0; j < XVECLEN (x, i); j++)
3087 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3088 if (new != XVECEXP (x, i, j) && ! copied_vec)
3090 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3091 &XVECEXP (x, i, 0));
3092 if (! copied)
3094 rtx new_x = rtx_alloc (code);
3095 bcopy ((char *) x, (char *) new_x,
3096 (sizeof (*new_x) - sizeof (new_x->fld)
3097 + (sizeof (new_x->fld[0])
3098 * GET_RTX_LENGTH (code))));
3099 x = new_x;
3100 copied = 1;
3102 XVEC (x, i) = new_v;
3103 copied_vec = 1;
3105 XVECEXP (x, i, j) = new;
3110 return x;
3113 /* Scan INSN and eliminate all eliminable registers in it.
3115 If REPLACE is nonzero, do the replacement destructively. Also
3116 delete the insn as dead it if it is setting an eliminable register.
3118 If REPLACE is zero, do all our allocations in reload_obstack.
3120 If no eliminations were done and this insn doesn't require any elimination
3121 processing (these are not identical conditions: it might be updating sp,
3122 but not referencing fp; this needs to be seen during reload_as_needed so
3123 that the offset between fp and sp can be taken into consideration), zero
3124 is returned. Otherwise, 1 is returned. */
3126 static int
3127 eliminate_regs_in_insn (insn, replace)
3128 rtx insn;
3129 int replace;
3131 rtx old_body = PATTERN (insn);
3132 rtx old_set = single_set (insn);
3133 rtx new_body;
3134 int val = 0;
3135 struct elim_table *ep;
3137 if (! replace)
3138 push_obstacks (&reload_obstack, &reload_obstack);
3140 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3141 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3143 /* Check for setting an eliminable register. */
3144 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3145 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3147 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3148 /* If this is setting the frame pointer register to the
3149 hardware frame pointer register and this is an elimination
3150 that will be done (tested above), this insn is really
3151 adjusting the frame pointer downward to compensate for
3152 the adjustment done before a nonlocal goto. */
3153 if (ep->from == FRAME_POINTER_REGNUM
3154 && ep->to == HARD_FRAME_POINTER_REGNUM)
3156 rtx src = SET_SRC (old_set);
3157 int offset, ok = 0;
3159 if (src == ep->to_rtx)
3160 offset = 0, ok = 1;
3161 else if (GET_CODE (src) == PLUS
3162 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3163 offset = INTVAL (XEXP (src, 0)), ok = 1;
3165 if (ok)
3167 if (replace)
3169 rtx src
3170 = plus_constant (ep->to_rtx, offset - ep->offset);
3172 /* First see if this insn remains valid when we
3173 make the change. If not, keep the INSN_CODE
3174 the same and let reload fit it up. */
3175 validate_change (insn, &SET_SRC (old_set), src, 1);
3176 validate_change (insn, &SET_DEST (old_set),
3177 ep->to_rtx, 1);
3178 if (! apply_change_group ())
3180 SET_SRC (old_set) = src;
3181 SET_DEST (old_set) = ep->to_rtx;
3185 val = 1;
3186 goto done;
3189 #endif
3191 /* In this case this insn isn't serving a useful purpose. We
3192 will delete it in reload_as_needed once we know that this
3193 elimination is, in fact, being done.
3195 If REPLACE isn't set, we can't delete this insn, but neededn't
3196 process it since it won't be used unless something changes. */
3197 if (replace)
3198 delete_dead_insn (insn);
3199 val = 1;
3200 goto done;
3203 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3204 in the insn is the negative of the offset in FROM. Substitute
3205 (set (reg) (reg to)) for the insn and change its code.
3207 We have to do this here, rather than in eliminate_regs, do that we can
3208 change the insn code. */
3210 if (GET_CODE (SET_SRC (old_set)) == PLUS
3211 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3212 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3213 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3214 ep++)
3215 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3216 && ep->can_eliminate)
3218 /* We must stop at the first elimination that will be used.
3219 If this one would replace the PLUS with a REG, do it
3220 now. Otherwise, quit the loop and let eliminate_regs
3221 do its normal replacement. */
3222 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3224 /* We assume here that we don't need a PARALLEL of
3225 any CLOBBERs for this assignment. There's not
3226 much we can do if we do need it. */
3227 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3228 SET_DEST (old_set), ep->to_rtx);
3229 INSN_CODE (insn) = -1;
3230 val = 1;
3231 goto done;
3234 break;
3238 old_asm_operands_vec = 0;
3240 /* Replace the body of this insn with a substituted form. If we changed
3241 something, return non-zero.
3243 If we are replacing a body that was a (set X (plus Y Z)), try to
3244 re-recognize the insn. We do this in case we had a simple addition
3245 but now can do this as a load-address. This saves an insn in this
3246 common case. */
3248 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3249 if (new_body != old_body)
3251 /* If we aren't replacing things permanently and we changed something,
3252 make another copy to ensure that all the RTL is new. Otherwise
3253 things can go wrong if find_reload swaps commutative operands
3254 and one is inside RTL that has been copied while the other is not. */
3256 /* Don't copy an asm_operands because (1) there's no need and (2)
3257 copy_rtx can't do it properly when there are multiple outputs. */
3258 if (! replace && asm_noperands (old_body) < 0)
3259 new_body = copy_rtx (new_body);
3261 /* If we had a move insn but now we don't, rerecognize it. This will
3262 cause spurious re-recognition if the old move had a PARALLEL since
3263 the new one still will, but we can't call single_set without
3264 having put NEW_BODY into the insn and the re-recognition won't
3265 hurt in this rare case. */
3266 if (old_set != 0
3267 && ((GET_CODE (SET_SRC (old_set)) == REG
3268 && (GET_CODE (new_body) != SET
3269 || GET_CODE (SET_SRC (new_body)) != REG))
3270 /* If this was a load from or store to memory, compare
3271 the MEM in recog_operand to the one in the insn. If they
3272 are not equal, then rerecognize the insn. */
3273 || (old_set != 0
3274 && ((GET_CODE (SET_SRC (old_set)) == MEM
3275 && SET_SRC (old_set) != recog_operand[1])
3276 || (GET_CODE (SET_DEST (old_set)) == MEM
3277 && SET_DEST (old_set) != recog_operand[0])))
3278 /* If this was an add insn before, rerecognize. */
3279 || GET_CODE (SET_SRC (old_set)) == PLUS))
3281 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3282 /* If recognition fails, store the new body anyway.
3283 It's normal to have recognition failures here
3284 due to bizarre memory addresses; reloading will fix them. */
3285 PATTERN (insn) = new_body;
3287 else
3288 PATTERN (insn) = new_body;
3290 val = 1;
3293 /* Loop through all elimination pairs. See if any have changed and
3294 recalculate the number not at initial offset.
3296 Compute the maximum offset (minimum offset if the stack does not
3297 grow downward) for each elimination pair.
3299 We also detect a cases where register elimination cannot be done,
3300 namely, if a register would be both changed and referenced outside a MEM
3301 in the resulting insn since such an insn is often undefined and, even if
3302 not, we cannot know what meaning will be given to it. Note that it is
3303 valid to have a register used in an address in an insn that changes it
3304 (presumably with a pre- or post-increment or decrement).
3306 If anything changes, return nonzero. */
3308 num_not_at_initial_offset = 0;
3309 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3311 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3312 ep->can_eliminate = 0;
3314 ep->ref_outside_mem = 0;
3316 if (ep->previous_offset != ep->offset)
3317 val = 1;
3319 ep->previous_offset = ep->offset;
3320 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3321 num_not_at_initial_offset++;
3323 #ifdef STACK_GROWS_DOWNWARD
3324 ep->max_offset = MAX (ep->max_offset, ep->offset);
3325 #else
3326 ep->max_offset = MIN (ep->max_offset, ep->offset);
3327 #endif
3330 done:
3331 /* If we changed something, perform elmination in REG_NOTES. This is
3332 needed even when REPLACE is zero because a REG_DEAD note might refer
3333 to a register that we eliminate and could cause a different number
3334 of spill registers to be needed in the final reload pass than in
3335 the pre-passes. */
3336 if (val && REG_NOTES (insn) != 0)
3337 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3339 if (! replace)
3340 pop_obstacks ();
3342 return val;
3345 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3346 replacement we currently believe is valid, mark it as not eliminable if X
3347 modifies DEST in any way other than by adding a constant integer to it.
3349 If DEST is the frame pointer, we do nothing because we assume that
3350 all assignments to the hard frame pointer are nonlocal gotos and are being
3351 done at a time when they are valid and do not disturb anything else.
3352 Some machines want to eliminate a fake argument pointer with either the
3353 frame or stack pointer. Assignments to the hard frame pointer must not
3354 prevent this elimination.
3356 Called via note_stores from reload before starting its passes to scan
3357 the insns of the function. */
3359 static void
3360 mark_not_eliminable (dest, x)
3361 rtx dest;
3362 rtx x;
3364 register int i;
3366 /* A SUBREG of a hard register here is just changing its mode. We should
3367 not see a SUBREG of an eliminable hard register, but check just in
3368 case. */
3369 if (GET_CODE (dest) == SUBREG)
3370 dest = SUBREG_REG (dest);
3372 if (dest == hard_frame_pointer_rtx)
3373 return;
3375 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3376 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3377 && (GET_CODE (x) != SET
3378 || GET_CODE (SET_SRC (x)) != PLUS
3379 || XEXP (SET_SRC (x), 0) != dest
3380 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3382 reg_eliminate[i].can_eliminate_previous
3383 = reg_eliminate[i].can_eliminate = 0;
3384 num_eliminable--;
3388 /* Kick all pseudos out of hard register REGNO.
3389 If GLOBAL is nonzero, try to find someplace else to put them.
3390 If DUMPFILE is nonzero, log actions taken on that file.
3392 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3393 because we found we can't eliminate some register. In the case, no pseudos
3394 are allowed to be in the register, even if they are only in a block that
3395 doesn't require spill registers, unlike the case when we are spilling this
3396 hard reg to produce another spill register.
3398 Return nonzero if any pseudos needed to be kicked out. */
3400 static int
3401 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3402 register int regno;
3403 int global;
3404 FILE *dumpfile;
3405 int cant_eliminate;
3407 enum reg_class class = REGNO_REG_CLASS (regno);
3408 int something_changed = 0;
3409 register int i;
3411 SET_HARD_REG_BIT (forbidden_regs, regno);
3413 if (cant_eliminate)
3414 regs_ever_live[regno] = 1;
3416 /* Spill every pseudo reg that was allocated to this reg
3417 or to something that overlaps this reg. */
3419 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3420 if (reg_renumber[i] >= 0
3421 && reg_renumber[i] <= regno
3422 && (reg_renumber[i]
3423 + HARD_REGNO_NREGS (reg_renumber[i],
3424 PSEUDO_REGNO_MODE (i))
3425 > regno))
3427 /* If this register belongs solely to a basic block which needed no
3428 spilling of any class that this register is contained in,
3429 leave it be, unless we are spilling this register because
3430 it was a hard register that can't be eliminated. */
3432 if (! cant_eliminate
3433 && basic_block_needs[0]
3434 && reg_basic_block[i] >= 0
3435 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3437 enum reg_class *p;
3439 for (p = reg_class_superclasses[(int) class];
3440 *p != LIM_REG_CLASSES; p++)
3441 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3442 break;
3444 if (*p == LIM_REG_CLASSES)
3445 continue;
3448 /* Mark it as no longer having a hard register home. */
3449 reg_renumber[i] = -1;
3450 /* We will need to scan everything again. */
3451 something_changed = 1;
3452 if (global)
3453 retry_global_alloc (i, forbidden_regs);
3455 alter_reg (i, regno);
3456 if (dumpfile)
3458 if (reg_renumber[i] == -1)
3459 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3460 else
3461 fprintf (dumpfile, " Register %d now in %d.\n\n",
3462 i, reg_renumber[i]);
3465 for (i = 0; i < scratch_list_length; i++)
3467 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3469 if (! cant_eliminate && basic_block_needs[0]
3470 && ! basic_block_needs[(int) class][scratch_block[i]])
3472 enum reg_class *p;
3474 for (p = reg_class_superclasses[(int) class];
3475 *p != LIM_REG_CLASSES; p++)
3476 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3477 break;
3479 if (*p == LIM_REG_CLASSES)
3480 continue;
3482 PUT_CODE (scratch_list[i], SCRATCH);
3483 scratch_list[i] = 0;
3484 something_changed = 1;
3485 continue;
3489 return something_changed;
3492 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3493 Also mark any hard registers used to store user variables as
3494 forbidden from being used for spill registers. */
3496 static void
3497 scan_paradoxical_subregs (x)
3498 register rtx x;
3500 register int i;
3501 register char *fmt;
3502 register enum rtx_code code = GET_CODE (x);
3504 switch (code)
3506 case REG:
3507 #ifdef SMALL_REGISTER_CLASSES
3508 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3509 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3510 #endif
3511 return;
3513 case CONST_INT:
3514 case CONST:
3515 case SYMBOL_REF:
3516 case LABEL_REF:
3517 case CONST_DOUBLE:
3518 case CC0:
3519 case PC:
3520 case USE:
3521 case CLOBBER:
3522 return;
3524 case SUBREG:
3525 if (GET_CODE (SUBREG_REG (x)) == REG
3526 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3527 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3528 = GET_MODE_SIZE (GET_MODE (x));
3529 return;
3532 fmt = GET_RTX_FORMAT (code);
3533 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3535 if (fmt[i] == 'e')
3536 scan_paradoxical_subregs (XEXP (x, i));
3537 else if (fmt[i] == 'E')
3539 register int j;
3540 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3541 scan_paradoxical_subregs (XVECEXP (x, i, j));
3546 static int
3547 hard_reg_use_compare (p1, p2)
3548 struct hard_reg_n_uses *p1, *p2;
3550 int tem = p1->uses - p2->uses;
3551 if (tem != 0) return tem;
3552 /* If regs are equally good, sort by regno,
3553 so that the results of qsort leave nothing to chance. */
3554 return p1->regno - p2->regno;
3557 /* Choose the order to consider regs for use as reload registers
3558 based on how much trouble would be caused by spilling one.
3559 Store them in order of decreasing preference in potential_reload_regs. */
3561 static void
3562 order_regs_for_reload ()
3564 register int i;
3565 register int o = 0;
3566 int large = 0;
3568 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3570 CLEAR_HARD_REG_SET (bad_spill_regs);
3572 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3573 potential_reload_regs[i] = -1;
3575 /* Count number of uses of each hard reg by pseudo regs allocated to it
3576 and then order them by decreasing use. */
3578 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3580 hard_reg_n_uses[i].uses = 0;
3581 hard_reg_n_uses[i].regno = i;
3584 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3586 int regno = reg_renumber[i];
3587 if (regno >= 0)
3589 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3590 while (regno < lim)
3591 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3593 large += reg_n_refs[i];
3596 /* Now fixed registers (which cannot safely be used for reloading)
3597 get a very high use count so they will be considered least desirable.
3598 Registers used explicitly in the rtl code are almost as bad. */
3600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3602 if (fixed_regs[i])
3604 hard_reg_n_uses[i].uses += 2 * large + 2;
3605 SET_HARD_REG_BIT (bad_spill_regs, i);
3607 else if (regs_explicitly_used[i])
3609 hard_reg_n_uses[i].uses += large + 1;
3610 #ifndef SMALL_REGISTER_CLASSES
3611 /* ??? We are doing this here because of the potential that
3612 bad code may be generated if a register explicitly used in
3613 an insn was used as a spill register for that insn. But
3614 not using these are spill registers may lose on some machine.
3615 We'll have to see how this works out. */
3616 SET_HARD_REG_BIT (bad_spill_regs, i);
3617 #endif
3620 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3621 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3623 #ifdef ELIMINABLE_REGS
3624 /* If registers other than the frame pointer are eliminable, mark them as
3625 poor choices. */
3626 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3628 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3629 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3631 #endif
3633 /* Prefer registers not so far used, for use in temporary loading.
3634 Among them, if REG_ALLOC_ORDER is defined, use that order.
3635 Otherwise, prefer registers not preserved by calls. */
3637 #ifdef REG_ALLOC_ORDER
3638 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3640 int regno = reg_alloc_order[i];
3642 if (hard_reg_n_uses[regno].uses == 0)
3643 potential_reload_regs[o++] = regno;
3645 #else
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3648 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3649 potential_reload_regs[o++] = i;
3651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3654 potential_reload_regs[o++] = i;
3656 #endif
3658 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3659 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3661 /* Now add the regs that are already used,
3662 preferring those used less often. The fixed and otherwise forbidden
3663 registers will be at the end of this list. */
3665 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3666 if (hard_reg_n_uses[i].uses != 0)
3667 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3670 /* Used in reload_as_needed to sort the spilled regs. */
3671 static int
3672 compare_spill_regs (r1, r2)
3673 short *r1, *r2;
3675 return *r1 < *r2 ? -1: 1;
3678 /* Reload pseudo-registers into hard regs around each insn as needed.
3679 Additional register load insns are output before the insn that needs it
3680 and perhaps store insns after insns that modify the reloaded pseudo reg.
3682 reg_last_reload_reg and reg_reloaded_contents keep track of
3683 which registers are already available in reload registers.
3684 We update these for the reloads that we perform,
3685 as the insns are scanned. */
3687 static void
3688 reload_as_needed (first, live_known)
3689 rtx first;
3690 int live_known;
3692 register rtx insn;
3693 register int i;
3694 int this_block = 0;
3695 rtx x;
3696 rtx after_call = 0;
3698 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3699 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3700 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3701 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3702 reg_has_output_reload = (char *) alloca (max_regno);
3703 for (i = 0; i < n_spills; i++)
3705 reg_reloaded_contents[i] = -1;
3706 reg_reloaded_insn[i] = 0;
3709 /* Reset all offsets on eliminable registers to their initial values. */
3710 #ifdef ELIMINABLE_REGS
3711 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3713 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3714 reg_eliminate[i].initial_offset);
3715 reg_eliminate[i].previous_offset
3716 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3718 #else
3719 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3720 reg_eliminate[0].previous_offset
3721 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3722 #endif
3724 num_not_at_initial_offset = 0;
3726 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3727 pack registers with group needs. */
3728 if (n_spills > 1)
3730 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3731 for (i = 0; i < n_spills; i++)
3732 spill_reg_order[spill_regs[i]] = i;
3735 for (insn = first; insn;)
3737 register rtx next = NEXT_INSN (insn);
3739 /* Notice when we move to a new basic block. */
3740 if (live_known && this_block + 1 < n_basic_blocks
3741 && insn == basic_block_head[this_block+1])
3742 ++this_block;
3744 /* If we pass a label, copy the offsets from the label information
3745 into the current offsets of each elimination. */
3746 if (GET_CODE (insn) == CODE_LABEL)
3748 num_not_at_initial_offset = 0;
3749 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3751 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3752 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3753 if (reg_eliminate[i].can_eliminate
3754 && (reg_eliminate[i].offset
3755 != reg_eliminate[i].initial_offset))
3756 num_not_at_initial_offset++;
3760 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3762 rtx avoid_return_reg = 0;
3764 #ifdef SMALL_REGISTER_CLASSES
3765 /* Set avoid_return_reg if this is an insn
3766 that might use the value of a function call. */
3767 if (GET_CODE (insn) == CALL_INSN)
3769 if (GET_CODE (PATTERN (insn)) == SET)
3770 after_call = SET_DEST (PATTERN (insn));
3771 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3772 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3773 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3774 else
3775 after_call = 0;
3777 else if (after_call != 0
3778 && !(GET_CODE (PATTERN (insn)) == SET
3779 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3781 if (reg_referenced_p (after_call, PATTERN (insn)))
3782 avoid_return_reg = after_call;
3783 after_call = 0;
3785 #endif /* SMALL_REGISTER_CLASSES */
3787 /* If this is a USE and CLOBBER of a MEM, ensure that any
3788 references to eliminable registers have been removed. */
3790 if ((GET_CODE (PATTERN (insn)) == USE
3791 || GET_CODE (PATTERN (insn)) == CLOBBER)
3792 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3793 XEXP (XEXP (PATTERN (insn), 0), 0)
3794 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3795 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3797 /* If we need to do register elimination processing, do so.
3798 This might delete the insn, in which case we are done. */
3799 if (num_eliminable && GET_MODE (insn) == QImode)
3801 eliminate_regs_in_insn (insn, 1);
3802 if (GET_CODE (insn) == NOTE)
3804 insn = next;
3805 continue;
3809 if (GET_MODE (insn) == VOIDmode)
3810 n_reloads = 0;
3811 /* First find the pseudo regs that must be reloaded for this insn.
3812 This info is returned in the tables reload_... (see reload.h).
3813 Also modify the body of INSN by substituting RELOAD
3814 rtx's for those pseudo regs. */
3815 else
3817 bzero (reg_has_output_reload, max_regno);
3818 CLEAR_HARD_REG_SET (reg_is_output_reload);
3820 find_reloads (insn, 1, spill_indirect_levels, live_known,
3821 spill_reg_order);
3824 if (n_reloads > 0)
3826 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3827 rtx p;
3828 int class;
3830 /* If this block has not had spilling done for a
3831 particular clas and we have any non-optionals that need a
3832 spill reg in that class, abort. */
3834 for (class = 0; class < N_REG_CLASSES; class++)
3835 if (basic_block_needs[class] != 0
3836 && basic_block_needs[class][this_block] == 0)
3837 for (i = 0; i < n_reloads; i++)
3838 if (class == (int) reload_reg_class[i]
3839 && reload_reg_rtx[i] == 0
3840 && ! reload_optional[i]
3841 && (reload_in[i] != 0 || reload_out[i] != 0
3842 || reload_secondary_p[i] != 0))
3843 abort ();
3845 /* Now compute which reload regs to reload them into. Perhaps
3846 reusing reload regs from previous insns, or else output
3847 load insns to reload them. Maybe output store insns too.
3848 Record the choices of reload reg in reload_reg_rtx. */
3849 choose_reload_regs (insn, avoid_return_reg);
3851 #ifdef SMALL_REGISTER_CLASSES
3852 /* Merge any reloads that we didn't combine for fear of
3853 increasing the number of spill registers needed but now
3854 discover can be safely merged. */
3855 merge_assigned_reloads (insn);
3856 #endif
3858 /* Generate the insns to reload operands into or out of
3859 their reload regs. */
3860 emit_reload_insns (insn);
3862 /* Substitute the chosen reload regs from reload_reg_rtx
3863 into the insn's body (or perhaps into the bodies of other
3864 load and store insn that we just made for reloading
3865 and that we moved the structure into). */
3866 subst_reloads ();
3868 /* If this was an ASM, make sure that all the reload insns
3869 we have generated are valid. If not, give an error
3870 and delete them. */
3872 if (asm_noperands (PATTERN (insn)) >= 0)
3873 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3874 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3875 && (recog_memoized (p) < 0
3876 || (insn_extract (p),
3877 ! constrain_operands (INSN_CODE (p), 1))))
3879 error_for_asm (insn,
3880 "`asm' operand requires impossible reload");
3881 PUT_CODE (p, NOTE);
3882 NOTE_SOURCE_FILE (p) = 0;
3883 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3886 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3887 is no longer validly lying around to save a future reload.
3888 Note that this does not detect pseudos that were reloaded
3889 for this insn in order to be stored in
3890 (obeying register constraints). That is correct; such reload
3891 registers ARE still valid. */
3892 note_stores (PATTERN (insn), forget_old_reloads_1);
3894 /* There may have been CLOBBER insns placed after INSN. So scan
3895 between INSN and NEXT and use them to forget old reloads. */
3896 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3897 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3898 note_stores (PATTERN (x), forget_old_reloads_1);
3900 #ifdef AUTO_INC_DEC
3901 /* Likewise for regs altered by auto-increment in this insn.
3902 But note that the reg-notes are not changed by reloading:
3903 they still contain the pseudo-regs, not the spill regs. */
3904 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3905 if (REG_NOTE_KIND (x) == REG_INC)
3907 /* See if this pseudo reg was reloaded in this insn.
3908 If so, its last-reload info is still valid
3909 because it is based on this insn's reload. */
3910 for (i = 0; i < n_reloads; i++)
3911 if (reload_out[i] == XEXP (x, 0))
3912 break;
3914 if (i == n_reloads)
3915 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3917 #endif
3919 /* A reload reg's contents are unknown after a label. */
3920 if (GET_CODE (insn) == CODE_LABEL)
3921 for (i = 0; i < n_spills; i++)
3923 reg_reloaded_contents[i] = -1;
3924 reg_reloaded_insn[i] = 0;
3927 /* Don't assume a reload reg is still good after a call insn
3928 if it is a call-used reg. */
3929 else if (GET_CODE (insn) == CALL_INSN)
3930 for (i = 0; i < n_spills; i++)
3931 if (call_used_regs[spill_regs[i]])
3933 reg_reloaded_contents[i] = -1;
3934 reg_reloaded_insn[i] = 0;
3937 /* In case registers overlap, allow certain insns to invalidate
3938 particular hard registers. */
3940 #ifdef INSN_CLOBBERS_REGNO_P
3941 for (i = 0 ; i < n_spills ; i++)
3942 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3944 reg_reloaded_contents[i] = -1;
3945 reg_reloaded_insn[i] = 0;
3947 #endif
3949 insn = next;
3951 #ifdef USE_C_ALLOCA
3952 alloca (0);
3953 #endif
3957 /* Discard all record of any value reloaded from X,
3958 or reloaded in X from someplace else;
3959 unless X is an output reload reg of the current insn.
3961 X may be a hard reg (the reload reg)
3962 or it may be a pseudo reg that was reloaded from. */
3964 static void
3965 forget_old_reloads_1 (x, ignored)
3966 rtx x;
3967 rtx ignored;
3969 register int regno;
3970 int nr;
3971 int offset = 0;
3973 /* note_stores does give us subregs of hard regs. */
3974 while (GET_CODE (x) == SUBREG)
3976 offset += SUBREG_WORD (x);
3977 x = SUBREG_REG (x);
3980 if (GET_CODE (x) != REG)
3981 return;
3983 regno = REGNO (x) + offset;
3985 if (regno >= FIRST_PSEUDO_REGISTER)
3986 nr = 1;
3987 else
3989 int i;
3990 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3991 /* Storing into a spilled-reg invalidates its contents.
3992 This can happen if a block-local pseudo is allocated to that reg
3993 and it wasn't spilled because this block's total need is 0.
3994 Then some insn might have an optional reload and use this reg. */
3995 for (i = 0; i < nr; i++)
3996 if (spill_reg_order[regno + i] >= 0
3997 /* But don't do this if the reg actually serves as an output
3998 reload reg in the current instruction. */
3999 && (n_reloads == 0
4000 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4002 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4003 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4007 /* Since value of X has changed,
4008 forget any value previously copied from it. */
4010 while (nr-- > 0)
4011 /* But don't forget a copy if this is the output reload
4012 that establishes the copy's validity. */
4013 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4014 reg_last_reload_reg[regno + nr] = 0;
4017 /* For each reload, the mode of the reload register. */
4018 static enum machine_mode reload_mode[MAX_RELOADS];
4020 /* For each reload, the largest number of registers it will require. */
4021 static int reload_nregs[MAX_RELOADS];
4023 /* Comparison function for qsort to decide which of two reloads
4024 should be handled first. *P1 and *P2 are the reload numbers. */
4026 static int
4027 reload_reg_class_lower (p1, p2)
4028 short *p1, *p2;
4030 register int r1 = *p1, r2 = *p2;
4031 register int t;
4033 /* Consider required reloads before optional ones. */
4034 t = reload_optional[r1] - reload_optional[r2];
4035 if (t != 0)
4036 return t;
4038 /* Count all solitary classes before non-solitary ones. */
4039 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4040 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4041 if (t != 0)
4042 return t;
4044 /* Aside from solitaires, consider all multi-reg groups first. */
4045 t = reload_nregs[r2] - reload_nregs[r1];
4046 if (t != 0)
4047 return t;
4049 /* Consider reloads in order of increasing reg-class number. */
4050 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4051 if (t != 0)
4052 return t;
4054 /* If reloads are equally urgent, sort by reload number,
4055 so that the results of qsort leave nothing to chance. */
4056 return r1 - r2;
4059 /* The following HARD_REG_SETs indicate when each hard register is
4060 used for a reload of various parts of the current insn. */
4062 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4063 static HARD_REG_SET reload_reg_used;
4064 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4065 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4066 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4067 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4068 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4069 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4070 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4071 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4072 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4073 static HARD_REG_SET reload_reg_used_in_op_addr;
4074 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4075 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4076 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4077 static HARD_REG_SET reload_reg_used_in_insn;
4078 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4079 static HARD_REG_SET reload_reg_used_in_other_addr;
4081 /* If reg is in use as a reload reg for any sort of reload. */
4082 static HARD_REG_SET reload_reg_used_at_all;
4084 /* If reg is use as an inherited reload. We just mark the first register
4085 in the group. */
4086 static HARD_REG_SET reload_reg_used_for_inherit;
4088 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4089 TYPE. MODE is used to indicate how many consecutive regs are
4090 actually used. */
4092 static void
4093 mark_reload_reg_in_use (regno, opnum, type, mode)
4094 int regno;
4095 int opnum;
4096 enum reload_type type;
4097 enum machine_mode mode;
4099 int nregs = HARD_REGNO_NREGS (regno, mode);
4100 int i;
4102 for (i = regno; i < nregs + regno; i++)
4104 switch (type)
4106 case RELOAD_OTHER:
4107 SET_HARD_REG_BIT (reload_reg_used, i);
4108 break;
4110 case RELOAD_FOR_INPUT_ADDRESS:
4111 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4112 break;
4114 case RELOAD_FOR_OUTPUT_ADDRESS:
4115 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4116 break;
4118 case RELOAD_FOR_OPERAND_ADDRESS:
4119 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4120 break;
4122 case RELOAD_FOR_OPADDR_ADDR:
4123 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4124 break;
4126 case RELOAD_FOR_OTHER_ADDRESS:
4127 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4128 break;
4130 case RELOAD_FOR_INPUT:
4131 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4132 break;
4134 case RELOAD_FOR_OUTPUT:
4135 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4136 break;
4138 case RELOAD_FOR_INSN:
4139 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4140 break;
4143 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4147 /* Similarly, but show REGNO is no longer in use for a reload. */
4149 static void
4150 clear_reload_reg_in_use (regno, opnum, type, mode)
4151 int regno;
4152 int opnum;
4153 enum reload_type type;
4154 enum machine_mode mode;
4156 int nregs = HARD_REGNO_NREGS (regno, mode);
4157 int i;
4159 for (i = regno; i < nregs + regno; i++)
4161 switch (type)
4163 case RELOAD_OTHER:
4164 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4165 break;
4167 case RELOAD_FOR_INPUT_ADDRESS:
4168 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4169 break;
4171 case RELOAD_FOR_OUTPUT_ADDRESS:
4172 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4173 break;
4175 case RELOAD_FOR_OPERAND_ADDRESS:
4176 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4177 break;
4179 case RELOAD_FOR_OPADDR_ADDR:
4180 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4181 break;
4183 case RELOAD_FOR_OTHER_ADDRESS:
4184 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4185 break;
4187 case RELOAD_FOR_INPUT:
4188 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4189 break;
4191 case RELOAD_FOR_OUTPUT:
4192 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4193 break;
4195 case RELOAD_FOR_INSN:
4196 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4197 break;
4202 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4203 specified by OPNUM and TYPE. */
4205 static int
4206 reload_reg_free_p (regno, opnum, type)
4207 int regno;
4208 int opnum;
4209 enum reload_type type;
4211 int i;
4213 /* In use for a RELOAD_OTHER means it's not available for anything except
4214 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4215 to be used only for inputs. */
4217 if (type != RELOAD_FOR_OTHER_ADDRESS
4218 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4219 return 0;
4221 switch (type)
4223 case RELOAD_OTHER:
4224 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4225 we can't use it for RELOAD_OTHER. */
4226 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4227 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4228 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4229 return 0;
4231 for (i = 0; i < reload_n_operands; i++)
4232 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4233 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4234 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4235 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4236 return 0;
4238 return 1;
4240 case RELOAD_FOR_INPUT:
4241 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4242 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4243 return 0;
4245 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4246 return 0;
4248 /* If it is used for some other input, can't use it. */
4249 for (i = 0; i < reload_n_operands; i++)
4250 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4251 return 0;
4253 /* If it is used in a later operand's address, can't use it. */
4254 for (i = opnum + 1; i < reload_n_operands; i++)
4255 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4256 return 0;
4258 return 1;
4260 case RELOAD_FOR_INPUT_ADDRESS:
4261 /* Can't use a register if it is used for an input address for this
4262 operand or used as an input in an earlier one. */
4263 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4264 return 0;
4266 for (i = 0; i < opnum; i++)
4267 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4268 return 0;
4270 return 1;
4272 case RELOAD_FOR_OUTPUT_ADDRESS:
4273 /* Can't use a register if it is used for an output address for this
4274 operand or used as an output in this or a later operand. */
4275 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4276 return 0;
4278 for (i = opnum; i < reload_n_operands; i++)
4279 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4280 return 0;
4282 return 1;
4284 case RELOAD_FOR_OPERAND_ADDRESS:
4285 for (i = 0; i < reload_n_operands; i++)
4286 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4287 return 0;
4289 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4290 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4292 case RELOAD_FOR_OPADDR_ADDR:
4293 for (i = 0; i < reload_n_operands; i++)
4294 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4295 return 0;
4297 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4299 case RELOAD_FOR_OUTPUT:
4300 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4301 outputs, or an operand address for this or an earlier output. */
4302 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4303 return 0;
4305 for (i = 0; i < reload_n_operands; i++)
4306 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4307 return 0;
4309 for (i = 0; i <= opnum; i++)
4310 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4311 return 0;
4313 return 1;
4315 case RELOAD_FOR_INSN:
4316 for (i = 0; i < reload_n_operands; i++)
4317 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4319 return 0;
4321 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4322 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4324 case RELOAD_FOR_OTHER_ADDRESS:
4325 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4327 abort ();
4330 /* Return 1 if the value in reload reg REGNO, as used by a reload
4331 needed for the part of the insn specified by OPNUM and TYPE,
4332 is not in use for a reload in any prior part of the insn.
4334 We can assume that the reload reg was already tested for availability
4335 at the time it is needed, and we should not check this again,
4336 in case the reg has already been marked in use. */
4338 static int
4339 reload_reg_free_before_p (regno, opnum, type)
4340 int regno;
4341 int opnum;
4342 enum reload_type type;
4344 int i;
4346 switch (type)
4348 case RELOAD_FOR_OTHER_ADDRESS:
4349 /* These always come first. */
4350 return 1;
4352 case RELOAD_OTHER:
4353 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4355 /* If this use is for part of the insn,
4356 check the reg is not in use for any prior part. It is tempting
4357 to try to do this by falling through from objecs that occur
4358 later in the insn to ones that occur earlier, but that will not
4359 correctly take into account the fact that here we MUST ignore
4360 things that would prevent the register from being allocated in
4361 the first place, since we know that it was allocated. */
4363 case RELOAD_FOR_OUTPUT_ADDRESS:
4364 /* Earlier reloads are for earlier outputs or their addresses,
4365 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4366 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4367 RELOAD_OTHER).. */
4368 for (i = 0; i < opnum; i++)
4369 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4370 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4371 return 0;
4373 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4374 return 0;
4376 for (i = 0; i < reload_n_operands; i++)
4377 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4378 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4379 return 0;
4381 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4382 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4383 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4385 case RELOAD_FOR_OUTPUT:
4386 /* This can't be used in the output address for this operand and
4387 anything that can't be used for it, except that we've already
4388 tested for RELOAD_FOR_INSN objects. */
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4391 return 0;
4393 for (i = 0; i < opnum; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4395 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4396 return 0;
4398 for (i = 0; i < reload_n_operands; i++)
4399 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4400 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4401 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4402 return 0;
4404 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4406 case RELOAD_FOR_OPERAND_ADDRESS:
4407 case RELOAD_FOR_OPADDR_ADDR:
4408 case RELOAD_FOR_INSN:
4409 /* These can't conflict with inputs, or each other, so all we have to
4410 test is input addresses and the addresses of OTHER items. */
4412 for (i = 0; i < reload_n_operands; i++)
4413 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4414 return 0;
4416 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4418 case RELOAD_FOR_INPUT:
4419 /* The only things earlier are the address for this and
4420 earlier inputs, other inputs (which we know we don't conflict
4421 with), and addresses of RELOAD_OTHER objects. */
4423 for (i = 0; i <= opnum; i++)
4424 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4425 return 0;
4427 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4429 case RELOAD_FOR_INPUT_ADDRESS:
4430 /* Similarly, all we have to check is for use in earlier inputs'
4431 addresses. */
4432 for (i = 0; i < opnum; i++)
4433 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4434 return 0;
4436 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4438 abort ();
4441 /* Return 1 if the value in reload reg REGNO, as used by a reload
4442 needed for the part of the insn specified by OPNUM and TYPE,
4443 is still available in REGNO at the end of the insn.
4445 We can assume that the reload reg was already tested for availability
4446 at the time it is needed, and we should not check this again,
4447 in case the reg has already been marked in use. */
4449 static int
4450 reload_reg_reaches_end_p (regno, opnum, type)
4451 int regno;
4452 int opnum;
4453 enum reload_type type;
4455 int i;
4457 switch (type)
4459 case RELOAD_OTHER:
4460 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4461 its value must reach the end. */
4462 return 1;
4464 /* If this use is for part of the insn,
4465 its value reaches if no subsequent part uses the same register.
4466 Just like the above function, don't try to do this with lots
4467 of fallthroughs. */
4469 case RELOAD_FOR_OTHER_ADDRESS:
4470 /* Here we check for everything else, since these don't conflict
4471 with anything else and everything comes later. */
4473 for (i = 0; i < reload_n_operands; i++)
4474 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4475 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4476 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4477 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4478 return 0;
4480 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4481 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4482 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4484 case RELOAD_FOR_INPUT_ADDRESS:
4485 /* Similar, except that we check only for this and subsequent inputs
4486 and the address of only subsequent inputs and we do not need
4487 to check for RELOAD_OTHER objects since they are known not to
4488 conflict. */
4490 for (i = opnum; i < reload_n_operands; i++)
4491 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4492 return 0;
4494 for (i = opnum + 1; i < reload_n_operands; i++)
4495 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4496 return 0;
4498 for (i = 0; i < reload_n_operands; i++)
4499 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4500 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4501 return 0;
4503 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4504 return 0;
4506 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4507 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4509 case RELOAD_FOR_INPUT:
4510 /* Similar to input address, except we start at the next operand for
4511 both input and input address and we do not check for
4512 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4513 would conflict. */
4515 for (i = opnum + 1; i < reload_n_operands; i++)
4516 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4517 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4518 return 0;
4520 /* ... fall through ... */
4522 case RELOAD_FOR_OPERAND_ADDRESS:
4523 /* Check outputs and their addresses. */
4525 for (i = 0; i < reload_n_operands; i++)
4526 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4527 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4528 return 0;
4530 return 1;
4532 case RELOAD_FOR_OPADDR_ADDR:
4533 for (i = 0; i < reload_n_operands; i++)
4534 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4535 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4536 return 0;
4538 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4539 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4541 case RELOAD_FOR_INSN:
4542 /* These conflict with other outputs with RELOAD_OTHER. So
4543 we need only check for output addresses. */
4545 opnum = -1;
4547 /* ... fall through ... */
4549 case RELOAD_FOR_OUTPUT:
4550 case RELOAD_FOR_OUTPUT_ADDRESS:
4551 /* We already know these can't conflict with a later output. So the
4552 only thing to check are later output addresses. */
4553 for (i = opnum + 1; i < reload_n_operands; i++)
4554 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4555 return 0;
4557 return 1;
4560 abort ();
4563 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4564 Return 0 otherwise.
4566 This function uses the same algorithm as reload_reg_free_p above. */
4568 static int
4569 reloads_conflict (r1, r2)
4570 int r1, r2;
4572 enum reload_type r1_type = reload_when_needed[r1];
4573 enum reload_type r2_type = reload_when_needed[r2];
4574 int r1_opnum = reload_opnum[r1];
4575 int r2_opnum = reload_opnum[r2];
4577 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4579 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4580 return 1;
4582 /* Otherwise, check conflicts differently for each type. */
4584 switch (r1_type)
4586 case RELOAD_FOR_INPUT:
4587 return (r2_type == RELOAD_FOR_INSN
4588 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4589 || r2_type == RELOAD_FOR_OPADDR_ADDR
4590 || r2_type == RELOAD_FOR_INPUT
4591 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4593 case RELOAD_FOR_INPUT_ADDRESS:
4594 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4595 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4597 case RELOAD_FOR_OUTPUT_ADDRESS:
4598 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4599 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4601 case RELOAD_FOR_OPERAND_ADDRESS:
4602 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4603 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4605 case RELOAD_FOR_OPADDR_ADDR:
4606 return (r2_type == RELOAD_FOR_INPUT
4607 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4609 case RELOAD_FOR_OUTPUT:
4610 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4611 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4612 && r2_opnum >= r1_opnum));
4614 case RELOAD_FOR_INSN:
4615 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4616 || r2_type == RELOAD_FOR_INSN
4617 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4619 case RELOAD_FOR_OTHER_ADDRESS:
4620 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4622 case RELOAD_OTHER:
4623 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4625 default:
4626 abort ();
4630 /* Vector of reload-numbers showing the order in which the reloads should
4631 be processed. */
4632 short reload_order[MAX_RELOADS];
4634 /* Indexed by reload number, 1 if incoming value
4635 inherited from previous insns. */
4636 char reload_inherited[MAX_RELOADS];
4638 /* For an inherited reload, this is the insn the reload was inherited from,
4639 if we know it. Otherwise, this is 0. */
4640 rtx reload_inheritance_insn[MAX_RELOADS];
4642 /* If non-zero, this is a place to get the value of the reload,
4643 rather than using reload_in. */
4644 rtx reload_override_in[MAX_RELOADS];
4646 /* For each reload, the index in spill_regs of the spill register used,
4647 or -1 if we did not need one of the spill registers for this reload. */
4648 int reload_spill_index[MAX_RELOADS];
4650 /* Index of last register assigned as a spill register. We allocate in
4651 a round-robin fashio. */
4653 static int last_spill_reg = 0;
4655 /* Find a spill register to use as a reload register for reload R.
4656 LAST_RELOAD is non-zero if this is the last reload for the insn being
4657 processed.
4659 Set reload_reg_rtx[R] to the register allocated.
4661 If NOERROR is nonzero, we return 1 if successful,
4662 or 0 if we couldn't find a spill reg and we didn't change anything. */
4664 static int
4665 allocate_reload_reg (r, insn, last_reload, noerror)
4666 int r;
4667 rtx insn;
4668 int last_reload;
4669 int noerror;
4671 int i;
4672 int pass;
4673 int count;
4674 rtx new;
4675 int regno;
4677 /* If we put this reload ahead, thinking it is a group,
4678 then insist on finding a group. Otherwise we can grab a
4679 reg that some other reload needs.
4680 (That can happen when we have a 68000 DATA_OR_FP_REG
4681 which is a group of data regs or one fp reg.)
4682 We need not be so restrictive if there are no more reloads
4683 for this insn.
4685 ??? Really it would be nicer to have smarter handling
4686 for that kind of reg class, where a problem like this is normal.
4687 Perhaps those classes should be avoided for reloading
4688 by use of more alternatives. */
4690 int force_group = reload_nregs[r] > 1 && ! last_reload;
4692 /* If we want a single register and haven't yet found one,
4693 take any reg in the right class and not in use.
4694 If we want a consecutive group, here is where we look for it.
4696 We use two passes so we can first look for reload regs to
4697 reuse, which are already in use for other reloads in this insn,
4698 and only then use additional registers.
4699 I think that maximizing reuse is needed to make sure we don't
4700 run out of reload regs. Suppose we have three reloads, and
4701 reloads A and B can share regs. These need two regs.
4702 Suppose A and B are given different regs.
4703 That leaves none for C. */
4704 for (pass = 0; pass < 2; pass++)
4706 /* I is the index in spill_regs.
4707 We advance it round-robin between insns to use all spill regs
4708 equally, so that inherited reloads have a chance
4709 of leapfrogging each other. Don't do this, however, when we have
4710 group needs and failure would be fatal; if we only have a relatively
4711 small number of spill registers, and more than one of them has
4712 group needs, then by starting in the middle, we may end up
4713 allocating the first one in such a way that we are not left with
4714 sufficient groups to handle the rest. */
4716 if (noerror || ! force_group)
4717 i = last_spill_reg;
4718 else
4719 i = -1;
4721 for (count = 0; count < n_spills; count++)
4723 int class = (int) reload_reg_class[r];
4725 i = (i + 1) % n_spills;
4727 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4728 reload_when_needed[r])
4729 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4730 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4731 /* Look first for regs to share, then for unshared. But
4732 don't share regs used for inherited reloads; they are
4733 the ones we want to preserve. */
4734 && (pass
4735 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4736 spill_regs[i])
4737 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4738 spill_regs[i]))))
4740 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4741 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4742 (on 68000) got us two FP regs. If NR is 1,
4743 we would reject both of them. */
4744 if (force_group)
4745 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4746 /* If we need only one reg, we have already won. */
4747 if (nr == 1)
4749 /* But reject a single reg if we demand a group. */
4750 if (force_group)
4751 continue;
4752 break;
4754 /* Otherwise check that as many consecutive regs as we need
4755 are available here.
4756 Also, don't use for a group registers that are
4757 needed for nongroups. */
4758 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4759 while (nr > 1)
4761 regno = spill_regs[i] + nr - 1;
4762 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4763 && spill_reg_order[regno] >= 0
4764 && reload_reg_free_p (regno, reload_opnum[r],
4765 reload_when_needed[r])
4766 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4767 regno)))
4768 break;
4769 nr--;
4771 if (nr == 1)
4772 break;
4776 /* If we found something on pass 1, omit pass 2. */
4777 if (count < n_spills)
4778 break;
4781 /* We should have found a spill register by now. */
4782 if (count == n_spills)
4784 if (noerror)
4785 return 0;
4786 goto failure;
4789 /* I is the index in SPILL_REG_RTX of the reload register we are to
4790 allocate. Get an rtx for it and find its register number. */
4792 new = spill_reg_rtx[i];
4794 if (new == 0 || GET_MODE (new) != reload_mode[r])
4795 spill_reg_rtx[i] = new
4796 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4798 regno = true_regnum (new);
4800 /* Detect when the reload reg can't hold the reload mode.
4801 This used to be one `if', but Sequent compiler can't handle that. */
4802 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4804 enum machine_mode test_mode = VOIDmode;
4805 if (reload_in[r])
4806 test_mode = GET_MODE (reload_in[r]);
4807 /* If reload_in[r] has VOIDmode, it means we will load it
4808 in whatever mode the reload reg has: to wit, reload_mode[r].
4809 We have already tested that for validity. */
4810 /* Aside from that, we need to test that the expressions
4811 to reload from or into have modes which are valid for this
4812 reload register. Otherwise the reload insns would be invalid. */
4813 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4814 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4815 if (! (reload_out[r] != 0
4816 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4818 /* The reg is OK. */
4819 last_spill_reg = i;
4821 /* Mark as in use for this insn the reload regs we use
4822 for this. */
4823 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4824 reload_when_needed[r], reload_mode[r]);
4826 reload_reg_rtx[r] = new;
4827 reload_spill_index[r] = i;
4828 return 1;
4832 /* The reg is not OK. */
4833 if (noerror)
4834 return 0;
4836 failure:
4837 if (asm_noperands (PATTERN (insn)) < 0)
4838 /* It's the compiler's fault. */
4839 abort ();
4841 /* It's the user's fault; the operand's mode and constraint
4842 don't match. Disable this reload so we don't crash in final. */
4843 error_for_asm (insn,
4844 "`asm' operand constraint incompatible with operand size");
4845 reload_in[r] = 0;
4846 reload_out[r] = 0;
4847 reload_reg_rtx[r] = 0;
4848 reload_optional[r] = 1;
4849 reload_secondary_p[r] = 1;
4851 return 1;
4854 /* Assign hard reg targets for the pseudo-registers we must reload
4855 into hard regs for this insn.
4856 Also output the instructions to copy them in and out of the hard regs.
4858 For machines with register classes, we are responsible for
4859 finding a reload reg in the proper class. */
4861 static void
4862 choose_reload_regs (insn, avoid_return_reg)
4863 rtx insn;
4864 rtx avoid_return_reg;
4866 register int i, j;
4867 int max_group_size = 1;
4868 enum reg_class group_class = NO_REGS;
4869 int inheritance;
4871 rtx save_reload_reg_rtx[MAX_RELOADS];
4872 char save_reload_inherited[MAX_RELOADS];
4873 rtx save_reload_inheritance_insn[MAX_RELOADS];
4874 rtx save_reload_override_in[MAX_RELOADS];
4875 int save_reload_spill_index[MAX_RELOADS];
4876 HARD_REG_SET save_reload_reg_used;
4877 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4878 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4879 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4880 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4881 HARD_REG_SET save_reload_reg_used_in_op_addr;
4882 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4883 HARD_REG_SET save_reload_reg_used_in_insn;
4884 HARD_REG_SET save_reload_reg_used_in_other_addr;
4885 HARD_REG_SET save_reload_reg_used_at_all;
4887 bzero (reload_inherited, MAX_RELOADS);
4888 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4889 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4891 CLEAR_HARD_REG_SET (reload_reg_used);
4892 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4893 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4894 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4895 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4896 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4898 for (i = 0; i < reload_n_operands; i++)
4900 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4901 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4902 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4903 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4906 #ifdef SMALL_REGISTER_CLASSES
4907 /* Don't bother with avoiding the return reg
4908 if we have no mandatory reload that could use it. */
4909 if (avoid_return_reg)
4911 int do_avoid = 0;
4912 int regno = REGNO (avoid_return_reg);
4913 int nregs
4914 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4915 int r;
4917 for (r = regno; r < regno + nregs; r++)
4918 if (spill_reg_order[r] >= 0)
4919 for (j = 0; j < n_reloads; j++)
4920 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4921 && (reload_in[j] != 0 || reload_out[j] != 0
4922 || reload_secondary_p[j])
4924 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4925 do_avoid = 1;
4926 if (!do_avoid)
4927 avoid_return_reg = 0;
4929 #endif /* SMALL_REGISTER_CLASSES */
4931 #if 0 /* Not needed, now that we can always retry without inheritance. */
4932 /* See if we have more mandatory reloads than spill regs.
4933 If so, then we cannot risk optimizations that could prevent
4934 reloads from sharing one spill register.
4936 Since we will try finding a better register than reload_reg_rtx
4937 unless it is equal to reload_in or reload_out, count such reloads. */
4940 int tem = 0;
4941 #ifdef SMALL_REGISTER_CLASSES
4942 int tem = (avoid_return_reg != 0);
4943 #endif
4944 for (j = 0; j < n_reloads; j++)
4945 if (! reload_optional[j]
4946 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4947 && (reload_reg_rtx[j] == 0
4948 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4949 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4950 tem++;
4951 if (tem > n_spills)
4952 must_reuse = 1;
4954 #endif
4956 #ifdef SMALL_REGISTER_CLASSES
4957 /* Don't use the subroutine call return reg for a reload
4958 if we are supposed to avoid it. */
4959 if (avoid_return_reg)
4961 int regno = REGNO (avoid_return_reg);
4962 int nregs
4963 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4964 int r;
4966 for (r = regno; r < regno + nregs; r++)
4967 if (spill_reg_order[r] >= 0)
4968 SET_HARD_REG_BIT (reload_reg_used, r);
4970 #endif /* SMALL_REGISTER_CLASSES */
4972 /* In order to be certain of getting the registers we need,
4973 we must sort the reloads into order of increasing register class.
4974 Then our grabbing of reload registers will parallel the process
4975 that provided the reload registers.
4977 Also note whether any of the reloads wants a consecutive group of regs.
4978 If so, record the maximum size of the group desired and what
4979 register class contains all the groups needed by this insn. */
4981 for (j = 0; j < n_reloads; j++)
4983 reload_order[j] = j;
4984 reload_spill_index[j] = -1;
4986 reload_mode[j]
4987 = (reload_inmode[j] == VOIDmode
4988 || (GET_MODE_SIZE (reload_outmode[j])
4989 > GET_MODE_SIZE (reload_inmode[j])))
4990 ? reload_outmode[j] : reload_inmode[j];
4992 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4994 if (reload_nregs[j] > 1)
4996 max_group_size = MAX (reload_nregs[j], max_group_size);
4997 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5000 /* If we have already decided to use a certain register,
5001 don't use it in another way. */
5002 if (reload_reg_rtx[j])
5003 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5004 reload_when_needed[j], reload_mode[j]);
5007 if (n_reloads > 1)
5008 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5010 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5011 sizeof reload_reg_rtx);
5012 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5013 bcopy ((char *) reload_inheritance_insn,
5014 (char *) save_reload_inheritance_insn,
5015 sizeof reload_inheritance_insn);
5016 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5017 sizeof reload_override_in);
5018 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5019 sizeof reload_spill_index);
5020 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5021 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5022 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5023 reload_reg_used_in_op_addr);
5025 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5026 reload_reg_used_in_op_addr_reload);
5028 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5029 reload_reg_used_in_insn);
5030 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5031 reload_reg_used_in_other_addr);
5033 for (i = 0; i < reload_n_operands; i++)
5035 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5036 reload_reg_used_in_output[i]);
5037 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5038 reload_reg_used_in_input[i]);
5039 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5040 reload_reg_used_in_input_addr[i]);
5041 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5042 reload_reg_used_in_output_addr[i]);
5045 /* If -O, try first with inheritance, then turning it off.
5046 If not -O, don't do inheritance.
5047 Using inheritance when not optimizing leads to paradoxes
5048 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5049 because one side of the comparison might be inherited. */
5051 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5053 /* Process the reloads in order of preference just found.
5054 Beyond this point, subregs can be found in reload_reg_rtx.
5056 This used to look for an existing reloaded home for all
5057 of the reloads, and only then perform any new reloads.
5058 But that could lose if the reloads were done out of reg-class order
5059 because a later reload with a looser constraint might have an old
5060 home in a register needed by an earlier reload with a tighter constraint.
5062 To solve this, we make two passes over the reloads, in the order
5063 described above. In the first pass we try to inherit a reload
5064 from a previous insn. If there is a later reload that needs a
5065 class that is a proper subset of the class being processed, we must
5066 also allocate a spill register during the first pass.
5068 Then make a second pass over the reloads to allocate any reloads
5069 that haven't been given registers yet. */
5071 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5073 for (j = 0; j < n_reloads; j++)
5075 register int r = reload_order[j];
5077 /* Ignore reloads that got marked inoperative. */
5078 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5079 continue;
5081 /* If find_reloads chose a to use reload_in or reload_out as a reload
5082 register, we don't need to chose one. Otherwise, try even if it found
5083 one since we might save an insn if we find the value lying around. */
5084 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5085 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5086 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5087 continue;
5089 #if 0 /* No longer needed for correct operation.
5090 It might give better code, or might not; worth an experiment? */
5091 /* If this is an optional reload, we can't inherit from earlier insns
5092 until we are sure that any non-optional reloads have been allocated.
5093 The following code takes advantage of the fact that optional reloads
5094 are at the end of reload_order. */
5095 if (reload_optional[r] != 0)
5096 for (i = 0; i < j; i++)
5097 if ((reload_out[reload_order[i]] != 0
5098 || reload_in[reload_order[i]] != 0
5099 || reload_secondary_p[reload_order[i]])
5100 && ! reload_optional[reload_order[i]]
5101 && reload_reg_rtx[reload_order[i]] == 0)
5102 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5103 #endif
5105 /* First see if this pseudo is already available as reloaded
5106 for a previous insn. We cannot try to inherit for reloads
5107 that are smaller than the maximum number of registers needed
5108 for groups unless the register we would allocate cannot be used
5109 for the groups.
5111 We could check here to see if this is a secondary reload for
5112 an object that is already in a register of the desired class.
5113 This would avoid the need for the secondary reload register.
5114 But this is complex because we can't easily determine what
5115 objects might want to be loaded via this reload. So let a register
5116 be allocated here. In `emit_reload_insns' we suppress one of the
5117 loads in the case described above. */
5119 if (inheritance)
5121 register int regno = -1;
5122 enum machine_mode mode;
5124 if (reload_in[r] == 0)
5126 else if (GET_CODE (reload_in[r]) == REG)
5128 regno = REGNO (reload_in[r]);
5129 mode = GET_MODE (reload_in[r]);
5131 else if (GET_CODE (reload_in_reg[r]) == REG)
5133 regno = REGNO (reload_in_reg[r]);
5134 mode = GET_MODE (reload_in_reg[r]);
5136 #if 0
5137 /* This won't work, since REGNO can be a pseudo reg number.
5138 Also, it takes much more hair to keep track of all the things
5139 that can invalidate an inherited reload of part of a pseudoreg. */
5140 else if (GET_CODE (reload_in[r]) == SUBREG
5141 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5142 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5143 #endif
5145 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5147 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5149 if (reg_reloaded_contents[i] == regno
5150 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5151 >= GET_MODE_SIZE (mode))
5152 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5153 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5154 spill_regs[i])
5155 && (reload_nregs[r] == max_group_size
5156 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5157 spill_regs[i]))
5158 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5159 reload_when_needed[r])
5160 && reload_reg_free_before_p (spill_regs[i],
5161 reload_opnum[r],
5162 reload_when_needed[r]))
5164 /* If a group is needed, verify that all the subsequent
5165 registers still have their values intact. */
5166 int nr
5167 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5168 int k;
5170 for (k = 1; k < nr; k++)
5171 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5172 != regno)
5173 break;
5175 if (k == nr)
5177 int i1;
5179 /* We found a register that contains the
5180 value we need. If this register is the
5181 same as an `earlyclobber' operand of the
5182 current insn, just mark it as a place to
5183 reload from since we can't use it as the
5184 reload register itself. */
5186 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5187 if (reg_overlap_mentioned_for_reload_p
5188 (reg_last_reload_reg[regno],
5189 reload_earlyclobbers[i1]))
5190 break;
5192 if (i1 != n_earlyclobbers
5193 /* Don't really use the inherited spill reg
5194 if we need it wider than we've got it. */
5195 || (GET_MODE_SIZE (reload_mode[r])
5196 > GET_MODE_SIZE (mode)))
5197 reload_override_in[r] = reg_last_reload_reg[regno];
5198 else
5200 int k;
5201 /* We can use this as a reload reg. */
5202 /* Mark the register as in use for this part of
5203 the insn. */
5204 mark_reload_reg_in_use (spill_regs[i],
5205 reload_opnum[r],
5206 reload_when_needed[r],
5207 reload_mode[r]);
5208 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5209 reload_inherited[r] = 1;
5210 reload_inheritance_insn[r]
5211 = reg_reloaded_insn[i];
5212 reload_spill_index[r] = i;
5213 for (k = 0; k < nr; k++)
5214 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5215 spill_regs[i + k]);
5222 /* Here's another way to see if the value is already lying around. */
5223 if (inheritance
5224 && reload_in[r] != 0
5225 && ! reload_inherited[r]
5226 && reload_out[r] == 0
5227 && (CONSTANT_P (reload_in[r])
5228 || GET_CODE (reload_in[r]) == PLUS
5229 || GET_CODE (reload_in[r]) == REG
5230 || GET_CODE (reload_in[r]) == MEM)
5231 && (reload_nregs[r] == max_group_size
5232 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5234 register rtx equiv
5235 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5236 -1, NULL_PTR, 0, reload_mode[r]);
5237 int regno;
5239 if (equiv != 0)
5241 if (GET_CODE (equiv) == REG)
5242 regno = REGNO (equiv);
5243 else if (GET_CODE (equiv) == SUBREG)
5245 /* This must be a SUBREG of a hard register.
5246 Make a new REG since this might be used in an
5247 address and not all machines support SUBREGs
5248 there. */
5249 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5250 equiv = gen_rtx (REG, reload_mode[r], regno);
5252 else
5253 abort ();
5256 /* If we found a spill reg, reject it unless it is free
5257 and of the desired class. */
5258 if (equiv != 0
5259 && ((spill_reg_order[regno] >= 0
5260 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5261 reload_when_needed[r]))
5262 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5263 regno)))
5264 equiv = 0;
5266 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5267 equiv = 0;
5269 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5270 equiv = 0;
5272 /* We found a register that contains the value we need.
5273 If this register is the same as an `earlyclobber' operand
5274 of the current insn, just mark it as a place to reload from
5275 since we can't use it as the reload register itself. */
5277 if (equiv != 0)
5278 for (i = 0; i < n_earlyclobbers; i++)
5279 if (reg_overlap_mentioned_for_reload_p (equiv,
5280 reload_earlyclobbers[i]))
5282 reload_override_in[r] = equiv;
5283 equiv = 0;
5284 break;
5287 /* JRV: If the equiv register we have found is explicitly
5288 clobbered in the current insn, mark but don't use, as above. */
5290 if (equiv != 0 && regno_clobbered_p (regno, insn))
5292 reload_override_in[r] = equiv;
5293 equiv = 0;
5296 /* If we found an equivalent reg, say no code need be generated
5297 to load it, and use it as our reload reg. */
5298 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5300 reload_reg_rtx[r] = equiv;
5301 reload_inherited[r] = 1;
5302 /* If it is a spill reg,
5303 mark the spill reg as in use for this insn. */
5304 i = spill_reg_order[regno];
5305 if (i >= 0)
5307 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5308 int k;
5309 mark_reload_reg_in_use (regno, reload_opnum[r],
5310 reload_when_needed[r],
5311 reload_mode[r]);
5312 for (k = 0; k < nr; k++)
5313 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5318 /* If we found a register to use already, or if this is an optional
5319 reload, we are done. */
5320 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5321 continue;
5323 #if 0 /* No longer needed for correct operation. Might or might not
5324 give better code on the average. Want to experiment? */
5326 /* See if there is a later reload that has a class different from our
5327 class that intersects our class or that requires less register
5328 than our reload. If so, we must allocate a register to this
5329 reload now, since that reload might inherit a previous reload
5330 and take the only available register in our class. Don't do this
5331 for optional reloads since they will force all previous reloads
5332 to be allocated. Also don't do this for reloads that have been
5333 turned off. */
5335 for (i = j + 1; i < n_reloads; i++)
5337 int s = reload_order[i];
5339 if ((reload_in[s] == 0 && reload_out[s] == 0
5340 && ! reload_secondary_p[s])
5341 || reload_optional[s])
5342 continue;
5344 if ((reload_reg_class[s] != reload_reg_class[r]
5345 && reg_classes_intersect_p (reload_reg_class[r],
5346 reload_reg_class[s]))
5347 || reload_nregs[s] < reload_nregs[r])
5348 break;
5351 if (i == n_reloads)
5352 continue;
5354 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5355 #endif
5358 /* Now allocate reload registers for anything non-optional that
5359 didn't get one yet. */
5360 for (j = 0; j < n_reloads; j++)
5362 register int r = reload_order[j];
5364 /* Ignore reloads that got marked inoperative. */
5365 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5366 continue;
5368 /* Skip reloads that already have a register allocated or are
5369 optional. */
5370 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5371 continue;
5373 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5374 break;
5377 /* If that loop got all the way, we have won. */
5378 if (j == n_reloads)
5379 break;
5381 fail:
5382 /* Loop around and try without any inheritance. */
5383 /* First undo everything done by the failed attempt
5384 to allocate with inheritance. */
5385 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5386 sizeof reload_reg_rtx);
5387 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5388 sizeof reload_inherited);
5389 bcopy ((char *) save_reload_inheritance_insn,
5390 (char *) reload_inheritance_insn,
5391 sizeof reload_inheritance_insn);
5392 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5393 sizeof reload_override_in);
5394 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5395 sizeof reload_spill_index);
5396 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5397 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5398 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5399 save_reload_reg_used_in_op_addr);
5400 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5401 save_reload_reg_used_in_op_addr_reload);
5402 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5403 save_reload_reg_used_in_insn);
5404 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5405 save_reload_reg_used_in_other_addr);
5407 for (i = 0; i < reload_n_operands; i++)
5409 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5410 save_reload_reg_used_in_input[i]);
5411 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5412 save_reload_reg_used_in_output[i]);
5413 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5414 save_reload_reg_used_in_input_addr[i]);
5415 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5416 save_reload_reg_used_in_output_addr[i]);
5420 /* If we thought we could inherit a reload, because it seemed that
5421 nothing else wanted the same reload register earlier in the insn,
5422 verify that assumption, now that all reloads have been assigned. */
5424 for (j = 0; j < n_reloads; j++)
5426 register int r = reload_order[j];
5428 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5429 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5430 reload_opnum[r],
5431 reload_when_needed[r]))
5432 reload_inherited[r] = 0;
5434 /* If we found a better place to reload from,
5435 validate it in the same fashion, if it is a reload reg. */
5436 if (reload_override_in[r]
5437 && (GET_CODE (reload_override_in[r]) == REG
5438 || GET_CODE (reload_override_in[r]) == SUBREG))
5440 int regno = true_regnum (reload_override_in[r]);
5441 if (spill_reg_order[regno] >= 0
5442 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5443 reload_when_needed[r]))
5444 reload_override_in[r] = 0;
5448 /* Now that reload_override_in is known valid,
5449 actually override reload_in. */
5450 for (j = 0; j < n_reloads; j++)
5451 if (reload_override_in[j])
5452 reload_in[j] = reload_override_in[j];
5454 /* If this reload won't be done because it has been cancelled or is
5455 optional and not inherited, clear reload_reg_rtx so other
5456 routines (such as subst_reloads) don't get confused. */
5457 for (j = 0; j < n_reloads; j++)
5458 if (reload_reg_rtx[j] != 0
5459 && ((reload_optional[j] && ! reload_inherited[j])
5460 || (reload_in[j] == 0 && reload_out[j] == 0
5461 && ! reload_secondary_p[j])))
5463 int regno = true_regnum (reload_reg_rtx[j]);
5465 if (spill_reg_order[regno] >= 0)
5466 clear_reload_reg_in_use (regno, reload_opnum[j],
5467 reload_when_needed[j], reload_mode[j]);
5468 reload_reg_rtx[j] = 0;
5471 /* Record which pseudos and which spill regs have output reloads. */
5472 for (j = 0; j < n_reloads; j++)
5474 register int r = reload_order[j];
5476 i = reload_spill_index[r];
5478 /* I is nonneg if this reload used one of the spill regs.
5479 If reload_reg_rtx[r] is 0, this is an optional reload
5480 that we opted to ignore. */
5481 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5482 && reload_reg_rtx[r] != 0)
5484 register int nregno = REGNO (reload_out[r]);
5485 int nr = 1;
5487 if (nregno < FIRST_PSEUDO_REGISTER)
5488 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5490 while (--nr >= 0)
5491 reg_has_output_reload[nregno + nr] = 1;
5493 if (i >= 0)
5495 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5496 while (--nr >= 0)
5497 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5500 if (reload_when_needed[r] != RELOAD_OTHER
5501 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5502 && reload_when_needed[r] != RELOAD_FOR_INSN)
5503 abort ();
5508 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5509 reloads of the same item for fear that we might not have enough reload
5510 registers. However, normally they will get the same reload register
5511 and hence actually need not be loaded twice.
5513 Here we check for the most common case of this phenomenon: when we have
5514 a number of reloads for the same object, each of which were allocated
5515 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5516 reload, and is not modified in the insn itself. If we find such,
5517 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5518 This will not increase the number of spill registers needed and will
5519 prevent redundant code. */
5521 #ifdef SMALL_REGISTER_CLASSES
5523 static void
5524 merge_assigned_reloads (insn)
5525 rtx insn;
5527 int i, j;
5529 /* Scan all the reloads looking for ones that only load values and
5530 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5531 assigned and not modified by INSN. */
5533 for (i = 0; i < n_reloads; i++)
5535 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5536 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5537 || reg_set_p (reload_reg_rtx[i], insn))
5538 continue;
5540 /* Look at all other reloads. Ensure that the only use of this
5541 reload_reg_rtx is in a reload that just loads the same value
5542 as we do. Note that any secondary reloads must be of the identical
5543 class since the values, modes, and result registers are the
5544 same, so we need not do anything with any secondary reloads. */
5546 for (j = 0; j < n_reloads; j++)
5548 if (i == j || reload_reg_rtx[j] == 0
5549 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5550 reload_reg_rtx[i]))
5551 continue;
5553 /* If the reload regs aren't exactly the same (e.g, different modes)
5554 or if the values are different, we can't merge anything with this
5555 reload register. */
5557 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5558 || reload_out[j] != 0 || reload_in[j] == 0
5559 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5560 break;
5563 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5564 we, in fact, found any matching reloads. */
5566 if (j == n_reloads)
5568 for (j = 0; j < n_reloads; j++)
5569 if (i != j && reload_reg_rtx[j] != 0
5570 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5572 reload_when_needed[i] = RELOAD_OTHER;
5573 reload_in[j] = 0;
5574 transfer_replacements (i, j);
5577 /* If this is now RELOAD_OTHER, look for any reloads that load
5578 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5579 if they were for inputs, RELOAD_OTHER for outputs. Note that
5580 this test is equivalent to looking for reloads for this operand
5581 number. */
5583 if (reload_when_needed[i] == RELOAD_OTHER)
5584 for (j = 0; j < n_reloads; j++)
5585 if (reload_in[j] != 0
5586 && reload_when_needed[i] != RELOAD_OTHER
5587 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5588 reload_in[i]))
5589 reload_when_needed[j]
5590 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5591 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5595 #endif /* SMALL_RELOAD_CLASSES */
5597 /* Output insns to reload values in and out of the chosen reload regs. */
5599 static void
5600 emit_reload_insns (insn)
5601 rtx insn;
5603 register int j;
5604 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5605 rtx other_input_address_reload_insns = 0;
5606 rtx other_input_reload_insns = 0;
5607 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5608 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5609 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5610 rtx operand_reload_insns = 0;
5611 rtx other_operand_reload_insns = 0;
5612 rtx following_insn = NEXT_INSN (insn);
5613 rtx before_insn = insn;
5614 int special;
5615 /* Values to be put in spill_reg_store are put here first. */
5616 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5618 for (j = 0; j < reload_n_operands; j++)
5619 input_reload_insns[j] = input_address_reload_insns[j]
5620 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5622 /* Now output the instructions to copy the data into and out of the
5623 reload registers. Do these in the order that the reloads were reported,
5624 since reloads of base and index registers precede reloads of operands
5625 and the operands may need the base and index registers reloaded. */
5627 for (j = 0; j < n_reloads; j++)
5629 register rtx old;
5630 rtx oldequiv_reg = 0;
5631 rtx store_insn = 0;
5633 old = reload_in[j];
5634 if (old != 0 && ! reload_inherited[j]
5635 && ! rtx_equal_p (reload_reg_rtx[j], old)
5636 && reload_reg_rtx[j] != 0)
5638 register rtx reloadreg = reload_reg_rtx[j];
5639 rtx oldequiv = 0;
5640 enum machine_mode mode;
5641 rtx *where;
5643 /* Determine the mode to reload in.
5644 This is very tricky because we have three to choose from.
5645 There is the mode the insn operand wants (reload_inmode[J]).
5646 There is the mode of the reload register RELOADREG.
5647 There is the intrinsic mode of the operand, which we could find
5648 by stripping some SUBREGs.
5649 It turns out that RELOADREG's mode is irrelevant:
5650 we can change that arbitrarily.
5652 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5653 then the reload reg may not support QImode moves, so use SImode.
5654 If foo is in memory due to spilling a pseudo reg, this is safe,
5655 because the QImode value is in the least significant part of a
5656 slot big enough for a SImode. If foo is some other sort of
5657 memory reference, then it is impossible to reload this case,
5658 so previous passes had better make sure this never happens.
5660 Then consider a one-word union which has SImode and one of its
5661 members is a float, being fetched as (SUBREG:SF union:SI).
5662 We must fetch that as SFmode because we could be loading into
5663 a float-only register. In this case OLD's mode is correct.
5665 Consider an immediate integer: it has VOIDmode. Here we need
5666 to get a mode from something else.
5668 In some cases, there is a fourth mode, the operand's
5669 containing mode. If the insn specifies a containing mode for
5670 this operand, it overrides all others.
5672 I am not sure whether the algorithm here is always right,
5673 but it does the right things in those cases. */
5675 mode = GET_MODE (old);
5676 if (mode == VOIDmode)
5677 mode = reload_inmode[j];
5679 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5680 /* If we need a secondary register for this operation, see if
5681 the value is already in a register in that class. Don't
5682 do this if the secondary register will be used as a scratch
5683 register. */
5685 if (reload_secondary_in_reload[j] >= 0
5686 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5687 && optimize)
5688 oldequiv
5689 = find_equiv_reg (old, insn,
5690 reload_reg_class[reload_secondary_in_reload[j]],
5691 -1, NULL_PTR, 0, mode);
5692 #endif
5694 /* If reloading from memory, see if there is a register
5695 that already holds the same value. If so, reload from there.
5696 We can pass 0 as the reload_reg_p argument because
5697 any other reload has either already been emitted,
5698 in which case find_equiv_reg will see the reload-insn,
5699 or has yet to be emitted, in which case it doesn't matter
5700 because we will use this equiv reg right away. */
5702 if (oldequiv == 0 && optimize
5703 && (GET_CODE (old) == MEM
5704 || (GET_CODE (old) == REG
5705 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5706 && reg_renumber[REGNO (old)] < 0)))
5707 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5708 -1, NULL_PTR, 0, mode);
5710 if (oldequiv)
5712 int regno = true_regnum (oldequiv);
5714 /* If OLDEQUIV is a spill register, don't use it for this
5715 if any other reload needs it at an earlier stage of this insn
5716 or at this stage. */
5717 if (spill_reg_order[regno] >= 0
5718 && (! reload_reg_free_p (regno, reload_opnum[j],
5719 reload_when_needed[j])
5720 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5721 reload_when_needed[j])))
5722 oldequiv = 0;
5724 /* If OLDEQUIV is not a spill register,
5725 don't use it if any other reload wants it. */
5726 if (spill_reg_order[regno] < 0)
5728 int k;
5729 for (k = 0; k < n_reloads; k++)
5730 if (reload_reg_rtx[k] != 0 && k != j
5731 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5732 oldequiv))
5734 oldequiv = 0;
5735 break;
5739 /* If it is no cheaper to copy from OLDEQUIV into the
5740 reload register than it would be to move from memory,
5741 don't use it. Likewise, if we need a secondary register
5742 or memory. */
5744 if (oldequiv != 0
5745 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5746 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5747 reload_reg_class[j])
5748 >= MEMORY_MOVE_COST (mode)))
5749 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5750 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5751 mode, oldequiv)
5752 != NO_REGS)
5753 #endif
5754 #ifdef SECONDARY_MEMORY_NEEDED
5755 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5756 REGNO_REG_CLASS (regno),
5757 mode)
5758 #endif
5760 oldequiv = 0;
5763 if (oldequiv == 0)
5764 oldequiv = old;
5765 else if (GET_CODE (oldequiv) == REG)
5766 oldequiv_reg = oldequiv;
5767 else if (GET_CODE (oldequiv) == SUBREG)
5768 oldequiv_reg = SUBREG_REG (oldequiv);
5770 /* If we are reloading from a register that was recently stored in
5771 with an output-reload, see if we can prove there was
5772 actually no need to store the old value in it. */
5774 if (optimize && GET_CODE (oldequiv) == REG
5775 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5776 && spill_reg_order[REGNO (oldequiv)] >= 0
5777 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5778 && find_reg_note (insn, REG_DEAD, reload_in[j])
5779 /* This is unsafe if operand occurs more than once in current
5780 insn. Perhaps some occurrences weren't reloaded. */
5781 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5782 delete_output_reload
5783 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5785 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5786 then load RELOADREG from OLDEQUIV. Note that we cannot use
5787 gen_lowpart_common since it can do the wrong thing when
5788 RELOADREG has a multi-word mode. Note that RELOADREG
5789 must always be a REG here. */
5791 if (GET_MODE (reloadreg) != mode)
5792 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5793 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5794 oldequiv = SUBREG_REG (oldequiv);
5795 if (GET_MODE (oldequiv) != VOIDmode
5796 && mode != GET_MODE (oldequiv))
5797 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5799 /* Switch to the right place to emit the reload insns. */
5800 switch (reload_when_needed[j])
5802 case RELOAD_OTHER:
5803 where = &other_input_reload_insns;
5804 break;
5805 case RELOAD_FOR_INPUT:
5806 where = &input_reload_insns[reload_opnum[j]];
5807 break;
5808 case RELOAD_FOR_INPUT_ADDRESS:
5809 where = &input_address_reload_insns[reload_opnum[j]];
5810 break;
5811 case RELOAD_FOR_OUTPUT_ADDRESS:
5812 where = &output_address_reload_insns[reload_opnum[j]];
5813 break;
5814 case RELOAD_FOR_OPERAND_ADDRESS:
5815 where = &operand_reload_insns;
5816 break;
5817 case RELOAD_FOR_OPADDR_ADDR:
5818 where = &other_operand_reload_insns;
5819 break;
5820 case RELOAD_FOR_OTHER_ADDRESS:
5821 where = &other_input_address_reload_insns;
5822 break;
5823 default:
5824 abort ();
5827 push_to_sequence (*where);
5828 special = 0;
5830 /* Auto-increment addresses must be reloaded in a special way. */
5831 if (GET_CODE (oldequiv) == POST_INC
5832 || GET_CODE (oldequiv) == POST_DEC
5833 || GET_CODE (oldequiv) == PRE_INC
5834 || GET_CODE (oldequiv) == PRE_DEC)
5836 /* We are not going to bother supporting the case where a
5837 incremented register can't be copied directly from
5838 OLDEQUIV since this seems highly unlikely. */
5839 if (reload_secondary_in_reload[j] >= 0)
5840 abort ();
5841 /* Prevent normal processing of this reload. */
5842 special = 1;
5843 /* Output a special code sequence for this case. */
5844 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5847 /* If we are reloading a pseudo-register that was set by the previous
5848 insn, see if we can get rid of that pseudo-register entirely
5849 by redirecting the previous insn into our reload register. */
5851 else if (optimize && GET_CODE (old) == REG
5852 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5853 && dead_or_set_p (insn, old)
5854 /* This is unsafe if some other reload
5855 uses the same reg first. */
5856 && reload_reg_free_before_p (REGNO (reloadreg),
5857 reload_opnum[j],
5858 reload_when_needed[j]))
5860 rtx temp = PREV_INSN (insn);
5861 while (temp && GET_CODE (temp) == NOTE)
5862 temp = PREV_INSN (temp);
5863 if (temp
5864 && GET_CODE (temp) == INSN
5865 && GET_CODE (PATTERN (temp)) == SET
5866 && SET_DEST (PATTERN (temp)) == old
5867 /* Make sure we can access insn_operand_constraint. */
5868 && asm_noperands (PATTERN (temp)) < 0
5869 /* This is unsafe if prev insn rejects our reload reg. */
5870 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5871 reloadreg)
5872 /* This is unsafe if operand occurs more than once in current
5873 insn. Perhaps some occurrences aren't reloaded. */
5874 && count_occurrences (PATTERN (insn), old) == 1
5875 /* Don't risk splitting a matching pair of operands. */
5876 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5878 /* Store into the reload register instead of the pseudo. */
5879 SET_DEST (PATTERN (temp)) = reloadreg;
5880 /* If these are the only uses of the pseudo reg,
5881 pretend for GDB it lives in the reload reg we used. */
5882 if (reg_n_deaths[REGNO (old)] == 1
5883 && reg_n_sets[REGNO (old)] == 1)
5885 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5886 alter_reg (REGNO (old), -1);
5888 special = 1;
5892 /* We can't do that, so output an insn to load RELOADREG. */
5894 if (! special)
5896 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5897 rtx second_reload_reg = 0;
5898 enum insn_code icode;
5900 /* If we have a secondary reload, pick up the secondary register
5901 and icode, if any. If OLDEQUIV and OLD are different or
5902 if this is an in-out reload, recompute whether or not we
5903 still need a secondary register and what the icode should
5904 be. If we still need a secondary register and the class or
5905 icode is different, go back to reloading from OLD if using
5906 OLDEQUIV means that we got the wrong type of register. We
5907 cannot have different class or icode due to an in-out reload
5908 because we don't make such reloads when both the input and
5909 output need secondary reload registers. */
5911 if (reload_secondary_in_reload[j] >= 0)
5913 int secondary_reload = reload_secondary_in_reload[j];
5914 rtx real_oldequiv = oldequiv;
5915 rtx real_old = old;
5917 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5918 and similarly for OLD.
5919 See comments in get_secondary_reload in reload.c. */
5920 if (GET_CODE (oldequiv) == REG
5921 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5922 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5923 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5925 if (GET_CODE (old) == REG
5926 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5927 && reg_equiv_mem[REGNO (old)] != 0)
5928 real_old = reg_equiv_mem[REGNO (old)];
5930 second_reload_reg = reload_reg_rtx[secondary_reload];
5931 icode = reload_secondary_in_icode[j];
5933 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5934 || (reload_in[j] != 0 && reload_out[j] != 0))
5936 enum reg_class new_class
5937 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5938 mode, real_oldequiv);
5940 if (new_class == NO_REGS)
5941 second_reload_reg = 0;
5942 else
5944 enum insn_code new_icode;
5945 enum machine_mode new_mode;
5947 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5948 REGNO (second_reload_reg)))
5949 oldequiv = old, real_oldequiv = real_old;
5950 else
5952 new_icode = reload_in_optab[(int) mode];
5953 if (new_icode != CODE_FOR_nothing
5954 && ((insn_operand_predicate[(int) new_icode][0]
5955 && ! ((*insn_operand_predicate[(int) new_icode][0])
5956 (reloadreg, mode)))
5957 || (insn_operand_predicate[(int) new_icode][1]
5958 && ! ((*insn_operand_predicate[(int) new_icode][1])
5959 (real_oldequiv, mode)))))
5960 new_icode = CODE_FOR_nothing;
5962 if (new_icode == CODE_FOR_nothing)
5963 new_mode = mode;
5964 else
5965 new_mode = insn_operand_mode[(int) new_icode][2];
5967 if (GET_MODE (second_reload_reg) != new_mode)
5969 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5970 new_mode))
5971 oldequiv = old, real_oldequiv = real_old;
5972 else
5973 second_reload_reg
5974 = gen_rtx (REG, new_mode,
5975 REGNO (second_reload_reg));
5981 /* If we still need a secondary reload register, check
5982 to see if it is being used as a scratch or intermediate
5983 register and generate code appropriately. If we need
5984 a scratch register, use REAL_OLDEQUIV since the form of
5985 the insn may depend on the actual address if it is
5986 a MEM. */
5988 if (second_reload_reg)
5990 if (icode != CODE_FOR_nothing)
5992 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5993 second_reload_reg));
5994 special = 1;
5996 else
5998 /* See if we need a scratch register to load the
5999 intermediate register (a tertiary reload). */
6000 enum insn_code tertiary_icode
6001 = reload_secondary_in_icode[secondary_reload];
6003 if (tertiary_icode != CODE_FOR_nothing)
6005 rtx third_reload_reg
6006 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6008 emit_insn ((GEN_FCN (tertiary_icode)
6009 (second_reload_reg, real_oldequiv,
6010 third_reload_reg)));
6012 else
6013 gen_input_reload (second_reload_reg, oldequiv,
6014 reload_opnum[j],
6015 reload_when_needed[j]);
6017 oldequiv = second_reload_reg;
6021 #endif
6023 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6024 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
6025 reload_when_needed[j]);
6027 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6028 /* We may have to make a REG_DEAD note for the secondary reload
6029 register in the insns we just made. Find the last insn that
6030 mentioned the register. */
6031 if (! special && second_reload_reg
6032 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6034 rtx prev;
6036 for (prev = get_last_insn (); prev;
6037 prev = PREV_INSN (prev))
6038 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6039 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6040 PATTERN (prev)))
6042 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6043 second_reload_reg,
6044 REG_NOTES (prev));
6045 break;
6048 #endif
6051 /* End this sequence. */
6052 *where = get_insns ();
6053 end_sequence ();
6056 /* Add a note saying the input reload reg
6057 dies in this insn, if anyone cares. */
6058 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6059 if (old != 0
6060 && reload_reg_rtx[j] != old
6061 && reload_reg_rtx[j] != 0
6062 && reload_out[j] == 0
6063 && ! reload_inherited[j]
6064 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6066 register rtx reloadreg = reload_reg_rtx[j];
6068 #if 0
6069 /* We can't abort here because we need to support this for sched.c.
6070 It's not terrible to miss a REG_DEAD note, but we should try
6071 to figure out how to do this correctly. */
6072 /* The code below is incorrect for address-only reloads. */
6073 if (reload_when_needed[j] != RELOAD_OTHER
6074 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6075 abort ();
6076 #endif
6078 /* Add a death note to this insn, for an input reload. */
6080 if ((reload_when_needed[j] == RELOAD_OTHER
6081 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6082 && ! dead_or_set_p (insn, reloadreg))
6083 REG_NOTES (insn)
6084 = gen_rtx (EXPR_LIST, REG_DEAD,
6085 reloadreg, REG_NOTES (insn));
6088 /* When we inherit a reload, the last marked death of the reload reg
6089 may no longer really be a death. */
6090 if (reload_reg_rtx[j] != 0
6091 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6092 && reload_inherited[j])
6094 /* Handle inheriting an output reload.
6095 Remove the death note from the output reload insn. */
6096 if (reload_spill_index[j] >= 0
6097 && GET_CODE (reload_in[j]) == REG
6098 && spill_reg_store[reload_spill_index[j]] != 0
6099 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6100 REG_DEAD, REGNO (reload_reg_rtx[j])))
6101 remove_death (REGNO (reload_reg_rtx[j]),
6102 spill_reg_store[reload_spill_index[j]]);
6103 /* Likewise for input reloads that were inherited. */
6104 else if (reload_spill_index[j] >= 0
6105 && GET_CODE (reload_in[j]) == REG
6106 && spill_reg_store[reload_spill_index[j]] == 0
6107 && reload_inheritance_insn[j] != 0
6108 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6109 REGNO (reload_reg_rtx[j])))
6110 remove_death (REGNO (reload_reg_rtx[j]),
6111 reload_inheritance_insn[j]);
6112 else
6114 rtx prev;
6116 /* We got this register from find_equiv_reg.
6117 Search back for its last death note and get rid of it.
6118 But don't search back too far.
6119 Don't go past a place where this reg is set,
6120 since a death note before that remains valid. */
6121 for (prev = PREV_INSN (insn);
6122 prev && GET_CODE (prev) != CODE_LABEL;
6123 prev = PREV_INSN (prev))
6124 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6125 && dead_or_set_p (prev, reload_reg_rtx[j]))
6127 if (find_regno_note (prev, REG_DEAD,
6128 REGNO (reload_reg_rtx[j])))
6129 remove_death (REGNO (reload_reg_rtx[j]), prev);
6130 break;
6135 /* We might have used find_equiv_reg above to choose an alternate
6136 place from which to reload. If so, and it died, we need to remove
6137 that death and move it to one of the insns we just made. */
6139 if (oldequiv_reg != 0
6140 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6142 rtx prev, prev1;
6144 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6145 prev = PREV_INSN (prev))
6146 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6147 && dead_or_set_p (prev, oldequiv_reg))
6149 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6151 for (prev1 = this_reload_insn;
6152 prev1; prev1 = PREV_INSN (prev1))
6153 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6154 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6155 PATTERN (prev1)))
6157 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6158 oldequiv_reg,
6159 REG_NOTES (prev1));
6160 break;
6162 remove_death (REGNO (oldequiv_reg), prev);
6164 break;
6167 #endif
6169 /* If we are reloading a register that was recently stored in with an
6170 output-reload, see if we can prove there was
6171 actually no need to store the old value in it. */
6173 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6174 && reload_in[j] != 0
6175 && GET_CODE (reload_in[j]) == REG
6176 #if 0
6177 /* There doesn't seem to be any reason to restrict this to pseudos
6178 and doing so loses in the case where we are copying from a
6179 register of the wrong class. */
6180 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6181 #endif
6182 && spill_reg_store[reload_spill_index[j]] != 0
6183 /* This is unsafe if some other reload uses the same reg first. */
6184 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6185 reload_opnum[j], reload_when_needed[j])
6186 && dead_or_set_p (insn, reload_in[j])
6187 /* This is unsafe if operand occurs more than once in current
6188 insn. Perhaps some occurrences weren't reloaded. */
6189 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6190 delete_output_reload (insn, j,
6191 spill_reg_store[reload_spill_index[j]]);
6193 /* Input-reloading is done. Now do output-reloading,
6194 storing the value from the reload-register after the main insn
6195 if reload_out[j] is nonzero.
6197 ??? At some point we need to support handling output reloads of
6198 JUMP_INSNs or insns that set cc0. */
6199 old = reload_out[j];
6200 if (old != 0
6201 && reload_reg_rtx[j] != old
6202 && reload_reg_rtx[j] != 0)
6204 register rtx reloadreg = reload_reg_rtx[j];
6205 register rtx second_reloadreg = 0;
6206 rtx note, p;
6207 enum machine_mode mode;
6208 int special = 0;
6210 /* An output operand that dies right away does need a reload,
6211 but need not be copied from it. Show the new location in the
6212 REG_UNUSED note. */
6213 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6214 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6216 XEXP (note, 0) = reload_reg_rtx[j];
6217 continue;
6219 else if (GET_CODE (old) == SCRATCH)
6220 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6221 but we don't want to make an output reload. */
6222 continue;
6224 #if 0
6225 /* Strip off of OLD any size-increasing SUBREGs such as
6226 (SUBREG:SI foo:QI 0). */
6228 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6229 && (GET_MODE_SIZE (GET_MODE (old))
6230 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6231 old = SUBREG_REG (old);
6232 #endif
6234 /* If is a JUMP_INSN, we can't support output reloads yet. */
6235 if (GET_CODE (insn) == JUMP_INSN)
6236 abort ();
6238 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6240 /* Determine the mode to reload in.
6241 See comments above (for input reloading). */
6243 mode = GET_MODE (old);
6244 if (mode == VOIDmode)
6246 /* VOIDmode should never happen for an output. */
6247 if (asm_noperands (PATTERN (insn)) < 0)
6248 /* It's the compiler's fault. */
6249 abort ();
6250 error_for_asm (insn, "output operand is constant in `asm'");
6251 /* Prevent crash--use something we know is valid. */
6252 mode = word_mode;
6253 old = gen_rtx (REG, mode, REGNO (reloadreg));
6256 if (GET_MODE (reloadreg) != mode)
6257 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6259 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6261 /* If we need two reload regs, set RELOADREG to the intermediate
6262 one, since it will be stored into OUT. We might need a secondary
6263 register only for an input reload, so check again here. */
6265 if (reload_secondary_out_reload[j] >= 0)
6267 rtx real_old = old;
6269 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6270 && reg_equiv_mem[REGNO (old)] != 0)
6271 real_old = reg_equiv_mem[REGNO (old)];
6273 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6274 mode, real_old)
6275 != NO_REGS))
6277 second_reloadreg = reloadreg;
6278 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6280 /* See if RELOADREG is to be used as a scratch register
6281 or as an intermediate register. */
6282 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6284 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6285 (real_old, second_reloadreg, reloadreg)));
6286 special = 1;
6288 else
6290 /* See if we need both a scratch and intermediate reload
6291 register. */
6292 int secondary_reload = reload_secondary_out_reload[j];
6293 enum insn_code tertiary_icode
6294 = reload_secondary_out_icode[secondary_reload];
6295 rtx pat;
6297 if (GET_MODE (reloadreg) != mode)
6298 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6300 if (tertiary_icode != CODE_FOR_nothing)
6302 rtx third_reloadreg
6303 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6304 pat = (GEN_FCN (tertiary_icode)
6305 (reloadreg, second_reloadreg, third_reloadreg));
6307 #ifdef SECONDARY_MEMORY_NEEDED
6308 /* If we need a memory location to do the move, do it that way. */
6309 else if (GET_CODE (reloadreg) == REG
6310 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6311 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6312 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6313 GET_MODE (second_reloadreg)))
6315 /* Get the memory to use and rewrite both registers
6316 to its mode. */
6317 rtx loc
6318 = get_secondary_mem (reloadreg,
6319 GET_MODE (second_reloadreg),
6320 reload_opnum[j],
6321 reload_when_needed[j]);
6322 rtx tmp_reloadreg;
6324 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6325 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6326 REGNO (second_reloadreg));
6328 if (GET_MODE (loc) != GET_MODE (reloadreg))
6329 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6330 REGNO (reloadreg));
6331 else
6332 tmp_reloadreg = reloadreg;
6334 emit_move_insn (loc, second_reloadreg);
6335 pat = gen_move_insn (tmp_reloadreg, loc);
6337 #endif
6338 else
6339 pat = gen_move_insn (reloadreg, second_reloadreg);
6341 emit_insn (pat);
6345 #endif
6347 /* Output the last reload insn. */
6348 if (! special)
6350 #ifdef SECONDARY_MEMORY_NEEDED
6351 /* If we need a memory location to do the move, do it that way. */
6352 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6353 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6354 REGNO_REG_CLASS (REGNO (reloadreg)),
6355 GET_MODE (reloadreg)))
6357 /* Get the memory to use and rewrite both registers to
6358 its mode. */
6359 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6360 reload_opnum[j],
6361 reload_when_needed[j]);
6363 if (GET_MODE (loc) != GET_MODE (reloadreg))
6364 reloadreg = gen_rtx (REG, GET_MODE (loc),
6365 REGNO (reloadreg));
6367 if (GET_MODE (loc) != GET_MODE (old))
6368 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6370 emit_insn (gen_move_insn (loc, reloadreg));
6371 emit_insn (gen_move_insn (old, loc));
6373 else
6374 #endif
6375 emit_insn (gen_move_insn (old, reloadreg));
6378 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6379 /* If final will look at death notes for this reg,
6380 put one on the last output-reload insn to use it. Similarly
6381 for any secondary register. */
6382 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6383 for (p = get_last_insn (); p; p = PREV_INSN (p))
6384 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6385 && reg_overlap_mentioned_for_reload_p (reloadreg,
6386 PATTERN (p)))
6387 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6388 reloadreg, REG_NOTES (p));
6390 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6391 if (! special
6392 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6393 for (p = get_last_insn (); p; p = PREV_INSN (p))
6394 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6395 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6396 PATTERN (p)))
6397 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6398 second_reloadreg, REG_NOTES (p));
6399 #endif
6400 #endif
6401 /* Look at all insns we emitted, just to be safe. */
6402 for (p = get_insns (); p; p = NEXT_INSN (p))
6403 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6405 /* If this output reload doesn't come from a spill reg,
6406 clear any memory of reloaded copies of the pseudo reg.
6407 If this output reload comes from a spill reg,
6408 reg_has_output_reload will make this do nothing. */
6409 note_stores (PATTERN (p), forget_old_reloads_1);
6411 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6412 store_insn = p;
6415 output_reload_insns[reload_opnum[j]] = get_insns ();
6416 end_sequence ();
6420 if (reload_spill_index[j] >= 0)
6421 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6424 /* Now write all the insns we made for reloads in the order expected by
6425 the allocation functions. Prior to the insn being reloaded, we write
6426 the following reloads:
6428 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6430 RELOAD_OTHER reloads.
6432 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6433 the RELOAD_FOR_INPUT reload for the operand.
6435 RELOAD_FOR_OPADDR_ADDRS reloads.
6437 RELOAD_FOR_OPERAND_ADDRESS reloads.
6439 After the insn being reloaded, we write the following:
6441 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6442 the RELOAD_FOR_OUTPUT reload for that operand. */
6444 emit_insns_before (other_input_address_reload_insns, before_insn);
6445 emit_insns_before (other_input_reload_insns, before_insn);
6447 for (j = 0; j < reload_n_operands; j++)
6449 emit_insns_before (input_address_reload_insns[j], before_insn);
6450 emit_insns_before (input_reload_insns[j], before_insn);
6453 emit_insns_before (other_operand_reload_insns, before_insn);
6454 emit_insns_before (operand_reload_insns, before_insn);
6456 for (j = 0; j < reload_n_operands; j++)
6458 emit_insns_before (output_address_reload_insns[j], following_insn);
6459 emit_insns_before (output_reload_insns[j], following_insn);
6462 /* Move death notes from INSN
6463 to output-operand-address and output reload insns. */
6464 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6466 rtx insn1;
6467 /* Loop over those insns, last ones first. */
6468 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6469 insn1 = PREV_INSN (insn1))
6470 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6472 rtx source = SET_SRC (PATTERN (insn1));
6473 rtx dest = SET_DEST (PATTERN (insn1));
6475 /* The note we will examine next. */
6476 rtx reg_notes = REG_NOTES (insn);
6477 /* The place that pointed to this note. */
6478 rtx *prev_reg_note = &REG_NOTES (insn);
6480 /* If the note is for something used in the source of this
6481 reload insn, or in the output address, move the note. */
6482 while (reg_notes)
6484 rtx next_reg_notes = XEXP (reg_notes, 1);
6485 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6486 && GET_CODE (XEXP (reg_notes, 0)) == REG
6487 && ((GET_CODE (dest) != REG
6488 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6489 dest))
6490 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6491 source)))
6493 *prev_reg_note = next_reg_notes;
6494 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6495 REG_NOTES (insn1) = reg_notes;
6497 else
6498 prev_reg_note = &XEXP (reg_notes, 1);
6500 reg_notes = next_reg_notes;
6504 #endif
6506 /* For all the spill regs newly reloaded in this instruction,
6507 record what they were reloaded from, so subsequent instructions
6508 can inherit the reloads.
6510 Update spill_reg_store for the reloads of this insn.
6511 Copy the elements that were updated in the loop above. */
6513 for (j = 0; j < n_reloads; j++)
6515 register int r = reload_order[j];
6516 register int i = reload_spill_index[r];
6518 /* I is nonneg if this reload used one of the spill regs.
6519 If reload_reg_rtx[r] is 0, this is an optional reload
6520 that we opted to ignore.
6522 Also ignore reloads that don't reach the end of the insn,
6523 since we will eventually see the one that does. */
6525 if (i >= 0 && reload_reg_rtx[r] != 0
6526 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6527 reload_when_needed[r]))
6529 /* First, clear out memory of what used to be in this spill reg.
6530 If consecutive registers are used, clear them all. */
6531 int nr
6532 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6533 int k;
6535 for (k = 0; k < nr; k++)
6537 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6538 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6541 /* Maybe the spill reg contains a copy of reload_out. */
6542 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6544 register int nregno = REGNO (reload_out[r]);
6545 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6546 : HARD_REGNO_NREGS (nregno,
6547 GET_MODE (reload_reg_rtx[r])));
6549 spill_reg_store[i] = new_spill_reg_store[i];
6550 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6552 /* If NREGNO is a hard register, it may occupy more than
6553 one register. If it does, say what is in the
6554 rest of the registers assuming that both registers
6555 agree on how many words the object takes. If not,
6556 invalidate the subsequent registers. */
6558 if (nregno < FIRST_PSEUDO_REGISTER)
6559 for (k = 1; k < nnr; k++)
6560 reg_last_reload_reg[nregno + k]
6561 = (nr == nnr ? gen_rtx (REG,
6562 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6563 REGNO (reload_reg_rtx[r]) + k)
6564 : 0);
6566 /* Now do the inverse operation. */
6567 for (k = 0; k < nr; k++)
6569 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6570 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6571 : nregno + k);
6572 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6576 /* Maybe the spill reg contains a copy of reload_in. Only do
6577 something if there will not be an output reload for
6578 the register being reloaded. */
6579 else if (reload_out[r] == 0
6580 && reload_in[r] != 0
6581 && ((GET_CODE (reload_in[r]) == REG
6582 && ! reg_has_output_reload[REGNO (reload_in[r])]
6583 || (GET_CODE (reload_in_reg[r]) == REG
6584 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6586 register int nregno;
6587 int nnr;
6589 if (GET_CODE (reload_in[r]) == REG)
6590 nregno = REGNO (reload_in[r]);
6591 else
6592 nregno = REGNO (reload_in_reg[r]);
6594 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6595 : HARD_REGNO_NREGS (nregno,
6596 GET_MODE (reload_reg_rtx[r])));
6598 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6600 if (nregno < FIRST_PSEUDO_REGISTER)
6601 for (k = 1; k < nnr; k++)
6602 reg_last_reload_reg[nregno + k]
6603 = (nr == nnr ? gen_rtx (REG,
6604 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6605 REGNO (reload_reg_rtx[r]) + k)
6606 : 0);
6608 /* Unless we inherited this reload, show we haven't
6609 recently done a store. */
6610 if (! reload_inherited[r])
6611 spill_reg_store[i] = 0;
6613 for (k = 0; k < nr; k++)
6615 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6616 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6617 : nregno + k);
6618 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6619 = insn;
6624 /* The following if-statement was #if 0'd in 1.34 (or before...).
6625 It's reenabled in 1.35 because supposedly nothing else
6626 deals with this problem. */
6628 /* If a register gets output-reloaded from a non-spill register,
6629 that invalidates any previous reloaded copy of it.
6630 But forget_old_reloads_1 won't get to see it, because
6631 it thinks only about the original insn. So invalidate it here. */
6632 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6634 register int nregno = REGNO (reload_out[r]);
6635 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6637 while (num_regs-- > 0)
6638 reg_last_reload_reg[nregno + num_regs] = 0;
6643 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6644 operand OPNUM with reload type TYPE.
6646 Returns first insn emitted. */
6649 gen_input_reload (reloadreg, in, opnum, type)
6650 rtx reloadreg;
6651 rtx in;
6652 int opnum;
6653 enum reload_type type;
6655 rtx last = get_last_insn ();
6657 /* How to do this reload can get quite tricky. Normally, we are being
6658 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6659 register that didn't get a hard register. In that case we can just
6660 call emit_move_insn.
6662 We can also be asked to reload a PLUS that adds a register or a MEM to
6663 another register, constant or MEM. This can occur during frame pointer
6664 elimination and while reloading addresses. This case is handled by
6665 trying to emit a single insn to perform the add. If it is not valid,
6666 we use a two insn sequence.
6668 Finally, we could be called to handle an 'o' constraint by putting
6669 an address into a register. In that case, we first try to do this
6670 with a named pattern of "reload_load_address". If no such pattern
6671 exists, we just emit a SET insn and hope for the best (it will normally
6672 be valid on machines that use 'o').
6674 This entire process is made complex because reload will never
6675 process the insns we generate here and so we must ensure that
6676 they will fit their constraints and also by the fact that parts of
6677 IN might be being reloaded separately and replaced with spill registers.
6678 Because of this, we are, in some sense, just guessing the right approach
6679 here. The one listed above seems to work.
6681 ??? At some point, this whole thing needs to be rethought. */
6683 if (GET_CODE (in) == PLUS
6684 && (GET_CODE (XEXP (in, 0)) == REG
6685 || GET_CODE (XEXP (in, 0)) == MEM)
6686 && (GET_CODE (XEXP (in, 1)) == REG
6687 || CONSTANT_P (XEXP (in, 1))
6688 || GET_CODE (XEXP (in, 1)) == MEM))
6690 /* We need to compute the sum of a register or a MEM and another
6691 register, constant, or MEM, and put it into the reload
6692 register. The best possible way of doing this is if the machine
6693 has a three-operand ADD insn that accepts the required operands.
6695 The simplest approach is to try to generate such an insn and see if it
6696 is recognized and matches its constraints. If so, it can be used.
6698 It might be better not to actually emit the insn unless it is valid,
6699 but we need to pass the insn as an operand to `recog' and
6700 `insn_extract' and it is simpler to emit and then delete the insn if
6701 not valid than to dummy things up. */
6703 rtx op0, op1, tem, insn;
6704 int code;
6706 op0 = find_replacement (&XEXP (in, 0));
6707 op1 = find_replacement (&XEXP (in, 1));
6709 /* Since constraint checking is strict, commutativity won't be
6710 checked, so we need to do that here to avoid spurious failure
6711 if the add instruction is two-address and the second operand
6712 of the add is the same as the reload reg, which is frequently
6713 the case. If the insn would be A = B + A, rearrange it so
6714 it will be A = A + B as constrain_operands expects. */
6716 if (GET_CODE (XEXP (in, 1)) == REG
6717 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6718 tem = op0, op0 = op1, op1 = tem;
6720 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6721 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6723 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6724 code = recog_memoized (insn);
6726 if (code >= 0)
6728 insn_extract (insn);
6729 /* We want constrain operands to treat this insn strictly in
6730 its validity determination, i.e., the way it would after reload
6731 has completed. */
6732 if (constrain_operands (code, 1))
6733 return insn;
6736 delete_insns_since (last);
6738 /* If that failed, we must use a conservative two-insn sequence.
6739 use move to copy constant, MEM, or pseudo register to the reload
6740 register since "move" will be able to handle an arbitrary operand,
6741 unlike add which can't, in general. Then add the registers.
6743 If there is another way to do this for a specific machine, a
6744 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6745 we emit below. */
6747 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6748 || (GET_CODE (op1) == REG
6749 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6750 tem = op0, op0 = op1, op1 = tem;
6752 emit_insn (gen_move_insn (reloadreg, op0));
6754 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6755 This fixes a problem on the 32K where the stack pointer cannot
6756 be used as an operand of an add insn. */
6758 if (rtx_equal_p (op0, op1))
6759 op1 = reloadreg;
6761 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6763 /* If that failed, copy the address register to the reload register.
6764 Then add the constant to the reload register. */
6766 code = recog_memoized (insn);
6768 if (code >= 0)
6770 insn_extract (insn);
6771 /* We want constrain operands to treat this insn strictly in
6772 its validity determination, i.e., the way it would after reload
6773 has completed. */
6774 if (constrain_operands (code, 1))
6775 return insn;
6778 delete_insns_since (last);
6780 emit_insn (gen_move_insn (reloadreg, op1));
6781 emit_insn (gen_add2_insn (reloadreg, op0));
6784 #ifdef SECONDARY_MEMORY_NEEDED
6785 /* If we need a memory location to do the move, do it that way. */
6786 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6787 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6788 REGNO_REG_CLASS (REGNO (reloadreg)),
6789 GET_MODE (reloadreg)))
6791 /* Get the memory to use and rewrite both registers to its mode. */
6792 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6794 if (GET_MODE (loc) != GET_MODE (reloadreg))
6795 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6797 if (GET_MODE (loc) != GET_MODE (in))
6798 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6800 emit_insn (gen_move_insn (loc, in));
6801 emit_insn (gen_move_insn (reloadreg, loc));
6803 #endif
6805 /* If IN is a simple operand, use gen_move_insn. */
6806 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6807 emit_insn (gen_move_insn (reloadreg, in));
6809 #ifdef HAVE_reload_load_address
6810 else if (HAVE_reload_load_address)
6811 emit_insn (gen_reload_load_address (reloadreg, in));
6812 #endif
6814 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6815 else
6816 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6818 /* Return the first insn emitted.
6819 We can not just return get_last_insn, because there may have
6820 been multiple instructions emitted. Also note that gen_move_insn may
6821 emit more than one insn itself, so we can not assume that there is one
6822 insn emitted per emit_insn_before call. */
6824 return last ? NEXT_INSN (last) : get_insns ();
6827 /* Delete a previously made output-reload
6828 whose result we now believe is not needed.
6829 First we double-check.
6831 INSN is the insn now being processed.
6832 OUTPUT_RELOAD_INSN is the insn of the output reload.
6833 J is the reload-number for this insn. */
6835 static void
6836 delete_output_reload (insn, j, output_reload_insn)
6837 rtx insn;
6838 int j;
6839 rtx output_reload_insn;
6841 register rtx i1;
6843 /* Get the raw pseudo-register referred to. */
6845 rtx reg = reload_in[j];
6846 while (GET_CODE (reg) == SUBREG)
6847 reg = SUBREG_REG (reg);
6849 /* If the pseudo-reg we are reloading is no longer referenced
6850 anywhere between the store into it and here,
6851 and no jumps or labels intervene, then the value can get
6852 here through the reload reg alone.
6853 Otherwise, give up--return. */
6854 for (i1 = NEXT_INSN (output_reload_insn);
6855 i1 != insn; i1 = NEXT_INSN (i1))
6857 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6858 return;
6859 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6860 && reg_mentioned_p (reg, PATTERN (i1)))
6861 return;
6864 if (cannot_omit_stores[REGNO (reg)])
6865 return;
6867 /* If this insn will store in the pseudo again,
6868 the previous store can be removed. */
6869 if (reload_out[j] == reload_in[j])
6870 delete_insn (output_reload_insn);
6872 /* See if the pseudo reg has been completely replaced
6873 with reload regs. If so, delete the store insn
6874 and forget we had a stack slot for the pseudo. */
6875 else if (reg_n_deaths[REGNO (reg)] == 1
6876 && reg_basic_block[REGNO (reg)] >= 0
6877 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6879 rtx i2;
6881 /* We know that it was used only between here
6882 and the beginning of the current basic block.
6883 (We also know that the last use before INSN was
6884 the output reload we are thinking of deleting, but never mind that.)
6885 Search that range; see if any ref remains. */
6886 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6888 rtx set = single_set (i2);
6890 /* Uses which just store in the pseudo don't count,
6891 since if they are the only uses, they are dead. */
6892 if (set != 0 && SET_DEST (set) == reg)
6893 continue;
6894 if (GET_CODE (i2) == CODE_LABEL
6895 || GET_CODE (i2) == JUMP_INSN)
6896 break;
6897 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6898 && reg_mentioned_p (reg, PATTERN (i2)))
6899 /* Some other ref remains;
6900 we can't do anything. */
6901 return;
6904 /* Delete the now-dead stores into this pseudo. */
6905 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6907 rtx set = single_set (i2);
6909 if (set != 0 && SET_DEST (set) == reg)
6910 delete_insn (i2);
6911 if (GET_CODE (i2) == CODE_LABEL
6912 || GET_CODE (i2) == JUMP_INSN)
6913 break;
6916 /* For the debugging info,
6917 say the pseudo lives in this reload reg. */
6918 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6919 alter_reg (REGNO (reg), -1);
6923 /* Output reload-insns to reload VALUE into RELOADREG.
6924 VALUE is an autoincrement or autodecrement RTX whose operand
6925 is a register or memory location;
6926 so reloading involves incrementing that location.
6928 INC_AMOUNT is the number to increment or decrement by (always positive).
6929 This cannot be deduced from VALUE. */
6931 static void
6932 inc_for_reload (reloadreg, value, inc_amount)
6933 rtx reloadreg;
6934 rtx value;
6935 int inc_amount;
6937 /* REG or MEM to be copied and incremented. */
6938 rtx incloc = XEXP (value, 0);
6939 /* Nonzero if increment after copying. */
6940 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6941 rtx last;
6942 rtx inc;
6943 rtx add_insn;
6944 int code;
6946 /* No hard register is equivalent to this register after
6947 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6948 we could inc/dec that register as well (maybe even using it for
6949 the source), but I'm not sure it's worth worrying about. */
6950 if (GET_CODE (incloc) == REG)
6951 reg_last_reload_reg[REGNO (incloc)] = 0;
6953 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6954 inc_amount = - inc_amount;
6956 inc = GEN_INT (inc_amount);
6958 /* If this is post-increment, first copy the location to the reload reg. */
6959 if (post)
6960 emit_insn (gen_move_insn (reloadreg, incloc));
6962 /* See if we can directly increment INCLOC. Use a method similar to that
6963 in gen_input_reload. */
6965 last = get_last_insn ();
6966 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6967 gen_rtx (PLUS, GET_MODE (incloc),
6968 incloc, inc)));
6970 code = recog_memoized (add_insn);
6971 if (code >= 0)
6973 insn_extract (add_insn);
6974 if (constrain_operands (code, 1))
6976 /* If this is a pre-increment and we have incremented the value
6977 where it lives, copy the incremented value to RELOADREG to
6978 be used as an address. */
6980 if (! post)
6981 emit_insn (gen_move_insn (reloadreg, incloc));
6983 return;
6987 delete_insns_since (last);
6989 /* If couldn't do the increment directly, must increment in RELOADREG.
6990 The way we do this depends on whether this is pre- or post-increment.
6991 For pre-increment, copy INCLOC to the reload register, increment it
6992 there, then save back. */
6994 if (! post)
6996 emit_insn (gen_move_insn (reloadreg, incloc));
6997 emit_insn (gen_add2_insn (reloadreg, inc));
6998 emit_insn (gen_move_insn (incloc, reloadreg));
7000 else
7002 /* Postincrement.
7003 Because this might be a jump insn or a compare, and because RELOADREG
7004 may not be available after the insn in an input reload, we must do
7005 the incrementation before the insn being reloaded for.
7007 We have already copied INCLOC to RELOADREG. Increment the copy in
7008 RELOADREG, save that back, then decrement RELOADREG so it has
7009 the original value. */
7011 emit_insn (gen_add2_insn (reloadreg, inc));
7012 emit_insn (gen_move_insn (incloc, reloadreg));
7013 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7016 return;
7019 /* Return 1 if we are certain that the constraint-string STRING allows
7020 the hard register REG. Return 0 if we can't be sure of this. */
7022 static int
7023 constraint_accepts_reg_p (string, reg)
7024 char *string;
7025 rtx reg;
7027 int value = 0;
7028 int regno = true_regnum (reg);
7029 int c;
7031 /* Initialize for first alternative. */
7032 value = 0;
7033 /* Check that each alternative contains `g' or `r'. */
7034 while (1)
7035 switch (c = *string++)
7037 case 0:
7038 /* If an alternative lacks `g' or `r', we lose. */
7039 return value;
7040 case ',':
7041 /* If an alternative lacks `g' or `r', we lose. */
7042 if (value == 0)
7043 return 0;
7044 /* Initialize for next alternative. */
7045 value = 0;
7046 break;
7047 case 'g':
7048 case 'r':
7049 /* Any general reg wins for this alternative. */
7050 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7051 value = 1;
7052 break;
7053 default:
7054 /* Any reg in specified class wins for this alternative. */
7056 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7058 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7059 value = 1;
7064 /* Return the number of places FIND appears within X, but don't count
7065 an occurrence if some SET_DEST is FIND. */
7067 static int
7068 count_occurrences (x, find)
7069 register rtx x, find;
7071 register int i, j;
7072 register enum rtx_code code;
7073 register char *format_ptr;
7074 int count;
7076 if (x == find)
7077 return 1;
7078 if (x == 0)
7079 return 0;
7081 code = GET_CODE (x);
7083 switch (code)
7085 case REG:
7086 case QUEUED:
7087 case CONST_INT:
7088 case CONST_DOUBLE:
7089 case SYMBOL_REF:
7090 case CODE_LABEL:
7091 case PC:
7092 case CC0:
7093 return 0;
7095 case SET:
7096 if (SET_DEST (x) == find)
7097 return count_occurrences (SET_SRC (x), find);
7098 break;
7101 format_ptr = GET_RTX_FORMAT (code);
7102 count = 0;
7104 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7106 switch (*format_ptr++)
7108 case 'e':
7109 count += count_occurrences (XEXP (x, i), find);
7110 break;
7112 case 'E':
7113 if (XVEC (x, i) != NULL)
7115 for (j = 0; j < XVECLEN (x, i); j++)
7116 count += count_occurrences (XVECEXP (x, i, j), find);
7118 break;
7121 return count;