include tree.h
[official-gcc.git] / gcc / reload1.c
blob11e0a1bf4a66ffe57dfddb3b405b6a79c51fd7e7
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
206 static char spill_indirect_levels;
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
212 char indirect_symref_ok;
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
216 char double_reg_address_ok;
218 /* Record the stack slot for each spilled hard register. */
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
222 /* Width allocated so far for that stack slot. */
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
231 char *basic_block_needs[N_REG_CLASSES];
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
240 int caller_save_needed;
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
245 int reload_in_progress = 0;
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
258 struct obstack reload_obstack;
259 char *reload_firstobj;
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
272 static struct elim_table
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
322 /* Number of labels in the current function. */
324 static int num_labels;
326 struct hard_reg_n_uses { int regno; int uses; };
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
369 /* Initialize the reload pass once per compilation. */
371 void
372 init_reload ()
374 register int i;
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
387 while (memory_address_p (QImode, tem))
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
398 /* See if reg+reg is a valid (and offsettable) address. */
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
408 if (memory_address_p (QImode, tem))
410 double_reg_address_ok = 1;
411 break;
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
420 /* Main entry point for the reload pass.
422 FIRST is the first insn of the function being compiled.
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
539 for (insn = first; insn; insn = NEXT_INSN (insn))
541 rtx set = single_set (insn);
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
567 else
568 continue;
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
594 /* Does this function require a frame pointer? */
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
608 num_eliminable = 0;
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && (ep->from != HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed));
620 #else
621 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
622 = ! frame_pointer_needed;
623 #endif
625 /* Count the number of eliminable registers and build the FROM and TO
626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
631 num_eliminable += ep->can_eliminate;
632 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
633 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
636 num_labels = max_label_num () - get_first_label_num ();
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at = (char *) alloca (num_labels);
640 offsets_at
641 = (int (*)[NUM_ELIMINABLE_REGS])
642 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
644 offsets_known_at -= get_first_label_num ();
645 offsets_at -= get_first_label_num ();
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
651 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
652 alter_reg (i, -1);
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode, 0, 0);
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
664 cannot be done. */
665 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
666 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
667 || GET_CODE (insn) == CALL_INSN)
668 note_stores (PATTERN (insn), mark_not_eliminable);
670 #ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
677 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
678 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
679 break;
681 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
682 return;
683 #endif
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
688 order_regs_for_reload ();
690 /* So far, no hard regs have been spilled. */
691 n_spills = 0;
692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
693 spill_reg_order[i] = -1;
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
699 #ifndef SMALL_REGISTER_CLASSES
700 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
701 #endif
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
705 if (! ep->can_eliminate)
707 spill_hard_reg (ep->from, global, dumpfile, 1);
708 regs_ever_live[ep->from] = 1;
711 if (global)
712 for (i = 0; i < N_REG_CLASSES; i++)
714 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
715 bzero (basic_block_needs[i], n_basic_blocks);
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress = 1;
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
724 /* This flag is set when a pseudo reg is spilled,
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed = 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads = 0;
731 /* This flag is set if there are any insns that require register
732 eliminations. */
733 something_needs_elimination = 0;
734 while (something_changed)
736 rtx after_call = 0;
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs[N_REG_CLASSES];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size[N_REG_CLASSES];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups[N_REG_CLASSES];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups[N_REG_CLASSES];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode[N_REG_CLASSES];
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn[N_REG_CLASSES];
759 rtx max_groups_insn[N_REG_CLASSES];
760 rtx max_nongroups_insn[N_REG_CLASSES];
761 rtx x;
762 int starting_frame_size = get_frame_size ();
763 static char *reg_class_names[] = REG_CLASS_NAMES;
765 something_changed = 0;
766 bzero ((char *) max_needs, sizeof max_needs);
767 bzero ((char *) max_groups, sizeof max_groups);
768 bzero ((char *) max_nongroups, sizeof max_nongroups);
769 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
770 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
771 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
772 bzero ((char *) group_size, sizeof group_size);
773 for (i = 0; i < N_REG_CLASSES; i++)
774 group_mode[i] = VOIDmode;
776 /* Keep track of which basic blocks are needing the reloads. */
777 this_block = 0;
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs = 0;
783 /* Reset all offsets on eliminable registers to their initial values. */
784 #ifdef ELIMINABLE_REGS
785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
787 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
788 ep->previous_offset = ep->offset
789 = ep->max_offset = ep->initial_offset;
791 #else
792 #ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
794 #else
795 if (!FRAME_POINTER_REQUIRED)
796 abort ();
797 reg_eliminate[0].initial_offset = 0;
798 #endif
799 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
800 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
801 #endif
803 num_not_at_initial_offset = 0;
805 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
812 for (x = forced_labels; x; x = XEXP (x, 1))
813 if (XEXP (x, 0))
814 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
819 is the normal case.
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
840 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
841 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
843 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
846 XEXP (x, 0)))
847 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
848 else if (CONSTANT_P (XEXP (x, 0))
849 || (GET_CODE (XEXP (x, 0)) == REG
850 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
851 || (GET_CODE (XEXP (x, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x, 0), 0))
854 < FIRST_PSEUDO_REGISTER)
855 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
856 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
857 else
859 /* Make a new stack slot. Then indicate that something
860 changed so we go back and recompute offsets for
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
864 the loop. */
865 reg_equiv_memory_loc[i] = 0;
866 reg_equiv_init[i] = 0;
867 alter_reg (i, -1);
868 something_changed = 1;
872 /* If we allocated another pseudo to the stack, redo elimination
873 bookkeeping. */
874 if (something_changed)
875 continue;
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
880 if (caller_save_group_size > 1)
882 group_mode[(int) caller_save_spill_class] = Pmode;
883 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
889 for (insn = first; insn; insn = NEXT_INSN (insn))
891 if (global && this_block + 1 < n_basic_blocks
892 && insn == basic_block_head[this_block+1])
893 ++this_block;
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
899 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
901 && REG_NOTES (insn) != 0))
902 set_label_offsets (insn, insn, 0);
904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg = 0;
910 rtx old_body = PATTERN (insn);
911 int old_code = INSN_CODE (insn);
912 rtx old_notes = REG_NOTES (insn);
913 int did_elimination = 0;
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
923 inputs.
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
933 struct needs
935 /* [0] is normal, [1] is nongroup. */
936 int regs[2][N_REG_CLASSES];
937 int groups[N_REG_CLASSES];
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
941 struct {
942 struct needs other;
943 struct needs input;
944 struct needs output;
945 struct needs insn;
946 struct needs other_addr;
947 struct needs op_addr;
948 struct needs op_addr_reload;
949 struct needs in_addr[MAX_RECOG_OPERANDS];
950 struct needs out_addr[MAX_RECOG_OPERANDS];
951 } insn_needs;
953 /* If needed, eliminate any eliminable registers. */
954 if (num_eliminable)
955 did_elimination = eliminate_regs_in_insn (insn, 0);
957 #ifdef SMALL_REGISTER_CLASSES
958 /* Set avoid_return_reg if this is an insn
959 that might use the value of a function call. */
960 if (GET_CODE (insn) == CALL_INSN)
962 if (GET_CODE (PATTERN (insn)) == SET)
963 after_call = SET_DEST (PATTERN (insn));
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
967 else
968 after_call = 0;
970 else if (after_call != 0
971 && !(GET_CODE (PATTERN (insn)) == SET
972 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
974 if (reg_referenced_p (after_call, PATTERN (insn)))
975 avoid_return_reg = after_call;
976 after_call = 0;
978 #endif /* SMALL_REGISTER_CLASSES */
980 /* Analyze the instruction. */
981 find_reloads (insn, 0, spill_indirect_levels, global,
982 spill_reg_order);
984 /* Remember for later shortcuts which insns had any reloads or
985 register eliminations.
987 One might think that it would be worthwhile to mark insns
988 that need register replacements but not reloads, but this is
989 not safe because find_reloads may do some manipulation of
990 the insn (such as swapping commutative operands), which would
991 be lost when we restore the old pattern after register
992 replacement. So the actions of find_reloads must be redone in
993 subsequent passes or in reload_as_needed.
995 However, it is safe to mark insns that need reloads
996 but not register replacement. */
998 PUT_MODE (insn, (did_elimination ? QImode
999 : n_reloads ? HImode
1000 : GET_MODE (insn) == DImode ? DImode
1001 : VOIDmode));
1003 /* Discard any register replacements done. */
1004 if (did_elimination)
1006 obstack_free (&reload_obstack, reload_firstobj);
1007 PATTERN (insn) = old_body;
1008 INSN_CODE (insn) = old_code;
1009 REG_NOTES (insn) = old_notes;
1010 something_needs_elimination = 1;
1013 /* If this insn has no reloads, we need not do anything except
1014 in the case of a CALL_INSN when we have caller-saves and
1015 caller-save needs reloads. */
1017 if (n_reloads == 0
1018 && ! (GET_CODE (insn) == CALL_INSN
1019 && caller_save_spill_class != NO_REGS))
1020 continue;
1022 something_needs_reloads = 1;
1023 bzero ((char *) &insn_needs, sizeof insn_needs);
1025 /* Count each reload once in every class
1026 containing the reload's own class. */
1028 for (i = 0; i < n_reloads; i++)
1030 register enum reg_class *p;
1031 enum reg_class class = reload_reg_class[i];
1032 int size;
1033 enum machine_mode mode;
1034 int nongroup_need;
1035 struct needs *this_needs;
1037 /* Don't count the dummy reloads, for which one of the
1038 regs mentioned in the insn can be used for reloading.
1039 Don't count optional reloads.
1040 Don't count reloads that got combined with others. */
1041 if (reload_reg_rtx[i] != 0
1042 || reload_optional[i] != 0
1043 || (reload_out[i] == 0 && reload_in[i] == 0
1044 && ! reload_secondary_p[i]))
1045 continue;
1047 /* Show that a reload register of this class is needed
1048 in this basic block. We do not use insn_needs and
1049 insn_groups because they are overly conservative for
1050 this purpose. */
1051 if (global && ! basic_block_needs[(int) class][this_block])
1053 basic_block_needs[(int) class][this_block] = 1;
1054 new_basic_block_needs = 1;
1058 mode = reload_inmode[i];
1059 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1060 mode = reload_outmode[i];
1061 size = CLASS_MAX_NREGS (class, mode);
1063 /* If this class doesn't want a group, determine if we have
1064 a nongroup need or a regular need. We have a nongroup
1065 need if this reload conflicts with a group reload whose
1066 class intersects with this reload's class. */
1068 nongroup_need = 0;
1069 if (size == 1)
1070 for (j = 0; j < n_reloads; j++)
1071 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1072 (GET_MODE_SIZE (reload_outmode[j])
1073 > GET_MODE_SIZE (reload_inmode[j]))
1074 ? reload_outmode[j]
1075 : reload_inmode[j])
1076 > 1)
1077 && (!reload_optional[j])
1078 && (reload_in[j] != 0 || reload_out[j] != 0
1079 || reload_secondary_p[j])
1080 && reloads_conflict (i, j)
1081 && reg_classes_intersect_p (class,
1082 reload_reg_class[j]))
1084 nongroup_need = 1;
1085 break;
1088 /* Decide which time-of-use to count this reload for. */
1089 switch (reload_when_needed[i])
1091 case RELOAD_OTHER:
1092 this_needs = &insn_needs.other;
1093 break;
1094 case RELOAD_FOR_INPUT:
1095 this_needs = &insn_needs.input;
1096 break;
1097 case RELOAD_FOR_OUTPUT:
1098 this_needs = &insn_needs.output;
1099 break;
1100 case RELOAD_FOR_INSN:
1101 this_needs = &insn_needs.insn;
1102 break;
1103 case RELOAD_FOR_OTHER_ADDRESS:
1104 this_needs = &insn_needs.other_addr;
1105 break;
1106 case RELOAD_FOR_INPUT_ADDRESS:
1107 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1108 break;
1109 case RELOAD_FOR_OUTPUT_ADDRESS:
1110 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1111 break;
1112 case RELOAD_FOR_OPERAND_ADDRESS:
1113 this_needs = &insn_needs.op_addr;
1114 break;
1115 case RELOAD_FOR_OPADDR_ADDR:
1116 this_needs = &insn_needs.op_addr_reload;
1117 break;
1120 if (size > 1)
1122 enum machine_mode other_mode, allocate_mode;
1124 /* Count number of groups needed separately from
1125 number of individual regs needed. */
1126 this_needs->groups[(int) class]++;
1127 p = reg_class_superclasses[(int) class];
1128 while (*p != LIM_REG_CLASSES)
1129 this_needs->groups[(int) *p++]++;
1131 /* Record size and mode of a group of this class. */
1132 /* If more than one size group is needed,
1133 make all groups the largest needed size. */
1134 if (group_size[(int) class] < size)
1136 other_mode = group_mode[(int) class];
1137 allocate_mode = mode;
1139 group_size[(int) class] = size;
1140 group_mode[(int) class] = mode;
1142 else
1144 other_mode = mode;
1145 allocate_mode = group_mode[(int) class];
1148 /* Crash if two dissimilar machine modes both need
1149 groups of consecutive regs of the same class. */
1151 if (other_mode != VOIDmode && other_mode != allocate_mode
1152 && ! modes_equiv_for_class_p (allocate_mode,
1153 other_mode, class))
1154 abort ();
1156 else if (size == 1)
1158 this_needs->regs[nongroup_need][(int) class] += 1;
1159 p = reg_class_superclasses[(int) class];
1160 while (*p != LIM_REG_CLASSES)
1161 this_needs->regs[nongroup_need][(int) *p++] += 1;
1163 else
1164 abort ();
1167 /* All reloads have been counted for this insn;
1168 now merge the various times of use.
1169 This sets insn_needs, etc., to the maximum total number
1170 of registers needed at any point in this insn. */
1172 for (i = 0; i < N_REG_CLASSES; i++)
1174 int in_max, out_max;
1176 /* Compute normal and nongroup needs. */
1177 for (j = 0; j <= 1; j++)
1179 for (in_max = 0, out_max = 0, k = 0;
1180 k < reload_n_operands; k++)
1182 in_max
1183 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1184 out_max
1185 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1188 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1189 and operand addresses but not things used to reload
1190 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1191 don't conflict with things needed to reload inputs or
1192 outputs. */
1194 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1195 insn_needs.op_addr_reload.regs[j][i]),
1196 in_max);
1198 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1200 insn_needs.input.regs[j][i]
1201 = MAX (insn_needs.input.regs[j][i]
1202 + insn_needs.op_addr.regs[j][i]
1203 + insn_needs.insn.regs[j][i],
1204 in_max + insn_needs.input.regs[j][i]);
1206 insn_needs.output.regs[j][i] += out_max;
1207 insn_needs.other.regs[j][i]
1208 += MAX (MAX (insn_needs.input.regs[j][i],
1209 insn_needs.output.regs[j][i]),
1210 insn_needs.other_addr.regs[j][i]);
1214 /* Now compute group needs. */
1215 for (in_max = 0, out_max = 0, j = 0;
1216 j < reload_n_operands; j++)
1218 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1219 out_max
1220 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1223 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1224 insn_needs.op_addr_reload.groups[i]),
1225 in_max);
1226 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1228 insn_needs.input.groups[i]
1229 = MAX (insn_needs.input.groups[i]
1230 + insn_needs.op_addr.groups[i]
1231 + insn_needs.insn.groups[i],
1232 in_max + insn_needs.input.groups[i]);
1234 insn_needs.output.groups[i] += out_max;
1235 insn_needs.other.groups[i]
1236 += MAX (MAX (insn_needs.input.groups[i],
1237 insn_needs.output.groups[i]),
1238 insn_needs.other_addr.groups[i]);
1241 /* If this is a CALL_INSN and caller-saves will need
1242 a spill register, act as if the spill register is
1243 needed for this insn. However, the spill register
1244 can be used by any reload of this insn, so we only
1245 need do something if no need for that class has
1246 been recorded.
1248 The assumption that every CALL_INSN will trigger a
1249 caller-save is highly conservative, however, the number
1250 of cases where caller-saves will need a spill register but
1251 a block containing a CALL_INSN won't need a spill register
1252 of that class should be quite rare.
1254 If a group is needed, the size and mode of the group will
1255 have been set up at the beginning of this loop. */
1257 if (GET_CODE (insn) == CALL_INSN
1258 && caller_save_spill_class != NO_REGS)
1260 /* See if this register would conflict with any reload
1261 that needs a group. */
1262 int nongroup_need = 0;
1263 int *caller_save_needs;
1265 for (j = 0; j < n_reloads; j++)
1266 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1267 (GET_MODE_SIZE (reload_outmode[j])
1268 > GET_MODE_SIZE (reload_inmode[j]))
1269 ? reload_outmode[j]
1270 : reload_inmode[j])
1271 > 1)
1272 && reg_classes_intersect_p (caller_save_spill_class,
1273 reload_reg_class[j]))
1275 nongroup_need = 1;
1276 break;
1279 caller_save_needs
1280 = (caller_save_group_size > 1
1281 ? insn_needs.other.groups
1282 : insn_needs.other.regs[nongroup_need]);
1284 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1286 register enum reg_class *p
1287 = reg_class_superclasses[(int) caller_save_spill_class];
1289 caller_save_needs[(int) caller_save_spill_class]++;
1291 while (*p != LIM_REG_CLASSES)
1292 caller_save_needs[(int) *p++] += 1;
1295 /* Show that this basic block will need a register of
1296 this class. */
1298 if (global
1299 && ! (basic_block_needs[(int) caller_save_spill_class]
1300 [this_block]))
1302 basic_block_needs[(int) caller_save_spill_class]
1303 [this_block] = 1;
1304 new_basic_block_needs = 1;
1308 #ifdef SMALL_REGISTER_CLASSES
1309 /* If this insn stores the value of a function call,
1310 and that value is in a register that has been spilled,
1311 and if the insn needs a reload in a class
1312 that might use that register as the reload register,
1313 then add add an extra need in that class.
1314 This makes sure we have a register available that does
1315 not overlap the return value. */
1317 if (avoid_return_reg)
1319 int regno = REGNO (avoid_return_reg);
1320 int nregs
1321 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1322 int r;
1323 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1325 /* First compute the "basic needs", which counts a
1326 need only in the smallest class in which it
1327 is required. */
1329 bcopy (insn_needs.other.regs[0], basic_needs,
1330 sizeof basic_needs);
1331 bcopy (insn_needs.other.groups, basic_groups,
1332 sizeof basic_groups);
1334 for (i = 0; i < N_REG_CLASSES; i++)
1336 enum reg_class *p;
1338 if (basic_needs[i] >= 0)
1339 for (p = reg_class_superclasses[i];
1340 *p != LIM_REG_CLASSES; p++)
1341 basic_needs[(int) *p] -= basic_needs[i];
1343 if (basic_groups[i] >= 0)
1344 for (p = reg_class_superclasses[i];
1345 *p != LIM_REG_CLASSES; p++)
1346 basic_groups[(int) *p] -= basic_groups[i];
1349 /* Now count extra regs if there might be a conflict with
1350 the return value register.
1352 ??? This is not quite correct because we don't properly
1353 handle the case of groups, but if we end up doing
1354 something wrong, it either will end up not mattering or
1355 we will abort elsewhere. */
1357 for (r = regno; r < regno + nregs; r++)
1358 if (spill_reg_order[r] >= 0)
1359 for (i = 0; i < N_REG_CLASSES; i++)
1360 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1362 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1364 enum reg_class *p;
1366 insn_needs.other.regs[0][i]++;
1367 p = reg_class_superclasses[i];
1368 while (*p != LIM_REG_CLASSES)
1369 insn_needs.other.regs[0][(int) *p++]++;
1373 #endif /* SMALL_REGISTER_CLASSES */
1375 /* For each class, collect maximum need of any insn. */
1377 for (i = 0; i < N_REG_CLASSES; i++)
1379 if (max_needs[i] < insn_needs.other.regs[0][i])
1381 max_needs[i] = insn_needs.other.regs[0][i];
1382 max_needs_insn[i] = insn;
1384 if (max_groups[i] < insn_needs.other.groups[i])
1386 max_groups[i] = insn_needs.other.groups[i];
1387 max_groups_insn[i] = insn;
1389 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1391 max_nongroups[i] = insn_needs.other.regs[1][i];
1392 max_nongroups_insn[i] = insn;
1396 /* Note that there is a continue statement above. */
1399 /* If we allocated any new memory locations, make another pass
1400 since it might have changed elimination offsets. */
1401 if (starting_frame_size != get_frame_size ())
1402 something_changed = 1;
1404 if (dumpfile)
1405 for (i = 0; i < N_REG_CLASSES; i++)
1407 if (max_needs[i] > 0)
1408 fprintf (dumpfile,
1409 ";; Need %d reg%s of class %s (for insn %d).\n",
1410 max_needs[i], max_needs[i] == 1 ? "" : "s",
1411 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1412 if (max_nongroups[i] > 0)
1413 fprintf (dumpfile,
1414 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1415 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1416 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1417 if (max_groups[i] > 0)
1418 fprintf (dumpfile,
1419 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1420 max_groups[i], max_groups[i] == 1 ? "" : "s",
1421 mode_name[(int) group_mode[i]],
1422 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1425 /* If we have caller-saves, set up the save areas and see if caller-save
1426 will need a spill register. */
1428 if (caller_save_needed
1429 && ! setup_save_areas (&something_changed)
1430 && caller_save_spill_class == NO_REGS)
1432 /* The class we will need depends on whether the machine
1433 supports the sum of two registers for an address; see
1434 find_address_reloads for details. */
1436 caller_save_spill_class
1437 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1438 caller_save_group_size
1439 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1440 something_changed = 1;
1443 /* See if anything that happened changes which eliminations are valid.
1444 For example, on the Sparc, whether or not the frame pointer can
1445 be eliminated can depend on what registers have been used. We need
1446 not check some conditions again (such as flag_omit_frame_pointer)
1447 since they can't have changed. */
1449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1450 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1451 #ifdef ELIMINABLE_REGS
1452 || ! CAN_ELIMINATE (ep->from, ep->to)
1453 #endif
1455 ep->can_eliminate = 0;
1457 /* Look for the case where we have discovered that we can't replace
1458 register A with register B and that means that we will now be
1459 trying to replace register A with register C. This means we can
1460 no longer replace register C with register B and we need to disable
1461 such an elimination, if it exists. This occurs often with A == ap,
1462 B == sp, and C == fp. */
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1466 struct elim_table *op;
1467 register int new_to = -1;
1469 if (! ep->can_eliminate && ep->can_eliminate_previous)
1471 /* Find the current elimination for ep->from, if there is a
1472 new one. */
1473 for (op = reg_eliminate;
1474 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1475 if (op->from == ep->from && op->can_eliminate)
1477 new_to = op->to;
1478 break;
1481 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1482 disable it. */
1483 for (op = reg_eliminate;
1484 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1485 if (op->from == new_to && op->to == ep->to)
1486 op->can_eliminate = 0;
1490 /* See if any registers that we thought we could eliminate the previous
1491 time are no longer eliminable. If so, something has changed and we
1492 must spill the register. Also, recompute the number of eliminable
1493 registers and see if the frame pointer is needed; it is if there is
1494 no elimination of the frame pointer that we can perform. */
1496 frame_pointer_needed = 1;
1497 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1499 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1500 && ep->to != HARD_FRAME_POINTER_REGNUM)
1501 frame_pointer_needed = 0;
1503 if (! ep->can_eliminate && ep->can_eliminate_previous)
1505 ep->can_eliminate_previous = 0;
1506 spill_hard_reg (ep->from, global, dumpfile, 1);
1507 regs_ever_live[ep->from] = 1;
1508 something_changed = 1;
1509 num_eliminable--;
1513 /* If all needs are met, we win. */
1515 for (i = 0; i < N_REG_CLASSES; i++)
1516 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1517 break;
1518 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1519 break;
1521 /* Not all needs are met; must spill some hard regs. */
1523 /* Put all registers spilled so far back in potential_reload_regs, but
1524 put them at the front, since we've already spilled most of the
1525 psuedos in them (we might have left some pseudos unspilled if they
1526 were in a block that didn't need any spill registers of a conflicting
1527 class. We used to try to mark off the need for those registers,
1528 but doing so properly is very complex and reallocating them is the
1529 simpler approach. First, "pack" potential_reload_regs by pushing
1530 any nonnegative entries towards the end. That will leave room
1531 for the registers we already spilled.
1533 Also, undo the marking of the spill registers from the last time
1534 around in FORBIDDEN_REGS since we will be probably be allocating
1535 them again below.
1537 ??? It is theoretically possible that we might end up not using one
1538 of our previously-spilled registers in this allocation, even though
1539 they are at the head of the list. It's not clear what to do about
1540 this, but it was no better before, when we marked off the needs met
1541 by the previously-spilled registers. With the current code, globals
1542 can be allocated into these registers, but locals cannot. */
1544 if (n_spills)
1546 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1547 if (potential_reload_regs[i] != -1)
1548 potential_reload_regs[j--] = potential_reload_regs[i];
1550 for (i = 0; i < n_spills; i++)
1552 potential_reload_regs[i] = spill_regs[i];
1553 spill_reg_order[spill_regs[i]] = -1;
1554 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1557 n_spills = 0;
1560 /* Now find more reload regs to satisfy the remaining need
1561 Do it by ascending class number, since otherwise a reg
1562 might be spilled for a big class and might fail to count
1563 for a smaller class even though it belongs to that class.
1565 Count spilled regs in `spills', and add entries to
1566 `spill_regs' and `spill_reg_order'.
1568 ??? Note there is a problem here.
1569 When there is a need for a group in a high-numbered class,
1570 and also need for non-group regs that come from a lower class,
1571 the non-group regs are chosen first. If there aren't many regs,
1572 they might leave no room for a group.
1574 This was happening on the 386. To fix it, we added the code
1575 that calls possible_group_p, so that the lower class won't
1576 break up the last possible group.
1578 Really fixing the problem would require changes above
1579 in counting the regs already spilled, and in choose_reload_regs.
1580 It might be hard to avoid introducing bugs there. */
1582 CLEAR_HARD_REG_SET (counted_for_groups);
1583 CLEAR_HARD_REG_SET (counted_for_nongroups);
1585 for (class = 0; class < N_REG_CLASSES; class++)
1587 /* First get the groups of registers.
1588 If we got single registers first, we might fragment
1589 possible groups. */
1590 while (max_groups[class] > 0)
1592 /* If any single spilled regs happen to form groups,
1593 count them now. Maybe we don't really need
1594 to spill another group. */
1595 count_possible_groups (group_size, group_mode, max_groups);
1597 if (max_groups[class] <= 0)
1598 break;
1600 /* Groups of size 2 (the only groups used on most machines)
1601 are treated specially. */
1602 if (group_size[class] == 2)
1604 /* First, look for a register that will complete a group. */
1605 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1607 int other;
1609 j = potential_reload_regs[i];
1610 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1612 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1613 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1614 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1615 && HARD_REGNO_MODE_OK (other, group_mode[class])
1616 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1617 other)
1618 /* We don't want one part of another group.
1619 We could get "two groups" that overlap! */
1620 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1622 (j < FIRST_PSEUDO_REGISTER - 1
1623 && (other = j + 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (j, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 && ! TEST_HARD_REG_BIT (counted_for_groups,
1630 other))))
1632 register enum reg_class *p;
1634 /* We have found one that will complete a group,
1635 so count off one group as provided. */
1636 max_groups[class]--;
1637 p = reg_class_superclasses[class];
1638 while (*p != LIM_REG_CLASSES)
1639 max_groups[(int) *p++]--;
1641 /* Indicate both these regs are part of a group. */
1642 SET_HARD_REG_BIT (counted_for_groups, j);
1643 SET_HARD_REG_BIT (counted_for_groups, other);
1644 break;
1647 /* We can't complete a group, so start one. */
1648 #ifdef SMALL_REGISTER_CLASSES
1649 /* Look for a pair neither of which is explicitly used. */
1650 if (i == FIRST_PSEUDO_REGISTER)
1651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1653 int k;
1654 j = potential_reload_regs[i];
1655 /* Verify that J+1 is a potential reload reg. */
1656 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1657 if (potential_reload_regs[k] == j + 1)
1658 break;
1659 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1660 && k < FIRST_PSEUDO_REGISTER
1661 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1664 && HARD_REGNO_MODE_OK (j, group_mode[class])
1665 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1666 j + 1)
1667 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1668 /* Reject J at this stage
1669 if J+1 was explicitly used. */
1670 && ! regs_explicitly_used[j + 1])
1671 break;
1673 #endif
1674 /* Now try any group at all
1675 whose registers are not in bad_spill_regs. */
1676 if (i == FIRST_PSEUDO_REGISTER)
1677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1679 int k;
1680 j = potential_reload_regs[i];
1681 /* Verify that J+1 is a potential reload reg. */
1682 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1683 if (potential_reload_regs[k] == j + 1)
1684 break;
1685 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1686 && k < FIRST_PSEUDO_REGISTER
1687 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1688 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1689 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1690 && HARD_REGNO_MODE_OK (j, group_mode[class])
1691 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1692 j + 1)
1693 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1694 break;
1697 /* I should be the index in potential_reload_regs
1698 of the new reload reg we have found. */
1700 if (i >= FIRST_PSEUDO_REGISTER)
1702 /* There are no groups left to spill. */
1703 spill_failure (max_groups_insn[class]);
1704 failure = 1;
1705 goto failed;
1707 else
1708 something_changed
1709 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1710 global, dumpfile);
1712 else
1714 /* For groups of more than 2 registers,
1715 look for a sufficient sequence of unspilled registers,
1716 and spill them all at once. */
1717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1719 int k;
1721 j = potential_reload_regs[i];
1722 if (j >= 0
1723 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1724 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1726 /* Check each reg in the sequence. */
1727 for (k = 0; k < group_size[class]; k++)
1728 if (! (spill_reg_order[j + k] < 0
1729 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1731 break;
1732 /* We got a full sequence, so spill them all. */
1733 if (k == group_size[class])
1735 register enum reg_class *p;
1736 for (k = 0; k < group_size[class]; k++)
1738 int idx;
1739 SET_HARD_REG_BIT (counted_for_groups, j + k);
1740 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1741 if (potential_reload_regs[idx] == j + k)
1742 break;
1743 something_changed
1744 |= new_spill_reg (idx, class,
1745 max_needs, NULL_PTR,
1746 global, dumpfile);
1749 /* We have found one that will complete a group,
1750 so count off one group as provided. */
1751 max_groups[class]--;
1752 p = reg_class_superclasses[class];
1753 while (*p != LIM_REG_CLASSES)
1754 max_groups[(int) *p++]--;
1756 break;
1760 /* We couldn't find any registers for this reload.
1761 Avoid going into an infinite loop. */
1762 if (i >= FIRST_PSEUDO_REGISTER)
1764 /* There are no groups left. */
1765 spill_failure (max_groups_insn[class]);
1766 failure = 1;
1767 goto failed;
1772 /* Now similarly satisfy all need for single registers. */
1774 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1776 #ifdef SMALL_REGISTER_CLASSES
1777 /* This should be right for all machines, but only the 386
1778 is known to need it, so this conditional plays safe.
1779 ??? For 2.5, try making this unconditional. */
1780 /* If we spilled enough regs, but they weren't counted
1781 against the non-group need, see if we can count them now.
1782 If so, we can avoid some actual spilling. */
1783 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1784 for (i = 0; i < n_spills; i++)
1785 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1786 spill_regs[i])
1787 && !TEST_HARD_REG_BIT (counted_for_groups,
1788 spill_regs[i])
1789 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1790 spill_regs[i])
1791 && max_nongroups[class] > 0)
1793 register enum reg_class *p;
1795 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1796 max_nongroups[class]--;
1797 p = reg_class_superclasses[class];
1798 while (*p != LIM_REG_CLASSES)
1799 max_nongroups[(int) *p++]--;
1801 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1802 break;
1803 #endif
1805 /* Consider the potential reload regs that aren't
1806 yet in use as reload regs, in order of preference.
1807 Find the most preferred one that's in this class. */
1809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1810 if (potential_reload_regs[i] >= 0
1811 && TEST_HARD_REG_BIT (reg_class_contents[class],
1812 potential_reload_regs[i])
1813 /* If this reg will not be available for groups,
1814 pick one that does not foreclose possible groups.
1815 This is a kludge, and not very general,
1816 but it should be sufficient to make the 386 work,
1817 and the problem should not occur on machines with
1818 more registers. */
1819 && (max_nongroups[class] == 0
1820 || possible_group_p (potential_reload_regs[i], max_groups)))
1821 break;
1823 /* If we couldn't get a register, try to get one even if we
1824 might foreclose possible groups. This may cause problems
1825 later, but that's better than aborting now, since it is
1826 possible that we will, in fact, be able to form the needed
1827 group even with this allocation. */
1829 if (i >= FIRST_PSEUDO_REGISTER
1830 && (asm_noperands (max_needs[class] > 0
1831 ? max_needs_insn[class]
1832 : max_nongroups_insn[class])
1833 < 0))
1834 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1835 if (potential_reload_regs[i] >= 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class],
1837 potential_reload_regs[i]))
1838 break;
1840 /* I should be the index in potential_reload_regs
1841 of the new reload reg we have found. */
1843 if (i >= FIRST_PSEUDO_REGISTER)
1845 /* There are no possible registers left to spill. */
1846 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1847 : max_nongroups_insn[class]);
1848 failure = 1;
1849 goto failed;
1851 else
1852 something_changed
1853 |= new_spill_reg (i, class, max_needs, max_nongroups,
1854 global, dumpfile);
1859 /* If global-alloc was run, notify it of any register eliminations we have
1860 done. */
1861 if (global)
1862 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1863 if (ep->can_eliminate)
1864 mark_elimination (ep->from, ep->to);
1866 /* Insert code to save and restore call-clobbered hard regs
1867 around calls. Tell if what mode to use so that we will process
1868 those insns in reload_as_needed if we have to. */
1870 if (caller_save_needed)
1871 save_call_clobbered_regs (num_eliminable ? QImode
1872 : caller_save_spill_class != NO_REGS ? HImode
1873 : VOIDmode);
1875 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1876 If that insn didn't set the register (i.e., it copied the register to
1877 memory), just delete that insn instead of the equivalencing insn plus
1878 anything now dead. If we call delete_dead_insn on that insn, we may
1879 delete the insn that actually sets the register if the register die
1880 there and that is incorrect. */
1882 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1883 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1884 && GET_CODE (reg_equiv_init[i]) != NOTE)
1886 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1887 delete_dead_insn (reg_equiv_init[i]);
1888 else
1890 PUT_CODE (reg_equiv_init[i], NOTE);
1891 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1892 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1896 /* Use the reload registers where necessary
1897 by generating move instructions to move the must-be-register
1898 values into or out of the reload registers. */
1900 if (something_needs_reloads || something_needs_elimination
1901 || (caller_save_needed && num_eliminable)
1902 || caller_save_spill_class != NO_REGS)
1903 reload_as_needed (first, global);
1905 /* If we were able to eliminate the frame pointer, show that it is no
1906 longer live at the start of any basic block. If it ls live by
1907 virtue of being in a pseudo, that pseudo will be marked live
1908 and hence the frame pointer will be known to be live via that
1909 pseudo. */
1911 if (! frame_pointer_needed)
1912 for (i = 0; i < n_basic_blocks; i++)
1913 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1914 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1915 % REGSET_ELT_BITS));
1917 /* Come here (with failure set nonzero) if we can't get enough spill regs
1918 and we decide not to abort about it. */
1919 failed:
1921 reload_in_progress = 0;
1923 /* Now eliminate all pseudo regs by modifying them into
1924 their equivalent memory references.
1925 The REG-rtx's for the pseudos are modified in place,
1926 so all insns that used to refer to them now refer to memory.
1928 For a reg that has a reg_equiv_address, all those insns
1929 were changed by reloading so that no insns refer to it any longer;
1930 but the DECL_RTL of a variable decl may refer to it,
1931 and if so this causes the debugging info to mention the variable. */
1933 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1935 rtx addr = 0;
1936 int in_struct = 0;
1937 if (reg_equiv_mem[i])
1939 addr = XEXP (reg_equiv_mem[i], 0);
1940 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1942 if (reg_equiv_address[i])
1943 addr = reg_equiv_address[i];
1944 if (addr)
1946 if (reg_renumber[i] < 0)
1948 rtx reg = regno_reg_rtx[i];
1949 XEXP (reg, 0) = addr;
1950 REG_USERVAR_P (reg) = 0;
1951 MEM_IN_STRUCT_P (reg) = in_struct;
1952 PUT_CODE (reg, MEM);
1954 else if (reg_equiv_mem[i])
1955 XEXP (reg_equiv_mem[i], 0) = addr;
1959 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1960 /* Make a pass over all the insns and remove death notes for things that
1961 are no longer registers or no longer die in the insn (e.g., an input
1962 and output pseudo being tied). */
1964 for (insn = first; insn; insn = NEXT_INSN (insn))
1965 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1967 rtx note, next;
1969 for (note = REG_NOTES (insn); note; note = next)
1971 next = XEXP (note, 1);
1972 if (REG_NOTE_KIND (note) == REG_DEAD
1973 && (GET_CODE (XEXP (note, 0)) != REG
1974 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1975 remove_note (insn, note);
1978 #endif
1980 /* Indicate that we no longer have known memory locations or constants. */
1981 reg_equiv_constant = 0;
1982 reg_equiv_memory_loc = 0;
1984 if (scratch_list)
1985 free (scratch_list);
1986 scratch_list = 0;
1987 if (scratch_block)
1988 free (scratch_block);
1989 scratch_block = 0;
1991 return failure;
1994 /* Nonzero if, after spilling reg REGNO for non-groups,
1995 it will still be possible to find a group if we still need one. */
1997 static int
1998 possible_group_p (regno, max_groups)
1999 int regno;
2000 int *max_groups;
2002 int i;
2003 int class = (int) NO_REGS;
2005 for (i = 0; i < (int) N_REG_CLASSES; i++)
2006 if (max_groups[i] > 0)
2008 class = i;
2009 break;
2012 if (class == (int) NO_REGS)
2013 return 1;
2015 /* Consider each pair of consecutive registers. */
2016 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2018 /* Ignore pairs that include reg REGNO. */
2019 if (i == regno || i + 1 == regno)
2020 continue;
2022 /* Ignore pairs that are outside the class that needs the group.
2023 ??? Here we fail to handle the case where two different classes
2024 independently need groups. But this never happens with our
2025 current machine descriptions. */
2026 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2027 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2028 continue;
2030 /* A pair of consecutive regs we can still spill does the trick. */
2031 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2032 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2034 return 1;
2036 /* A pair of one already spilled and one we can spill does it
2037 provided the one already spilled is not otherwise reserved. */
2038 if (spill_reg_order[i] < 0
2039 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2040 && spill_reg_order[i + 1] >= 0
2041 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2042 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2043 return 1;
2044 if (spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2046 && spill_reg_order[i] >= 0
2047 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2048 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2049 return 1;
2052 return 0;
2055 /* Count any groups that can be formed from the registers recently spilled.
2056 This is done class by class, in order of ascending class number. */
2058 static void
2059 count_possible_groups (group_size, group_mode, max_groups)
2060 int *group_size;
2061 enum machine_mode *group_mode;
2062 int *max_groups;
2064 int i;
2065 /* Now find all consecutive groups of spilled registers
2066 and mark each group off against the need for such groups.
2067 But don't count them against ordinary need, yet. */
2069 for (i = 0; i < N_REG_CLASSES; i++)
2070 if (group_size[i] > 1)
2072 HARD_REG_SET new;
2073 int j;
2075 CLEAR_HARD_REG_SET (new);
2077 /* Make a mask of all the regs that are spill regs in class I. */
2078 for (j = 0; j < n_spills; j++)
2079 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2082 spill_regs[j]))
2083 SET_HARD_REG_BIT (new, spill_regs[j]);
2085 /* Find each consecutive group of them. */
2086 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2087 if (TEST_HARD_REG_BIT (new, j)
2088 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2089 /* Next line in case group-mode for this class
2090 demands an even-odd pair. */
2091 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2093 int k;
2094 for (k = 1; k < group_size[i]; k++)
2095 if (! TEST_HARD_REG_BIT (new, j + k))
2096 break;
2097 if (k == group_size[i])
2099 /* We found a group. Mark it off against this class's
2100 need for groups, and against each superclass too. */
2101 register enum reg_class *p;
2102 max_groups[i]--;
2103 p = reg_class_superclasses[i];
2104 while (*p != LIM_REG_CLASSES)
2105 max_groups[(int) *p++]--;
2106 /* Don't count these registers again. */
2107 for (k = 0; k < group_size[i]; k++)
2108 SET_HARD_REG_BIT (counted_for_groups, j + k);
2110 /* Skip to the last reg in this group. When j is incremented
2111 above, it will then point to the first reg of the next
2112 possible group. */
2113 j += k - 1;
2119 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2120 another mode that needs to be reloaded for the same register class CLASS.
2121 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2122 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2124 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2125 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2126 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2127 causes unnecessary failures on machines requiring alignment of register
2128 groups when the two modes are different sizes, because the larger mode has
2129 more strict alignment rules than the smaller mode. */
2131 static int
2132 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2133 enum machine_mode allocate_mode, other_mode;
2134 enum reg_class class;
2136 register int regno;
2137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2139 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2140 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2141 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2142 return 0;
2144 return 1;
2147 /* Handle the failure to find a register to spill.
2148 INSN should be one of the insns which needed this particular spill reg. */
2150 static void
2151 spill_failure (insn)
2152 rtx insn;
2154 if (asm_noperands (PATTERN (insn)) >= 0)
2155 error_for_asm (insn, "`asm' needs too many reloads");
2156 else
2157 abort ();
2160 /* Add a new register to the tables of available spill-registers
2161 (as well as spilling all pseudos allocated to the register).
2162 I is the index of this register in potential_reload_regs.
2163 CLASS is the regclass whose need is being satisfied.
2164 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2165 so that this register can count off against them.
2166 MAX_NONGROUPS is 0 if this register is part of a group.
2167 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2169 static int
2170 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2171 int i;
2172 int class;
2173 int *max_needs;
2174 int *max_nongroups;
2175 int global;
2176 FILE *dumpfile;
2178 register enum reg_class *p;
2179 int val;
2180 int regno = potential_reload_regs[i];
2182 if (i >= FIRST_PSEUDO_REGISTER)
2183 abort (); /* Caller failed to find any register. */
2185 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2186 fatal ("fixed or forbidden register was spilled.\n\
2187 This may be due to a compiler bug or to impossible asm\n\
2188 statements or clauses.");
2190 /* Make reg REGNO an additional reload reg. */
2192 potential_reload_regs[i] = -1;
2193 spill_regs[n_spills] = regno;
2194 spill_reg_order[regno] = n_spills;
2195 if (dumpfile)
2196 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2198 /* Clear off the needs we just satisfied. */
2200 max_needs[class]--;
2201 p = reg_class_superclasses[class];
2202 while (*p != LIM_REG_CLASSES)
2203 max_needs[(int) *p++]--;
2205 if (max_nongroups && max_nongroups[class] > 0)
2207 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2208 max_nongroups[class]--;
2209 p = reg_class_superclasses[class];
2210 while (*p != LIM_REG_CLASSES)
2211 max_nongroups[(int) *p++]--;
2214 /* Spill every pseudo reg that was allocated to this reg
2215 or to something that overlaps this reg. */
2217 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2219 /* If there are some registers still to eliminate and this register
2220 wasn't ever used before, additional stack space may have to be
2221 allocated to store this register. Thus, we may have changed the offset
2222 between the stack and frame pointers, so mark that something has changed.
2223 (If new pseudos were spilled, thus requiring more space, VAL would have
2224 been set non-zero by the call to spill_hard_reg above since additional
2225 reloads may be needed in that case.
2227 One might think that we need only set VAL to 1 if this is a call-used
2228 register. However, the set of registers that must be saved by the
2229 prologue is not identical to the call-used set. For example, the
2230 register used by the call insn for the return PC is a call-used register,
2231 but must be saved by the prologue. */
2232 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2233 val = 1;
2235 regs_ever_live[spill_regs[n_spills]] = 1;
2236 n_spills++;
2238 return val;
2241 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2242 data that is dead in INSN. */
2244 static void
2245 delete_dead_insn (insn)
2246 rtx insn;
2248 rtx prev = prev_real_insn (insn);
2249 rtx prev_dest;
2251 /* If the previous insn sets a register that dies in our insn, delete it
2252 too. */
2253 if (prev && GET_CODE (PATTERN (prev)) == SET
2254 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2255 && reg_mentioned_p (prev_dest, PATTERN (insn))
2256 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2257 delete_dead_insn (prev);
2259 PUT_CODE (insn, NOTE);
2260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2261 NOTE_SOURCE_FILE (insn) = 0;
2264 /* Modify the home of pseudo-reg I.
2265 The new home is present in reg_renumber[I].
2267 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2268 or it may be -1, meaning there is none or it is not relevant.
2269 This is used so that all pseudos spilled from a given hard reg
2270 can share one stack slot. */
2272 static void
2273 alter_reg (i, from_reg)
2274 register int i;
2275 int from_reg;
2277 /* When outputting an inline function, this can happen
2278 for a reg that isn't actually used. */
2279 if (regno_reg_rtx[i] == 0)
2280 return;
2282 /* If the reg got changed to a MEM at rtl-generation time,
2283 ignore it. */
2284 if (GET_CODE (regno_reg_rtx[i]) != REG)
2285 return;
2287 /* Modify the reg-rtx to contain the new hard reg
2288 number or else to contain its pseudo reg number. */
2289 REGNO (regno_reg_rtx[i])
2290 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2292 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2293 allocate a stack slot for it. */
2295 if (reg_renumber[i] < 0
2296 && reg_n_refs[i] > 0
2297 && reg_equiv_constant[i] == 0
2298 && reg_equiv_memory_loc[i] == 0)
2300 register rtx x;
2301 int inherent_size = PSEUDO_REGNO_BYTES (i);
2302 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2303 int adjust = 0;
2305 /* Each pseudo reg has an inherent size which comes from its own mode,
2306 and a total size which provides room for paradoxical subregs
2307 which refer to the pseudo reg in wider modes.
2309 We can use a slot already allocated if it provides both
2310 enough inherent space and enough total space.
2311 Otherwise, we allocate a new slot, making sure that it has no less
2312 inherent space, and no less total space, then the previous slot. */
2313 if (from_reg == -1)
2315 /* No known place to spill from => no slot to reuse. */
2316 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2317 #if BYTES_BIG_ENDIAN
2318 /* Cancel the big-endian correction done in assign_stack_local.
2319 Get the address of the beginning of the slot.
2320 This is so we can do a big-endian correction unconditionally
2321 below. */
2322 adjust = inherent_size - total_size;
2323 #endif
2325 /* Reuse a stack slot if possible. */
2326 else if (spill_stack_slot[from_reg] != 0
2327 && spill_stack_slot_width[from_reg] >= total_size
2328 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2329 >= inherent_size))
2330 x = spill_stack_slot[from_reg];
2331 /* Allocate a bigger slot. */
2332 else
2334 /* Compute maximum size needed, both for inherent size
2335 and for total size. */
2336 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2337 if (spill_stack_slot[from_reg])
2339 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 > inherent_size)
2341 mode = GET_MODE (spill_stack_slot[from_reg]);
2342 if (spill_stack_slot_width[from_reg] > total_size)
2343 total_size = spill_stack_slot_width[from_reg];
2345 /* Make a slot with that size. */
2346 x = assign_stack_local (mode, total_size, -1);
2347 #if BYTES_BIG_ENDIAN
2348 /* Cancel the big-endian correction done in assign_stack_local.
2349 Get the address of the beginning of the slot.
2350 This is so we can do a big-endian correction unconditionally
2351 below. */
2352 adjust = GET_MODE_SIZE (mode) - total_size;
2353 #endif
2354 spill_stack_slot[from_reg] = x;
2355 spill_stack_slot_width[from_reg] = total_size;
2358 #if BYTES_BIG_ENDIAN
2359 /* On a big endian machine, the "address" of the slot
2360 is the address of the low part that fits its inherent mode. */
2361 if (inherent_size < total_size)
2362 adjust += (total_size - inherent_size);
2363 #endif /* BYTES_BIG_ENDIAN */
2365 /* If we have any adjustment to make, or if the stack slot is the
2366 wrong mode, make a new stack slot. */
2367 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2369 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2370 plus_constant (XEXP (x, 0), adjust));
2371 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2374 /* Save the stack slot for later. */
2375 reg_equiv_memory_loc[i] = x;
2379 /* Mark the slots in regs_ever_live for the hard regs
2380 used by pseudo-reg number REGNO. */
2382 void
2383 mark_home_live (regno)
2384 int regno;
2386 register int i, lim;
2387 i = reg_renumber[regno];
2388 if (i < 0)
2389 return;
2390 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2391 while (i < lim)
2392 regs_ever_live[i++] = 1;
2395 /* Mark the registers used in SCRATCH as being live. */
2397 static void
2398 mark_scratch_live (scratch)
2399 rtx scratch;
2401 register int i;
2402 int regno = REGNO (scratch);
2403 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2405 for (i = regno; i < lim; i++)
2406 regs_ever_live[i] = 1;
2409 /* This function handles the tracking of elimination offsets around branches.
2411 X is a piece of RTL being scanned.
2413 INSN is the insn that it came from, if any.
2415 INITIAL_P is non-zero if we are to set the offset to be the initial
2416 offset and zero if we are setting the offset of the label to be the
2417 current offset. */
2419 static void
2420 set_label_offsets (x, insn, initial_p)
2421 rtx x;
2422 rtx insn;
2423 int initial_p;
2425 enum rtx_code code = GET_CODE (x);
2426 rtx tem;
2427 int i;
2428 struct elim_table *p;
2430 switch (code)
2432 case LABEL_REF:
2433 if (LABEL_REF_NONLOCAL_P (x))
2434 return;
2436 x = XEXP (x, 0);
2438 /* ... fall through ... */
2440 case CODE_LABEL:
2441 /* If we know nothing about this label, set the desired offsets. Note
2442 that this sets the offset at a label to be the offset before a label
2443 if we don't know anything about the label. This is not correct for
2444 the label after a BARRIER, but is the best guess we can make. If
2445 we guessed wrong, we will suppress an elimination that might have
2446 been possible had we been able to guess correctly. */
2448 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2450 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2451 offsets_at[CODE_LABEL_NUMBER (x)][i]
2452 = (initial_p ? reg_eliminate[i].initial_offset
2453 : reg_eliminate[i].offset);
2454 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2457 /* Otherwise, if this is the definition of a label and it is
2458 preceded by a BARRIER, set our offsets to the known offset of
2459 that label. */
2461 else if (x == insn
2462 && (tem = prev_nonnote_insn (insn)) != 0
2463 && GET_CODE (tem) == BARRIER)
2465 num_not_at_initial_offset = 0;
2466 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2468 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2469 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2470 if (reg_eliminate[i].can_eliminate
2471 && (reg_eliminate[i].offset
2472 != reg_eliminate[i].initial_offset))
2473 num_not_at_initial_offset++;
2477 else
2478 /* If neither of the above cases is true, compare each offset
2479 with those previously recorded and suppress any eliminations
2480 where the offsets disagree. */
2482 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2483 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2484 != (initial_p ? reg_eliminate[i].initial_offset
2485 : reg_eliminate[i].offset))
2486 reg_eliminate[i].can_eliminate = 0;
2488 return;
2490 case JUMP_INSN:
2491 set_label_offsets (PATTERN (insn), insn, initial_p);
2493 /* ... fall through ... */
2495 case INSN:
2496 case CALL_INSN:
2497 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2498 and hence must have all eliminations at their initial offsets. */
2499 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2500 if (REG_NOTE_KIND (tem) == REG_LABEL)
2501 set_label_offsets (XEXP (tem, 0), insn, 1);
2502 return;
2504 case ADDR_VEC:
2505 case ADDR_DIFF_VEC:
2506 /* Each of the labels in the address vector must be at their initial
2507 offsets. We want the first first for ADDR_VEC and the second
2508 field for ADDR_DIFF_VEC. */
2510 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2511 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2512 insn, initial_p);
2513 return;
2515 case SET:
2516 /* We only care about setting PC. If the source is not RETURN,
2517 IF_THEN_ELSE, or a label, disable any eliminations not at
2518 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2519 isn't one of those possibilities. For branches to a label,
2520 call ourselves recursively.
2522 Note that this can disable elimination unnecessarily when we have
2523 a non-local goto since it will look like a non-constant jump to
2524 someplace in the current function. This isn't a significant
2525 problem since such jumps will normally be when all elimination
2526 pairs are back to their initial offsets. */
2528 if (SET_DEST (x) != pc_rtx)
2529 return;
2531 switch (GET_CODE (SET_SRC (x)))
2533 case PC:
2534 case RETURN:
2535 return;
2537 case LABEL_REF:
2538 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2539 return;
2541 case IF_THEN_ELSE:
2542 tem = XEXP (SET_SRC (x), 1);
2543 if (GET_CODE (tem) == LABEL_REF)
2544 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2545 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2546 break;
2548 tem = XEXP (SET_SRC (x), 2);
2549 if (GET_CODE (tem) == LABEL_REF)
2550 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2551 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2552 break;
2553 return;
2556 /* If we reach here, all eliminations must be at their initial
2557 offset because we are doing a jump to a variable address. */
2558 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2559 if (p->offset != p->initial_offset)
2560 p->can_eliminate = 0;
2564 /* Used for communication between the next two function to properly share
2565 the vector for an ASM_OPERANDS. */
2567 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2569 /* Scan X and replace any eliminable registers (such as fp) with a
2570 replacement (such as sp), plus an offset.
2572 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2573 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2574 MEM, we are allowed to replace a sum of a register and the constant zero
2575 with the register, which we cannot do outside a MEM. In addition, we need
2576 to record the fact that a register is referenced outside a MEM.
2578 If INSN is an insn, it is the insn containing X. If we replace a REG
2579 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2580 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2581 that the REG is being modified.
2583 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2584 That's used when we eliminate in expressions stored in notes.
2585 This means, do not set ref_outside_mem even if the reference
2586 is outside of MEMs.
2588 If we see a modification to a register we know about, take the
2589 appropriate action (see case SET, below).
2591 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2592 replacements done assuming all offsets are at their initial values. If
2593 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2594 encounter, return the actual location so that find_reloads will do
2595 the proper thing. */
2598 eliminate_regs (x, mem_mode, insn)
2599 rtx x;
2600 enum machine_mode mem_mode;
2601 rtx insn;
2603 enum rtx_code code = GET_CODE (x);
2604 struct elim_table *ep;
2605 int regno;
2606 rtx new;
2607 int i, j;
2608 char *fmt;
2609 int copied = 0;
2611 switch (code)
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 case ASM_INPUT:
2621 case ADDR_VEC:
2622 case ADDR_DIFF_VEC:
2623 case RETURN:
2624 return x;
2626 case REG:
2627 regno = REGNO (x);
2629 /* First handle the case where we encounter a bare register that
2630 is eliminable. Replace it with a PLUS. */
2631 if (regno < FIRST_PSEUDO_REGISTER)
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == x && ep->can_eliminate)
2637 if (! mem_mode
2638 /* Refs inside notes don't count for this purpose. */
2639 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2640 || GET_CODE (insn) == INSN_LIST)))
2641 ep->ref_outside_mem = 1;
2642 return plus_constant (ep->to_rtx, ep->previous_offset);
2646 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2647 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2649 /* In this case, find_reloads would attempt to either use an
2650 incorrect address (if something is not at its initial offset)
2651 or substitute an replaced address into an insn (which loses
2652 if the offset is changed by some later action). So we simply
2653 return the replaced stack slot (assuming it is changed by
2654 elimination) and ignore the fact that this is actually a
2655 reference to the pseudo. Ensure we make a copy of the
2656 address in case it is shared. */
2657 new = eliminate_regs (reg_equiv_memory_loc[regno],
2658 mem_mode, insn);
2659 if (new != reg_equiv_memory_loc[regno])
2661 cannot_omit_stores[regno] = 1;
2662 return copy_rtx (new);
2665 return x;
2667 case PLUS:
2668 /* If this is the sum of an eliminable register and a constant, rework
2669 the sum. */
2670 if (GET_CODE (XEXP (x, 0)) == REG
2671 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2672 && CONSTANT_P (XEXP (x, 1)))
2674 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2675 ep++)
2676 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2678 if (! mem_mode
2679 /* Refs inside notes don't count for this purpose. */
2680 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2681 || GET_CODE (insn) == INSN_LIST)))
2682 ep->ref_outside_mem = 1;
2684 /* The only time we want to replace a PLUS with a REG (this
2685 occurs when the constant operand of the PLUS is the negative
2686 of the offset) is when we are inside a MEM. We won't want
2687 to do so at other times because that would change the
2688 structure of the insn in a way that reload can't handle.
2689 We special-case the commonest situation in
2690 eliminate_regs_in_insn, so just replace a PLUS with a
2691 PLUS here, unless inside a MEM. */
2692 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2693 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2694 return ep->to_rtx;
2695 else
2696 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2697 plus_constant (XEXP (x, 1),
2698 ep->previous_offset));
2701 /* If the register is not eliminable, we are done since the other
2702 operand is a constant. */
2703 return x;
2706 /* If this is part of an address, we want to bring any constant to the
2707 outermost PLUS. We will do this by doing register replacement in
2708 our operands and seeing if a constant shows up in one of them.
2710 We assume here this is part of an address (or a "load address" insn)
2711 since an eliminable register is not likely to appear in any other
2712 context.
2714 If we have (plus (eliminable) (reg)), we want to produce
2715 (plus (plus (replacement) (reg) (const))). If this was part of a
2716 normal add insn, (plus (replacement) (reg)) will be pushed as a
2717 reload. This is the desired action. */
2720 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2721 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2723 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2725 /* If one side is a PLUS and the other side is a pseudo that
2726 didn't get a hard register but has a reg_equiv_constant,
2727 we must replace the constant here since it may no longer
2728 be in the position of any operand. */
2729 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2730 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2731 && reg_renumber[REGNO (new1)] < 0
2732 && reg_equiv_constant != 0
2733 && reg_equiv_constant[REGNO (new1)] != 0)
2734 new1 = reg_equiv_constant[REGNO (new1)];
2735 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2736 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2737 && reg_renumber[REGNO (new0)] < 0
2738 && reg_equiv_constant[REGNO (new0)] != 0)
2739 new0 = reg_equiv_constant[REGNO (new0)];
2741 new = form_sum (new0, new1);
2743 /* As above, if we are not inside a MEM we do not want to
2744 turn a PLUS into something else. We might try to do so here
2745 for an addition of 0 if we aren't optimizing. */
2746 if (! mem_mode && GET_CODE (new) != PLUS)
2747 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2748 else
2749 return new;
2752 return x;
2754 case MULT:
2755 /* If this is the product of an eliminable register and a
2756 constant, apply the distribute law and move the constant out
2757 so that we have (plus (mult ..) ..). This is needed in order
2758 to keep load-address insns valid. This case is pathalogical.
2759 We ignore the possibility of overflow here. */
2760 if (GET_CODE (XEXP (x, 0)) == REG
2761 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2762 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2763 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2764 ep++)
2765 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2767 if (! mem_mode
2768 /* Refs inside notes don't count for this purpose. */
2769 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2770 || GET_CODE (insn) == INSN_LIST)))
2771 ep->ref_outside_mem = 1;
2773 return
2774 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2775 ep->previous_offset * INTVAL (XEXP (x, 1)));
2778 /* ... fall through ... */
2780 case CALL:
2781 case COMPARE:
2782 case MINUS:
2783 case DIV: case UDIV:
2784 case MOD: case UMOD:
2785 case AND: case IOR: case XOR:
2786 case ROTATERT: case ROTATE:
2787 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2788 case NE: case EQ:
2789 case GE: case GT: case GEU: case GTU:
2790 case LE: case LT: case LEU: case LTU:
2792 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2793 rtx new1
2794 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2796 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2797 return gen_rtx (code, GET_MODE (x), new0, new1);
2799 return x;
2801 case EXPR_LIST:
2802 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2803 if (XEXP (x, 0))
2805 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2806 if (new != XEXP (x, 0))
2807 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2810 /* ... fall through ... */
2812 case INSN_LIST:
2813 /* Now do eliminations in the rest of the chain. If this was
2814 an EXPR_LIST, this might result in allocating more memory than is
2815 strictly needed, but it simplifies the code. */
2816 if (XEXP (x, 1))
2818 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2819 if (new != XEXP (x, 1))
2820 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2822 return x;
2824 case PRE_INC:
2825 case POST_INC:
2826 case PRE_DEC:
2827 case POST_DEC:
2828 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2829 if (ep->to_rtx == XEXP (x, 0))
2831 int size = GET_MODE_SIZE (mem_mode);
2833 /* If more bytes than MEM_MODE are pushed, account for them. */
2834 #ifdef PUSH_ROUNDING
2835 if (ep->to_rtx == stack_pointer_rtx)
2836 size = PUSH_ROUNDING (size);
2837 #endif
2838 if (code == PRE_DEC || code == POST_DEC)
2839 ep->offset += size;
2840 else
2841 ep->offset -= size;
2844 /* Fall through to generic unary operation case. */
2845 case USE:
2846 case STRICT_LOW_PART:
2847 case NEG: case NOT:
2848 case SIGN_EXTEND: case ZERO_EXTEND:
2849 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2850 case FLOAT: case FIX:
2851 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2852 case ABS:
2853 case SQRT:
2854 case FFS:
2855 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2856 if (new != XEXP (x, 0))
2857 return gen_rtx (code, GET_MODE (x), new);
2858 return x;
2860 case SUBREG:
2861 /* Similar to above processing, but preserve SUBREG_WORD.
2862 Convert (subreg (mem)) to (mem) if not paradoxical.
2863 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2864 pseudo didn't get a hard reg, we must replace this with the
2865 eliminated version of the memory location because push_reloads
2866 may do the replacement in certain circumstances. */
2867 if (GET_CODE (SUBREG_REG (x)) == REG
2868 && (GET_MODE_SIZE (GET_MODE (x))
2869 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2870 && reg_equiv_memory_loc != 0
2871 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2873 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2874 mem_mode, insn);
2876 /* If we didn't change anything, we must retain the pseudo. */
2877 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2878 new = XEXP (x, 0);
2879 else
2880 /* Otherwise, ensure NEW isn't shared in case we have to reload
2881 it. */
2882 new = copy_rtx (new);
2884 else
2885 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2887 if (new != XEXP (x, 0))
2889 if (GET_CODE (new) == MEM
2890 && (GET_MODE_SIZE (GET_MODE (x))
2891 <= GET_MODE_SIZE (GET_MODE (new)))
2892 #ifdef LOAD_EXTEND_OP
2893 /* On these machines we will be reloading what is
2894 inside the SUBREG if it originally was a pseudo and
2895 the inner and outer modes are both a word or
2896 smaller. So leave the SUBREG then. */
2897 && ! (GET_CODE (SUBREG_REG (x)) == REG
2898 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2899 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2900 #endif
2903 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2904 enum machine_mode mode = GET_MODE (x);
2906 #if BYTES_BIG_ENDIAN
2907 offset += (MIN (UNITS_PER_WORD,
2908 GET_MODE_SIZE (GET_MODE (new)))
2909 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2910 #endif
2912 PUT_MODE (new, mode);
2913 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2914 return new;
2916 else
2917 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2920 return x;
2922 case CLOBBER:
2923 /* If clobbering a register that is the replacement register for an
2924 elimination we still think can be performed, note that it cannot
2925 be performed. Otherwise, we need not be concerned about it. */
2926 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2927 if (ep->to_rtx == XEXP (x, 0))
2928 ep->can_eliminate = 0;
2930 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2931 if (new != XEXP (x, 0))
2932 return gen_rtx (code, GET_MODE (x), new);
2933 return x;
2935 case ASM_OPERANDS:
2937 rtx *temp_vec;
2938 /* Properly handle sharing input and constraint vectors. */
2939 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2941 /* When we come to a new vector not seen before,
2942 scan all its elements; keep the old vector if none
2943 of them changes; otherwise, make a copy. */
2944 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2945 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2948 mem_mode, insn);
2950 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2951 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2952 break;
2954 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2955 new_asm_operands_vec = old_asm_operands_vec;
2956 else
2957 new_asm_operands_vec
2958 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2961 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2962 if (new_asm_operands_vec == old_asm_operands_vec)
2963 return x;
2965 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2966 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2967 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2968 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2969 ASM_OPERANDS_SOURCE_FILE (x),
2970 ASM_OPERANDS_SOURCE_LINE (x));
2971 new->volatil = x->volatil;
2972 return new;
2975 case SET:
2976 /* Check for setting a register that we know about. */
2977 if (GET_CODE (SET_DEST (x)) == REG)
2979 /* See if this is setting the replacement register for an
2980 elimination.
2982 If DEST is the hard frame pointer, we do nothing because we
2983 assume that all assignments to the frame pointer are for
2984 non-local gotos and are being done at a time when they are valid
2985 and do not disturb anything else. Some machines want to
2986 eliminate a fake argument pointer (or even a fake frame pointer)
2987 with either the real frame or the stack pointer. Assignments to
2988 the hard frame pointer must not prevent this elimination. */
2990 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2991 ep++)
2992 if (ep->to_rtx == SET_DEST (x)
2993 && SET_DEST (x) != hard_frame_pointer_rtx)
2995 /* If it is being incremented, adjust the offset. Otherwise,
2996 this elimination can't be done. */
2997 rtx src = SET_SRC (x);
2999 if (GET_CODE (src) == PLUS
3000 && XEXP (src, 0) == SET_DEST (x)
3001 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3002 ep->offset -= INTVAL (XEXP (src, 1));
3003 else
3004 ep->can_eliminate = 0;
3007 /* Now check to see we are assigning to a register that can be
3008 eliminated. If so, it must be as part of a PARALLEL, since we
3009 will not have been called if this is a single SET. So indicate
3010 that we can no longer eliminate this reg. */
3011 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3012 ep++)
3013 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3014 ep->can_eliminate = 0;
3017 /* Now avoid the loop below in this common case. */
3019 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3020 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3022 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3023 write a CLOBBER insn. */
3024 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3025 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3026 && GET_CODE (insn) != INSN_LIST)
3027 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3029 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3030 return gen_rtx (SET, VOIDmode, new0, new1);
3033 return x;
3035 case MEM:
3036 /* Our only special processing is to pass the mode of the MEM to our
3037 recursive call and copy the flags. While we are here, handle this
3038 case more efficiently. */
3039 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3040 if (new != XEXP (x, 0))
3042 new = gen_rtx (MEM, GET_MODE (x), new);
3043 new->volatil = x->volatil;
3044 new->unchanging = x->unchanging;
3045 new->in_struct = x->in_struct;
3046 return new;
3048 else
3049 return x;
3052 /* Process each of our operands recursively. If any have changed, make a
3053 copy of the rtx. */
3054 fmt = GET_RTX_FORMAT (code);
3055 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3057 if (*fmt == 'e')
3059 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3060 if (new != XEXP (x, i) && ! copied)
3062 rtx new_x = rtx_alloc (code);
3063 bcopy ((char *) x, (char *) new_x,
3064 (sizeof (*new_x) - sizeof (new_x->fld)
3065 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3066 x = new_x;
3067 copied = 1;
3069 XEXP (x, i) = new;
3071 else if (*fmt == 'E')
3073 int copied_vec = 0;
3074 for (j = 0; j < XVECLEN (x, i); j++)
3076 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3077 if (new != XVECEXP (x, i, j) && ! copied_vec)
3079 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3080 &XVECEXP (x, i, 0));
3081 if (! copied)
3083 rtx new_x = rtx_alloc (code);
3084 bcopy ((char *) x, (char *) new_x,
3085 (sizeof (*new_x) - sizeof (new_x->fld)
3086 + (sizeof (new_x->fld[0])
3087 * GET_RTX_LENGTH (code))));
3088 x = new_x;
3089 copied = 1;
3091 XVEC (x, i) = new_v;
3092 copied_vec = 1;
3094 XVECEXP (x, i, j) = new;
3099 return x;
3102 /* Scan INSN and eliminate all eliminable registers in it.
3104 If REPLACE is nonzero, do the replacement destructively. Also
3105 delete the insn as dead it if it is setting an eliminable register.
3107 If REPLACE is zero, do all our allocations in reload_obstack.
3109 If no eliminations were done and this insn doesn't require any elimination
3110 processing (these are not identical conditions: it might be updating sp,
3111 but not referencing fp; this needs to be seen during reload_as_needed so
3112 that the offset between fp and sp can be taken into consideration), zero
3113 is returned. Otherwise, 1 is returned. */
3115 static int
3116 eliminate_regs_in_insn (insn, replace)
3117 rtx insn;
3118 int replace;
3120 rtx old_body = PATTERN (insn);
3121 rtx new_body;
3122 int val = 0;
3123 struct elim_table *ep;
3125 if (! replace)
3126 push_obstacks (&reload_obstack, &reload_obstack);
3128 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3129 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3131 /* Check for setting an eliminable register. */
3132 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3133 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3135 /* In this case this insn isn't serving a useful purpose. We
3136 will delete it in reload_as_needed once we know that this
3137 elimination is, in fact, being done.
3139 If REPLACE isn't set, we can't delete this insn, but neededn't
3140 process it since it won't be used unless something changes. */
3141 if (replace)
3142 delete_dead_insn (insn);
3143 val = 1;
3144 goto done;
3147 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3148 in the insn is the negative of the offset in FROM. Substitute
3149 (set (reg) (reg to)) for the insn and change its code.
3151 We have to do this here, rather than in eliminate_regs, do that we can
3152 change the insn code. */
3154 if (GET_CODE (SET_SRC (old_body)) == PLUS
3155 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3156 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3157 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3158 ep++)
3159 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3160 && ep->can_eliminate)
3162 /* We must stop at the first elimination that will be used.
3163 If this one would replace the PLUS with a REG, do it
3164 now. Otherwise, quit the loop and let eliminate_regs
3165 do its normal replacement. */
3166 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3168 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3169 SET_DEST (old_body), ep->to_rtx);
3170 INSN_CODE (insn) = -1;
3171 val = 1;
3172 goto done;
3175 break;
3179 old_asm_operands_vec = 0;
3181 /* Replace the body of this insn with a substituted form. If we changed
3182 something, return non-zero.
3184 If we are replacing a body that was a (set X (plus Y Z)), try to
3185 re-recognize the insn. We do this in case we had a simple addition
3186 but now can do this as a load-address. This saves an insn in this
3187 common case. */
3189 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3190 if (new_body != old_body)
3192 /* If we aren't replacing things permanently and we changed something,
3193 make another copy to ensure that all the RTL is new. Otherwise
3194 things can go wrong if find_reload swaps commutative operands
3195 and one is inside RTL that has been copied while the other is not. */
3197 /* Don't copy an asm_operands because (1) there's no need and (2)
3198 copy_rtx can't do it properly when there are multiple outputs. */
3199 if (! replace && asm_noperands (old_body) < 0)
3200 new_body = copy_rtx (new_body);
3202 /* If we had a move insn but now we don't, rerecognize it. */
3203 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3204 && (GET_CODE (new_body) != SET
3205 || GET_CODE (SET_SRC (new_body)) != REG))
3206 /* If this was a load from or store to memory, compare
3207 the MEM in recog_operand to the one in the insn. If they
3208 are not equal, then rerecognize the insn. */
3209 || (GET_CODE (old_body) == SET
3210 && ((GET_CODE (SET_SRC (old_body)) == MEM
3211 && SET_SRC (old_body) != recog_operand[1])
3212 || (GET_CODE (SET_DEST (old_body)) == MEM
3213 && SET_DEST (old_body) != recog_operand[0])))
3214 /* If this was an add insn before, rerecognize. */
3216 (GET_CODE (old_body) == SET
3217 && GET_CODE (SET_SRC (old_body)) == PLUS))
3219 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3220 /* If recognition fails, store the new body anyway.
3221 It's normal to have recognition failures here
3222 due to bizarre memory addresses; reloading will fix them. */
3223 PATTERN (insn) = new_body;
3225 else
3226 PATTERN (insn) = new_body;
3228 val = 1;
3231 /* Loop through all elimination pairs. See if any have changed and
3232 recalculate the number not at initial offset.
3234 Compute the maximum offset (minimum offset if the stack does not
3235 grow downward) for each elimination pair.
3237 We also detect a cases where register elimination cannot be done,
3238 namely, if a register would be both changed and referenced outside a MEM
3239 in the resulting insn since such an insn is often undefined and, even if
3240 not, we cannot know what meaning will be given to it. Note that it is
3241 valid to have a register used in an address in an insn that changes it
3242 (presumably with a pre- or post-increment or decrement).
3244 If anything changes, return nonzero. */
3246 num_not_at_initial_offset = 0;
3247 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3249 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3250 ep->can_eliminate = 0;
3252 ep->ref_outside_mem = 0;
3254 if (ep->previous_offset != ep->offset)
3255 val = 1;
3257 ep->previous_offset = ep->offset;
3258 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3259 num_not_at_initial_offset++;
3261 #ifdef STACK_GROWS_DOWNWARD
3262 ep->max_offset = MAX (ep->max_offset, ep->offset);
3263 #else
3264 ep->max_offset = MIN (ep->max_offset, ep->offset);
3265 #endif
3268 done:
3269 /* If we changed something, perform elmination in REG_NOTES. This is
3270 needed even when REPLACE is zero because a REG_DEAD note might refer
3271 to a register that we eliminate and could cause a different number
3272 of spill registers to be needed in the final reload pass than in
3273 the pre-passes. */
3274 if (val && REG_NOTES (insn) != 0)
3275 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3277 if (! replace)
3278 pop_obstacks ();
3280 return val;
3283 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3284 replacement we currently believe is valid, mark it as not eliminable if X
3285 modifies DEST in any way other than by adding a constant integer to it.
3287 If DEST is the frame pointer, we do nothing because we assume that
3288 all assignments to the hard frame pointer are nonlocal gotos and are being
3289 done at a time when they are valid and do not disturb anything else.
3290 Some machines want to eliminate a fake argument pointer with either the
3291 frame or stack pointer. Assignments to the hard frame pointer must not
3292 prevent this elimination.
3294 Called via note_stores from reload before starting its passes to scan
3295 the insns of the function. */
3297 static void
3298 mark_not_eliminable (dest, x)
3299 rtx dest;
3300 rtx x;
3302 register int i;
3304 /* A SUBREG of a hard register here is just changing its mode. We should
3305 not see a SUBREG of an eliminable hard register, but check just in
3306 case. */
3307 if (GET_CODE (dest) == SUBREG)
3308 dest = SUBREG_REG (dest);
3310 if (dest == hard_frame_pointer_rtx)
3311 return;
3313 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3314 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3315 && (GET_CODE (x) != SET
3316 || GET_CODE (SET_SRC (x)) != PLUS
3317 || XEXP (SET_SRC (x), 0) != dest
3318 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3320 reg_eliminate[i].can_eliminate_previous
3321 = reg_eliminate[i].can_eliminate = 0;
3322 num_eliminable--;
3326 /* Kick all pseudos out of hard register REGNO.
3327 If GLOBAL is nonzero, try to find someplace else to put them.
3328 If DUMPFILE is nonzero, log actions taken on that file.
3330 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3331 because we found we can't eliminate some register. In the case, no pseudos
3332 are allowed to be in the register, even if they are only in a block that
3333 doesn't require spill registers, unlike the case when we are spilling this
3334 hard reg to produce another spill register.
3336 Return nonzero if any pseudos needed to be kicked out. */
3338 static int
3339 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3340 register int regno;
3341 int global;
3342 FILE *dumpfile;
3343 int cant_eliminate;
3345 enum reg_class class = REGNO_REG_CLASS (regno);
3346 int something_changed = 0;
3347 register int i;
3349 SET_HARD_REG_BIT (forbidden_regs, regno);
3351 /* Spill every pseudo reg that was allocated to this reg
3352 or to something that overlaps this reg. */
3354 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3355 if (reg_renumber[i] >= 0
3356 && reg_renumber[i] <= regno
3357 && (reg_renumber[i]
3358 + HARD_REGNO_NREGS (reg_renumber[i],
3359 PSEUDO_REGNO_MODE (i))
3360 > regno))
3362 /* If this register belongs solely to a basic block which needed no
3363 spilling of any class that this register is contained in,
3364 leave it be, unless we are spilling this register because
3365 it was a hard register that can't be eliminated. */
3367 if (! cant_eliminate
3368 && basic_block_needs[0]
3369 && reg_basic_block[i] >= 0
3370 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3372 enum reg_class *p;
3374 for (p = reg_class_superclasses[(int) class];
3375 *p != LIM_REG_CLASSES; p++)
3376 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3377 break;
3379 if (*p == LIM_REG_CLASSES)
3380 continue;
3383 /* Mark it as no longer having a hard register home. */
3384 reg_renumber[i] = -1;
3385 /* We will need to scan everything again. */
3386 something_changed = 1;
3387 if (global)
3388 retry_global_alloc (i, forbidden_regs);
3390 alter_reg (i, regno);
3391 if (dumpfile)
3393 if (reg_renumber[i] == -1)
3394 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3395 else
3396 fprintf (dumpfile, " Register %d now in %d.\n\n",
3397 i, reg_renumber[i]);
3400 for (i = 0; i < scratch_list_length; i++)
3402 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3404 if (! cant_eliminate && basic_block_needs[0]
3405 && ! basic_block_needs[(int) class][scratch_block[i]])
3407 enum reg_class *p;
3409 for (p = reg_class_superclasses[(int) class];
3410 *p != LIM_REG_CLASSES; p++)
3411 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3412 break;
3414 if (*p == LIM_REG_CLASSES)
3415 continue;
3417 PUT_CODE (scratch_list[i], SCRATCH);
3418 scratch_list[i] = 0;
3419 something_changed = 1;
3420 continue;
3424 return something_changed;
3427 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3428 Also mark any hard registers used to store user variables as
3429 forbidden from being used for spill registers. */
3431 static void
3432 scan_paradoxical_subregs (x)
3433 register rtx x;
3435 register int i;
3436 register char *fmt;
3437 register enum rtx_code code = GET_CODE (x);
3439 switch (code)
3441 case REG:
3442 #ifdef SMALL_REGISTER_CLASSES
3443 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3444 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3445 #endif
3446 return;
3448 case CONST_INT:
3449 case CONST:
3450 case SYMBOL_REF:
3451 case LABEL_REF:
3452 case CONST_DOUBLE:
3453 case CC0:
3454 case PC:
3455 case USE:
3456 case CLOBBER:
3457 return;
3459 case SUBREG:
3460 if (GET_CODE (SUBREG_REG (x)) == REG
3461 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3462 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3463 = GET_MODE_SIZE (GET_MODE (x));
3464 return;
3467 fmt = GET_RTX_FORMAT (code);
3468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3470 if (fmt[i] == 'e')
3471 scan_paradoxical_subregs (XEXP (x, i));
3472 else if (fmt[i] == 'E')
3474 register int j;
3475 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3476 scan_paradoxical_subregs (XVECEXP (x, i, j));
3481 static int
3482 hard_reg_use_compare (p1, p2)
3483 struct hard_reg_n_uses *p1, *p2;
3485 int tem = p1->uses - p2->uses;
3486 if (tem != 0) return tem;
3487 /* If regs are equally good, sort by regno,
3488 so that the results of qsort leave nothing to chance. */
3489 return p1->regno - p2->regno;
3492 /* Choose the order to consider regs for use as reload registers
3493 based on how much trouble would be caused by spilling one.
3494 Store them in order of decreasing preference in potential_reload_regs. */
3496 static void
3497 order_regs_for_reload ()
3499 register int i;
3500 register int o = 0;
3501 int large = 0;
3503 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3505 CLEAR_HARD_REG_SET (bad_spill_regs);
3507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3508 potential_reload_regs[i] = -1;
3510 /* Count number of uses of each hard reg by pseudo regs allocated to it
3511 and then order them by decreasing use. */
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 hard_reg_n_uses[i].uses = 0;
3516 hard_reg_n_uses[i].regno = i;
3519 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3521 int regno = reg_renumber[i];
3522 if (regno >= 0)
3524 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3525 while (regno < lim)
3526 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3528 large += reg_n_refs[i];
3531 /* Now fixed registers (which cannot safely be used for reloading)
3532 get a very high use count so they will be considered least desirable.
3533 Registers used explicitly in the rtl code are almost as bad. */
3535 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3537 if (fixed_regs[i])
3539 hard_reg_n_uses[i].uses += 2 * large + 2;
3540 SET_HARD_REG_BIT (bad_spill_regs, i);
3542 else if (regs_explicitly_used[i])
3544 hard_reg_n_uses[i].uses += large + 1;
3545 #ifndef SMALL_REGISTER_CLASSES
3546 /* ??? We are doing this here because of the potential that
3547 bad code may be generated if a register explicitly used in
3548 an insn was used as a spill register for that insn. But
3549 not using these are spill registers may lose on some machine.
3550 We'll have to see how this works out. */
3551 SET_HARD_REG_BIT (bad_spill_regs, i);
3552 #endif
3555 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3556 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3558 #ifdef ELIMINABLE_REGS
3559 /* If registers other than the frame pointer are eliminable, mark them as
3560 poor choices. */
3561 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3563 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3564 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3566 #endif
3568 /* Prefer registers not so far used, for use in temporary loading.
3569 Among them, if REG_ALLOC_ORDER is defined, use that order.
3570 Otherwise, prefer registers not preserved by calls. */
3572 #ifdef REG_ALLOC_ORDER
3573 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3575 int regno = reg_alloc_order[i];
3577 if (hard_reg_n_uses[regno].uses == 0)
3578 potential_reload_regs[o++] = regno;
3580 #else
3581 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3583 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3584 potential_reload_regs[o++] = i;
3586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3588 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3589 potential_reload_regs[o++] = i;
3591 #endif
3593 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3594 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3596 /* Now add the regs that are already used,
3597 preferring those used less often. The fixed and otherwise forbidden
3598 registers will be at the end of this list. */
3600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3601 if (hard_reg_n_uses[i].uses != 0)
3602 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3605 /* Used in reload_as_needed to sort the spilled regs. */
3606 static int
3607 compare_spill_regs (r1, r2)
3608 short *r1, *r2;
3610 return *r1 < *r2 ? -1: 1;
3613 /* Reload pseudo-registers into hard regs around each insn as needed.
3614 Additional register load insns are output before the insn that needs it
3615 and perhaps store insns after insns that modify the reloaded pseudo reg.
3617 reg_last_reload_reg and reg_reloaded_contents keep track of
3618 which registers are already available in reload registers.
3619 We update these for the reloads that we perform,
3620 as the insns are scanned. */
3622 static void
3623 reload_as_needed (first, live_known)
3624 rtx first;
3625 int live_known;
3627 register rtx insn;
3628 register int i;
3629 int this_block = 0;
3630 rtx x;
3631 rtx after_call = 0;
3633 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3634 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3635 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3636 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3637 reg_has_output_reload = (char *) alloca (max_regno);
3638 for (i = 0; i < n_spills; i++)
3640 reg_reloaded_contents[i] = -1;
3641 reg_reloaded_insn[i] = 0;
3644 /* Reset all offsets on eliminable registers to their initial values. */
3645 #ifdef ELIMINABLE_REGS
3646 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3648 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3649 reg_eliminate[i].initial_offset);
3650 reg_eliminate[i].previous_offset
3651 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3653 #else
3654 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3655 reg_eliminate[0].previous_offset
3656 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3657 #endif
3659 num_not_at_initial_offset = 0;
3661 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3662 pack registers with group needs. */
3663 if (n_spills > 1)
3665 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3666 for (i = 0; i < n_spills; i++)
3667 spill_reg_order[spill_regs[i]] = i;
3670 for (insn = first; insn;)
3672 register rtx next = NEXT_INSN (insn);
3674 /* Notice when we move to a new basic block. */
3675 if (live_known && this_block + 1 < n_basic_blocks
3676 && insn == basic_block_head[this_block+1])
3677 ++this_block;
3679 /* If we pass a label, copy the offsets from the label information
3680 into the current offsets of each elimination. */
3681 if (GET_CODE (insn) == CODE_LABEL)
3683 num_not_at_initial_offset = 0;
3684 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3686 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3687 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3688 if (reg_eliminate[i].can_eliminate
3689 && (reg_eliminate[i].offset
3690 != reg_eliminate[i].initial_offset))
3691 num_not_at_initial_offset++;
3695 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3697 rtx avoid_return_reg = 0;
3699 #ifdef SMALL_REGISTER_CLASSES
3700 /* Set avoid_return_reg if this is an insn
3701 that might use the value of a function call. */
3702 if (GET_CODE (insn) == CALL_INSN)
3704 if (GET_CODE (PATTERN (insn)) == SET)
3705 after_call = SET_DEST (PATTERN (insn));
3706 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3707 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3708 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3709 else
3710 after_call = 0;
3712 else if (after_call != 0
3713 && !(GET_CODE (PATTERN (insn)) == SET
3714 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3716 if (reg_referenced_p (after_call, PATTERN (insn)))
3717 avoid_return_reg = after_call;
3718 after_call = 0;
3720 #endif /* SMALL_REGISTER_CLASSES */
3722 /* If this is a USE and CLOBBER of a MEM, ensure that any
3723 references to eliminable registers have been removed. */
3725 if ((GET_CODE (PATTERN (insn)) == USE
3726 || GET_CODE (PATTERN (insn)) == CLOBBER)
3727 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3728 XEXP (XEXP (PATTERN (insn), 0), 0)
3729 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3730 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3732 /* If we need to do register elimination processing, do so.
3733 This might delete the insn, in which case we are done. */
3734 if (num_eliminable && GET_MODE (insn) == QImode)
3736 eliminate_regs_in_insn (insn, 1);
3737 if (GET_CODE (insn) == NOTE)
3739 insn = next;
3740 continue;
3744 if (GET_MODE (insn) == VOIDmode)
3745 n_reloads = 0;
3746 /* First find the pseudo regs that must be reloaded for this insn.
3747 This info is returned in the tables reload_... (see reload.h).
3748 Also modify the body of INSN by substituting RELOAD
3749 rtx's for those pseudo regs. */
3750 else
3752 bzero (reg_has_output_reload, max_regno);
3753 CLEAR_HARD_REG_SET (reg_is_output_reload);
3755 find_reloads (insn, 1, spill_indirect_levels, live_known,
3756 spill_reg_order);
3759 if (n_reloads > 0)
3761 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3762 rtx p;
3763 int class;
3765 /* If this block has not had spilling done for a
3766 particular clas and we have any non-optionals that need a
3767 spill reg in that class, abort. */
3769 for (class = 0; class < N_REG_CLASSES; class++)
3770 if (basic_block_needs[class] != 0
3771 && basic_block_needs[class][this_block] == 0)
3772 for (i = 0; i < n_reloads; i++)
3773 if (class == (int) reload_reg_class[i]
3774 && reload_reg_rtx[i] == 0
3775 && ! reload_optional[i]
3776 && (reload_in[i] != 0 || reload_out[i] != 0
3777 || reload_secondary_p[i] != 0))
3778 abort ();
3780 /* Now compute which reload regs to reload them into. Perhaps
3781 reusing reload regs from previous insns, or else output
3782 load insns to reload them. Maybe output store insns too.
3783 Record the choices of reload reg in reload_reg_rtx. */
3784 choose_reload_regs (insn, avoid_return_reg);
3786 #ifdef SMALL_REGISTER_CLASSES
3787 /* Merge any reloads that we didn't combine for fear of
3788 increasing the number of spill registers needed but now
3789 discover can be safely merged. */
3790 merge_assigned_reloads (insn);
3791 #endif
3793 /* Generate the insns to reload operands into or out of
3794 their reload regs. */
3795 emit_reload_insns (insn);
3797 /* Substitute the chosen reload regs from reload_reg_rtx
3798 into the insn's body (or perhaps into the bodies of other
3799 load and store insn that we just made for reloading
3800 and that we moved the structure into). */
3801 subst_reloads ();
3803 /* If this was an ASM, make sure that all the reload insns
3804 we have generated are valid. If not, give an error
3805 and delete them. */
3807 if (asm_noperands (PATTERN (insn)) >= 0)
3808 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3809 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3810 && (recog_memoized (p) < 0
3811 || (insn_extract (p),
3812 ! constrain_operands (INSN_CODE (p), 1))))
3814 error_for_asm (insn,
3815 "`asm' operand requires impossible reload");
3816 PUT_CODE (p, NOTE);
3817 NOTE_SOURCE_FILE (p) = 0;
3818 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3821 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3822 is no longer validly lying around to save a future reload.
3823 Note that this does not detect pseudos that were reloaded
3824 for this insn in order to be stored in
3825 (obeying register constraints). That is correct; such reload
3826 registers ARE still valid. */
3827 note_stores (PATTERN (insn), forget_old_reloads_1);
3829 /* There may have been CLOBBER insns placed after INSN. So scan
3830 between INSN and NEXT and use them to forget old reloads. */
3831 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3832 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3833 note_stores (PATTERN (x), forget_old_reloads_1);
3835 #ifdef AUTO_INC_DEC
3836 /* Likewise for regs altered by auto-increment in this insn.
3837 But note that the reg-notes are not changed by reloading:
3838 they still contain the pseudo-regs, not the spill regs. */
3839 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3840 if (REG_NOTE_KIND (x) == REG_INC)
3842 /* See if this pseudo reg was reloaded in this insn.
3843 If so, its last-reload info is still valid
3844 because it is based on this insn's reload. */
3845 for (i = 0; i < n_reloads; i++)
3846 if (reload_out[i] == XEXP (x, 0))
3847 break;
3849 if (i == n_reloads)
3850 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3852 #endif
3854 /* A reload reg's contents are unknown after a label. */
3855 if (GET_CODE (insn) == CODE_LABEL)
3856 for (i = 0; i < n_spills; i++)
3858 reg_reloaded_contents[i] = -1;
3859 reg_reloaded_insn[i] = 0;
3862 /* Don't assume a reload reg is still good after a call insn
3863 if it is a call-used reg. */
3864 else if (GET_CODE (insn) == CALL_INSN)
3865 for (i = 0; i < n_spills; i++)
3866 if (call_used_regs[spill_regs[i]])
3868 reg_reloaded_contents[i] = -1;
3869 reg_reloaded_insn[i] = 0;
3872 /* In case registers overlap, allow certain insns to invalidate
3873 particular hard registers. */
3875 #ifdef INSN_CLOBBERS_REGNO_P
3876 for (i = 0 ; i < n_spills ; i++)
3877 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3879 reg_reloaded_contents[i] = -1;
3880 reg_reloaded_insn[i] = 0;
3882 #endif
3884 insn = next;
3886 #ifdef USE_C_ALLOCA
3887 alloca (0);
3888 #endif
3892 /* Discard all record of any value reloaded from X,
3893 or reloaded in X from someplace else;
3894 unless X is an output reload reg of the current insn.
3896 X may be a hard reg (the reload reg)
3897 or it may be a pseudo reg that was reloaded from. */
3899 static void
3900 forget_old_reloads_1 (x, ignored)
3901 rtx x;
3902 rtx ignored;
3904 register int regno;
3905 int nr;
3906 int offset = 0;
3908 /* note_stores does give us subregs of hard regs. */
3909 while (GET_CODE (x) == SUBREG)
3911 offset += SUBREG_WORD (x);
3912 x = SUBREG_REG (x);
3915 if (GET_CODE (x) != REG)
3916 return;
3918 regno = REGNO (x) + offset;
3920 if (regno >= FIRST_PSEUDO_REGISTER)
3921 nr = 1;
3922 else
3924 int i;
3925 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3926 /* Storing into a spilled-reg invalidates its contents.
3927 This can happen if a block-local pseudo is allocated to that reg
3928 and it wasn't spilled because this block's total need is 0.
3929 Then some insn might have an optional reload and use this reg. */
3930 for (i = 0; i < nr; i++)
3931 if (spill_reg_order[regno + i] >= 0
3932 /* But don't do this if the reg actually serves as an output
3933 reload reg in the current instruction. */
3934 && (n_reloads == 0
3935 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3937 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3938 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3942 /* Since value of X has changed,
3943 forget any value previously copied from it. */
3945 while (nr-- > 0)
3946 /* But don't forget a copy if this is the output reload
3947 that establishes the copy's validity. */
3948 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3949 reg_last_reload_reg[regno + nr] = 0;
3952 /* For each reload, the mode of the reload register. */
3953 static enum machine_mode reload_mode[MAX_RELOADS];
3955 /* For each reload, the largest number of registers it will require. */
3956 static int reload_nregs[MAX_RELOADS];
3958 /* Comparison function for qsort to decide which of two reloads
3959 should be handled first. *P1 and *P2 are the reload numbers. */
3961 static int
3962 reload_reg_class_lower (p1, p2)
3963 short *p1, *p2;
3965 register int r1 = *p1, r2 = *p2;
3966 register int t;
3968 /* Consider required reloads before optional ones. */
3969 t = reload_optional[r1] - reload_optional[r2];
3970 if (t != 0)
3971 return t;
3973 /* Count all solitary classes before non-solitary ones. */
3974 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3975 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3976 if (t != 0)
3977 return t;
3979 /* Aside from solitaires, consider all multi-reg groups first. */
3980 t = reload_nregs[r2] - reload_nregs[r1];
3981 if (t != 0)
3982 return t;
3984 /* Consider reloads in order of increasing reg-class number. */
3985 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3986 if (t != 0)
3987 return t;
3989 /* If reloads are equally urgent, sort by reload number,
3990 so that the results of qsort leave nothing to chance. */
3991 return r1 - r2;
3994 /* The following HARD_REG_SETs indicate when each hard register is
3995 used for a reload of various parts of the current insn. */
3997 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3998 static HARD_REG_SET reload_reg_used;
3999 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4000 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4001 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4002 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4003 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4004 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4005 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4006 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4007 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4008 static HARD_REG_SET reload_reg_used_in_op_addr;
4009 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4010 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4011 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4012 static HARD_REG_SET reload_reg_used_in_insn;
4013 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4014 static HARD_REG_SET reload_reg_used_in_other_addr;
4016 /* If reg is in use as a reload reg for any sort of reload. */
4017 static HARD_REG_SET reload_reg_used_at_all;
4019 /* If reg is use as an inherited reload. We just mark the first register
4020 in the group. */
4021 static HARD_REG_SET reload_reg_used_for_inherit;
4023 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4024 TYPE. MODE is used to indicate how many consecutive regs are
4025 actually used. */
4027 static void
4028 mark_reload_reg_in_use (regno, opnum, type, mode)
4029 int regno;
4030 int opnum;
4031 enum reload_type type;
4032 enum machine_mode mode;
4034 int nregs = HARD_REGNO_NREGS (regno, mode);
4035 int i;
4037 for (i = regno; i < nregs + regno; i++)
4039 switch (type)
4041 case RELOAD_OTHER:
4042 SET_HARD_REG_BIT (reload_reg_used, i);
4043 break;
4045 case RELOAD_FOR_INPUT_ADDRESS:
4046 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4047 break;
4049 case RELOAD_FOR_OUTPUT_ADDRESS:
4050 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4051 break;
4053 case RELOAD_FOR_OPERAND_ADDRESS:
4054 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4055 break;
4057 case RELOAD_FOR_OPADDR_ADDR:
4058 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4059 break;
4061 case RELOAD_FOR_OTHER_ADDRESS:
4062 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4063 break;
4065 case RELOAD_FOR_INPUT:
4066 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4067 break;
4069 case RELOAD_FOR_OUTPUT:
4070 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4071 break;
4073 case RELOAD_FOR_INSN:
4074 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4075 break;
4078 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4082 /* Similarly, but show REGNO is no longer in use for a reload. */
4084 static void
4085 clear_reload_reg_in_use (regno, opnum, type, mode)
4086 int regno;
4087 int opnum;
4088 enum reload_type type;
4089 enum machine_mode mode;
4091 int nregs = HARD_REGNO_NREGS (regno, mode);
4092 int i;
4094 for (i = regno; i < nregs + regno; i++)
4096 switch (type)
4098 case RELOAD_OTHER:
4099 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4100 break;
4102 case RELOAD_FOR_INPUT_ADDRESS:
4103 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4104 break;
4106 case RELOAD_FOR_OUTPUT_ADDRESS:
4107 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4108 break;
4110 case RELOAD_FOR_OPERAND_ADDRESS:
4111 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4112 break;
4114 case RELOAD_FOR_OPADDR_ADDR:
4115 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4116 break;
4118 case RELOAD_FOR_OTHER_ADDRESS:
4119 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4120 break;
4122 case RELOAD_FOR_INPUT:
4123 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4124 break;
4126 case RELOAD_FOR_OUTPUT:
4127 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4128 break;
4130 case RELOAD_FOR_INSN:
4131 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4132 break;
4137 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4138 specified by OPNUM and TYPE. */
4140 static int
4141 reload_reg_free_p (regno, opnum, type)
4142 int regno;
4143 int opnum;
4144 enum reload_type type;
4146 int i;
4148 /* In use for a RELOAD_OTHER means it's not available for anything except
4149 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4150 to be used only for inputs. */
4152 if (type != RELOAD_FOR_OTHER_ADDRESS
4153 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4154 return 0;
4156 switch (type)
4158 case RELOAD_OTHER:
4159 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4160 we can't use it for RELOAD_OTHER. */
4161 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4162 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4163 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4164 return 0;
4166 for (i = 0; i < reload_n_operands; i++)
4167 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4168 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4169 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4170 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4171 return 0;
4173 return 1;
4175 case RELOAD_FOR_INPUT:
4176 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4177 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4178 return 0;
4180 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4181 return 0;
4183 /* If it is used for some other input, can't use it. */
4184 for (i = 0; i < reload_n_operands; i++)
4185 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4186 return 0;
4188 /* If it is used in a later operand's address, can't use it. */
4189 for (i = opnum + 1; i < reload_n_operands; i++)
4190 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4191 return 0;
4193 return 1;
4195 case RELOAD_FOR_INPUT_ADDRESS:
4196 /* Can't use a register if it is used for an input address for this
4197 operand or used as an input in an earlier one. */
4198 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4199 return 0;
4201 for (i = 0; i < opnum; i++)
4202 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4203 return 0;
4205 return 1;
4207 case RELOAD_FOR_OUTPUT_ADDRESS:
4208 /* Can't use a register if it is used for an output address for this
4209 operand or used as an output in this or a later operand. */
4210 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4211 return 0;
4213 for (i = opnum; i < reload_n_operands; i++)
4214 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4215 return 0;
4217 return 1;
4219 case RELOAD_FOR_OPERAND_ADDRESS:
4220 for (i = 0; i < reload_n_operands; i++)
4221 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4222 return 0;
4224 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4225 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4227 case RELOAD_FOR_OPADDR_ADDR:
4228 for (i = 0; i < reload_n_operands; i++)
4229 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4230 return 0;
4232 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4234 case RELOAD_FOR_OUTPUT:
4235 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4236 outputs, or an operand address for this or an earlier output. */
4237 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4238 return 0;
4240 for (i = 0; i < reload_n_operands; i++)
4241 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4242 return 0;
4244 for (i = 0; i <= opnum; i++)
4245 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4246 return 0;
4248 return 1;
4250 case RELOAD_FOR_INSN:
4251 for (i = 0; i < reload_n_operands; i++)
4252 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4253 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4254 return 0;
4256 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4257 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4259 case RELOAD_FOR_OTHER_ADDRESS:
4260 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4262 abort ();
4265 /* Return 1 if the value in reload reg REGNO, as used by a reload
4266 needed for the part of the insn specified by OPNUM and TYPE,
4267 is not in use for a reload in any prior part of the insn.
4269 We can assume that the reload reg was already tested for availability
4270 at the time it is needed, and we should not check this again,
4271 in case the reg has already been marked in use. */
4273 static int
4274 reload_reg_free_before_p (regno, opnum, type)
4275 int regno;
4276 int opnum;
4277 enum reload_type type;
4279 int i;
4281 switch (type)
4283 case RELOAD_FOR_OTHER_ADDRESS:
4284 /* These always come first. */
4285 return 1;
4287 case RELOAD_OTHER:
4288 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4290 /* If this use is for part of the insn,
4291 check the reg is not in use for any prior part. It is tempting
4292 to try to do this by falling through from objecs that occur
4293 later in the insn to ones that occur earlier, but that will not
4294 correctly take into account the fact that here we MUST ignore
4295 things that would prevent the register from being allocated in
4296 the first place, since we know that it was allocated. */
4298 case RELOAD_FOR_OUTPUT_ADDRESS:
4299 /* Earlier reloads are for earlier outputs or their addresses,
4300 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4301 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4302 RELOAD_OTHER).. */
4303 for (i = 0; i < opnum; i++)
4304 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4305 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4306 return 0;
4308 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4309 return 0;
4311 for (i = 0; i < reload_n_operands; i++)
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4313 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4314 return 0;
4316 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4317 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4318 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4320 case RELOAD_FOR_OUTPUT:
4321 /* This can't be used in the output address for this operand and
4322 anything that can't be used for it, except that we've already
4323 tested for RELOAD_FOR_INSN objects. */
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4326 return 0;
4328 for (i = 0; i < opnum; i++)
4329 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4330 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4331 return 0;
4333 for (i = 0; i < reload_n_operands; i++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4335 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4336 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4337 return 0;
4339 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4341 case RELOAD_FOR_OPERAND_ADDRESS:
4342 case RELOAD_FOR_OPADDR_ADDR:
4343 case RELOAD_FOR_INSN:
4344 /* These can't conflict with inputs, or each other, so all we have to
4345 test is input addresses and the addresses of OTHER items. */
4347 for (i = 0; i < reload_n_operands; i++)
4348 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4349 return 0;
4351 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4353 case RELOAD_FOR_INPUT:
4354 /* The only things earlier are the address for this and
4355 earlier inputs, other inputs (which we know we don't conflict
4356 with), and addresses of RELOAD_OTHER objects. */
4358 for (i = 0; i <= opnum; i++)
4359 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4360 return 0;
4362 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4364 case RELOAD_FOR_INPUT_ADDRESS:
4365 /* Similarly, all we have to check is for use in earlier inputs'
4366 addresses. */
4367 for (i = 0; i < opnum; i++)
4368 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4369 return 0;
4371 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4373 abort ();
4376 /* Return 1 if the value in reload reg REGNO, as used by a reload
4377 needed for the part of the insn specified by OPNUM and TYPE,
4378 is still available in REGNO at the end of the insn.
4380 We can assume that the reload reg was already tested for availability
4381 at the time it is needed, and we should not check this again,
4382 in case the reg has already been marked in use. */
4384 static int
4385 reload_reg_reaches_end_p (regno, opnum, type)
4386 int regno;
4387 int opnum;
4388 enum reload_type type;
4390 int i;
4392 switch (type)
4394 case RELOAD_OTHER:
4395 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4396 its value must reach the end. */
4397 return 1;
4399 /* If this use is for part of the insn,
4400 its value reaches if no subsequent part uses the same register.
4401 Just like the above function, don't try to do this with lots
4402 of fallthroughs. */
4404 case RELOAD_FOR_OTHER_ADDRESS:
4405 /* Here we check for everything else, since these don't conflict
4406 with anything else and everything comes later. */
4408 for (i = 0; i < reload_n_operands; i++)
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4410 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4411 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4412 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4413 return 0;
4415 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4416 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4417 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4419 case RELOAD_FOR_INPUT_ADDRESS:
4420 /* Similar, except that we check only for this and subsequent inputs
4421 and the address of only subsequent inputs and we do not need
4422 to check for RELOAD_OTHER objects since they are known not to
4423 conflict. */
4425 for (i = opnum; i < reload_n_operands; i++)
4426 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4427 return 0;
4429 for (i = opnum + 1; i < reload_n_operands; i++)
4430 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4431 return 0;
4433 for (i = 0; i < reload_n_operands; i++)
4434 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4435 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4436 return 0;
4438 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4439 return 0;
4441 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4442 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4444 case RELOAD_FOR_INPUT:
4445 /* Similar to input address, except we start at the next operand for
4446 both input and input address and we do not check for
4447 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4448 would conflict. */
4450 for (i = opnum + 1; i < reload_n_operands; i++)
4451 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4452 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4453 return 0;
4455 /* ... fall through ... */
4457 case RELOAD_FOR_OPERAND_ADDRESS:
4458 /* Check outputs and their addresses. */
4460 for (i = 0; i < reload_n_operands; i++)
4461 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4462 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4463 return 0;
4465 return 1;
4467 case RELOAD_FOR_OPADDR_ADDR:
4468 for (i = 0; i < reload_n_operands; i++)
4469 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4470 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4471 return 0;
4473 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4474 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4476 case RELOAD_FOR_INSN:
4477 /* These conflict with other outputs with RELOAD_OTHER. So
4478 we need only check for output addresses. */
4480 opnum = -1;
4482 /* ... fall through ... */
4484 case RELOAD_FOR_OUTPUT:
4485 case RELOAD_FOR_OUTPUT_ADDRESS:
4486 /* We already know these can't conflict with a later output. So the
4487 only thing to check are later output addresses. */
4488 for (i = opnum + 1; i < reload_n_operands; i++)
4489 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4490 return 0;
4492 return 1;
4495 abort ();
4498 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4499 Return 0 otherwise.
4501 This function uses the same algorithm as reload_reg_free_p above. */
4503 static int
4504 reloads_conflict (r1, r2)
4505 int r1, r2;
4507 enum reload_type r1_type = reload_when_needed[r1];
4508 enum reload_type r2_type = reload_when_needed[r2];
4509 int r1_opnum = reload_opnum[r1];
4510 int r2_opnum = reload_opnum[r2];
4512 /* RELOAD_OTHER conflicts with everything except
4513 RELOAD_FOR_OTHER_ADDRESS. */
4515 if ((r1_type == RELOAD_OTHER && r2_type != RELOAD_FOR_OTHER_ADDRESS)
4516 || (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS))
4517 return 1;
4519 /* Otherwise, check conflicts differently for each type. */
4521 switch (r1_type)
4523 case RELOAD_FOR_INPUT:
4524 return (r2_type == RELOAD_FOR_INSN
4525 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4526 || r2_type == RELOAD_FOR_OPADDR_ADDR
4527 || r2_type == RELOAD_FOR_INPUT
4528 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4530 case RELOAD_FOR_INPUT_ADDRESS:
4531 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4532 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4534 case RELOAD_FOR_OUTPUT_ADDRESS:
4535 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4536 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4538 case RELOAD_FOR_OPERAND_ADDRESS:
4539 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4540 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4542 case RELOAD_FOR_OPADDR_ADDR:
4543 return (r2_type == RELOAD_FOR_INPUT
4544 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4546 case RELOAD_FOR_OUTPUT:
4547 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4548 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4549 && r2_opnum >= r1_opnum));
4551 case RELOAD_FOR_INSN:
4552 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4553 || r2_type == RELOAD_FOR_INSN
4554 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4556 case RELOAD_FOR_OTHER_ADDRESS:
4557 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4559 default:
4560 abort ();
4564 /* Vector of reload-numbers showing the order in which the reloads should
4565 be processed. */
4566 short reload_order[MAX_RELOADS];
4568 /* Indexed by reload number, 1 if incoming value
4569 inherited from previous insns. */
4570 char reload_inherited[MAX_RELOADS];
4572 /* For an inherited reload, this is the insn the reload was inherited from,
4573 if we know it. Otherwise, this is 0. */
4574 rtx reload_inheritance_insn[MAX_RELOADS];
4576 /* If non-zero, this is a place to get the value of the reload,
4577 rather than using reload_in. */
4578 rtx reload_override_in[MAX_RELOADS];
4580 /* For each reload, the index in spill_regs of the spill register used,
4581 or -1 if we did not need one of the spill registers for this reload. */
4582 int reload_spill_index[MAX_RELOADS];
4584 /* Index of last register assigned as a spill register. We allocate in
4585 a round-robin fashio. */
4587 static int last_spill_reg = 0;
4589 /* Find a spill register to use as a reload register for reload R.
4590 LAST_RELOAD is non-zero if this is the last reload for the insn being
4591 processed.
4593 Set reload_reg_rtx[R] to the register allocated.
4595 If NOERROR is nonzero, we return 1 if successful,
4596 or 0 if we couldn't find a spill reg and we didn't change anything. */
4598 static int
4599 allocate_reload_reg (r, insn, last_reload, noerror)
4600 int r;
4601 rtx insn;
4602 int last_reload;
4603 int noerror;
4605 int i;
4606 int pass;
4607 int count;
4608 rtx new;
4609 int regno;
4611 /* If we put this reload ahead, thinking it is a group,
4612 then insist on finding a group. Otherwise we can grab a
4613 reg that some other reload needs.
4614 (That can happen when we have a 68000 DATA_OR_FP_REG
4615 which is a group of data regs or one fp reg.)
4616 We need not be so restrictive if there are no more reloads
4617 for this insn.
4619 ??? Really it would be nicer to have smarter handling
4620 for that kind of reg class, where a problem like this is normal.
4621 Perhaps those classes should be avoided for reloading
4622 by use of more alternatives. */
4624 int force_group = reload_nregs[r] > 1 && ! last_reload;
4626 /* If we want a single register and haven't yet found one,
4627 take any reg in the right class and not in use.
4628 If we want a consecutive group, here is where we look for it.
4630 We use two passes so we can first look for reload regs to
4631 reuse, which are already in use for other reloads in this insn,
4632 and only then use additional registers.
4633 I think that maximizing reuse is needed to make sure we don't
4634 run out of reload regs. Suppose we have three reloads, and
4635 reloads A and B can share regs. These need two regs.
4636 Suppose A and B are given different regs.
4637 That leaves none for C. */
4638 for (pass = 0; pass < 2; pass++)
4640 /* I is the index in spill_regs.
4641 We advance it round-robin between insns to use all spill regs
4642 equally, so that inherited reloads have a chance
4643 of leapfrogging each other. Don't do this, however, when we have
4644 group needs and failure would be fatal; if we only have a relatively
4645 small number of spill registers, and more than one of them has
4646 group needs, then by starting in the middle, we may end up
4647 allocating the first one in such a way that we are not left with
4648 sufficient groups to handle the rest. */
4650 if (noerror || ! force_group)
4651 i = last_spill_reg;
4652 else
4653 i = -1;
4655 for (count = 0; count < n_spills; count++)
4657 int class = (int) reload_reg_class[r];
4659 i = (i + 1) % n_spills;
4661 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4662 reload_when_needed[r])
4663 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4664 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4665 /* Look first for regs to share, then for unshared. But
4666 don't share regs used for inherited reloads; they are
4667 the ones we want to preserve. */
4668 && (pass
4669 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4670 spill_regs[i])
4671 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4672 spill_regs[i]))))
4674 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4675 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4676 (on 68000) got us two FP regs. If NR is 1,
4677 we would reject both of them. */
4678 if (force_group)
4679 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4680 /* If we need only one reg, we have already won. */
4681 if (nr == 1)
4683 /* But reject a single reg if we demand a group. */
4684 if (force_group)
4685 continue;
4686 break;
4688 /* Otherwise check that as many consecutive regs as we need
4689 are available here.
4690 Also, don't use for a group registers that are
4691 needed for nongroups. */
4692 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4693 while (nr > 1)
4695 regno = spill_regs[i] + nr - 1;
4696 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4697 && spill_reg_order[regno] >= 0
4698 && reload_reg_free_p (regno, reload_opnum[r],
4699 reload_when_needed[r])
4700 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4701 regno)))
4702 break;
4703 nr--;
4705 if (nr == 1)
4706 break;
4710 /* If we found something on pass 1, omit pass 2. */
4711 if (count < n_spills)
4712 break;
4715 /* We should have found a spill register by now. */
4716 if (count == n_spills)
4718 if (noerror)
4719 return 0;
4720 goto failure;
4723 /* I is the index in SPILL_REG_RTX of the reload register we are to
4724 allocate. Get an rtx for it and find its register number. */
4726 new = spill_reg_rtx[i];
4728 if (new == 0 || GET_MODE (new) != reload_mode[r])
4729 spill_reg_rtx[i] = new
4730 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4732 regno = true_regnum (new);
4734 /* Detect when the reload reg can't hold the reload mode.
4735 This used to be one `if', but Sequent compiler can't handle that. */
4736 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4738 enum machine_mode test_mode = VOIDmode;
4739 if (reload_in[r])
4740 test_mode = GET_MODE (reload_in[r]);
4741 /* If reload_in[r] has VOIDmode, it means we will load it
4742 in whatever mode the reload reg has: to wit, reload_mode[r].
4743 We have already tested that for validity. */
4744 /* Aside from that, we need to test that the expressions
4745 to reload from or into have modes which are valid for this
4746 reload register. Otherwise the reload insns would be invalid. */
4747 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4748 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4749 if (! (reload_out[r] != 0
4750 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4752 /* The reg is OK. */
4753 last_spill_reg = i;
4755 /* Mark as in use for this insn the reload regs we use
4756 for this. */
4757 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4758 reload_when_needed[r], reload_mode[r]);
4760 reload_reg_rtx[r] = new;
4761 reload_spill_index[r] = i;
4762 return 1;
4766 /* The reg is not OK. */
4767 if (noerror)
4768 return 0;
4770 failure:
4771 if (asm_noperands (PATTERN (insn)) < 0)
4772 /* It's the compiler's fault. */
4773 abort ();
4775 /* It's the user's fault; the operand's mode and constraint
4776 don't match. Disable this reload so we don't crash in final. */
4777 error_for_asm (insn,
4778 "`asm' operand constraint incompatible with operand size");
4779 reload_in[r] = 0;
4780 reload_out[r] = 0;
4781 reload_reg_rtx[r] = 0;
4782 reload_optional[r] = 1;
4783 reload_secondary_p[r] = 1;
4785 return 1;
4788 /* Assign hard reg targets for the pseudo-registers we must reload
4789 into hard regs for this insn.
4790 Also output the instructions to copy them in and out of the hard regs.
4792 For machines with register classes, we are responsible for
4793 finding a reload reg in the proper class. */
4795 static void
4796 choose_reload_regs (insn, avoid_return_reg)
4797 rtx insn;
4798 rtx avoid_return_reg;
4800 register int i, j;
4801 int max_group_size = 1;
4802 enum reg_class group_class = NO_REGS;
4803 int inheritance;
4805 rtx save_reload_reg_rtx[MAX_RELOADS];
4806 char save_reload_inherited[MAX_RELOADS];
4807 rtx save_reload_inheritance_insn[MAX_RELOADS];
4808 rtx save_reload_override_in[MAX_RELOADS];
4809 int save_reload_spill_index[MAX_RELOADS];
4810 HARD_REG_SET save_reload_reg_used;
4811 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4812 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4813 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4814 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4815 HARD_REG_SET save_reload_reg_used_in_op_addr;
4816 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4817 HARD_REG_SET save_reload_reg_used_in_insn;
4818 HARD_REG_SET save_reload_reg_used_in_other_addr;
4819 HARD_REG_SET save_reload_reg_used_at_all;
4821 bzero (reload_inherited, MAX_RELOADS);
4822 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4823 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4825 CLEAR_HARD_REG_SET (reload_reg_used);
4826 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4827 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4828 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4829 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4830 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4832 for (i = 0; i < reload_n_operands; i++)
4834 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4835 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4836 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4837 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4840 #ifdef SMALL_REGISTER_CLASSES
4841 /* Don't bother with avoiding the return reg
4842 if we have no mandatory reload that could use it. */
4843 if (avoid_return_reg)
4845 int do_avoid = 0;
4846 int regno = REGNO (avoid_return_reg);
4847 int nregs
4848 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4849 int r;
4851 for (r = regno; r < regno + nregs; r++)
4852 if (spill_reg_order[r] >= 0)
4853 for (j = 0; j < n_reloads; j++)
4854 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4855 && (reload_in[j] != 0 || reload_out[j] != 0
4856 || reload_secondary_p[j])
4858 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4859 do_avoid = 1;
4860 if (!do_avoid)
4861 avoid_return_reg = 0;
4863 #endif /* SMALL_REGISTER_CLASSES */
4865 #if 0 /* Not needed, now that we can always retry without inheritance. */
4866 /* See if we have more mandatory reloads than spill regs.
4867 If so, then we cannot risk optimizations that could prevent
4868 reloads from sharing one spill register.
4870 Since we will try finding a better register than reload_reg_rtx
4871 unless it is equal to reload_in or reload_out, count such reloads. */
4874 int tem = 0;
4875 #ifdef SMALL_REGISTER_CLASSES
4876 int tem = (avoid_return_reg != 0);
4877 #endif
4878 for (j = 0; j < n_reloads; j++)
4879 if (! reload_optional[j]
4880 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4881 && (reload_reg_rtx[j] == 0
4882 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4883 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4884 tem++;
4885 if (tem > n_spills)
4886 must_reuse = 1;
4888 #endif
4890 #ifdef SMALL_REGISTER_CLASSES
4891 /* Don't use the subroutine call return reg for a reload
4892 if we are supposed to avoid it. */
4893 if (avoid_return_reg)
4895 int regno = REGNO (avoid_return_reg);
4896 int nregs
4897 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4898 int r;
4900 for (r = regno; r < regno + nregs; r++)
4901 if (spill_reg_order[r] >= 0)
4902 SET_HARD_REG_BIT (reload_reg_used, r);
4904 #endif /* SMALL_REGISTER_CLASSES */
4906 /* In order to be certain of getting the registers we need,
4907 we must sort the reloads into order of increasing register class.
4908 Then our grabbing of reload registers will parallel the process
4909 that provided the reload registers.
4911 Also note whether any of the reloads wants a consecutive group of regs.
4912 If so, record the maximum size of the group desired and what
4913 register class contains all the groups needed by this insn. */
4915 for (j = 0; j < n_reloads; j++)
4917 reload_order[j] = j;
4918 reload_spill_index[j] = -1;
4920 reload_mode[j]
4921 = (reload_inmode[j] == VOIDmode
4922 || (GET_MODE_SIZE (reload_outmode[j])
4923 > GET_MODE_SIZE (reload_inmode[j])))
4924 ? reload_outmode[j] : reload_inmode[j];
4926 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4928 if (reload_nregs[j] > 1)
4930 max_group_size = MAX (reload_nregs[j], max_group_size);
4931 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4934 /* If we have already decided to use a certain register,
4935 don't use it in another way. */
4936 if (reload_reg_rtx[j])
4937 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4938 reload_when_needed[j], reload_mode[j]);
4941 if (n_reloads > 1)
4942 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4944 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
4945 sizeof reload_reg_rtx);
4946 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4947 bcopy ((char *) reload_inheritance_insn,
4948 (char *) save_reload_inheritance_insn,
4949 sizeof reload_inheritance_insn);
4950 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
4951 sizeof reload_override_in);
4952 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
4953 sizeof reload_spill_index);
4954 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4955 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4956 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4957 reload_reg_used_in_op_addr);
4959 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
4960 reload_reg_used_in_op_addr_reload);
4962 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4963 reload_reg_used_in_insn);
4964 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4965 reload_reg_used_in_other_addr);
4967 for (i = 0; i < reload_n_operands; i++)
4969 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4970 reload_reg_used_in_output[i]);
4971 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4972 reload_reg_used_in_input[i]);
4973 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4974 reload_reg_used_in_input_addr[i]);
4975 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4976 reload_reg_used_in_output_addr[i]);
4979 /* If -O, try first with inheritance, then turning it off.
4980 If not -O, don't do inheritance.
4981 Using inheritance when not optimizing leads to paradoxes
4982 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4983 because one side of the comparison might be inherited. */
4985 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4987 /* Process the reloads in order of preference just found.
4988 Beyond this point, subregs can be found in reload_reg_rtx.
4990 This used to look for an existing reloaded home for all
4991 of the reloads, and only then perform any new reloads.
4992 But that could lose if the reloads were done out of reg-class order
4993 because a later reload with a looser constraint might have an old
4994 home in a register needed by an earlier reload with a tighter constraint.
4996 To solve this, we make two passes over the reloads, in the order
4997 described above. In the first pass we try to inherit a reload
4998 from a previous insn. If there is a later reload that needs a
4999 class that is a proper subset of the class being processed, we must
5000 also allocate a spill register during the first pass.
5002 Then make a second pass over the reloads to allocate any reloads
5003 that haven't been given registers yet. */
5005 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5007 for (j = 0; j < n_reloads; j++)
5009 register int r = reload_order[j];
5011 /* Ignore reloads that got marked inoperative. */
5012 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5013 continue;
5015 /* If find_reloads chose a to use reload_in or reload_out as a reload
5016 register, we don't need to chose one. Otherwise, try even if it found
5017 one since we might save an insn if we find the value lying around. */
5018 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5019 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5020 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5021 continue;
5023 #if 0 /* No longer needed for correct operation.
5024 It might give better code, or might not; worth an experiment? */
5025 /* If this is an optional reload, we can't inherit from earlier insns
5026 until we are sure that any non-optional reloads have been allocated.
5027 The following code takes advantage of the fact that optional reloads
5028 are at the end of reload_order. */
5029 if (reload_optional[r] != 0)
5030 for (i = 0; i < j; i++)
5031 if ((reload_out[reload_order[i]] != 0
5032 || reload_in[reload_order[i]] != 0
5033 || reload_secondary_p[reload_order[i]])
5034 && ! reload_optional[reload_order[i]]
5035 && reload_reg_rtx[reload_order[i]] == 0)
5036 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5037 #endif
5039 /* First see if this pseudo is already available as reloaded
5040 for a previous insn. We cannot try to inherit for reloads
5041 that are smaller than the maximum number of registers needed
5042 for groups unless the register we would allocate cannot be used
5043 for the groups.
5045 We could check here to see if this is a secondary reload for
5046 an object that is already in a register of the desired class.
5047 This would avoid the need for the secondary reload register.
5048 But this is complex because we can't easily determine what
5049 objects might want to be loaded via this reload. So let a register
5050 be allocated here. In `emit_reload_insns' we suppress one of the
5051 loads in the case described above. */
5053 if (inheritance)
5055 register int regno = -1;
5056 enum machine_mode mode;
5058 if (reload_in[r] == 0)
5060 else if (GET_CODE (reload_in[r]) == REG)
5062 regno = REGNO (reload_in[r]);
5063 mode = GET_MODE (reload_in[r]);
5065 else if (GET_CODE (reload_in_reg[r]) == REG)
5067 regno = REGNO (reload_in_reg[r]);
5068 mode = GET_MODE (reload_in_reg[r]);
5070 #if 0
5071 /* This won't work, since REGNO can be a pseudo reg number.
5072 Also, it takes much more hair to keep track of all the things
5073 that can invalidate an inherited reload of part of a pseudoreg. */
5074 else if (GET_CODE (reload_in[r]) == SUBREG
5075 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5076 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5077 #endif
5079 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5081 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5083 if (reg_reloaded_contents[i] == regno
5084 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5085 >= GET_MODE_SIZE (mode))
5086 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5087 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5088 spill_regs[i])
5089 && (reload_nregs[r] == max_group_size
5090 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5091 spill_regs[i]))
5092 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5093 reload_when_needed[r])
5094 && reload_reg_free_before_p (spill_regs[i],
5095 reload_opnum[r],
5096 reload_when_needed[r]))
5098 /* If a group is needed, verify that all the subsequent
5099 registers still have their values intact. */
5100 int nr
5101 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5102 int k;
5104 for (k = 1; k < nr; k++)
5105 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5106 != regno)
5107 break;
5109 if (k == nr)
5111 int i1;
5113 /* We found a register that contains the
5114 value we need. If this register is the
5115 same as an `earlyclobber' operand of the
5116 current insn, just mark it as a place to
5117 reload from since we can't use it as the
5118 reload register itself. */
5120 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5121 if (reg_overlap_mentioned_for_reload_p
5122 (reg_last_reload_reg[regno],
5123 reload_earlyclobbers[i1]))
5124 break;
5126 if (i1 != n_earlyclobbers
5127 /* Don't really use the inherited spill reg
5128 if we need it wider than we've got it. */
5129 || (GET_MODE_SIZE (reload_mode[r])
5130 > GET_MODE_SIZE (mode)))
5131 reload_override_in[r] = reg_last_reload_reg[regno];
5132 else
5134 int k;
5135 /* We can use this as a reload reg. */
5136 /* Mark the register as in use for this part of
5137 the insn. */
5138 mark_reload_reg_in_use (spill_regs[i],
5139 reload_opnum[r],
5140 reload_when_needed[r],
5141 reload_mode[r]);
5142 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5143 reload_inherited[r] = 1;
5144 reload_inheritance_insn[r]
5145 = reg_reloaded_insn[i];
5146 reload_spill_index[r] = i;
5147 for (k = 0; k < nr; k++)
5148 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5149 spill_regs[i + k]);
5156 /* Here's another way to see if the value is already lying around. */
5157 if (inheritance
5158 && reload_in[r] != 0
5159 && ! reload_inherited[r]
5160 && reload_out[r] == 0
5161 && (CONSTANT_P (reload_in[r])
5162 || GET_CODE (reload_in[r]) == PLUS
5163 || GET_CODE (reload_in[r]) == REG
5164 || GET_CODE (reload_in[r]) == MEM)
5165 && (reload_nregs[r] == max_group_size
5166 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5168 register rtx equiv
5169 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5170 -1, NULL_PTR, 0, reload_mode[r]);
5171 int regno;
5173 if (equiv != 0)
5175 if (GET_CODE (equiv) == REG)
5176 regno = REGNO (equiv);
5177 else if (GET_CODE (equiv) == SUBREG)
5179 /* This must be a SUBREG of a hard register.
5180 Make a new REG since this might be used in an
5181 address and not all machines support SUBREGs
5182 there. */
5183 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5184 equiv = gen_rtx (REG, reload_mode[r], regno);
5186 else
5187 abort ();
5190 /* If we found a spill reg, reject it unless it is free
5191 and of the desired class. */
5192 if (equiv != 0
5193 && ((spill_reg_order[regno] >= 0
5194 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5195 reload_when_needed[r]))
5196 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5197 regno)))
5198 equiv = 0;
5200 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5201 equiv = 0;
5203 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5204 equiv = 0;
5206 /* We found a register that contains the value we need.
5207 If this register is the same as an `earlyclobber' operand
5208 of the current insn, just mark it as a place to reload from
5209 since we can't use it as the reload register itself. */
5211 if (equiv != 0)
5212 for (i = 0; i < n_earlyclobbers; i++)
5213 if (reg_overlap_mentioned_for_reload_p (equiv,
5214 reload_earlyclobbers[i]))
5216 reload_override_in[r] = equiv;
5217 equiv = 0;
5218 break;
5221 /* JRV: If the equiv register we have found is explicitly
5222 clobbered in the current insn, mark but don't use, as above. */
5224 if (equiv != 0 && regno_clobbered_p (regno, insn))
5226 reload_override_in[r] = equiv;
5227 equiv = 0;
5230 /* If we found an equivalent reg, say no code need be generated
5231 to load it, and use it as our reload reg. */
5232 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5234 reload_reg_rtx[r] = equiv;
5235 reload_inherited[r] = 1;
5236 /* If it is a spill reg,
5237 mark the spill reg as in use for this insn. */
5238 i = spill_reg_order[regno];
5239 if (i >= 0)
5241 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5242 int k;
5243 mark_reload_reg_in_use (regno, reload_opnum[r],
5244 reload_when_needed[r],
5245 reload_mode[r]);
5246 for (k = 0; k < nr; k++)
5247 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5252 /* If we found a register to use already, or if this is an optional
5253 reload, we are done. */
5254 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5255 continue;
5257 #if 0 /* No longer needed for correct operation. Might or might not
5258 give better code on the average. Want to experiment? */
5260 /* See if there is a later reload that has a class different from our
5261 class that intersects our class or that requires less register
5262 than our reload. If so, we must allocate a register to this
5263 reload now, since that reload might inherit a previous reload
5264 and take the only available register in our class. Don't do this
5265 for optional reloads since they will force all previous reloads
5266 to be allocated. Also don't do this for reloads that have been
5267 turned off. */
5269 for (i = j + 1; i < n_reloads; i++)
5271 int s = reload_order[i];
5273 if ((reload_in[s] == 0 && reload_out[s] == 0
5274 && ! reload_secondary_p[s])
5275 || reload_optional[s])
5276 continue;
5278 if ((reload_reg_class[s] != reload_reg_class[r]
5279 && reg_classes_intersect_p (reload_reg_class[r],
5280 reload_reg_class[s]))
5281 || reload_nregs[s] < reload_nregs[r])
5282 break;
5285 if (i == n_reloads)
5286 continue;
5288 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5289 #endif
5292 /* Now allocate reload registers for anything non-optional that
5293 didn't get one yet. */
5294 for (j = 0; j < n_reloads; j++)
5296 register int r = reload_order[j];
5298 /* Ignore reloads that got marked inoperative. */
5299 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5300 continue;
5302 /* Skip reloads that already have a register allocated or are
5303 optional. */
5304 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5305 continue;
5307 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5308 break;
5311 /* If that loop got all the way, we have won. */
5312 if (j == n_reloads)
5313 break;
5315 fail:
5316 /* Loop around and try without any inheritance. */
5317 /* First undo everything done by the failed attempt
5318 to allocate with inheritance. */
5319 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5320 sizeof reload_reg_rtx);
5321 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5322 sizeof reload_inherited);
5323 bcopy ((char *) save_reload_inheritance_insn,
5324 (char *) reload_inheritance_insn,
5325 sizeof reload_inheritance_insn);
5326 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5327 sizeof reload_override_in);
5328 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5329 sizeof reload_spill_index);
5330 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5331 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5332 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5333 save_reload_reg_used_in_op_addr);
5334 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5335 save_reload_reg_used_in_op_addr_reload);
5336 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5337 save_reload_reg_used_in_insn);
5338 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5339 save_reload_reg_used_in_other_addr);
5341 for (i = 0; i < reload_n_operands; i++)
5343 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5344 save_reload_reg_used_in_input[i]);
5345 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5346 save_reload_reg_used_in_output[i]);
5347 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5348 save_reload_reg_used_in_input_addr[i]);
5349 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5350 save_reload_reg_used_in_output_addr[i]);
5354 /* If we thought we could inherit a reload, because it seemed that
5355 nothing else wanted the same reload register earlier in the insn,
5356 verify that assumption, now that all reloads have been assigned. */
5358 for (j = 0; j < n_reloads; j++)
5360 register int r = reload_order[j];
5362 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5363 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5364 reload_opnum[r],
5365 reload_when_needed[r]))
5366 reload_inherited[r] = 0;
5368 /* If we found a better place to reload from,
5369 validate it in the same fashion, if it is a reload reg. */
5370 if (reload_override_in[r]
5371 && (GET_CODE (reload_override_in[r]) == REG
5372 || GET_CODE (reload_override_in[r]) == SUBREG))
5374 int regno = true_regnum (reload_override_in[r]);
5375 if (spill_reg_order[regno] >= 0
5376 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5377 reload_when_needed[r]))
5378 reload_override_in[r] = 0;
5382 /* Now that reload_override_in is known valid,
5383 actually override reload_in. */
5384 for (j = 0; j < n_reloads; j++)
5385 if (reload_override_in[j])
5386 reload_in[j] = reload_override_in[j];
5388 /* If this reload won't be done because it has been cancelled or is
5389 optional and not inherited, clear reload_reg_rtx so other
5390 routines (such as subst_reloads) don't get confused. */
5391 for (j = 0; j < n_reloads; j++)
5392 if (reload_reg_rtx[j] != 0
5393 && ((reload_optional[j] && ! reload_inherited[j])
5394 || (reload_in[j] == 0 && reload_out[j] == 0
5395 && ! reload_secondary_p[j])))
5397 int regno = true_regnum (reload_reg_rtx[j]);
5399 if (spill_reg_order[regno] >= 0)
5400 clear_reload_reg_in_use (regno, reload_opnum[j],
5401 reload_when_needed[j], reload_mode[j]);
5402 reload_reg_rtx[j] = 0;
5405 /* Record which pseudos and which spill regs have output reloads. */
5406 for (j = 0; j < n_reloads; j++)
5408 register int r = reload_order[j];
5410 i = reload_spill_index[r];
5412 /* I is nonneg if this reload used one of the spill regs.
5413 If reload_reg_rtx[r] is 0, this is an optional reload
5414 that we opted to ignore. */
5415 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5416 && reload_reg_rtx[r] != 0)
5418 register int nregno = REGNO (reload_out[r]);
5419 int nr = 1;
5421 if (nregno < FIRST_PSEUDO_REGISTER)
5422 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5424 while (--nr >= 0)
5425 reg_has_output_reload[nregno + nr] = 1;
5427 if (i >= 0)
5429 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5430 while (--nr >= 0)
5431 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5434 if (reload_when_needed[r] != RELOAD_OTHER
5435 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5436 && reload_when_needed[r] != RELOAD_FOR_INSN)
5437 abort ();
5442 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5443 reloads of the same item for fear that we might not have enough reload
5444 registers. However, normally they will get the same reload register
5445 and hence actually need not be loaded twice.
5447 Here we check for the most common case of this phenomenon: when we have
5448 a number of reloads for the same object, each of which were allocated
5449 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5450 reload, and is not modified in the insn itself. If we find such,
5451 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5452 This will not increase the number of spill registers needed and will
5453 prevent redundant code. */
5455 #ifdef SMALL_REGISTER_CLASSES
5457 static void
5458 merge_assigned_reloads (insn)
5459 rtx insn;
5461 int i, j;
5463 /* Scan all the reloads looking for ones that only load values and
5464 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5465 assigned and not modified by INSN. */
5467 for (i = 0; i < n_reloads; i++)
5469 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5470 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5471 || reg_set_p (reload_reg_rtx[i], insn))
5472 continue;
5474 /* Look at all other reloads. Ensure that the only use of this
5475 reload_reg_rtx is in a reload that just loads the same value
5476 as we do. Note that any secondary reloads must be of the identical
5477 class since the values, modes, and result registers are the
5478 same, so we need not do anything with any secondary reloads. */
5480 for (j = 0; j < n_reloads; j++)
5482 if (i == j || reload_reg_rtx[j] == 0
5483 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5484 reload_reg_rtx[i]))
5485 continue;
5487 /* If the reload regs aren't exactly the same (e.g, different modes)
5488 or if the values are different, we can't merge anything with this
5489 reload register. */
5491 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5492 || reload_out[j] != 0 || reload_in[j] == 0
5493 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5494 break;
5497 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5498 we, in fact, found any matching reloads. */
5500 if (j == n_reloads)
5502 for (j = 0; j < n_reloads; j++)
5503 if (i != j && reload_reg_rtx[j] != 0
5504 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5506 reload_when_needed[i] = RELOAD_OTHER;
5507 reload_in[j] = 0;
5508 transfer_replacements (i, j);
5511 /* If this is now RELOAD_OTHER, look for any reloads that load
5512 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5513 if they were for inputs, RELOAD_OTHER for outputs. Note that
5514 this test is equivalent to looking for reloads for this operand
5515 number. */
5517 if (reload_when_needed[i] == RELOAD_OTHER)
5518 for (j = 0; j < n_reloads; j++)
5519 if (reload_in[j] != 0
5520 && reload_when_needed[i] != RELOAD_OTHER
5521 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5522 reload_in[i]))
5523 reload_when_needed[j]
5524 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5525 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5529 #endif /* SMALL_RELOAD_CLASSES */
5531 /* Output insns to reload values in and out of the chosen reload regs. */
5533 static void
5534 emit_reload_insns (insn)
5535 rtx insn;
5537 register int j;
5538 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5539 rtx other_input_address_reload_insns = 0;
5540 rtx other_input_reload_insns = 0;
5541 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5542 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5543 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5544 rtx operand_reload_insns = 0;
5545 rtx other_operand_reload_insns = 0;
5546 rtx following_insn = NEXT_INSN (insn);
5547 rtx before_insn = insn;
5548 int special;
5549 /* Values to be put in spill_reg_store are put here first. */
5550 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5552 for (j = 0; j < reload_n_operands; j++)
5553 input_reload_insns[j] = input_address_reload_insns[j]
5554 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5556 /* Now output the instructions to copy the data into and out of the
5557 reload registers. Do these in the order that the reloads were reported,
5558 since reloads of base and index registers precede reloads of operands
5559 and the operands may need the base and index registers reloaded. */
5561 for (j = 0; j < n_reloads; j++)
5563 register rtx old;
5564 rtx oldequiv_reg = 0;
5565 rtx store_insn = 0;
5567 old = reload_in[j];
5568 if (old != 0 && ! reload_inherited[j]
5569 && ! rtx_equal_p (reload_reg_rtx[j], old)
5570 && reload_reg_rtx[j] != 0)
5572 register rtx reloadreg = reload_reg_rtx[j];
5573 rtx oldequiv = 0;
5574 enum machine_mode mode;
5575 rtx *where;
5577 /* Determine the mode to reload in.
5578 This is very tricky because we have three to choose from.
5579 There is the mode the insn operand wants (reload_inmode[J]).
5580 There is the mode of the reload register RELOADREG.
5581 There is the intrinsic mode of the operand, which we could find
5582 by stripping some SUBREGs.
5583 It turns out that RELOADREG's mode is irrelevant:
5584 we can change that arbitrarily.
5586 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5587 then the reload reg may not support QImode moves, so use SImode.
5588 If foo is in memory due to spilling a pseudo reg, this is safe,
5589 because the QImode value is in the least significant part of a
5590 slot big enough for a SImode. If foo is some other sort of
5591 memory reference, then it is impossible to reload this case,
5592 so previous passes had better make sure this never happens.
5594 Then consider a one-word union which has SImode and one of its
5595 members is a float, being fetched as (SUBREG:SF union:SI).
5596 We must fetch that as SFmode because we could be loading into
5597 a float-only register. In this case OLD's mode is correct.
5599 Consider an immediate integer: it has VOIDmode. Here we need
5600 to get a mode from something else.
5602 In some cases, there is a fourth mode, the operand's
5603 containing mode. If the insn specifies a containing mode for
5604 this operand, it overrides all others.
5606 I am not sure whether the algorithm here is always right,
5607 but it does the right things in those cases. */
5609 mode = GET_MODE (old);
5610 if (mode == VOIDmode)
5611 mode = reload_inmode[j];
5613 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5614 /* If we need a secondary register for this operation, see if
5615 the value is already in a register in that class. Don't
5616 do this if the secondary register will be used as a scratch
5617 register. */
5619 if (reload_secondary_in_reload[j] >= 0
5620 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5621 && optimize)
5622 oldequiv
5623 = find_equiv_reg (old, insn,
5624 reload_reg_class[reload_secondary_in_reload[j]],
5625 -1, NULL_PTR, 0, mode);
5626 #endif
5628 /* If reloading from memory, see if there is a register
5629 that already holds the same value. If so, reload from there.
5630 We can pass 0 as the reload_reg_p argument because
5631 any other reload has either already been emitted,
5632 in which case find_equiv_reg will see the reload-insn,
5633 or has yet to be emitted, in which case it doesn't matter
5634 because we will use this equiv reg right away. */
5636 if (oldequiv == 0 && optimize
5637 && (GET_CODE (old) == MEM
5638 || (GET_CODE (old) == REG
5639 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5640 && reg_renumber[REGNO (old)] < 0)))
5641 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5642 -1, NULL_PTR, 0, mode);
5644 if (oldequiv)
5646 int regno = true_regnum (oldequiv);
5648 /* If OLDEQUIV is a spill register, don't use it for this
5649 if any other reload needs it at an earlier stage of this insn
5650 or at this stage. */
5651 if (spill_reg_order[regno] >= 0
5652 && (! reload_reg_free_p (regno, reload_opnum[j],
5653 reload_when_needed[j])
5654 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5655 reload_when_needed[j])))
5656 oldequiv = 0;
5658 /* If OLDEQUIV is not a spill register,
5659 don't use it if any other reload wants it. */
5660 if (spill_reg_order[regno] < 0)
5662 int k;
5663 for (k = 0; k < n_reloads; k++)
5664 if (reload_reg_rtx[k] != 0 && k != j
5665 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5666 oldequiv))
5668 oldequiv = 0;
5669 break;
5673 /* If it is no cheaper to copy from OLDEQUIV into the
5674 reload register than it would be to move from memory,
5675 don't use it. Likewise, if we need a secondary register
5676 or memory. */
5678 if (oldequiv != 0
5679 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5680 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5681 reload_reg_class[j])
5682 >= MEMORY_MOVE_COST (mode)))
5683 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5684 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5685 mode, oldequiv)
5686 != NO_REGS)
5687 #endif
5688 #ifdef SECONDARY_MEMORY_NEEDED
5689 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5690 REGNO_REG_CLASS (regno),
5691 mode)
5692 #endif
5694 oldequiv = 0;
5697 if (oldequiv == 0)
5698 oldequiv = old;
5699 else if (GET_CODE (oldequiv) == REG)
5700 oldequiv_reg = oldequiv;
5701 else if (GET_CODE (oldequiv) == SUBREG)
5702 oldequiv_reg = SUBREG_REG (oldequiv);
5704 /* If we are reloading from a register that was recently stored in
5705 with an output-reload, see if we can prove there was
5706 actually no need to store the old value in it. */
5708 if (optimize && GET_CODE (oldequiv) == REG
5709 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5710 && spill_reg_order[REGNO (oldequiv)] >= 0
5711 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5712 && find_reg_note (insn, REG_DEAD, reload_in[j])
5713 /* This is unsafe if operand occurs more than once in current
5714 insn. Perhaps some occurrences weren't reloaded. */
5715 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5716 delete_output_reload
5717 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5719 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5720 then load RELOADREG from OLDEQUIV. Note that we cannot use
5721 gen_lowpart_common since it can do the wrong thing when
5722 RELOADREG has a multi-word mode. Note that RELOADREG
5723 must always be a REG here. */
5725 if (GET_MODE (reloadreg) != mode)
5726 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5727 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5728 oldequiv = SUBREG_REG (oldequiv);
5729 if (GET_MODE (oldequiv) != VOIDmode
5730 && mode != GET_MODE (oldequiv))
5731 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5733 /* Switch to the right place to emit the reload insns. */
5734 switch (reload_when_needed[j])
5736 case RELOAD_OTHER:
5737 where = &other_input_reload_insns;
5738 break;
5739 case RELOAD_FOR_INPUT:
5740 where = &input_reload_insns[reload_opnum[j]];
5741 break;
5742 case RELOAD_FOR_INPUT_ADDRESS:
5743 where = &input_address_reload_insns[reload_opnum[j]];
5744 break;
5745 case RELOAD_FOR_OUTPUT_ADDRESS:
5746 where = &output_address_reload_insns[reload_opnum[j]];
5747 break;
5748 case RELOAD_FOR_OPERAND_ADDRESS:
5749 where = &operand_reload_insns;
5750 break;
5751 case RELOAD_FOR_OPADDR_ADDR:
5752 where = &other_operand_reload_insns;
5753 break;
5754 case RELOAD_FOR_OTHER_ADDRESS:
5755 where = &other_input_address_reload_insns;
5756 break;
5757 default:
5758 abort ();
5761 push_to_sequence (*where);
5762 special = 0;
5764 /* Auto-increment addresses must be reloaded in a special way. */
5765 if (GET_CODE (oldequiv) == POST_INC
5766 || GET_CODE (oldequiv) == POST_DEC
5767 || GET_CODE (oldequiv) == PRE_INC
5768 || GET_CODE (oldequiv) == PRE_DEC)
5770 /* We are not going to bother supporting the case where a
5771 incremented register can't be copied directly from
5772 OLDEQUIV since this seems highly unlikely. */
5773 if (reload_secondary_in_reload[j] >= 0)
5774 abort ();
5775 /* Prevent normal processing of this reload. */
5776 special = 1;
5777 /* Output a special code sequence for this case. */
5778 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5781 /* If we are reloading a pseudo-register that was set by the previous
5782 insn, see if we can get rid of that pseudo-register entirely
5783 by redirecting the previous insn into our reload register. */
5785 else if (optimize && GET_CODE (old) == REG
5786 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5787 && dead_or_set_p (insn, old)
5788 /* This is unsafe if some other reload
5789 uses the same reg first. */
5790 && reload_reg_free_before_p (REGNO (reloadreg),
5791 reload_opnum[j],
5792 reload_when_needed[j]))
5794 rtx temp = PREV_INSN (insn);
5795 while (temp && GET_CODE (temp) == NOTE)
5796 temp = PREV_INSN (temp);
5797 if (temp
5798 && GET_CODE (temp) == INSN
5799 && GET_CODE (PATTERN (temp)) == SET
5800 && SET_DEST (PATTERN (temp)) == old
5801 /* Make sure we can access insn_operand_constraint. */
5802 && asm_noperands (PATTERN (temp)) < 0
5803 /* This is unsafe if prev insn rejects our reload reg. */
5804 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5805 reloadreg)
5806 /* This is unsafe if operand occurs more than once in current
5807 insn. Perhaps some occurrences aren't reloaded. */
5808 && count_occurrences (PATTERN (insn), old) == 1
5809 /* Don't risk splitting a matching pair of operands. */
5810 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5812 /* Store into the reload register instead of the pseudo. */
5813 SET_DEST (PATTERN (temp)) = reloadreg;
5814 /* If these are the only uses of the pseudo reg,
5815 pretend for GDB it lives in the reload reg we used. */
5816 if (reg_n_deaths[REGNO (old)] == 1
5817 && reg_n_sets[REGNO (old)] == 1)
5819 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5820 alter_reg (REGNO (old), -1);
5822 special = 1;
5826 /* We can't do that, so output an insn to load RELOADREG. */
5828 if (! special)
5830 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5831 rtx second_reload_reg = 0;
5832 enum insn_code icode;
5834 /* If we have a secondary reload, pick up the secondary register
5835 and icode, if any. If OLDEQUIV and OLD are different or
5836 if this is an in-out reload, recompute whether or not we
5837 still need a secondary register and what the icode should
5838 be. If we still need a secondary register and the class or
5839 icode is different, go back to reloading from OLD if using
5840 OLDEQUIV means that we got the wrong type of register. We
5841 cannot have different class or icode due to an in-out reload
5842 because we don't make such reloads when both the input and
5843 output need secondary reload registers. */
5845 if (reload_secondary_in_reload[j] >= 0)
5847 int secondary_reload = reload_secondary_in_reload[j];
5848 rtx real_oldequiv = oldequiv;
5849 rtx real_old = old;
5851 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5852 and similarly for OLD.
5853 See comments in get_secondary_reload in reload.c. */
5854 if (GET_CODE (oldequiv) == REG
5855 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5856 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5857 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5859 if (GET_CODE (old) == REG
5860 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5861 && reg_equiv_mem[REGNO (old)] != 0)
5862 real_old = reg_equiv_mem[REGNO (old)];
5864 second_reload_reg = reload_reg_rtx[secondary_reload];
5865 icode = reload_secondary_in_icode[j];
5867 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5868 || (reload_in[j] != 0 && reload_out[j] != 0))
5870 enum reg_class new_class
5871 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5872 mode, real_oldequiv);
5874 if (new_class == NO_REGS)
5875 second_reload_reg = 0;
5876 else
5878 enum insn_code new_icode;
5879 enum machine_mode new_mode;
5881 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5882 REGNO (second_reload_reg)))
5883 oldequiv = old, real_oldequiv = real_old;
5884 else
5886 new_icode = reload_in_optab[(int) mode];
5887 if (new_icode != CODE_FOR_nothing
5888 && ((insn_operand_predicate[(int) new_icode][0]
5889 && ! ((*insn_operand_predicate[(int) new_icode][0])
5890 (reloadreg, mode)))
5891 || (insn_operand_predicate[(int) new_icode][1]
5892 && ! ((*insn_operand_predicate[(int) new_icode][1])
5893 (real_oldequiv, mode)))))
5894 new_icode = CODE_FOR_nothing;
5896 if (new_icode == CODE_FOR_nothing)
5897 new_mode = mode;
5898 else
5899 new_mode = insn_operand_mode[(int) new_icode][2];
5901 if (GET_MODE (second_reload_reg) != new_mode)
5903 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5904 new_mode))
5905 oldequiv = old, real_oldequiv = real_old;
5906 else
5907 second_reload_reg
5908 = gen_rtx (REG, new_mode,
5909 REGNO (second_reload_reg));
5915 /* If we still need a secondary reload register, check
5916 to see if it is being used as a scratch or intermediate
5917 register and generate code appropriately. If we need
5918 a scratch register, use REAL_OLDEQUIV since the form of
5919 the insn may depend on the actual address if it is
5920 a MEM. */
5922 if (second_reload_reg)
5924 if (icode != CODE_FOR_nothing)
5926 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5927 second_reload_reg));
5928 special = 1;
5930 else
5932 /* See if we need a scratch register to load the
5933 intermediate register (a tertiary reload). */
5934 enum insn_code tertiary_icode
5935 = reload_secondary_in_icode[secondary_reload];
5937 if (tertiary_icode != CODE_FOR_nothing)
5939 rtx third_reload_reg
5940 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
5942 emit_insn ((GEN_FCN (tertiary_icode)
5943 (second_reload_reg, real_oldequiv,
5944 third_reload_reg)));
5946 else
5947 gen_input_reload (second_reload_reg, oldequiv,
5948 reload_opnum[j],
5949 reload_when_needed[j]);
5951 oldequiv = second_reload_reg;
5955 #endif
5957 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5958 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5959 reload_when_needed[j]);
5961 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5962 /* We may have to make a REG_DEAD note for the secondary reload
5963 register in the insns we just made. Find the last insn that
5964 mentioned the register. */
5965 if (! special && second_reload_reg
5966 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5968 rtx prev;
5970 for (prev = get_last_insn (); prev;
5971 prev = PREV_INSN (prev))
5972 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5973 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5974 PATTERN (prev)))
5976 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5977 second_reload_reg,
5978 REG_NOTES (prev));
5979 break;
5982 #endif
5985 /* End this sequence. */
5986 *where = get_insns ();
5987 end_sequence ();
5990 /* Add a note saying the input reload reg
5991 dies in this insn, if anyone cares. */
5992 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5993 if (old != 0
5994 && reload_reg_rtx[j] != old
5995 && reload_reg_rtx[j] != 0
5996 && reload_out[j] == 0
5997 && ! reload_inherited[j]
5998 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6000 register rtx reloadreg = reload_reg_rtx[j];
6002 #if 0
6003 /* We can't abort here because we need to support this for sched.c.
6004 It's not terrible to miss a REG_DEAD note, but we should try
6005 to figure out how to do this correctly. */
6006 /* The code below is incorrect for address-only reloads. */
6007 if (reload_when_needed[j] != RELOAD_OTHER
6008 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6009 abort ();
6010 #endif
6012 /* Add a death note to this insn, for an input reload. */
6014 if ((reload_when_needed[j] == RELOAD_OTHER
6015 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6016 && ! dead_or_set_p (insn, reloadreg))
6017 REG_NOTES (insn)
6018 = gen_rtx (EXPR_LIST, REG_DEAD,
6019 reloadreg, REG_NOTES (insn));
6022 /* When we inherit a reload, the last marked death of the reload reg
6023 may no longer really be a death. */
6024 if (reload_reg_rtx[j] != 0
6025 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6026 && reload_inherited[j])
6028 /* Handle inheriting an output reload.
6029 Remove the death note from the output reload insn. */
6030 if (reload_spill_index[j] >= 0
6031 && GET_CODE (reload_in[j]) == REG
6032 && spill_reg_store[reload_spill_index[j]] != 0
6033 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6034 REG_DEAD, REGNO (reload_reg_rtx[j])))
6035 remove_death (REGNO (reload_reg_rtx[j]),
6036 spill_reg_store[reload_spill_index[j]]);
6037 /* Likewise for input reloads that were inherited. */
6038 else if (reload_spill_index[j] >= 0
6039 && GET_CODE (reload_in[j]) == REG
6040 && spill_reg_store[reload_spill_index[j]] == 0
6041 && reload_inheritance_insn[j] != 0
6042 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6043 REGNO (reload_reg_rtx[j])))
6044 remove_death (REGNO (reload_reg_rtx[j]),
6045 reload_inheritance_insn[j]);
6046 else
6048 rtx prev;
6050 /* We got this register from find_equiv_reg.
6051 Search back for its last death note and get rid of it.
6052 But don't search back too far.
6053 Don't go past a place where this reg is set,
6054 since a death note before that remains valid. */
6055 for (prev = PREV_INSN (insn);
6056 prev && GET_CODE (prev) != CODE_LABEL;
6057 prev = PREV_INSN (prev))
6058 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6059 && dead_or_set_p (prev, reload_reg_rtx[j]))
6061 if (find_regno_note (prev, REG_DEAD,
6062 REGNO (reload_reg_rtx[j])))
6063 remove_death (REGNO (reload_reg_rtx[j]), prev);
6064 break;
6069 /* We might have used find_equiv_reg above to choose an alternate
6070 place from which to reload. If so, and it died, we need to remove
6071 that death and move it to one of the insns we just made. */
6073 if (oldequiv_reg != 0
6074 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6076 rtx prev, prev1;
6078 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6079 prev = PREV_INSN (prev))
6080 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6081 && dead_or_set_p (prev, oldequiv_reg))
6083 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6085 for (prev1 = this_reload_insn;
6086 prev1; prev1 = PREV_INSN (prev1))
6087 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6088 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6089 PATTERN (prev1)))
6091 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6092 oldequiv_reg,
6093 REG_NOTES (prev1));
6094 break;
6096 remove_death (REGNO (oldequiv_reg), prev);
6098 break;
6101 #endif
6103 /* If we are reloading a register that was recently stored in with an
6104 output-reload, see if we can prove there was
6105 actually no need to store the old value in it. */
6107 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6108 && reload_in[j] != 0
6109 && GET_CODE (reload_in[j]) == REG
6110 #if 0
6111 /* There doesn't seem to be any reason to restrict this to pseudos
6112 and doing so loses in the case where we are copying from a
6113 register of the wrong class. */
6114 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6115 #endif
6116 && spill_reg_store[reload_spill_index[j]] != 0
6117 /* This is unsafe if some other reload uses the same reg first. */
6118 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6119 reload_opnum[j], reload_when_needed[j])
6120 && dead_or_set_p (insn, reload_in[j])
6121 /* This is unsafe if operand occurs more than once in current
6122 insn. Perhaps some occurrences weren't reloaded. */
6123 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6124 delete_output_reload (insn, j,
6125 spill_reg_store[reload_spill_index[j]]);
6127 /* Input-reloading is done. Now do output-reloading,
6128 storing the value from the reload-register after the main insn
6129 if reload_out[j] is nonzero.
6131 ??? At some point we need to support handling output reloads of
6132 JUMP_INSNs or insns that set cc0. */
6133 old = reload_out[j];
6134 if (old != 0
6135 && reload_reg_rtx[j] != old
6136 && reload_reg_rtx[j] != 0)
6138 register rtx reloadreg = reload_reg_rtx[j];
6139 register rtx second_reloadreg = 0;
6140 rtx note, p;
6141 enum machine_mode mode;
6142 int special = 0;
6144 /* An output operand that dies right away does need a reload,
6145 but need not be copied from it. Show the new location in the
6146 REG_UNUSED note. */
6147 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6148 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6150 XEXP (note, 0) = reload_reg_rtx[j];
6151 continue;
6153 else if (GET_CODE (old) == SCRATCH)
6154 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6155 but we don't want to make an output reload. */
6156 continue;
6158 #if 0
6159 /* Strip off of OLD any size-increasing SUBREGs such as
6160 (SUBREG:SI foo:QI 0). */
6162 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6163 && (GET_MODE_SIZE (GET_MODE (old))
6164 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6165 old = SUBREG_REG (old);
6166 #endif
6168 /* If is a JUMP_INSN, we can't support output reloads yet. */
6169 if (GET_CODE (insn) == JUMP_INSN)
6170 abort ();
6172 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6174 /* Determine the mode to reload in.
6175 See comments above (for input reloading). */
6177 mode = GET_MODE (old);
6178 if (mode == VOIDmode)
6180 /* VOIDmode should never happen for an output. */
6181 if (asm_noperands (PATTERN (insn)) < 0)
6182 /* It's the compiler's fault. */
6183 abort ();
6184 error_for_asm (insn, "output operand is constant in `asm'");
6185 /* Prevent crash--use something we know is valid. */
6186 mode = word_mode;
6187 old = gen_rtx (REG, mode, REGNO (reloadreg));
6190 if (GET_MODE (reloadreg) != mode)
6191 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6193 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6195 /* If we need two reload regs, set RELOADREG to the intermediate
6196 one, since it will be stored into OUT. We might need a secondary
6197 register only for an input reload, so check again here. */
6199 if (reload_secondary_out_reload[j] >= 0)
6201 rtx real_old = old;
6203 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6204 && reg_equiv_mem[REGNO (old)] != 0)
6205 real_old = reg_equiv_mem[REGNO (old)];
6207 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6208 mode, real_old)
6209 != NO_REGS))
6211 second_reloadreg = reloadreg;
6212 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6214 /* See if RELOADREG is to be used as a scratch register
6215 or as an intermediate register. */
6216 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6218 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6219 (real_old, second_reloadreg, reloadreg)));
6220 special = 1;
6222 else
6224 /* See if we need both a scratch and intermediate reload
6225 register. */
6226 int secondary_reload = reload_secondary_out_reload[j];
6227 enum insn_code tertiary_icode
6228 = reload_secondary_out_icode[secondary_reload];
6229 rtx pat;
6231 if (GET_MODE (reloadreg) != mode)
6232 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6234 if (tertiary_icode != CODE_FOR_nothing)
6236 rtx third_reloadreg
6237 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6238 pat = (GEN_FCN (tertiary_icode)
6239 (reloadreg, second_reloadreg, third_reloadreg));
6241 #ifdef SECONDARY_MEMORY_NEEDED
6242 /* If we need a memory location to do the move, do it that way. */
6243 else if (GET_CODE (reloadreg) == REG
6244 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6245 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6246 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6247 GET_MODE (second_reloadreg)))
6249 /* Get the memory to use and rewrite both registers
6250 to its mode. */
6251 rtx loc
6252 = get_secondary_mem (reloadreg,
6253 GET_MODE (second_reloadreg),
6254 reload_opnum[j],
6255 reload_when_needed[j]);
6256 rtx tmp_reloadreg;
6258 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6259 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6260 REGNO (second_reloadreg));
6262 if (GET_MODE (loc) != GET_MODE (reloadreg))
6263 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6264 REGNO (reloadreg));
6265 else
6266 tmp_reloadreg = reloadreg;
6268 emit_move_insn (loc, second_reloadreg);
6269 pat = gen_move_insn (tmp_reloadreg, loc);
6271 #endif
6272 else
6273 pat = gen_move_insn (reloadreg, second_reloadreg);
6275 emit_insn (pat);
6279 #endif
6281 /* Output the last reload insn. */
6282 if (! special)
6284 #ifdef SECONDARY_MEMORY_NEEDED
6285 /* If we need a memory location to do the move, do it that way. */
6286 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6287 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6288 REGNO_REG_CLASS (REGNO (reloadreg)),
6289 GET_MODE (reloadreg)))
6291 /* Get the memory to use and rewrite both registers to
6292 its mode. */
6293 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6294 reload_opnum[j],
6295 reload_when_needed[j]);
6297 if (GET_MODE (loc) != GET_MODE (reloadreg))
6298 reloadreg = gen_rtx (REG, GET_MODE (loc),
6299 REGNO (reloadreg));
6301 if (GET_MODE (loc) != GET_MODE (old))
6302 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6304 emit_insn (gen_move_insn (loc, reloadreg));
6305 emit_insn (gen_move_insn (old, loc));
6307 else
6308 #endif
6309 emit_insn (gen_move_insn (old, reloadreg));
6312 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6313 /* If final will look at death notes for this reg,
6314 put one on the last output-reload insn to use it. Similarly
6315 for any secondary register. */
6316 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6317 for (p = get_last_insn (); p; p = PREV_INSN (p))
6318 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6319 && reg_overlap_mentioned_for_reload_p (reloadreg,
6320 PATTERN (p)))
6321 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6322 reloadreg, REG_NOTES (p));
6324 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6325 if (! special
6326 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6327 for (p = get_last_insn (); p; p = PREV_INSN (p))
6328 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6329 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6330 PATTERN (p)))
6331 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6332 second_reloadreg, REG_NOTES (p));
6333 #endif
6334 #endif
6335 /* Look at all insns we emitted, just to be safe. */
6336 for (p = get_insns (); p; p = NEXT_INSN (p))
6337 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6339 /* If this output reload doesn't come from a spill reg,
6340 clear any memory of reloaded copies of the pseudo reg.
6341 If this output reload comes from a spill reg,
6342 reg_has_output_reload will make this do nothing. */
6343 note_stores (PATTERN (p), forget_old_reloads_1);
6345 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6346 store_insn = p;
6349 output_reload_insns[reload_opnum[j]] = get_insns ();
6350 end_sequence ();
6354 if (reload_spill_index[j] >= 0)
6355 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6358 /* Now write all the insns we made for reloads in the order expected by
6359 the allocation functions. Prior to the insn being reloaded, we write
6360 the following reloads:
6362 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6364 RELOAD_OTHER reloads.
6366 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6367 the RELOAD_FOR_INPUT reload for the operand.
6369 RELOAD_FOR_OPADDR_ADDRS reloads.
6371 RELOAD_FOR_OPERAND_ADDRESS reloads.
6373 After the insn being reloaded, we write the following:
6375 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6376 the RELOAD_FOR_OUTPUT reload for that operand. */
6378 emit_insns_before (other_input_address_reload_insns, before_insn);
6379 emit_insns_before (other_input_reload_insns, before_insn);
6381 for (j = 0; j < reload_n_operands; j++)
6383 emit_insns_before (input_address_reload_insns[j], before_insn);
6384 emit_insns_before (input_reload_insns[j], before_insn);
6387 emit_insns_before (other_operand_reload_insns, before_insn);
6388 emit_insns_before (operand_reload_insns, before_insn);
6390 for (j = 0; j < reload_n_operands; j++)
6392 emit_insns_before (output_address_reload_insns[j], following_insn);
6393 emit_insns_before (output_reload_insns[j], following_insn);
6396 /* Move death notes from INSN
6397 to output-operand-address and output reload insns. */
6398 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6400 rtx insn1;
6401 /* Loop over those insns, last ones first. */
6402 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6403 insn1 = PREV_INSN (insn1))
6404 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6406 rtx source = SET_SRC (PATTERN (insn1));
6407 rtx dest = SET_DEST (PATTERN (insn1));
6409 /* The note we will examine next. */
6410 rtx reg_notes = REG_NOTES (insn);
6411 /* The place that pointed to this note. */
6412 rtx *prev_reg_note = &REG_NOTES (insn);
6414 /* If the note is for something used in the source of this
6415 reload insn, or in the output address, move the note. */
6416 while (reg_notes)
6418 rtx next_reg_notes = XEXP (reg_notes, 1);
6419 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6420 && GET_CODE (XEXP (reg_notes, 0)) == REG
6421 && ((GET_CODE (dest) != REG
6422 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6423 dest))
6424 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6425 source)))
6427 *prev_reg_note = next_reg_notes;
6428 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6429 REG_NOTES (insn1) = reg_notes;
6431 else
6432 prev_reg_note = &XEXP (reg_notes, 1);
6434 reg_notes = next_reg_notes;
6438 #endif
6440 /* For all the spill regs newly reloaded in this instruction,
6441 record what they were reloaded from, so subsequent instructions
6442 can inherit the reloads.
6444 Update spill_reg_store for the reloads of this insn.
6445 Copy the elements that were updated in the loop above. */
6447 for (j = 0; j < n_reloads; j++)
6449 register int r = reload_order[j];
6450 register int i = reload_spill_index[r];
6452 /* I is nonneg if this reload used one of the spill regs.
6453 If reload_reg_rtx[r] is 0, this is an optional reload
6454 that we opted to ignore.
6456 Also ignore reloads that don't reach the end of the insn,
6457 since we will eventually see the one that does. */
6459 if (i >= 0 && reload_reg_rtx[r] != 0
6460 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6461 reload_when_needed[r]))
6463 /* First, clear out memory of what used to be in this spill reg.
6464 If consecutive registers are used, clear them all. */
6465 int nr
6466 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6467 int k;
6469 for (k = 0; k < nr; k++)
6471 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6472 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6475 /* Maybe the spill reg contains a copy of reload_out. */
6476 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6478 register int nregno = REGNO (reload_out[r]);
6479 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6480 : HARD_REGNO_NREGS (nregno,
6481 GET_MODE (reload_reg_rtx[r])));
6483 spill_reg_store[i] = new_spill_reg_store[i];
6484 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6486 /* If NREGNO is a hard register, it may occupy more than
6487 one register. If it does, say what is in the
6488 rest of the registers assuming that both registers
6489 agree on how many words the object takes. If not,
6490 invalidate the subsequent registers. */
6492 if (nregno < FIRST_PSEUDO_REGISTER)
6493 for (k = 1; k < nnr; k++)
6494 reg_last_reload_reg[nregno + k]
6495 = (nr == nnr ? gen_rtx (REG,
6496 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6497 REGNO (reload_reg_rtx[r]) + k)
6498 : 0);
6500 /* Now do the inverse operation. */
6501 for (k = 0; k < nr; k++)
6503 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6504 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6505 : nregno + k);
6506 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6510 /* Maybe the spill reg contains a copy of reload_in. Only do
6511 something if there will not be an output reload for
6512 the register being reloaded. */
6513 else if (reload_out[r] == 0
6514 && reload_in[r] != 0
6515 && ((GET_CODE (reload_in[r]) == REG
6516 && ! reg_has_output_reload[REGNO (reload_in[r])]
6517 || (GET_CODE (reload_in_reg[r]) == REG
6518 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6520 register int nregno;
6521 int nnr;
6523 if (GET_CODE (reload_in[r]) == REG)
6524 nregno = REGNO (reload_in[r]);
6525 else
6526 nregno = REGNO (reload_in_reg[r]);
6528 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6529 : HARD_REGNO_NREGS (nregno,
6530 GET_MODE (reload_reg_rtx[r])));
6532 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6534 if (nregno < FIRST_PSEUDO_REGISTER)
6535 for (k = 1; k < nnr; k++)
6536 reg_last_reload_reg[nregno + k]
6537 = (nr == nnr ? gen_rtx (REG,
6538 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6539 REGNO (reload_reg_rtx[r]) + k)
6540 : 0);
6542 /* Unless we inherited this reload, show we haven't
6543 recently done a store. */
6544 if (! reload_inherited[r])
6545 spill_reg_store[i] = 0;
6547 for (k = 0; k < nr; k++)
6549 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6550 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6551 : nregno + k);
6552 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6553 = insn;
6558 /* The following if-statement was #if 0'd in 1.34 (or before...).
6559 It's reenabled in 1.35 because supposedly nothing else
6560 deals with this problem. */
6562 /* If a register gets output-reloaded from a non-spill register,
6563 that invalidates any previous reloaded copy of it.
6564 But forget_old_reloads_1 won't get to see it, because
6565 it thinks only about the original insn. So invalidate it here. */
6566 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6568 register int nregno = REGNO (reload_out[r]);
6569 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6571 while (num_regs-- > 0)
6572 reg_last_reload_reg[nregno + num_regs] = 0;
6577 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6578 operand OPNUM with reload type TYPE.
6580 Returns first insn emitted. */
6583 gen_input_reload (reloadreg, in, opnum, type)
6584 rtx reloadreg;
6585 rtx in;
6586 int opnum;
6587 enum reload_type type;
6589 rtx last = get_last_insn ();
6591 /* How to do this reload can get quite tricky. Normally, we are being
6592 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6593 register that didn't get a hard register. In that case we can just
6594 call emit_move_insn.
6596 We can also be asked to reload a PLUS that adds a register or a MEM to
6597 another register, constant or MEM. This can occur during frame pointer
6598 elimination and while reloading addresses. This case is handled by
6599 trying to emit a single insn to perform the add. If it is not valid,
6600 we use a two insn sequence.
6602 Finally, we could be called to handle an 'o' constraint by putting
6603 an address into a register. In that case, we first try to do this
6604 with a named pattern of "reload_load_address". If no such pattern
6605 exists, we just emit a SET insn and hope for the best (it will normally
6606 be valid on machines that use 'o').
6608 This entire process is made complex because reload will never
6609 process the insns we generate here and so we must ensure that
6610 they will fit their constraints and also by the fact that parts of
6611 IN might be being reloaded separately and replaced with spill registers.
6612 Because of this, we are, in some sense, just guessing the right approach
6613 here. The one listed above seems to work.
6615 ??? At some point, this whole thing needs to be rethought. */
6617 if (GET_CODE (in) == PLUS
6618 && (GET_CODE (XEXP (in, 0)) == REG
6619 || GET_CODE (XEXP (in, 0)) == MEM)
6620 && (GET_CODE (XEXP (in, 1)) == REG
6621 || CONSTANT_P (XEXP (in, 1))
6622 || GET_CODE (XEXP (in, 1)) == MEM))
6624 /* We need to compute the sum of a register or a MEM and another
6625 register, constant, or MEM, and put it into the reload
6626 register. The best possible way of doing this is if the machine
6627 has a three-operand ADD insn that accepts the required operands.
6629 The simplest approach is to try to generate such an insn and see if it
6630 is recognized and matches its constraints. If so, it can be used.
6632 It might be better not to actually emit the insn unless it is valid,
6633 but we need to pass the insn as an operand to `recog' and
6634 `insn_extract' and it is simpler to emit and then delete the insn if
6635 not valid than to dummy things up. */
6637 rtx op0, op1, tem, insn;
6638 int code;
6640 op0 = find_replacement (&XEXP (in, 0));
6641 op1 = find_replacement (&XEXP (in, 1));
6643 /* Since constraint checking is strict, commutativity won't be
6644 checked, so we need to do that here to avoid spurious failure
6645 if the add instruction is two-address and the second operand
6646 of the add is the same as the reload reg, which is frequently
6647 the case. If the insn would be A = B + A, rearrange it so
6648 it will be A = A + B as constrain_operands expects. */
6650 if (GET_CODE (XEXP (in, 1)) == REG
6651 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6652 tem = op0, op0 = op1, op1 = tem;
6654 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6655 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6657 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6658 code = recog_memoized (insn);
6660 if (code >= 0)
6662 insn_extract (insn);
6663 /* We want constrain operands to treat this insn strictly in
6664 its validity determination, i.e., the way it would after reload
6665 has completed. */
6666 if (constrain_operands (code, 1))
6667 return insn;
6670 delete_insns_since (last);
6672 /* If that failed, we must use a conservative two-insn sequence.
6673 use move to copy constant, MEM, or pseudo register to the reload
6674 register since "move" will be able to handle an arbitrary operand,
6675 unlike add which can't, in general. Then add the registers.
6677 If there is another way to do this for a specific machine, a
6678 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6679 we emit below. */
6681 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6682 || (GET_CODE (op1) == REG
6683 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6684 tem = op0, op0 = op1, op1 = tem;
6686 emit_insn (gen_move_insn (reloadreg, op0));
6688 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6689 This fixes a problem on the 32K where the stack pointer cannot
6690 be used as an operand of an add insn. */
6692 if (rtx_equal_p (op0, op1))
6693 op1 = reloadreg;
6695 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6697 /* If that failed, copy the address register to the reload register.
6698 Then add the constant to the reload register. */
6700 code = recog_memoized (insn);
6702 if (code >= 0)
6704 insn_extract (insn);
6705 /* We want constrain operands to treat this insn strictly in
6706 its validity determination, i.e., the way it would after reload
6707 has completed. */
6708 if (constrain_operands (code, 1))
6709 return insn;
6712 delete_insns_since (last);
6714 emit_insn (gen_move_insn (reloadreg, op1));
6715 emit_insn (gen_add2_insn (reloadreg, op0));
6718 #ifdef SECONDARY_MEMORY_NEEDED
6719 /* If we need a memory location to do the move, do it that way. */
6720 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6721 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6722 REGNO_REG_CLASS (REGNO (reloadreg)),
6723 GET_MODE (reloadreg)))
6725 /* Get the memory to use and rewrite both registers to its mode. */
6726 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6728 if (GET_MODE (loc) != GET_MODE (reloadreg))
6729 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6731 if (GET_MODE (loc) != GET_MODE (in))
6732 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6734 emit_insn (gen_move_insn (loc, in));
6735 emit_insn (gen_move_insn (reloadreg, loc));
6737 #endif
6739 /* If IN is a simple operand, use gen_move_insn. */
6740 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6741 emit_insn (gen_move_insn (reloadreg, in));
6743 #ifdef HAVE_reload_load_address
6744 else if (HAVE_reload_load_address)
6745 emit_insn (gen_reload_load_address (reloadreg, in));
6746 #endif
6748 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6749 else
6750 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6752 /* Return the first insn emitted.
6753 We can not just return get_last_insn, because there may have
6754 been multiple instructions emitted. Also note that gen_move_insn may
6755 emit more than one insn itself, so we can not assume that there is one
6756 insn emitted per emit_insn_before call. */
6758 return last ? NEXT_INSN (last) : get_insns ();
6761 /* Delete a previously made output-reload
6762 whose result we now believe is not needed.
6763 First we double-check.
6765 INSN is the insn now being processed.
6766 OUTPUT_RELOAD_INSN is the insn of the output reload.
6767 J is the reload-number for this insn. */
6769 static void
6770 delete_output_reload (insn, j, output_reload_insn)
6771 rtx insn;
6772 int j;
6773 rtx output_reload_insn;
6775 register rtx i1;
6777 /* Get the raw pseudo-register referred to. */
6779 rtx reg = reload_in[j];
6780 while (GET_CODE (reg) == SUBREG)
6781 reg = SUBREG_REG (reg);
6783 /* If the pseudo-reg we are reloading is no longer referenced
6784 anywhere between the store into it and here,
6785 and no jumps or labels intervene, then the value can get
6786 here through the reload reg alone.
6787 Otherwise, give up--return. */
6788 for (i1 = NEXT_INSN (output_reload_insn);
6789 i1 != insn; i1 = NEXT_INSN (i1))
6791 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6792 return;
6793 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6794 && reg_mentioned_p (reg, PATTERN (i1)))
6795 return;
6798 if (cannot_omit_stores[REGNO (reg)])
6799 return;
6801 /* If this insn will store in the pseudo again,
6802 the previous store can be removed. */
6803 if (reload_out[j] == reload_in[j])
6804 delete_insn (output_reload_insn);
6806 /* See if the pseudo reg has been completely replaced
6807 with reload regs. If so, delete the store insn
6808 and forget we had a stack slot for the pseudo. */
6809 else if (reg_n_deaths[REGNO (reg)] == 1
6810 && reg_basic_block[REGNO (reg)] >= 0
6811 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6813 rtx i2;
6815 /* We know that it was used only between here
6816 and the beginning of the current basic block.
6817 (We also know that the last use before INSN was
6818 the output reload we are thinking of deleting, but never mind that.)
6819 Search that range; see if any ref remains. */
6820 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6822 rtx set = single_set (i2);
6824 /* Uses which just store in the pseudo don't count,
6825 since if they are the only uses, they are dead. */
6826 if (set != 0 && SET_DEST (set) == reg)
6827 continue;
6828 if (GET_CODE (i2) == CODE_LABEL
6829 || GET_CODE (i2) == JUMP_INSN)
6830 break;
6831 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6832 && reg_mentioned_p (reg, PATTERN (i2)))
6833 /* Some other ref remains;
6834 we can't do anything. */
6835 return;
6838 /* Delete the now-dead stores into this pseudo. */
6839 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6841 rtx set = single_set (i2);
6843 if (set != 0 && SET_DEST (set) == reg)
6844 delete_insn (i2);
6845 if (GET_CODE (i2) == CODE_LABEL
6846 || GET_CODE (i2) == JUMP_INSN)
6847 break;
6850 /* For the debugging info,
6851 say the pseudo lives in this reload reg. */
6852 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6853 alter_reg (REGNO (reg), -1);
6857 /* Output reload-insns to reload VALUE into RELOADREG.
6858 VALUE is an autoincrement or autodecrement RTX whose operand
6859 is a register or memory location;
6860 so reloading involves incrementing that location.
6862 INC_AMOUNT is the number to increment or decrement by (always positive).
6863 This cannot be deduced from VALUE. */
6865 static void
6866 inc_for_reload (reloadreg, value, inc_amount)
6867 rtx reloadreg;
6868 rtx value;
6869 int inc_amount;
6871 /* REG or MEM to be copied and incremented. */
6872 rtx incloc = XEXP (value, 0);
6873 /* Nonzero if increment after copying. */
6874 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6875 rtx last;
6876 rtx inc;
6877 rtx add_insn;
6878 int code;
6880 /* No hard register is equivalent to this register after
6881 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6882 we could inc/dec that register as well (maybe even using it for
6883 the source), but I'm not sure it's worth worrying about. */
6884 if (GET_CODE (incloc) == REG)
6885 reg_last_reload_reg[REGNO (incloc)] = 0;
6887 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6888 inc_amount = - inc_amount;
6890 inc = GEN_INT (inc_amount);
6892 /* If this is post-increment, first copy the location to the reload reg. */
6893 if (post)
6894 emit_insn (gen_move_insn (reloadreg, incloc));
6896 /* See if we can directly increment INCLOC. Use a method similar to that
6897 in gen_input_reload. */
6899 last = get_last_insn ();
6900 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6901 gen_rtx (PLUS, GET_MODE (incloc),
6902 incloc, inc)));
6904 code = recog_memoized (add_insn);
6905 if (code >= 0)
6907 insn_extract (add_insn);
6908 if (constrain_operands (code, 1))
6910 /* If this is a pre-increment and we have incremented the value
6911 where it lives, copy the incremented value to RELOADREG to
6912 be used as an address. */
6914 if (! post)
6915 emit_insn (gen_move_insn (reloadreg, incloc));
6917 return;
6921 delete_insns_since (last);
6923 /* If couldn't do the increment directly, must increment in RELOADREG.
6924 The way we do this depends on whether this is pre- or post-increment.
6925 For pre-increment, copy INCLOC to the reload register, increment it
6926 there, then save back. */
6928 if (! post)
6930 emit_insn (gen_move_insn (reloadreg, incloc));
6931 emit_insn (gen_add2_insn (reloadreg, inc));
6932 emit_insn (gen_move_insn (incloc, reloadreg));
6934 else
6936 /* Postincrement.
6937 Because this might be a jump insn or a compare, and because RELOADREG
6938 may not be available after the insn in an input reload, we must do
6939 the incrementation before the insn being reloaded for.
6941 We have already copied INCLOC to RELOADREG. Increment the copy in
6942 RELOADREG, save that back, then decrement RELOADREG so it has
6943 the original value. */
6945 emit_insn (gen_add2_insn (reloadreg, inc));
6946 emit_insn (gen_move_insn (incloc, reloadreg));
6947 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6950 return;
6953 /* Return 1 if we are certain that the constraint-string STRING allows
6954 the hard register REG. Return 0 if we can't be sure of this. */
6956 static int
6957 constraint_accepts_reg_p (string, reg)
6958 char *string;
6959 rtx reg;
6961 int value = 0;
6962 int regno = true_regnum (reg);
6963 int c;
6965 /* Initialize for first alternative. */
6966 value = 0;
6967 /* Check that each alternative contains `g' or `r'. */
6968 while (1)
6969 switch (c = *string++)
6971 case 0:
6972 /* If an alternative lacks `g' or `r', we lose. */
6973 return value;
6974 case ',':
6975 /* If an alternative lacks `g' or `r', we lose. */
6976 if (value == 0)
6977 return 0;
6978 /* Initialize for next alternative. */
6979 value = 0;
6980 break;
6981 case 'g':
6982 case 'r':
6983 /* Any general reg wins for this alternative. */
6984 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6985 value = 1;
6986 break;
6987 default:
6988 /* Any reg in specified class wins for this alternative. */
6990 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6992 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6993 value = 1;
6998 /* Return the number of places FIND appears within X, but don't count
6999 an occurrence if some SET_DEST is FIND. */
7001 static int
7002 count_occurrences (x, find)
7003 register rtx x, find;
7005 register int i, j;
7006 register enum rtx_code code;
7007 register char *format_ptr;
7008 int count;
7010 if (x == find)
7011 return 1;
7012 if (x == 0)
7013 return 0;
7015 code = GET_CODE (x);
7017 switch (code)
7019 case REG:
7020 case QUEUED:
7021 case CONST_INT:
7022 case CONST_DOUBLE:
7023 case SYMBOL_REF:
7024 case CODE_LABEL:
7025 case PC:
7026 case CC0:
7027 return 0;
7029 case SET:
7030 if (SET_DEST (x) == find)
7031 return count_occurrences (SET_SRC (x), find);
7032 break;
7035 format_ptr = GET_RTX_FORMAT (code);
7036 count = 0;
7038 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7040 switch (*format_ptr++)
7042 case 'e':
7043 count += count_occurrences (XEXP (x, i), find);
7044 break;
7046 case 'E':
7047 if (XVEC (x, i) != NULL)
7049 for (j = 0; j < XVECLEN (x, i); j++)
7050 count += count_occurrences (XVECEXP (x, i, j), find);
7052 break;
7055 return count;