* m68k/m68k.c (m68k_last_compare_had_fp_operands): New variable.
[official-gcc.git] / gcc / reload1.c
blobad0f8f80588121d8bbac897ee52082556bd6995c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
271 int reload_in_progress = 0;
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
284 struct obstack reload_obstack;
285 char *reload_firstobj;
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
301 static struct elim_table
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351 /* Number of labels in the current function. */
353 static int num_labels;
355 struct hard_reg_n_uses { int regno; int uses; };
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
397 /* Initialize the reload pass once per compilation. */
399 void
400 init_reload ()
402 register int i;
404 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
405 Set spill_indirect_levels to the number of levels such addressing is
406 permitted, zero if it is not permitted at all. */
408 register rtx tem
409 = gen_rtx (MEM, Pmode,
410 gen_rtx (PLUS, Pmode,
411 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
412 GEN_INT (4)));
413 spill_indirect_levels = 0;
415 while (memory_address_p (QImode, tem))
417 spill_indirect_levels++;
418 tem = gen_rtx (MEM, Pmode, tem);
421 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
423 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
424 indirect_symref_ok = memory_address_p (QImode, tem);
426 /* See if reg+reg is a valid (and offsettable) address. */
428 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
430 tem = gen_rtx (PLUS, Pmode,
431 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
432 gen_rtx (REG, Pmode, i));
433 /* This way, we make sure that reg+reg is an offsettable address. */
434 tem = plus_constant (tem, 4);
436 if (memory_address_p (QImode, tem))
438 double_reg_address_ok = 1;
439 break;
443 /* Initialize obstack for our rtl allocation. */
444 gcc_obstack_init (&reload_obstack);
445 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
447 /* Decide which register class should be used when reloading
448 addresses. If we are using SMALL_REGISTER_CLASSES, and any
449 parameters are passed in registers, then we do not want to use
450 those registers when reloading an address. Otherwise, if a
451 function argument needs a reload, we may wind up clobbering
452 another argument to the function which was already computed. If
453 we find a subset class which simply avoids those registers, we
454 use it instead. ??? It would be better to only use the
455 restricted class when we actually are loading function arguments,
456 but that is hard to determine. */
457 reload_address_base_reg_class = BASE_REG_CLASS;
458 reload_address_index_reg_class = INDEX_REG_CLASS;
459 #ifdef SMALL_REGISTER_CLASSES
460 if (SMALL_REGISTER_CLASSES)
462 int regno;
463 HARD_REG_SET base, index;
464 enum reg_class *p;
466 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
467 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
468 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
470 if (FUNCTION_ARG_REGNO_P (regno))
472 CLEAR_HARD_REG_BIT (base, regno);
473 CLEAR_HARD_REG_BIT (index, regno);
477 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
478 baseok);
479 for (p = reg_class_subclasses[BASE_REG_CLASS];
480 *p != LIM_REG_CLASSES;
481 p++)
483 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
484 continue;
485 usebase:
486 reload_address_base_reg_class = *p;
487 break;
489 baseok:;
491 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
492 indexok);
493 for (p = reg_class_subclasses[INDEX_REG_CLASS];
494 *p != LIM_REG_CLASSES;
495 p++)
497 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
498 continue;
499 useindex:
500 reload_address_index_reg_class = *p;
501 break;
503 indexok:;
505 #endif /* SMALL_REGISTER_CLASSES */
508 /* Main entry point for the reload pass.
510 FIRST is the first insn of the function being compiled.
512 GLOBAL nonzero means we were called from global_alloc
513 and should attempt to reallocate any pseudoregs that we
514 displace from hard regs we will use for reloads.
515 If GLOBAL is zero, we do not have enough information to do that,
516 so any pseudo reg that is spilled must go to the stack.
518 DUMPFILE is the global-reg debugging dump file stream, or 0.
519 If it is nonzero, messages are written to it to describe
520 which registers are seized as reload regs, which pseudo regs
521 are spilled from them, and where the pseudo regs are reallocated to.
523 Return value is nonzero if reload failed
524 and we must not do any more for this function. */
527 reload (first, global, dumpfile)
528 rtx first;
529 int global;
530 FILE *dumpfile;
532 register int class;
533 register int i, j, k;
534 register rtx insn;
535 register struct elim_table *ep;
537 int something_changed;
538 int something_needs_reloads;
539 int something_needs_elimination;
540 int new_basic_block_needs;
541 enum reg_class caller_save_spill_class = NO_REGS;
542 int caller_save_group_size = 1;
544 /* Nonzero means we couldn't get enough spill regs. */
545 int failure = 0;
547 /* The basic block number currently being processed for INSN. */
548 int this_block;
550 /* Make sure even insns with volatile mem refs are recognizable. */
551 init_recog ();
553 /* Enable find_equiv_reg to distinguish insns made by reload. */
554 reload_first_uid = get_max_uid ();
556 for (i = 0; i < N_REG_CLASSES; i++)
557 basic_block_needs[i] = 0;
559 #ifdef SECONDARY_MEMORY_NEEDED
560 /* Initialize the secondary memory table. */
561 clear_secondary_mem ();
562 #endif
564 /* Remember which hard regs appear explicitly
565 before we merge into `regs_ever_live' the ones in which
566 pseudo regs have been allocated. */
567 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
569 /* We don't have a stack slot for any spill reg yet. */
570 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
571 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
573 /* Initialize the save area information for caller-save, in case some
574 are needed. */
575 init_save_areas ();
577 /* Compute which hard registers are now in use
578 as homes for pseudo registers.
579 This is done here rather than (eg) in global_alloc
580 because this point is reached even if not optimizing. */
582 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
583 mark_home_live (i);
585 for (i = 0; i < scratch_list_length; i++)
586 if (scratch_list[i])
587 mark_scratch_live (scratch_list[i]);
589 /* Make sure that the last insn in the chain
590 is not something that needs reloading. */
591 emit_note (NULL_PTR, NOTE_INSN_DELETED);
593 /* Find all the pseudo registers that didn't get hard regs
594 but do have known equivalent constants or memory slots.
595 These include parameters (known equivalent to parameter slots)
596 and cse'd or loop-moved constant memory addresses.
598 Record constant equivalents in reg_equiv_constant
599 so they will be substituted by find_reloads.
600 Record memory equivalents in reg_mem_equiv so they can
601 be substituted eventually by altering the REG-rtx's. */
603 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
604 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
605 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
606 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
607 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
608 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
609 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
610 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
611 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
612 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
613 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
614 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
615 cannot_omit_stores = (char *) alloca (max_regno);
616 bzero (cannot_omit_stores, max_regno);
618 #ifdef SMALL_REGISTER_CLASSES
619 if (SMALL_REGISTER_CLASSES)
620 CLEAR_HARD_REG_SET (forbidden_regs);
621 #endif
623 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
624 Also find all paradoxical subregs and find largest such for each pseudo.
625 On machines with small register classes, record hard registers that
626 are used for user variables. These can never be used for spills.
627 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
628 caller-saved registers must be marked live. */
630 for (insn = first; insn; insn = NEXT_INSN (insn))
632 rtx set = single_set (insn);
634 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
635 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
636 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
637 if (! call_used_regs[i])
638 regs_ever_live[i] = 1;
640 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
642 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
643 if (note
644 #ifdef LEGITIMATE_PIC_OPERAND_P
645 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
646 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
647 #endif
650 rtx x = XEXP (note, 0);
651 i = REGNO (SET_DEST (set));
652 if (i > LAST_VIRTUAL_REGISTER)
654 if (GET_CODE (x) == MEM)
655 reg_equiv_memory_loc[i] = x;
656 else if (CONSTANT_P (x))
658 if (LEGITIMATE_CONSTANT_P (x))
659 reg_equiv_constant[i] = x;
660 else
661 reg_equiv_memory_loc[i]
662 = force_const_mem (GET_MODE (SET_DEST (set)), x);
664 else
665 continue;
667 /* If this register is being made equivalent to a MEM
668 and the MEM is not SET_SRC, the equivalencing insn
669 is one with the MEM as a SET_DEST and it occurs later.
670 So don't mark this insn now. */
671 if (GET_CODE (x) != MEM
672 || rtx_equal_p (SET_SRC (set), x))
673 reg_equiv_init[i] = insn;
678 /* If this insn is setting a MEM from a register equivalent to it,
679 this is the equivalencing insn. */
680 else if (set && GET_CODE (SET_DEST (set)) == MEM
681 && GET_CODE (SET_SRC (set)) == REG
682 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
683 && rtx_equal_p (SET_DEST (set),
684 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
685 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
687 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
688 scan_paradoxical_subregs (PATTERN (insn));
691 /* Does this function require a frame pointer? */
693 frame_pointer_needed = (! flag_omit_frame_pointer
694 #ifdef EXIT_IGNORE_STACK
695 /* ?? If EXIT_IGNORE_STACK is set, we will not save
696 and restore sp for alloca. So we can't eliminate
697 the frame pointer in that case. At some point,
698 we should improve this by emitting the
699 sp-adjusting insns for this case. */
700 || (current_function_calls_alloca
701 && EXIT_IGNORE_STACK)
702 #endif
703 || FRAME_POINTER_REQUIRED);
705 num_eliminable = 0;
707 /* Initialize the table of registers to eliminate. The way we do this
708 depends on how the eliminable registers were defined. */
709 #ifdef ELIMINABLE_REGS
710 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
712 ep->can_eliminate = ep->can_eliminate_previous
713 = (CAN_ELIMINATE (ep->from, ep->to)
714 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
716 #else
717 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
718 = ! frame_pointer_needed;
719 #endif
721 /* Count the number of eliminable registers and build the FROM and TO
722 REG rtx's. Note that code in gen_rtx will cause, e.g.,
723 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
724 We depend on this. */
725 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
727 num_eliminable += ep->can_eliminate;
728 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
729 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
732 num_labels = max_label_num () - get_first_label_num ();
734 /* Allocate the tables used to store offset information at labels. */
735 offsets_known_at = (char *) alloca (num_labels);
736 offsets_at
737 = (int (*)[NUM_ELIMINABLE_REGS])
738 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
740 offsets_known_at -= get_first_label_num ();
741 offsets_at -= get_first_label_num ();
743 /* Alter each pseudo-reg rtx to contain its hard reg number.
744 Assign stack slots to the pseudos that lack hard regs or equivalents.
745 Do not touch virtual registers. */
747 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
748 alter_reg (i, -1);
750 /* If we have some registers we think can be eliminated, scan all insns to
751 see if there is an insn that sets one of these registers to something
752 other than itself plus a constant. If so, the register cannot be
753 eliminated. Doing this scan here eliminates an extra pass through the
754 main reload loop in the most common case where register elimination
755 cannot be done. */
756 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
757 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
758 || GET_CODE (insn) == CALL_INSN)
759 note_stores (PATTERN (insn), mark_not_eliminable);
761 #ifndef REGISTER_CONSTRAINTS
762 /* If all the pseudo regs have hard regs,
763 except for those that are never referenced,
764 we know that no reloads are needed. */
765 /* But that is not true if there are register constraints, since
766 in that case some pseudos might be in the wrong kind of hard reg. */
768 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
769 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
770 break;
772 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
773 return;
774 #endif
776 /* Compute the order of preference for hard registers to spill.
777 Store them by decreasing preference in potential_reload_regs. */
779 order_regs_for_reload (global);
781 /* So far, no hard regs have been spilled. */
782 n_spills = 0;
783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
784 spill_reg_order[i] = -1;
786 /* Initialize to -1, which means take the first spill register. */
787 last_spill_reg = -1;
789 /* On most machines, we can't use any register explicitly used in the
790 rtl as a spill register. But on some, we have to. Those will have
791 taken care to keep the life of hard regs as short as possible. */
793 #ifdef SMALL_REGISTER_CLASSES
794 if (! SMALL_REGISTER_CLASSES)
795 #endif
796 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
798 /* Spill any hard regs that we know we can't eliminate. */
799 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
800 if (! ep->can_eliminate)
801 spill_hard_reg (ep->from, global, dumpfile, 1);
803 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
804 if (frame_pointer_needed)
805 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
806 #endif
808 if (global)
809 for (i = 0; i < N_REG_CLASSES; i++)
811 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
812 bzero (basic_block_needs[i], n_basic_blocks);
815 /* From now on, we need to emit any moves without making new pseudos. */
816 reload_in_progress = 1;
818 /* This loop scans the entire function each go-round
819 and repeats until one repetition spills no additional hard regs. */
821 /* This flag is set when a pseudo reg is spilled,
822 to require another pass. Note that getting an additional reload
823 reg does not necessarily imply any pseudo reg was spilled;
824 sometimes we find a reload reg that no pseudo reg was allocated in. */
825 something_changed = 1;
826 /* This flag is set if there are any insns that require reloading. */
827 something_needs_reloads = 0;
828 /* This flag is set if there are any insns that require register
829 eliminations. */
830 something_needs_elimination = 0;
831 while (something_changed)
833 rtx after_call = 0;
835 /* For each class, number of reload regs needed in that class.
836 This is the maximum over all insns of the needs in that class
837 of the individual insn. */
838 int max_needs[N_REG_CLASSES];
839 /* For each class, size of group of consecutive regs
840 that is needed for the reloads of this class. */
841 int group_size[N_REG_CLASSES];
842 /* For each class, max number of consecutive groups needed.
843 (Each group contains group_size[CLASS] consecutive registers.) */
844 int max_groups[N_REG_CLASSES];
845 /* For each class, max number needed of regs that don't belong
846 to any of the groups. */
847 int max_nongroups[N_REG_CLASSES];
848 /* For each class, the machine mode which requires consecutive
849 groups of regs of that class.
850 If two different modes ever require groups of one class,
851 they must be the same size and equally restrictive for that class,
852 otherwise we can't handle the complexity. */
853 enum machine_mode group_mode[N_REG_CLASSES];
854 /* Record the insn where each maximum need is first found. */
855 rtx max_needs_insn[N_REG_CLASSES];
856 rtx max_groups_insn[N_REG_CLASSES];
857 rtx max_nongroups_insn[N_REG_CLASSES];
858 rtx x;
859 HOST_WIDE_INT starting_frame_size;
860 int previous_frame_pointer_needed = frame_pointer_needed;
861 static char *reg_class_names[] = REG_CLASS_NAMES;
863 something_changed = 0;
864 bzero ((char *) max_needs, sizeof max_needs);
865 bzero ((char *) max_groups, sizeof max_groups);
866 bzero ((char *) max_nongroups, sizeof max_nongroups);
867 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
868 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
869 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
870 bzero ((char *) group_size, sizeof group_size);
871 for (i = 0; i < N_REG_CLASSES; i++)
872 group_mode[i] = VOIDmode;
874 /* Keep track of which basic blocks are needing the reloads. */
875 this_block = 0;
877 /* Remember whether any element of basic_block_needs
878 changes from 0 to 1 in this pass. */
879 new_basic_block_needs = 0;
881 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
882 here because the stack size may be a part of the offset computation
883 for register elimination, and there might have been new stack slots
884 created in the last iteration of this loop. */
885 assign_stack_local (BLKmode, 0, 0);
887 starting_frame_size = get_frame_size ();
889 /* Reset all offsets on eliminable registers to their initial values. */
890 #ifdef ELIMINABLE_REGS
891 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
893 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
894 ep->previous_offset = ep->offset
895 = ep->max_offset = ep->initial_offset;
897 #else
898 #ifdef INITIAL_FRAME_POINTER_OFFSET
899 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
900 #else
901 if (!FRAME_POINTER_REQUIRED)
902 abort ();
903 reg_eliminate[0].initial_offset = 0;
904 #endif
905 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
906 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
907 #endif
909 num_not_at_initial_offset = 0;
911 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
913 /* Set a known offset for each forced label to be at the initial offset
914 of each elimination. We do this because we assume that all
915 computed jumps occur from a location where each elimination is
916 at its initial offset. */
918 for (x = forced_labels; x; x = XEXP (x, 1))
919 if (XEXP (x, 0))
920 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
922 /* For each pseudo register that has an equivalent location defined,
923 try to eliminate any eliminable registers (such as the frame pointer)
924 assuming initial offsets for the replacement register, which
925 is the normal case.
927 If the resulting location is directly addressable, substitute
928 the MEM we just got directly for the old REG.
930 If it is not addressable but is a constant or the sum of a hard reg
931 and constant, it is probably not addressable because the constant is
932 out of range, in that case record the address; we will generate
933 hairy code to compute the address in a register each time it is
934 needed. Similarly if it is a hard register, but one that is not
935 valid as an address register.
937 If the location is not addressable, but does not have one of the
938 above forms, assign a stack slot. We have to do this to avoid the
939 potential of producing lots of reloads if, e.g., a location involves
940 a pseudo that didn't get a hard register and has an equivalent memory
941 location that also involves a pseudo that didn't get a hard register.
943 Perhaps at some point we will improve reload_when_needed handling
944 so this problem goes away. But that's very hairy. */
946 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
947 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
949 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
951 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
952 XEXP (x, 0)))
953 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
954 else if (CONSTANT_P (XEXP (x, 0))
955 || (GET_CODE (XEXP (x, 0)) == REG
956 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
957 || (GET_CODE (XEXP (x, 0)) == PLUS
958 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
959 && (REGNO (XEXP (XEXP (x, 0), 0))
960 < FIRST_PSEUDO_REGISTER)
961 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
962 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
963 else
965 /* Make a new stack slot. Then indicate that something
966 changed so we go back and recompute offsets for
967 eliminable registers because the allocation of memory
968 below might change some offset. reg_equiv_{mem,address}
969 will be set up for this pseudo on the next pass around
970 the loop. */
971 reg_equiv_memory_loc[i] = 0;
972 reg_equiv_init[i] = 0;
973 alter_reg (i, -1);
974 something_changed = 1;
978 /* If we allocated another pseudo to the stack, redo elimination
979 bookkeeping. */
980 if (something_changed)
981 continue;
983 /* If caller-saves needs a group, initialize the group to include
984 the size and mode required for caller-saves. */
986 if (caller_save_group_size > 1)
988 group_mode[(int) caller_save_spill_class] = Pmode;
989 group_size[(int) caller_save_spill_class] = caller_save_group_size;
992 /* Compute the most additional registers needed by any instruction.
993 Collect information separately for each class of regs. */
995 for (insn = first; insn; insn = NEXT_INSN (insn))
997 if (global && this_block + 1 < n_basic_blocks
998 && insn == basic_block_head[this_block+1])
999 ++this_block;
1001 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1002 might include REG_LABEL), we need to see what effects this
1003 has on the known offsets at labels. */
1005 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1006 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1007 && REG_NOTES (insn) != 0))
1008 set_label_offsets (insn, insn, 0);
1010 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1012 /* Nonzero means don't use a reload reg that overlaps
1013 the place where a function value can be returned. */
1014 rtx avoid_return_reg = 0;
1016 rtx old_body = PATTERN (insn);
1017 int old_code = INSN_CODE (insn);
1018 rtx old_notes = REG_NOTES (insn);
1019 int did_elimination = 0;
1021 /* To compute the number of reload registers of each class
1022 needed for an insn, we must simulate what choose_reload_regs
1023 can do. We do this by splitting an insn into an "input" and
1024 an "output" part. RELOAD_OTHER reloads are used in both.
1025 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1026 which must be live over the entire input section of reloads,
1027 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1028 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1029 inputs.
1031 The registers needed for output are RELOAD_OTHER and
1032 RELOAD_FOR_OUTPUT, which are live for the entire output
1033 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1034 reloads for each operand.
1036 The total number of registers needed is the maximum of the
1037 inputs and outputs. */
1039 struct needs
1041 /* [0] is normal, [1] is nongroup. */
1042 int regs[2][N_REG_CLASSES];
1043 int groups[N_REG_CLASSES];
1046 /* Each `struct needs' corresponds to one RELOAD_... type. */
1047 struct {
1048 struct needs other;
1049 struct needs input;
1050 struct needs output;
1051 struct needs insn;
1052 struct needs other_addr;
1053 struct needs op_addr;
1054 struct needs op_addr_reload;
1055 struct needs in_addr[MAX_RECOG_OPERANDS];
1056 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1057 struct needs out_addr[MAX_RECOG_OPERANDS];
1058 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1059 } insn_needs;
1061 /* If needed, eliminate any eliminable registers. */
1062 if (num_eliminable)
1063 did_elimination = eliminate_regs_in_insn (insn, 0);
1065 #ifdef SMALL_REGISTER_CLASSES
1066 /* Set avoid_return_reg if this is an insn
1067 that might use the value of a function call. */
1068 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1070 if (GET_CODE (PATTERN (insn)) == SET)
1071 after_call = SET_DEST (PATTERN (insn));
1072 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1073 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1074 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1075 else
1076 after_call = 0;
1078 else if (SMALL_REGISTER_CLASSES
1079 && after_call != 0
1080 && !(GET_CODE (PATTERN (insn)) == SET
1081 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1083 if (reg_referenced_p (after_call, PATTERN (insn)))
1084 avoid_return_reg = after_call;
1085 after_call = 0;
1087 #endif /* SMALL_REGISTER_CLASSES */
1089 /* Analyze the instruction. */
1090 find_reloads (insn, 0, spill_indirect_levels, global,
1091 spill_reg_order);
1093 /* Remember for later shortcuts which insns had any reloads or
1094 register eliminations.
1096 One might think that it would be worthwhile to mark insns
1097 that need register replacements but not reloads, but this is
1098 not safe because find_reloads may do some manipulation of
1099 the insn (such as swapping commutative operands), which would
1100 be lost when we restore the old pattern after register
1101 replacement. So the actions of find_reloads must be redone in
1102 subsequent passes or in reload_as_needed.
1104 However, it is safe to mark insns that need reloads
1105 but not register replacement. */
1107 PUT_MODE (insn, (did_elimination ? QImode
1108 : n_reloads ? HImode
1109 : GET_MODE (insn) == DImode ? DImode
1110 : VOIDmode));
1112 /* Discard any register replacements done. */
1113 if (did_elimination)
1115 obstack_free (&reload_obstack, reload_firstobj);
1116 PATTERN (insn) = old_body;
1117 INSN_CODE (insn) = old_code;
1118 REG_NOTES (insn) = old_notes;
1119 something_needs_elimination = 1;
1122 /* If this insn has no reloads, we need not do anything except
1123 in the case of a CALL_INSN when we have caller-saves and
1124 caller-save needs reloads. */
1126 if (n_reloads == 0
1127 && ! (GET_CODE (insn) == CALL_INSN
1128 && caller_save_spill_class != NO_REGS))
1129 continue;
1131 something_needs_reloads = 1;
1132 bzero ((char *) &insn_needs, sizeof insn_needs);
1134 /* Count each reload once in every class
1135 containing the reload's own class. */
1137 for (i = 0; i < n_reloads; i++)
1139 register enum reg_class *p;
1140 enum reg_class class = reload_reg_class[i];
1141 int size;
1142 enum machine_mode mode;
1143 int nongroup_need;
1144 struct needs *this_needs;
1146 /* Don't count the dummy reloads, for which one of the
1147 regs mentioned in the insn can be used for reloading.
1148 Don't count optional reloads.
1149 Don't count reloads that got combined with others. */
1150 if (reload_reg_rtx[i] != 0
1151 || reload_optional[i] != 0
1152 || (reload_out[i] == 0 && reload_in[i] == 0
1153 && ! reload_secondary_p[i]))
1154 continue;
1156 /* Show that a reload register of this class is needed
1157 in this basic block. We do not use insn_needs and
1158 insn_groups because they are overly conservative for
1159 this purpose. */
1160 if (global && ! basic_block_needs[(int) class][this_block])
1162 basic_block_needs[(int) class][this_block] = 1;
1163 new_basic_block_needs = 1;
1167 mode = reload_inmode[i];
1168 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1169 mode = reload_outmode[i];
1170 size = CLASS_MAX_NREGS (class, mode);
1172 /* If this class doesn't want a group, determine if we have
1173 a nongroup need or a regular need. We have a nongroup
1174 need if this reload conflicts with a group reload whose
1175 class intersects with this reload's class. */
1177 nongroup_need = 0;
1178 if (size == 1)
1179 for (j = 0; j < n_reloads; j++)
1180 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1181 (GET_MODE_SIZE (reload_outmode[j])
1182 > GET_MODE_SIZE (reload_inmode[j]))
1183 ? reload_outmode[j]
1184 : reload_inmode[j])
1185 > 1)
1186 && (!reload_optional[j])
1187 && (reload_in[j] != 0 || reload_out[j] != 0
1188 || reload_secondary_p[j])
1189 && reloads_conflict (i, j)
1190 && reg_classes_intersect_p (class,
1191 reload_reg_class[j]))
1193 nongroup_need = 1;
1194 break;
1197 /* Decide which time-of-use to count this reload for. */
1198 switch (reload_when_needed[i])
1200 case RELOAD_OTHER:
1201 this_needs = &insn_needs.other;
1202 break;
1203 case RELOAD_FOR_INPUT:
1204 this_needs = &insn_needs.input;
1205 break;
1206 case RELOAD_FOR_OUTPUT:
1207 this_needs = &insn_needs.output;
1208 break;
1209 case RELOAD_FOR_INSN:
1210 this_needs = &insn_needs.insn;
1211 break;
1212 case RELOAD_FOR_OTHER_ADDRESS:
1213 this_needs = &insn_needs.other_addr;
1214 break;
1215 case RELOAD_FOR_INPUT_ADDRESS:
1216 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1217 break;
1218 case RELOAD_FOR_INPADDR_ADDRESS:
1219 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1220 break;
1221 case RELOAD_FOR_OUTPUT_ADDRESS:
1222 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1223 break;
1224 case RELOAD_FOR_OUTADDR_ADDRESS:
1225 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1226 break;
1227 case RELOAD_FOR_OPERAND_ADDRESS:
1228 this_needs = &insn_needs.op_addr;
1229 break;
1230 case RELOAD_FOR_OPADDR_ADDR:
1231 this_needs = &insn_needs.op_addr_reload;
1232 break;
1235 if (size > 1)
1237 enum machine_mode other_mode, allocate_mode;
1239 /* Count number of groups needed separately from
1240 number of individual regs needed. */
1241 this_needs->groups[(int) class]++;
1242 p = reg_class_superclasses[(int) class];
1243 while (*p != LIM_REG_CLASSES)
1244 this_needs->groups[(int) *p++]++;
1246 /* Record size and mode of a group of this class. */
1247 /* If more than one size group is needed,
1248 make all groups the largest needed size. */
1249 if (group_size[(int) class] < size)
1251 other_mode = group_mode[(int) class];
1252 allocate_mode = mode;
1254 group_size[(int) class] = size;
1255 group_mode[(int) class] = mode;
1257 else
1259 other_mode = mode;
1260 allocate_mode = group_mode[(int) class];
1263 /* Crash if two dissimilar machine modes both need
1264 groups of consecutive regs of the same class. */
1266 if (other_mode != VOIDmode && other_mode != allocate_mode
1267 && ! modes_equiv_for_class_p (allocate_mode,
1268 other_mode, class))
1269 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1270 insn);
1272 else if (size == 1)
1274 this_needs->regs[nongroup_need][(int) class] += 1;
1275 p = reg_class_superclasses[(int) class];
1276 while (*p != LIM_REG_CLASSES)
1277 this_needs->regs[nongroup_need][(int) *p++] += 1;
1279 else
1280 abort ();
1283 /* All reloads have been counted for this insn;
1284 now merge the various times of use.
1285 This sets insn_needs, etc., to the maximum total number
1286 of registers needed at any point in this insn. */
1288 for (i = 0; i < N_REG_CLASSES; i++)
1290 int in_max, out_max;
1292 /* Compute normal and nongroup needs. */
1293 for (j = 0; j <= 1; j++)
1295 for (in_max = 0, out_max = 0, k = 0;
1296 k < reload_n_operands; k++)
1298 in_max
1299 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1300 in_max
1301 = MAX (in_max,
1302 insn_needs.in_addr_addr[k].regs[j][i]);
1303 out_max
1304 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1305 out_max
1306 = MAX (out_max,
1307 insn_needs.out_addr_addr[k].regs[j][i]);
1310 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1311 and operand addresses but not things used to reload
1312 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1313 don't conflict with things needed to reload inputs or
1314 outputs. */
1316 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1317 insn_needs.op_addr_reload.regs[j][i]),
1318 in_max);
1320 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1322 insn_needs.input.regs[j][i]
1323 = MAX (insn_needs.input.regs[j][i]
1324 + insn_needs.op_addr.regs[j][i]
1325 + insn_needs.insn.regs[j][i],
1326 in_max + insn_needs.input.regs[j][i]);
1328 insn_needs.output.regs[j][i] += out_max;
1329 insn_needs.other.regs[j][i]
1330 += MAX (MAX (insn_needs.input.regs[j][i],
1331 insn_needs.output.regs[j][i]),
1332 insn_needs.other_addr.regs[j][i]);
1336 /* Now compute group needs. */
1337 for (in_max = 0, out_max = 0, j = 0;
1338 j < reload_n_operands; j++)
1340 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1341 in_max = MAX (in_max,
1342 insn_needs.in_addr_addr[j].groups[i]);
1343 out_max
1344 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1345 out_max
1346 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1349 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1350 insn_needs.op_addr_reload.groups[i]),
1351 in_max);
1352 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1354 insn_needs.input.groups[i]
1355 = MAX (insn_needs.input.groups[i]
1356 + insn_needs.op_addr.groups[i]
1357 + insn_needs.insn.groups[i],
1358 in_max + insn_needs.input.groups[i]);
1360 insn_needs.output.groups[i] += out_max;
1361 insn_needs.other.groups[i]
1362 += MAX (MAX (insn_needs.input.groups[i],
1363 insn_needs.output.groups[i]),
1364 insn_needs.other_addr.groups[i]);
1367 /* If this is a CALL_INSN and caller-saves will need
1368 a spill register, act as if the spill register is
1369 needed for this insn. However, the spill register
1370 can be used by any reload of this insn, so we only
1371 need do something if no need for that class has
1372 been recorded.
1374 The assumption that every CALL_INSN will trigger a
1375 caller-save is highly conservative, however, the number
1376 of cases where caller-saves will need a spill register but
1377 a block containing a CALL_INSN won't need a spill register
1378 of that class should be quite rare.
1380 If a group is needed, the size and mode of the group will
1381 have been set up at the beginning of this loop. */
1383 if (GET_CODE (insn) == CALL_INSN
1384 && caller_save_spill_class != NO_REGS)
1386 /* See if this register would conflict with any reload
1387 that needs a group. */
1388 int nongroup_need = 0;
1389 int *caller_save_needs;
1391 for (j = 0; j < n_reloads; j++)
1392 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1393 (GET_MODE_SIZE (reload_outmode[j])
1394 > GET_MODE_SIZE (reload_inmode[j]))
1395 ? reload_outmode[j]
1396 : reload_inmode[j])
1397 > 1)
1398 && reg_classes_intersect_p (caller_save_spill_class,
1399 reload_reg_class[j]))
1401 nongroup_need = 1;
1402 break;
1405 caller_save_needs
1406 = (caller_save_group_size > 1
1407 ? insn_needs.other.groups
1408 : insn_needs.other.regs[nongroup_need]);
1410 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1412 register enum reg_class *p
1413 = reg_class_superclasses[(int) caller_save_spill_class];
1415 caller_save_needs[(int) caller_save_spill_class]++;
1417 while (*p != LIM_REG_CLASSES)
1418 caller_save_needs[(int) *p++] += 1;
1421 /* Show that this basic block will need a register of
1422 this class. */
1424 if (global
1425 && ! (basic_block_needs[(int) caller_save_spill_class]
1426 [this_block]))
1428 basic_block_needs[(int) caller_save_spill_class]
1429 [this_block] = 1;
1430 new_basic_block_needs = 1;
1434 #ifdef SMALL_REGISTER_CLASSES
1435 /* If this insn stores the value of a function call,
1436 and that value is in a register that has been spilled,
1437 and if the insn needs a reload in a class
1438 that might use that register as the reload register,
1439 then add add an extra need in that class.
1440 This makes sure we have a register available that does
1441 not overlap the return value. */
1443 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1445 int regno = REGNO (avoid_return_reg);
1446 int nregs
1447 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1448 int r;
1449 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1451 /* First compute the "basic needs", which counts a
1452 need only in the smallest class in which it
1453 is required. */
1455 bcopy ((char *) insn_needs.other.regs[0],
1456 (char *) basic_needs, sizeof basic_needs);
1457 bcopy ((char *) insn_needs.other.groups,
1458 (char *) basic_groups, sizeof basic_groups);
1460 for (i = 0; i < N_REG_CLASSES; i++)
1462 enum reg_class *p;
1464 if (basic_needs[i] >= 0)
1465 for (p = reg_class_superclasses[i];
1466 *p != LIM_REG_CLASSES; p++)
1467 basic_needs[(int) *p] -= basic_needs[i];
1469 if (basic_groups[i] >= 0)
1470 for (p = reg_class_superclasses[i];
1471 *p != LIM_REG_CLASSES; p++)
1472 basic_groups[(int) *p] -= basic_groups[i];
1475 /* Now count extra regs if there might be a conflict with
1476 the return value register. */
1478 for (r = regno; r < regno + nregs; r++)
1479 if (spill_reg_order[r] >= 0)
1480 for (i = 0; i < N_REG_CLASSES; i++)
1481 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1483 if (basic_needs[i] > 0)
1485 enum reg_class *p;
1487 insn_needs.other.regs[0][i]++;
1488 p = reg_class_superclasses[i];
1489 while (*p != LIM_REG_CLASSES)
1490 insn_needs.other.regs[0][(int) *p++]++;
1492 if (basic_groups[i] > 0)
1494 enum reg_class *p;
1496 insn_needs.other.groups[i]++;
1497 p = reg_class_superclasses[i];
1498 while (*p != LIM_REG_CLASSES)
1499 insn_needs.other.groups[(int) *p++]++;
1503 #endif /* SMALL_REGISTER_CLASSES */
1505 /* For each class, collect maximum need of any insn. */
1507 for (i = 0; i < N_REG_CLASSES; i++)
1509 if (max_needs[i] < insn_needs.other.regs[0][i])
1511 max_needs[i] = insn_needs.other.regs[0][i];
1512 max_needs_insn[i] = insn;
1514 if (max_groups[i] < insn_needs.other.groups[i])
1516 max_groups[i] = insn_needs.other.groups[i];
1517 max_groups_insn[i] = insn;
1519 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1521 max_nongroups[i] = insn_needs.other.regs[1][i];
1522 max_nongroups_insn[i] = insn;
1526 /* Note that there is a continue statement above. */
1529 /* If we allocated any new memory locations, make another pass
1530 since it might have changed elimination offsets. */
1531 if (starting_frame_size != get_frame_size ())
1532 something_changed = 1;
1534 if (dumpfile)
1535 for (i = 0; i < N_REG_CLASSES; i++)
1537 if (max_needs[i] > 0)
1538 fprintf (dumpfile,
1539 ";; Need %d reg%s of class %s (for insn %d).\n",
1540 max_needs[i], max_needs[i] == 1 ? "" : "s",
1541 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1542 if (max_nongroups[i] > 0)
1543 fprintf (dumpfile,
1544 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1545 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1546 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1547 if (max_groups[i] > 0)
1548 fprintf (dumpfile,
1549 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1550 max_groups[i], max_groups[i] == 1 ? "" : "s",
1551 mode_name[(int) group_mode[i]],
1552 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1555 /* If we have caller-saves, set up the save areas and see if caller-save
1556 will need a spill register. */
1558 if (caller_save_needed)
1560 /* Set the offsets for setup_save_areas. */
1561 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1562 ep++)
1563 ep->previous_offset = ep->max_offset;
1565 if ( ! setup_save_areas (&something_changed)
1566 && caller_save_spill_class == NO_REGS)
1568 /* The class we will need depends on whether the machine
1569 supports the sum of two registers for an address; see
1570 find_address_reloads for details. */
1572 caller_save_spill_class
1573 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1574 caller_save_group_size
1575 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1576 something_changed = 1;
1580 /* See if anything that happened changes which eliminations are valid.
1581 For example, on the Sparc, whether or not the frame pointer can
1582 be eliminated can depend on what registers have been used. We need
1583 not check some conditions again (such as flag_omit_frame_pointer)
1584 since they can't have changed. */
1586 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1587 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1588 #ifdef ELIMINABLE_REGS
1589 || ! CAN_ELIMINATE (ep->from, ep->to)
1590 #endif
1592 ep->can_eliminate = 0;
1594 /* Look for the case where we have discovered that we can't replace
1595 register A with register B and that means that we will now be
1596 trying to replace register A with register C. This means we can
1597 no longer replace register C with register B and we need to disable
1598 such an elimination, if it exists. This occurs often with A == ap,
1599 B == sp, and C == fp. */
1601 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1603 struct elim_table *op;
1604 register int new_to = -1;
1606 if (! ep->can_eliminate && ep->can_eliminate_previous)
1608 /* Find the current elimination for ep->from, if there is a
1609 new one. */
1610 for (op = reg_eliminate;
1611 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1612 if (op->from == ep->from && op->can_eliminate)
1614 new_to = op->to;
1615 break;
1618 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1619 disable it. */
1620 for (op = reg_eliminate;
1621 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1622 if (op->from == new_to && op->to == ep->to)
1623 op->can_eliminate = 0;
1627 /* See if any registers that we thought we could eliminate the previous
1628 time are no longer eliminable. If so, something has changed and we
1629 must spill the register. Also, recompute the number of eliminable
1630 registers and see if the frame pointer is needed; it is if there is
1631 no elimination of the frame pointer that we can perform. */
1633 frame_pointer_needed = 1;
1634 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1636 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1637 && ep->to != HARD_FRAME_POINTER_REGNUM)
1638 frame_pointer_needed = 0;
1640 if (! ep->can_eliminate && ep->can_eliminate_previous)
1642 ep->can_eliminate_previous = 0;
1643 spill_hard_reg (ep->from, global, dumpfile, 1);
1644 something_changed = 1;
1645 num_eliminable--;
1649 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1650 /* If we didn't need a frame pointer last time, but we do now, spill
1651 the hard frame pointer. */
1652 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1654 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1655 something_changed = 1;
1657 #endif
1659 /* If all needs are met, we win. */
1661 for (i = 0; i < N_REG_CLASSES; i++)
1662 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1663 break;
1664 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1665 break;
1667 /* Not all needs are met; must spill some hard regs. */
1669 /* Put all registers spilled so far back in potential_reload_regs, but
1670 put them at the front, since we've already spilled most of the
1671 pseudos in them (we might have left some pseudos unspilled if they
1672 were in a block that didn't need any spill registers of a conflicting
1673 class. We used to try to mark off the need for those registers,
1674 but doing so properly is very complex and reallocating them is the
1675 simpler approach. First, "pack" potential_reload_regs by pushing
1676 any nonnegative entries towards the end. That will leave room
1677 for the registers we already spilled.
1679 Also, undo the marking of the spill registers from the last time
1680 around in FORBIDDEN_REGS since we will be probably be allocating
1681 them again below.
1683 ??? It is theoretically possible that we might end up not using one
1684 of our previously-spilled registers in this allocation, even though
1685 they are at the head of the list. It's not clear what to do about
1686 this, but it was no better before, when we marked off the needs met
1687 by the previously-spilled registers. With the current code, globals
1688 can be allocated into these registers, but locals cannot. */
1690 if (n_spills)
1692 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1693 if (potential_reload_regs[i] != -1)
1694 potential_reload_regs[j--] = potential_reload_regs[i];
1696 for (i = 0; i < n_spills; i++)
1698 potential_reload_regs[i] = spill_regs[i];
1699 spill_reg_order[spill_regs[i]] = -1;
1700 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1703 n_spills = 0;
1706 /* Now find more reload regs to satisfy the remaining need
1707 Do it by ascending class number, since otherwise a reg
1708 might be spilled for a big class and might fail to count
1709 for a smaller class even though it belongs to that class.
1711 Count spilled regs in `spills', and add entries to
1712 `spill_regs' and `spill_reg_order'.
1714 ??? Note there is a problem here.
1715 When there is a need for a group in a high-numbered class,
1716 and also need for non-group regs that come from a lower class,
1717 the non-group regs are chosen first. If there aren't many regs,
1718 they might leave no room for a group.
1720 This was happening on the 386. To fix it, we added the code
1721 that calls possible_group_p, so that the lower class won't
1722 break up the last possible group.
1724 Really fixing the problem would require changes above
1725 in counting the regs already spilled, and in choose_reload_regs.
1726 It might be hard to avoid introducing bugs there. */
1728 CLEAR_HARD_REG_SET (counted_for_groups);
1729 CLEAR_HARD_REG_SET (counted_for_nongroups);
1731 for (class = 0; class < N_REG_CLASSES; class++)
1733 /* First get the groups of registers.
1734 If we got single registers first, we might fragment
1735 possible groups. */
1736 while (max_groups[class] > 0)
1738 /* If any single spilled regs happen to form groups,
1739 count them now. Maybe we don't really need
1740 to spill another group. */
1741 count_possible_groups (group_size, group_mode, max_groups,
1742 class);
1744 if (max_groups[class] <= 0)
1745 break;
1747 /* Groups of size 2 (the only groups used on most machines)
1748 are treated specially. */
1749 if (group_size[class] == 2)
1751 /* First, look for a register that will complete a group. */
1752 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1754 int other;
1756 j = potential_reload_regs[i];
1757 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1759 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1760 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1761 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1762 && HARD_REGNO_MODE_OK (other, group_mode[class])
1763 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1764 other)
1765 /* We don't want one part of another group.
1766 We could get "two groups" that overlap! */
1767 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1769 (j < FIRST_PSEUDO_REGISTER - 1
1770 && (other = j + 1, spill_reg_order[other] >= 0)
1771 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1772 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1773 && HARD_REGNO_MODE_OK (j, group_mode[class])
1774 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1775 other)
1776 && ! TEST_HARD_REG_BIT (counted_for_groups,
1777 other))))
1779 register enum reg_class *p;
1781 /* We have found one that will complete a group,
1782 so count off one group as provided. */
1783 max_groups[class]--;
1784 p = reg_class_superclasses[class];
1785 while (*p != LIM_REG_CLASSES)
1787 if (group_size [(int) *p] <= group_size [class])
1788 max_groups[(int) *p]--;
1789 p++;
1792 /* Indicate both these regs are part of a group. */
1793 SET_HARD_REG_BIT (counted_for_groups, j);
1794 SET_HARD_REG_BIT (counted_for_groups, other);
1795 break;
1798 /* We can't complete a group, so start one. */
1799 #ifdef SMALL_REGISTER_CLASSES
1800 /* Look for a pair neither of which is explicitly used. */
1801 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1802 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1804 int k;
1805 j = potential_reload_regs[i];
1806 /* Verify that J+1 is a potential reload reg. */
1807 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1808 if (potential_reload_regs[k] == j + 1)
1809 break;
1810 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1811 && k < FIRST_PSEUDO_REGISTER
1812 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1813 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1814 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1815 && HARD_REGNO_MODE_OK (j, group_mode[class])
1816 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1817 j + 1)
1818 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1819 /* Reject J at this stage
1820 if J+1 was explicitly used. */
1821 && ! regs_explicitly_used[j + 1])
1822 break;
1824 #endif
1825 /* Now try any group at all
1826 whose registers are not in bad_spill_regs. */
1827 if (i == FIRST_PSEUDO_REGISTER)
1828 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1830 int k;
1831 j = potential_reload_regs[i];
1832 /* Verify that J+1 is a potential reload reg. */
1833 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1834 if (potential_reload_regs[k] == j + 1)
1835 break;
1836 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1837 && k < FIRST_PSEUDO_REGISTER
1838 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1839 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1840 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1841 && HARD_REGNO_MODE_OK (j, group_mode[class])
1842 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1843 j + 1)
1844 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1845 break;
1848 /* I should be the index in potential_reload_regs
1849 of the new reload reg we have found. */
1851 if (i >= FIRST_PSEUDO_REGISTER)
1853 /* There are no groups left to spill. */
1854 spill_failure (max_groups_insn[class]);
1855 failure = 1;
1856 goto failed;
1858 else
1859 something_changed
1860 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1861 global, dumpfile);
1863 else
1865 /* For groups of more than 2 registers,
1866 look for a sufficient sequence of unspilled registers,
1867 and spill them all at once. */
1868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1870 int k;
1872 j = potential_reload_regs[i];
1873 if (j >= 0
1874 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1875 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1877 /* Check each reg in the sequence. */
1878 for (k = 0; k < group_size[class]; k++)
1879 if (! (spill_reg_order[j + k] < 0
1880 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1881 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1882 break;
1883 /* We got a full sequence, so spill them all. */
1884 if (k == group_size[class])
1886 register enum reg_class *p;
1887 for (k = 0; k < group_size[class]; k++)
1889 int idx;
1890 SET_HARD_REG_BIT (counted_for_groups, j + k);
1891 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1892 if (potential_reload_regs[idx] == j + k)
1893 break;
1894 something_changed
1895 |= new_spill_reg (idx, class,
1896 max_needs, NULL_PTR,
1897 global, dumpfile);
1900 /* We have found one that will complete a group,
1901 so count off one group as provided. */
1902 max_groups[class]--;
1903 p = reg_class_superclasses[class];
1904 while (*p != LIM_REG_CLASSES)
1906 if (group_size [(int) *p]
1907 <= group_size [class])
1908 max_groups[(int) *p]--;
1909 p++;
1911 break;
1915 /* We couldn't find any registers for this reload.
1916 Avoid going into an infinite loop. */
1917 if (i >= FIRST_PSEUDO_REGISTER)
1919 /* There are no groups left. */
1920 spill_failure (max_groups_insn[class]);
1921 failure = 1;
1922 goto failed;
1927 /* Now similarly satisfy all need for single registers. */
1929 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1931 /* If we spilled enough regs, but they weren't counted
1932 against the non-group need, see if we can count them now.
1933 If so, we can avoid some actual spilling. */
1934 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1935 for (i = 0; i < n_spills; i++)
1936 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1937 spill_regs[i])
1938 && !TEST_HARD_REG_BIT (counted_for_groups,
1939 spill_regs[i])
1940 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1941 spill_regs[i])
1942 && max_nongroups[class] > 0)
1944 register enum reg_class *p;
1946 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1947 max_nongroups[class]--;
1948 p = reg_class_superclasses[class];
1949 while (*p != LIM_REG_CLASSES)
1950 max_nongroups[(int) *p++]--;
1952 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1953 break;
1955 /* Consider the potential reload regs that aren't
1956 yet in use as reload regs, in order of preference.
1957 Find the most preferred one that's in this class. */
1959 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1960 if (potential_reload_regs[i] >= 0
1961 && TEST_HARD_REG_BIT (reg_class_contents[class],
1962 potential_reload_regs[i])
1963 /* If this reg will not be available for groups,
1964 pick one that does not foreclose possible groups.
1965 This is a kludge, and not very general,
1966 but it should be sufficient to make the 386 work,
1967 and the problem should not occur on machines with
1968 more registers. */
1969 && (max_nongroups[class] == 0
1970 || possible_group_p (potential_reload_regs[i], max_groups)))
1971 break;
1973 /* If we couldn't get a register, try to get one even if we
1974 might foreclose possible groups. This may cause problems
1975 later, but that's better than aborting now, since it is
1976 possible that we will, in fact, be able to form the needed
1977 group even with this allocation. */
1979 if (i >= FIRST_PSEUDO_REGISTER
1980 && (asm_noperands (max_needs[class] > 0
1981 ? max_needs_insn[class]
1982 : max_nongroups_insn[class])
1983 < 0))
1984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1985 if (potential_reload_regs[i] >= 0
1986 && TEST_HARD_REG_BIT (reg_class_contents[class],
1987 potential_reload_regs[i]))
1988 break;
1990 /* I should be the index in potential_reload_regs
1991 of the new reload reg we have found. */
1993 if (i >= FIRST_PSEUDO_REGISTER)
1995 /* There are no possible registers left to spill. */
1996 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1997 : max_nongroups_insn[class]);
1998 failure = 1;
1999 goto failed;
2001 else
2002 something_changed
2003 |= new_spill_reg (i, class, max_needs, max_nongroups,
2004 global, dumpfile);
2009 /* If global-alloc was run, notify it of any register eliminations we have
2010 done. */
2011 if (global)
2012 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2013 if (ep->can_eliminate)
2014 mark_elimination (ep->from, ep->to);
2016 /* Insert code to save and restore call-clobbered hard regs
2017 around calls. Tell if what mode to use so that we will process
2018 those insns in reload_as_needed if we have to. */
2020 if (caller_save_needed)
2021 save_call_clobbered_regs (num_eliminable ? QImode
2022 : caller_save_spill_class != NO_REGS ? HImode
2023 : VOIDmode);
2025 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2026 If that insn didn't set the register (i.e., it copied the register to
2027 memory), just delete that insn instead of the equivalencing insn plus
2028 anything now dead. If we call delete_dead_insn on that insn, we may
2029 delete the insn that actually sets the register if the register die
2030 there and that is incorrect. */
2032 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2033 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2034 && GET_CODE (reg_equiv_init[i]) != NOTE)
2036 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2037 delete_dead_insn (reg_equiv_init[i]);
2038 else
2040 PUT_CODE (reg_equiv_init[i], NOTE);
2041 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2042 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2046 /* Use the reload registers where necessary
2047 by generating move instructions to move the must-be-register
2048 values into or out of the reload registers. */
2050 if (something_needs_reloads || something_needs_elimination
2051 || (caller_save_needed && num_eliminable)
2052 || caller_save_spill_class != NO_REGS)
2053 reload_as_needed (first, global);
2055 /* If we were able to eliminate the frame pointer, show that it is no
2056 longer live at the start of any basic block. If it ls live by
2057 virtue of being in a pseudo, that pseudo will be marked live
2058 and hence the frame pointer will be known to be live via that
2059 pseudo. */
2061 if (! frame_pointer_needed)
2062 for (i = 0; i < n_basic_blocks; i++)
2063 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
2064 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
2065 % REGSET_ELT_BITS));
2067 /* Come here (with failure set nonzero) if we can't get enough spill regs
2068 and we decide not to abort about it. */
2069 failed:
2071 reload_in_progress = 0;
2073 /* Now eliminate all pseudo regs by modifying them into
2074 their equivalent memory references.
2075 The REG-rtx's for the pseudos are modified in place,
2076 so all insns that used to refer to them now refer to memory.
2078 For a reg that has a reg_equiv_address, all those insns
2079 were changed by reloading so that no insns refer to it any longer;
2080 but the DECL_RTL of a variable decl may refer to it,
2081 and if so this causes the debugging info to mention the variable. */
2083 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2085 rtx addr = 0;
2086 int in_struct = 0;
2087 if (reg_equiv_mem[i])
2089 addr = XEXP (reg_equiv_mem[i], 0);
2090 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2092 if (reg_equiv_address[i])
2093 addr = reg_equiv_address[i];
2094 if (addr)
2096 if (reg_renumber[i] < 0)
2098 rtx reg = regno_reg_rtx[i];
2099 XEXP (reg, 0) = addr;
2100 REG_USERVAR_P (reg) = 0;
2101 MEM_IN_STRUCT_P (reg) = in_struct;
2102 PUT_CODE (reg, MEM);
2104 else if (reg_equiv_mem[i])
2105 XEXP (reg_equiv_mem[i], 0) = addr;
2109 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2110 /* Make a pass over all the insns and remove death notes for things that
2111 are no longer registers or no longer die in the insn (e.g., an input
2112 and output pseudo being tied). */
2114 for (insn = first; insn; insn = NEXT_INSN (insn))
2115 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2117 rtx note, next;
2119 for (note = REG_NOTES (insn); note; note = next)
2121 next = XEXP (note, 1);
2122 if (REG_NOTE_KIND (note) == REG_DEAD
2123 && (GET_CODE (XEXP (note, 0)) != REG
2124 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2125 remove_note (insn, note);
2128 #endif
2130 /* Indicate that we no longer have known memory locations or constants. */
2131 reg_equiv_constant = 0;
2132 reg_equiv_memory_loc = 0;
2134 if (scratch_list)
2135 free (scratch_list);
2136 scratch_list = 0;
2137 if (scratch_block)
2138 free (scratch_block);
2139 scratch_block = 0;
2141 CLEAR_HARD_REG_SET (used_spill_regs);
2142 for (i = 0; i < n_spills; i++)
2143 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2145 return failure;
2148 /* Nonzero if, after spilling reg REGNO for non-groups,
2149 it will still be possible to find a group if we still need one. */
2151 static int
2152 possible_group_p (regno, max_groups)
2153 int regno;
2154 int *max_groups;
2156 int i;
2157 int class = (int) NO_REGS;
2159 for (i = 0; i < (int) N_REG_CLASSES; i++)
2160 if (max_groups[i] > 0)
2162 class = i;
2163 break;
2166 if (class == (int) NO_REGS)
2167 return 1;
2169 /* Consider each pair of consecutive registers. */
2170 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2172 /* Ignore pairs that include reg REGNO. */
2173 if (i == regno || i + 1 == regno)
2174 continue;
2176 /* Ignore pairs that are outside the class that needs the group.
2177 ??? Here we fail to handle the case where two different classes
2178 independently need groups. But this never happens with our
2179 current machine descriptions. */
2180 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2181 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2182 continue;
2184 /* A pair of consecutive regs we can still spill does the trick. */
2185 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2186 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2187 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2188 return 1;
2190 /* A pair of one already spilled and one we can spill does it
2191 provided the one already spilled is not otherwise reserved. */
2192 if (spill_reg_order[i] < 0
2193 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2194 && spill_reg_order[i + 1] >= 0
2195 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2196 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2197 return 1;
2198 if (spill_reg_order[i + 1] < 0
2199 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2200 && spill_reg_order[i] >= 0
2201 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2202 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2203 return 1;
2206 return 0;
2209 /* Count any groups of CLASS that can be formed from the registers recently
2210 spilled. */
2212 static void
2213 count_possible_groups (group_size, group_mode, max_groups, class)
2214 int *group_size;
2215 enum machine_mode *group_mode;
2216 int *max_groups;
2217 int class;
2219 HARD_REG_SET new;
2220 int i, j;
2222 /* Now find all consecutive groups of spilled registers
2223 and mark each group off against the need for such groups.
2224 But don't count them against ordinary need, yet. */
2226 if (group_size[class] == 0)
2227 return;
2229 CLEAR_HARD_REG_SET (new);
2231 /* Make a mask of all the regs that are spill regs in class I. */
2232 for (i = 0; i < n_spills; i++)
2233 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2234 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2235 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2236 SET_HARD_REG_BIT (new, spill_regs[i]);
2238 /* Find each consecutive group of them. */
2239 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2240 if (TEST_HARD_REG_BIT (new, i)
2241 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2242 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2244 for (j = 1; j < group_size[class]; j++)
2245 if (! TEST_HARD_REG_BIT (new, i + j))
2246 break;
2248 if (j == group_size[class])
2250 /* We found a group. Mark it off against this class's need for
2251 groups, and against each superclass too. */
2252 register enum reg_class *p;
2254 max_groups[class]--;
2255 p = reg_class_superclasses[class];
2256 while (*p != LIM_REG_CLASSES)
2258 if (group_size [(int) *p] <= group_size [class])
2259 max_groups[(int) *p]--;
2260 p++;
2263 /* Don't count these registers again. */
2264 for (j = 0; j < group_size[class]; j++)
2265 SET_HARD_REG_BIT (counted_for_groups, i + j);
2268 /* Skip to the last reg in this group. When i is incremented above,
2269 it will then point to the first reg of the next possible group. */
2270 i += j - 1;
2274 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2275 another mode that needs to be reloaded for the same register class CLASS.
2276 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2277 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2279 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2280 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2281 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2282 causes unnecessary failures on machines requiring alignment of register
2283 groups when the two modes are different sizes, because the larger mode has
2284 more strict alignment rules than the smaller mode. */
2286 static int
2287 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2288 enum machine_mode allocate_mode, other_mode;
2289 enum reg_class class;
2291 register int regno;
2292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2294 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2295 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2296 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2297 return 0;
2299 return 1;
2302 /* Handle the failure to find a register to spill.
2303 INSN should be one of the insns which needed this particular spill reg. */
2305 static void
2306 spill_failure (insn)
2307 rtx insn;
2309 if (asm_noperands (PATTERN (insn)) >= 0)
2310 error_for_asm (insn, "`asm' needs too many reloads");
2311 else
2312 fatal_insn ("Unable to find a register to spill.", insn);
2315 /* Add a new register to the tables of available spill-registers
2316 (as well as spilling all pseudos allocated to the register).
2317 I is the index of this register in potential_reload_regs.
2318 CLASS is the regclass whose need is being satisfied.
2319 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2320 so that this register can count off against them.
2321 MAX_NONGROUPS is 0 if this register is part of a group.
2322 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2324 static int
2325 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2326 int i;
2327 int class;
2328 int *max_needs;
2329 int *max_nongroups;
2330 int global;
2331 FILE *dumpfile;
2333 register enum reg_class *p;
2334 int val;
2335 int regno = potential_reload_regs[i];
2337 if (i >= FIRST_PSEUDO_REGISTER)
2338 abort (); /* Caller failed to find any register. */
2340 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2341 fatal ("fixed or forbidden register was spilled.\n\
2342 This may be due to a compiler bug or to impossible asm\n\
2343 statements or clauses.");
2345 /* Make reg REGNO an additional reload reg. */
2347 potential_reload_regs[i] = -1;
2348 spill_regs[n_spills] = regno;
2349 spill_reg_order[regno] = n_spills;
2350 if (dumpfile)
2351 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2353 /* Clear off the needs we just satisfied. */
2355 max_needs[class]--;
2356 p = reg_class_superclasses[class];
2357 while (*p != LIM_REG_CLASSES)
2358 max_needs[(int) *p++]--;
2360 if (max_nongroups && max_nongroups[class] > 0)
2362 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2363 max_nongroups[class]--;
2364 p = reg_class_superclasses[class];
2365 while (*p != LIM_REG_CLASSES)
2366 max_nongroups[(int) *p++]--;
2369 /* Spill every pseudo reg that was allocated to this reg
2370 or to something that overlaps this reg. */
2372 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2374 /* If there are some registers still to eliminate and this register
2375 wasn't ever used before, additional stack space may have to be
2376 allocated to store this register. Thus, we may have changed the offset
2377 between the stack and frame pointers, so mark that something has changed.
2378 (If new pseudos were spilled, thus requiring more space, VAL would have
2379 been set non-zero by the call to spill_hard_reg above since additional
2380 reloads may be needed in that case.
2382 One might think that we need only set VAL to 1 if this is a call-used
2383 register. However, the set of registers that must be saved by the
2384 prologue is not identical to the call-used set. For example, the
2385 register used by the call insn for the return PC is a call-used register,
2386 but must be saved by the prologue. */
2387 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2388 val = 1;
2390 regs_ever_live[spill_regs[n_spills]] = 1;
2391 n_spills++;
2393 return val;
2396 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2397 data that is dead in INSN. */
2399 static void
2400 delete_dead_insn (insn)
2401 rtx insn;
2403 rtx prev = prev_real_insn (insn);
2404 rtx prev_dest;
2406 /* If the previous insn sets a register that dies in our insn, delete it
2407 too. */
2408 if (prev && GET_CODE (PATTERN (prev)) == SET
2409 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2410 && reg_mentioned_p (prev_dest, PATTERN (insn))
2411 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2412 delete_dead_insn (prev);
2414 PUT_CODE (insn, NOTE);
2415 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2416 NOTE_SOURCE_FILE (insn) = 0;
2419 /* Modify the home of pseudo-reg I.
2420 The new home is present in reg_renumber[I].
2422 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2423 or it may be -1, meaning there is none or it is not relevant.
2424 This is used so that all pseudos spilled from a given hard reg
2425 can share one stack slot. */
2427 static void
2428 alter_reg (i, from_reg)
2429 register int i;
2430 int from_reg;
2432 /* When outputting an inline function, this can happen
2433 for a reg that isn't actually used. */
2434 if (regno_reg_rtx[i] == 0)
2435 return;
2437 /* If the reg got changed to a MEM at rtl-generation time,
2438 ignore it. */
2439 if (GET_CODE (regno_reg_rtx[i]) != REG)
2440 return;
2442 /* Modify the reg-rtx to contain the new hard reg
2443 number or else to contain its pseudo reg number. */
2444 REGNO (regno_reg_rtx[i])
2445 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2447 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2448 allocate a stack slot for it. */
2450 if (reg_renumber[i] < 0
2451 && reg_n_refs[i] > 0
2452 && reg_equiv_constant[i] == 0
2453 && reg_equiv_memory_loc[i] == 0)
2455 register rtx x;
2456 int inherent_size = PSEUDO_REGNO_BYTES (i);
2457 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2458 int adjust = 0;
2460 /* Each pseudo reg has an inherent size which comes from its own mode,
2461 and a total size which provides room for paradoxical subregs
2462 which refer to the pseudo reg in wider modes.
2464 We can use a slot already allocated if it provides both
2465 enough inherent space and enough total space.
2466 Otherwise, we allocate a new slot, making sure that it has no less
2467 inherent space, and no less total space, then the previous slot. */
2468 if (from_reg == -1)
2470 /* No known place to spill from => no slot to reuse. */
2471 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2472 inherent_size == total_size ? 0 : -1);
2473 if (BYTES_BIG_ENDIAN)
2474 /* Cancel the big-endian correction done in assign_stack_local.
2475 Get the address of the beginning of the slot.
2476 This is so we can do a big-endian correction unconditionally
2477 below. */
2478 adjust = inherent_size - total_size;
2480 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2482 /* Reuse a stack slot if possible. */
2483 else if (spill_stack_slot[from_reg] != 0
2484 && spill_stack_slot_width[from_reg] >= total_size
2485 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2486 >= inherent_size))
2487 x = spill_stack_slot[from_reg];
2488 /* Allocate a bigger slot. */
2489 else
2491 /* Compute maximum size needed, both for inherent size
2492 and for total size. */
2493 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2494 rtx stack_slot;
2495 if (spill_stack_slot[from_reg])
2497 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2498 > inherent_size)
2499 mode = GET_MODE (spill_stack_slot[from_reg]);
2500 if (spill_stack_slot_width[from_reg] > total_size)
2501 total_size = spill_stack_slot_width[from_reg];
2503 /* Make a slot with that size. */
2504 x = assign_stack_local (mode, total_size,
2505 inherent_size == total_size ? 0 : -1);
2506 stack_slot = x;
2507 if (BYTES_BIG_ENDIAN)
2509 /* Cancel the big-endian correction done in assign_stack_local.
2510 Get the address of the beginning of the slot.
2511 This is so we can do a big-endian correction unconditionally
2512 below. */
2513 adjust = GET_MODE_SIZE (mode) - total_size;
2514 if (adjust)
2515 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2516 * BITS_PER_UNIT,
2517 MODE_INT, 1),
2518 plus_constant (XEXP (x, 0), adjust));
2520 spill_stack_slot[from_reg] = stack_slot;
2521 spill_stack_slot_width[from_reg] = total_size;
2524 /* On a big endian machine, the "address" of the slot
2525 is the address of the low part that fits its inherent mode. */
2526 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2527 adjust += (total_size - inherent_size);
2529 /* If we have any adjustment to make, or if the stack slot is the
2530 wrong mode, make a new stack slot. */
2531 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2533 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2534 plus_constant (XEXP (x, 0), adjust));
2535 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2538 /* Save the stack slot for later. */
2539 reg_equiv_memory_loc[i] = x;
2543 /* Mark the slots in regs_ever_live for the hard regs
2544 used by pseudo-reg number REGNO. */
2546 void
2547 mark_home_live (regno)
2548 int regno;
2550 register int i, lim;
2551 i = reg_renumber[regno];
2552 if (i < 0)
2553 return;
2554 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2555 while (i < lim)
2556 regs_ever_live[i++] = 1;
2559 /* Mark the registers used in SCRATCH as being live. */
2561 static void
2562 mark_scratch_live (scratch)
2563 rtx scratch;
2565 register int i;
2566 int regno = REGNO (scratch);
2567 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2569 for (i = regno; i < lim; i++)
2570 regs_ever_live[i] = 1;
2573 /* This function handles the tracking of elimination offsets around branches.
2575 X is a piece of RTL being scanned.
2577 INSN is the insn that it came from, if any.
2579 INITIAL_P is non-zero if we are to set the offset to be the initial
2580 offset and zero if we are setting the offset of the label to be the
2581 current offset. */
2583 static void
2584 set_label_offsets (x, insn, initial_p)
2585 rtx x;
2586 rtx insn;
2587 int initial_p;
2589 enum rtx_code code = GET_CODE (x);
2590 rtx tem;
2591 int i;
2592 struct elim_table *p;
2594 switch (code)
2596 case LABEL_REF:
2597 if (LABEL_REF_NONLOCAL_P (x))
2598 return;
2600 x = XEXP (x, 0);
2602 /* ... fall through ... */
2604 case CODE_LABEL:
2605 /* If we know nothing about this label, set the desired offsets. Note
2606 that this sets the offset at a label to be the offset before a label
2607 if we don't know anything about the label. This is not correct for
2608 the label after a BARRIER, but is the best guess we can make. If
2609 we guessed wrong, we will suppress an elimination that might have
2610 been possible had we been able to guess correctly. */
2612 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2614 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2615 offsets_at[CODE_LABEL_NUMBER (x)][i]
2616 = (initial_p ? reg_eliminate[i].initial_offset
2617 : reg_eliminate[i].offset);
2618 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2621 /* Otherwise, if this is the definition of a label and it is
2622 preceded by a BARRIER, set our offsets to the known offset of
2623 that label. */
2625 else if (x == insn
2626 && (tem = prev_nonnote_insn (insn)) != 0
2627 && GET_CODE (tem) == BARRIER)
2629 num_not_at_initial_offset = 0;
2630 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2632 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2633 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2634 if (reg_eliminate[i].can_eliminate
2635 && (reg_eliminate[i].offset
2636 != reg_eliminate[i].initial_offset))
2637 num_not_at_initial_offset++;
2641 else
2642 /* If neither of the above cases is true, compare each offset
2643 with those previously recorded and suppress any eliminations
2644 where the offsets disagree. */
2646 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2647 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2648 != (initial_p ? reg_eliminate[i].initial_offset
2649 : reg_eliminate[i].offset))
2650 reg_eliminate[i].can_eliminate = 0;
2652 return;
2654 case JUMP_INSN:
2655 set_label_offsets (PATTERN (insn), insn, initial_p);
2657 /* ... fall through ... */
2659 case INSN:
2660 case CALL_INSN:
2661 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2662 and hence must have all eliminations at their initial offsets. */
2663 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2664 if (REG_NOTE_KIND (tem) == REG_LABEL)
2665 set_label_offsets (XEXP (tem, 0), insn, 1);
2666 return;
2668 case ADDR_VEC:
2669 case ADDR_DIFF_VEC:
2670 /* Each of the labels in the address vector must be at their initial
2671 offsets. We want the first first for ADDR_VEC and the second
2672 field for ADDR_DIFF_VEC. */
2674 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2675 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2676 insn, initial_p);
2677 return;
2679 case SET:
2680 /* We only care about setting PC. If the source is not RETURN,
2681 IF_THEN_ELSE, or a label, disable any eliminations not at
2682 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2683 isn't one of those possibilities. For branches to a label,
2684 call ourselves recursively.
2686 Note that this can disable elimination unnecessarily when we have
2687 a non-local goto since it will look like a non-constant jump to
2688 someplace in the current function. This isn't a significant
2689 problem since such jumps will normally be when all elimination
2690 pairs are back to their initial offsets. */
2692 if (SET_DEST (x) != pc_rtx)
2693 return;
2695 switch (GET_CODE (SET_SRC (x)))
2697 case PC:
2698 case RETURN:
2699 return;
2701 case LABEL_REF:
2702 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2703 return;
2705 case IF_THEN_ELSE:
2706 tem = XEXP (SET_SRC (x), 1);
2707 if (GET_CODE (tem) == LABEL_REF)
2708 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2709 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2710 break;
2712 tem = XEXP (SET_SRC (x), 2);
2713 if (GET_CODE (tem) == LABEL_REF)
2714 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2715 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2716 break;
2717 return;
2720 /* If we reach here, all eliminations must be at their initial
2721 offset because we are doing a jump to a variable address. */
2722 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2723 if (p->offset != p->initial_offset)
2724 p->can_eliminate = 0;
2728 /* Used for communication between the next two function to properly share
2729 the vector for an ASM_OPERANDS. */
2731 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2733 /* Scan X and replace any eliminable registers (such as fp) with a
2734 replacement (such as sp), plus an offset.
2736 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2737 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2738 MEM, we are allowed to replace a sum of a register and the constant zero
2739 with the register, which we cannot do outside a MEM. In addition, we need
2740 to record the fact that a register is referenced outside a MEM.
2742 If INSN is an insn, it is the insn containing X. If we replace a REG
2743 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2744 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2745 that the REG is being modified.
2747 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2748 That's used when we eliminate in expressions stored in notes.
2749 This means, do not set ref_outside_mem even if the reference
2750 is outside of MEMs.
2752 If we see a modification to a register we know about, take the
2753 appropriate action (see case SET, below).
2755 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2756 replacements done assuming all offsets are at their initial values. If
2757 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2758 encounter, return the actual location so that find_reloads will do
2759 the proper thing. */
2762 eliminate_regs (x, mem_mode, insn, storing)
2763 rtx x;
2764 enum machine_mode mem_mode;
2765 rtx insn;
2766 int storing;
2768 enum rtx_code code = GET_CODE (x);
2769 struct elim_table *ep;
2770 int regno;
2771 rtx new;
2772 int i, j;
2773 char *fmt;
2774 int copied = 0;
2776 switch (code)
2778 case CONST_INT:
2779 case CONST_DOUBLE:
2780 case CONST:
2781 case SYMBOL_REF:
2782 case CODE_LABEL:
2783 case PC:
2784 case CC0:
2785 case ASM_INPUT:
2786 case ADDR_VEC:
2787 case ADDR_DIFF_VEC:
2788 case RETURN:
2789 return x;
2791 case REG:
2792 regno = REGNO (x);
2794 /* First handle the case where we encounter a bare register that
2795 is eliminable. Replace it with a PLUS. */
2796 if (regno < FIRST_PSEUDO_REGISTER)
2798 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2799 ep++)
2800 if (ep->from_rtx == x && ep->can_eliminate)
2802 if (! mem_mode
2803 /* Refs inside notes don't count for this purpose. */
2804 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2805 || GET_CODE (insn) == INSN_LIST)))
2806 ep->ref_outside_mem = 1;
2807 return plus_constant (ep->to_rtx, ep->previous_offset);
2811 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2812 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2814 /* In this case, find_reloads would attempt to either use an
2815 incorrect address (if something is not at its initial offset)
2816 or substitute an replaced address into an insn (which loses
2817 if the offset is changed by some later action). So we simply
2818 return the replaced stack slot (assuming it is changed by
2819 elimination) and ignore the fact that this is actually a
2820 reference to the pseudo. Ensure we make a copy of the
2821 address in case it is shared. */
2822 new = eliminate_regs (reg_equiv_memory_loc[regno],
2823 mem_mode, insn, 0);
2824 if (new != reg_equiv_memory_loc[regno])
2826 cannot_omit_stores[regno] = 1;
2827 return copy_rtx (new);
2830 return x;
2832 case PLUS:
2833 /* If this is the sum of an eliminable register and a constant, rework
2834 the sum. */
2835 if (GET_CODE (XEXP (x, 0)) == REG
2836 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2837 && CONSTANT_P (XEXP (x, 1)))
2839 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2840 ep++)
2841 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2843 if (! mem_mode
2844 /* Refs inside notes don't count for this purpose. */
2845 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2846 || GET_CODE (insn) == INSN_LIST)))
2847 ep->ref_outside_mem = 1;
2849 /* The only time we want to replace a PLUS with a REG (this
2850 occurs when the constant operand of the PLUS is the negative
2851 of the offset) is when we are inside a MEM. We won't want
2852 to do so at other times because that would change the
2853 structure of the insn in a way that reload can't handle.
2854 We special-case the commonest situation in
2855 eliminate_regs_in_insn, so just replace a PLUS with a
2856 PLUS here, unless inside a MEM. */
2857 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2858 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2859 return ep->to_rtx;
2860 else
2861 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2862 plus_constant (XEXP (x, 1),
2863 ep->previous_offset));
2866 /* If the register is not eliminable, we are done since the other
2867 operand is a constant. */
2868 return x;
2871 /* If this is part of an address, we want to bring any constant to the
2872 outermost PLUS. We will do this by doing register replacement in
2873 our operands and seeing if a constant shows up in one of them.
2875 We assume here this is part of an address (or a "load address" insn)
2876 since an eliminable register is not likely to appear in any other
2877 context.
2879 If we have (plus (eliminable) (reg)), we want to produce
2880 (plus (plus (replacement) (reg) (const))). If this was part of a
2881 normal add insn, (plus (replacement) (reg)) will be pushed as a
2882 reload. This is the desired action. */
2885 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2886 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2888 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2890 /* If one side is a PLUS and the other side is a pseudo that
2891 didn't get a hard register but has a reg_equiv_constant,
2892 we must replace the constant here since it may no longer
2893 be in the position of any operand. */
2894 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2895 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2896 && reg_renumber[REGNO (new1)] < 0
2897 && reg_equiv_constant != 0
2898 && reg_equiv_constant[REGNO (new1)] != 0)
2899 new1 = reg_equiv_constant[REGNO (new1)];
2900 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2901 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2902 && reg_renumber[REGNO (new0)] < 0
2903 && reg_equiv_constant[REGNO (new0)] != 0)
2904 new0 = reg_equiv_constant[REGNO (new0)];
2906 new = form_sum (new0, new1);
2908 /* As above, if we are not inside a MEM we do not want to
2909 turn a PLUS into something else. We might try to do so here
2910 for an addition of 0 if we aren't optimizing. */
2911 if (! mem_mode && GET_CODE (new) != PLUS)
2912 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2913 else
2914 return new;
2917 return x;
2919 case MULT:
2920 /* If this is the product of an eliminable register and a
2921 constant, apply the distribute law and move the constant out
2922 so that we have (plus (mult ..) ..). This is needed in order
2923 to keep load-address insns valid. This case is pathological.
2924 We ignore the possibility of overflow here. */
2925 if (GET_CODE (XEXP (x, 0)) == REG
2926 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2927 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2928 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2929 ep++)
2930 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2932 if (! mem_mode
2933 /* Refs inside notes don't count for this purpose. */
2934 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2935 || GET_CODE (insn) == INSN_LIST)))
2936 ep->ref_outside_mem = 1;
2938 return
2939 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2940 ep->previous_offset * INTVAL (XEXP (x, 1)));
2943 /* ... fall through ... */
2945 case CALL:
2946 case COMPARE:
2947 case MINUS:
2948 case DIV: case UDIV:
2949 case MOD: case UMOD:
2950 case AND: case IOR: case XOR:
2951 case ROTATERT: case ROTATE:
2952 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2953 case NE: case EQ:
2954 case GE: case GT: case GEU: case GTU:
2955 case LE: case LT: case LEU: case LTU:
2957 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2958 rtx new1
2959 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
2961 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2962 return gen_rtx (code, GET_MODE (x), new0, new1);
2964 return x;
2966 case EXPR_LIST:
2967 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2968 if (XEXP (x, 0))
2970 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2971 if (new != XEXP (x, 0))
2972 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2975 /* ... fall through ... */
2977 case INSN_LIST:
2978 /* Now do eliminations in the rest of the chain. If this was
2979 an EXPR_LIST, this might result in allocating more memory than is
2980 strictly needed, but it simplifies the code. */
2981 if (XEXP (x, 1))
2983 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2984 if (new != XEXP (x, 1))
2985 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2987 return x;
2989 case PRE_INC:
2990 case POST_INC:
2991 case PRE_DEC:
2992 case POST_DEC:
2993 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2994 if (ep->to_rtx == XEXP (x, 0))
2996 int size = GET_MODE_SIZE (mem_mode);
2998 /* If more bytes than MEM_MODE are pushed, account for them. */
2999 #ifdef PUSH_ROUNDING
3000 if (ep->to_rtx == stack_pointer_rtx)
3001 size = PUSH_ROUNDING (size);
3002 #endif
3003 if (code == PRE_DEC || code == POST_DEC)
3004 ep->offset += size;
3005 else
3006 ep->offset -= size;
3009 /* Fall through to generic unary operation case. */
3010 case STRICT_LOW_PART:
3011 case NEG: case NOT:
3012 case SIGN_EXTEND: case ZERO_EXTEND:
3013 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3014 case FLOAT: case FIX:
3015 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3016 case ABS:
3017 case SQRT:
3018 case FFS:
3019 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3020 if (new != XEXP (x, 0))
3021 return gen_rtx (code, GET_MODE (x), new);
3022 return x;
3024 case SUBREG:
3025 /* Similar to above processing, but preserve SUBREG_WORD.
3026 Convert (subreg (mem)) to (mem) if not paradoxical.
3027 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3028 pseudo didn't get a hard reg, we must replace this with the
3029 eliminated version of the memory location because push_reloads
3030 may do the replacement in certain circumstances. */
3031 if (GET_CODE (SUBREG_REG (x)) == REG
3032 && (GET_MODE_SIZE (GET_MODE (x))
3033 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3034 && reg_equiv_memory_loc != 0
3035 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3037 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3038 mem_mode, insn, 0);
3040 /* If we didn't change anything, we must retain the pseudo. */
3041 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3042 new = SUBREG_REG (x);
3043 else
3045 /* Otherwise, ensure NEW isn't shared in case we have to reload
3046 it. */
3047 new = copy_rtx (new);
3049 /* In this case, we must show that the pseudo is used in this
3050 insn so that delete_output_reload will do the right thing. */
3051 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3052 && GET_CODE (insn) != INSN_LIST)
3053 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3054 insn);
3057 else
3058 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3060 if (new != XEXP (x, 0))
3062 int x_size = GET_MODE_SIZE (GET_MODE (x));
3063 int new_size = GET_MODE_SIZE (GET_MODE (new));
3065 /* When asked to spill a partial word subreg, we need to go
3066 ahead and spill the whole thing against the possibility
3067 that we reload the whole reg and find garbage at the top. */
3068 if (storing
3069 && GET_CODE (new) == MEM
3070 && x_size < new_size
3071 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3072 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3073 return new;
3074 else if (GET_CODE (new) == MEM
3075 && x_size <= new_size
3076 #ifdef LOAD_EXTEND_OP
3077 /* On these machines we will be reloading what is
3078 inside the SUBREG if it originally was a pseudo and
3079 the inner and outer modes are both a word or
3080 smaller. So leave the SUBREG then. */
3081 && ! (GET_CODE (SUBREG_REG (x)) == REG
3082 && x_size <= UNITS_PER_WORD
3083 && new_size <= UNITS_PER_WORD
3084 && x_size > new_size
3085 && INTEGRAL_MODE_P (GET_MODE (new))
3086 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3087 #endif
3090 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3091 enum machine_mode mode = GET_MODE (x);
3093 if (BYTES_BIG_ENDIAN)
3094 offset += (MIN (UNITS_PER_WORD,
3095 GET_MODE_SIZE (GET_MODE (new)))
3096 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3098 PUT_MODE (new, mode);
3099 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3100 return new;
3102 else
3103 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3106 return x;
3108 case USE:
3109 /* If using a register that is the source of an eliminate we still
3110 think can be performed, note it cannot be performed since we don't
3111 know how this register is used. */
3112 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3113 if (ep->from_rtx == XEXP (x, 0))
3114 ep->can_eliminate = 0;
3116 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3117 if (new != XEXP (x, 0))
3118 return gen_rtx (code, GET_MODE (x), new);
3119 return x;
3121 case CLOBBER:
3122 /* If clobbering a register that is the replacement register for an
3123 elimination we still think can be performed, note that it cannot
3124 be performed. Otherwise, we need not be concerned about it. */
3125 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3126 if (ep->to_rtx == XEXP (x, 0))
3127 ep->can_eliminate = 0;
3129 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3130 if (new != XEXP (x, 0))
3131 return gen_rtx (code, GET_MODE (x), new);
3132 return x;
3134 case ASM_OPERANDS:
3136 rtx *temp_vec;
3137 /* Properly handle sharing input and constraint vectors. */
3138 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3140 /* When we come to a new vector not seen before,
3141 scan all its elements; keep the old vector if none
3142 of them changes; otherwise, make a copy. */
3143 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3144 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3145 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3146 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3147 mem_mode, insn, 0);
3149 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3150 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3151 break;
3153 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3154 new_asm_operands_vec = old_asm_operands_vec;
3155 else
3156 new_asm_operands_vec
3157 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3160 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3161 if (new_asm_operands_vec == old_asm_operands_vec)
3162 return x;
3164 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3165 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3166 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3167 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3168 ASM_OPERANDS_SOURCE_FILE (x),
3169 ASM_OPERANDS_SOURCE_LINE (x));
3170 new->volatil = x->volatil;
3171 return new;
3174 case SET:
3175 /* Check for setting a register that we know about. */
3176 if (GET_CODE (SET_DEST (x)) == REG)
3178 /* See if this is setting the replacement register for an
3179 elimination.
3181 If DEST is the hard frame pointer, we do nothing because we
3182 assume that all assignments to the frame pointer are for
3183 non-local gotos and are being done at a time when they are valid
3184 and do not disturb anything else. Some machines want to
3185 eliminate a fake argument pointer (or even a fake frame pointer)
3186 with either the real frame or the stack pointer. Assignments to
3187 the hard frame pointer must not prevent this elimination. */
3189 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3190 ep++)
3191 if (ep->to_rtx == SET_DEST (x)
3192 && SET_DEST (x) != hard_frame_pointer_rtx)
3194 /* If it is being incremented, adjust the offset. Otherwise,
3195 this elimination can't be done. */
3196 rtx src = SET_SRC (x);
3198 if (GET_CODE (src) == PLUS
3199 && XEXP (src, 0) == SET_DEST (x)
3200 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3201 ep->offset -= INTVAL (XEXP (src, 1));
3202 else
3203 ep->can_eliminate = 0;
3206 /* Now check to see we are assigning to a register that can be
3207 eliminated. If so, it must be as part of a PARALLEL, since we
3208 will not have been called if this is a single SET. So indicate
3209 that we can no longer eliminate this reg. */
3210 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3211 ep++)
3212 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3213 ep->can_eliminate = 0;
3216 /* Now avoid the loop below in this common case. */
3218 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3219 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3221 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3222 write a CLOBBER insn. */
3223 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3224 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3225 && GET_CODE (insn) != INSN_LIST)
3226 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3228 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3229 return gen_rtx (SET, VOIDmode, new0, new1);
3232 return x;
3234 case MEM:
3235 /* Our only special processing is to pass the mode of the MEM to our
3236 recursive call and copy the flags. While we are here, handle this
3237 case more efficiently. */
3238 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3239 if (new != XEXP (x, 0))
3241 new = gen_rtx (MEM, GET_MODE (x), new);
3242 new->volatil = x->volatil;
3243 new->unchanging = x->unchanging;
3244 new->in_struct = x->in_struct;
3245 return new;
3247 else
3248 return x;
3251 /* Process each of our operands recursively. If any have changed, make a
3252 copy of the rtx. */
3253 fmt = GET_RTX_FORMAT (code);
3254 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3256 if (*fmt == 'e')
3258 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3259 if (new != XEXP (x, i) && ! copied)
3261 rtx new_x = rtx_alloc (code);
3262 bcopy ((char *) x, (char *) new_x,
3263 (sizeof (*new_x) - sizeof (new_x->fld)
3264 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3265 x = new_x;
3266 copied = 1;
3268 XEXP (x, i) = new;
3270 else if (*fmt == 'E')
3272 int copied_vec = 0;
3273 for (j = 0; j < XVECLEN (x, i); j++)
3275 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3276 if (new != XVECEXP (x, i, j) && ! copied_vec)
3278 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3279 XVEC (x, i)->elem);
3280 if (! copied)
3282 rtx new_x = rtx_alloc (code);
3283 bcopy ((char *) x, (char *) new_x,
3284 (sizeof (*new_x) - sizeof (new_x->fld)
3285 + (sizeof (new_x->fld[0])
3286 * GET_RTX_LENGTH (code))));
3287 x = new_x;
3288 copied = 1;
3290 XVEC (x, i) = new_v;
3291 copied_vec = 1;
3293 XVECEXP (x, i, j) = new;
3298 return x;
3301 /* Scan INSN and eliminate all eliminable registers in it.
3303 If REPLACE is nonzero, do the replacement destructively. Also
3304 delete the insn as dead it if it is setting an eliminable register.
3306 If REPLACE is zero, do all our allocations in reload_obstack.
3308 If no eliminations were done and this insn doesn't require any elimination
3309 processing (these are not identical conditions: it might be updating sp,
3310 but not referencing fp; this needs to be seen during reload_as_needed so
3311 that the offset between fp and sp can be taken into consideration), zero
3312 is returned. Otherwise, 1 is returned. */
3314 static int
3315 eliminate_regs_in_insn (insn, replace)
3316 rtx insn;
3317 int replace;
3319 rtx old_body = PATTERN (insn);
3320 rtx old_set = single_set (insn);
3321 rtx new_body;
3322 int val = 0;
3323 struct elim_table *ep;
3325 if (! replace)
3326 push_obstacks (&reload_obstack, &reload_obstack);
3328 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3329 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3331 /* Check for setting an eliminable register. */
3332 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3333 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3335 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3336 /* If this is setting the frame pointer register to the
3337 hardware frame pointer register and this is an elimination
3338 that will be done (tested above), this insn is really
3339 adjusting the frame pointer downward to compensate for
3340 the adjustment done before a nonlocal goto. */
3341 if (ep->from == FRAME_POINTER_REGNUM
3342 && ep->to == HARD_FRAME_POINTER_REGNUM)
3344 rtx src = SET_SRC (old_set);
3345 int offset, ok = 0;
3346 rtx prev_insn, prev_set;
3348 if (src == ep->to_rtx)
3349 offset = 0, ok = 1;
3350 else if (GET_CODE (src) == PLUS
3351 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3352 offset = INTVAL (XEXP (src, 0)), ok = 1;
3353 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3354 && (prev_set = single_set (prev_insn)) != 0
3355 && rtx_equal_p (SET_DEST (prev_set), src))
3357 src = SET_SRC (prev_set);
3358 if (src == ep->to_rtx)
3359 offset = 0, ok = 1;
3360 else if (GET_CODE (src) == PLUS
3361 && GET_CODE (XEXP (src, 0)) == CONST_INT
3362 && XEXP (src, 1) == ep->to_rtx)
3363 offset = INTVAL (XEXP (src, 0)), ok = 1;
3364 else if (GET_CODE (src) == PLUS
3365 && GET_CODE (XEXP (src, 1)) == CONST_INT
3366 && XEXP (src, 0) == ep->to_rtx)
3367 offset = INTVAL (XEXP (src, 1)), ok = 1;
3370 if (ok)
3372 if (replace)
3374 rtx src
3375 = plus_constant (ep->to_rtx, offset - ep->offset);
3377 /* First see if this insn remains valid when we
3378 make the change. If not, keep the INSN_CODE
3379 the same and let reload fit it up. */
3380 validate_change (insn, &SET_SRC (old_set), src, 1);
3381 validate_change (insn, &SET_DEST (old_set),
3382 ep->to_rtx, 1);
3383 if (! apply_change_group ())
3385 SET_SRC (old_set) = src;
3386 SET_DEST (old_set) = ep->to_rtx;
3390 val = 1;
3391 goto done;
3394 #endif
3396 /* In this case this insn isn't serving a useful purpose. We
3397 will delete it in reload_as_needed once we know that this
3398 elimination is, in fact, being done.
3400 If REPLACE isn't set, we can't delete this insn, but needn't
3401 process it since it won't be used unless something changes. */
3402 if (replace)
3403 delete_dead_insn (insn);
3404 val = 1;
3405 goto done;
3408 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3409 in the insn is the negative of the offset in FROM. Substitute
3410 (set (reg) (reg to)) for the insn and change its code.
3412 We have to do this here, rather than in eliminate_regs, do that we can
3413 change the insn code. */
3415 if (GET_CODE (SET_SRC (old_set)) == PLUS
3416 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3417 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3418 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3419 ep++)
3420 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3421 && ep->can_eliminate)
3423 /* We must stop at the first elimination that will be used.
3424 If this one would replace the PLUS with a REG, do it
3425 now. Otherwise, quit the loop and let eliminate_regs
3426 do its normal replacement. */
3427 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3429 /* We assume here that we don't need a PARALLEL of
3430 any CLOBBERs for this assignment. There's not
3431 much we can do if we do need it. */
3432 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3433 SET_DEST (old_set), ep->to_rtx);
3434 INSN_CODE (insn) = -1;
3435 val = 1;
3436 goto done;
3439 break;
3443 old_asm_operands_vec = 0;
3445 /* Replace the body of this insn with a substituted form. If we changed
3446 something, return non-zero.
3448 If we are replacing a body that was a (set X (plus Y Z)), try to
3449 re-recognize the insn. We do this in case we had a simple addition
3450 but now can do this as a load-address. This saves an insn in this
3451 common case. */
3453 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3454 if (new_body != old_body)
3456 /* If we aren't replacing things permanently and we changed something,
3457 make another copy to ensure that all the RTL is new. Otherwise
3458 things can go wrong if find_reload swaps commutative operands
3459 and one is inside RTL that has been copied while the other is not. */
3461 /* Don't copy an asm_operands because (1) there's no need and (2)
3462 copy_rtx can't do it properly when there are multiple outputs. */
3463 if (! replace && asm_noperands (old_body) < 0)
3464 new_body = copy_rtx (new_body);
3466 /* If we had a move insn but now we don't, rerecognize it. This will
3467 cause spurious re-recognition if the old move had a PARALLEL since
3468 the new one still will, but we can't call single_set without
3469 having put NEW_BODY into the insn and the re-recognition won't
3470 hurt in this rare case. */
3471 if (old_set != 0
3472 && ((GET_CODE (SET_SRC (old_set)) == REG
3473 && (GET_CODE (new_body) != SET
3474 || GET_CODE (SET_SRC (new_body)) != REG))
3475 /* If this was a load from or store to memory, compare
3476 the MEM in recog_operand to the one in the insn. If they
3477 are not equal, then rerecognize the insn. */
3478 || (old_set != 0
3479 && ((GET_CODE (SET_SRC (old_set)) == MEM
3480 && SET_SRC (old_set) != recog_operand[1])
3481 || (GET_CODE (SET_DEST (old_set)) == MEM
3482 && SET_DEST (old_set) != recog_operand[0])))
3483 /* If this was an add insn before, rerecognize. */
3484 || GET_CODE (SET_SRC (old_set)) == PLUS))
3486 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3487 /* If recognition fails, store the new body anyway.
3488 It's normal to have recognition failures here
3489 due to bizarre memory addresses; reloading will fix them. */
3490 PATTERN (insn) = new_body;
3492 else
3493 PATTERN (insn) = new_body;
3495 val = 1;
3498 /* Loop through all elimination pairs. See if any have changed and
3499 recalculate the number not at initial offset.
3501 Compute the maximum offset (minimum offset if the stack does not
3502 grow downward) for each elimination pair.
3504 We also detect a cases where register elimination cannot be done,
3505 namely, if a register would be both changed and referenced outside a MEM
3506 in the resulting insn since such an insn is often undefined and, even if
3507 not, we cannot know what meaning will be given to it. Note that it is
3508 valid to have a register used in an address in an insn that changes it
3509 (presumably with a pre- or post-increment or decrement).
3511 If anything changes, return nonzero. */
3513 num_not_at_initial_offset = 0;
3514 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3516 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3517 ep->can_eliminate = 0;
3519 ep->ref_outside_mem = 0;
3521 if (ep->previous_offset != ep->offset)
3522 val = 1;
3524 ep->previous_offset = ep->offset;
3525 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3526 num_not_at_initial_offset++;
3528 #ifdef STACK_GROWS_DOWNWARD
3529 ep->max_offset = MAX (ep->max_offset, ep->offset);
3530 #else
3531 ep->max_offset = MIN (ep->max_offset, ep->offset);
3532 #endif
3535 done:
3536 /* If we changed something, perform elimination in REG_NOTES. This is
3537 needed even when REPLACE is zero because a REG_DEAD note might refer
3538 to a register that we eliminate and could cause a different number
3539 of spill registers to be needed in the final reload pass than in
3540 the pre-passes. */
3541 if (val && REG_NOTES (insn) != 0)
3542 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3544 if (! replace)
3545 pop_obstacks ();
3547 return val;
3550 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3551 replacement we currently believe is valid, mark it as not eliminable if X
3552 modifies DEST in any way other than by adding a constant integer to it.
3554 If DEST is the frame pointer, we do nothing because we assume that
3555 all assignments to the hard frame pointer are nonlocal gotos and are being
3556 done at a time when they are valid and do not disturb anything else.
3557 Some machines want to eliminate a fake argument pointer with either the
3558 frame or stack pointer. Assignments to the hard frame pointer must not
3559 prevent this elimination.
3561 Called via note_stores from reload before starting its passes to scan
3562 the insns of the function. */
3564 static void
3565 mark_not_eliminable (dest, x)
3566 rtx dest;
3567 rtx x;
3569 register int i;
3571 /* A SUBREG of a hard register here is just changing its mode. We should
3572 not see a SUBREG of an eliminable hard register, but check just in
3573 case. */
3574 if (GET_CODE (dest) == SUBREG)
3575 dest = SUBREG_REG (dest);
3577 if (dest == hard_frame_pointer_rtx)
3578 return;
3580 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3581 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3582 && (GET_CODE (x) != SET
3583 || GET_CODE (SET_SRC (x)) != PLUS
3584 || XEXP (SET_SRC (x), 0) != dest
3585 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3587 reg_eliminate[i].can_eliminate_previous
3588 = reg_eliminate[i].can_eliminate = 0;
3589 num_eliminable--;
3593 /* Kick all pseudos out of hard register REGNO.
3594 If GLOBAL is nonzero, try to find someplace else to put them.
3595 If DUMPFILE is nonzero, log actions taken on that file.
3597 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3598 because we found we can't eliminate some register. In the case, no pseudos
3599 are allowed to be in the register, even if they are only in a block that
3600 doesn't require spill registers, unlike the case when we are spilling this
3601 hard reg to produce another spill register.
3603 Return nonzero if any pseudos needed to be kicked out. */
3605 static int
3606 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3607 register int regno;
3608 int global;
3609 FILE *dumpfile;
3610 int cant_eliminate;
3612 enum reg_class class = REGNO_REG_CLASS (regno);
3613 int something_changed = 0;
3614 register int i;
3616 SET_HARD_REG_BIT (forbidden_regs, regno);
3618 if (cant_eliminate)
3619 regs_ever_live[regno] = 1;
3621 /* Spill every pseudo reg that was allocated to this reg
3622 or to something that overlaps this reg. */
3624 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3625 if (reg_renumber[i] >= 0
3626 && reg_renumber[i] <= regno
3627 && (reg_renumber[i]
3628 + HARD_REGNO_NREGS (reg_renumber[i],
3629 PSEUDO_REGNO_MODE (i))
3630 > regno))
3632 /* If this register belongs solely to a basic block which needed no
3633 spilling of any class that this register is contained in,
3634 leave it be, unless we are spilling this register because
3635 it was a hard register that can't be eliminated. */
3637 if (! cant_eliminate
3638 && basic_block_needs[0]
3639 && reg_basic_block[i] >= 0
3640 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3642 enum reg_class *p;
3644 for (p = reg_class_superclasses[(int) class];
3645 *p != LIM_REG_CLASSES; p++)
3646 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3647 break;
3649 if (*p == LIM_REG_CLASSES)
3650 continue;
3653 /* Mark it as no longer having a hard register home. */
3654 reg_renumber[i] = -1;
3655 /* We will need to scan everything again. */
3656 something_changed = 1;
3657 if (global)
3658 retry_global_alloc (i, forbidden_regs);
3660 alter_reg (i, regno);
3661 if (dumpfile)
3663 if (reg_renumber[i] == -1)
3664 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3665 else
3666 fprintf (dumpfile, " Register %d now in %d.\n\n",
3667 i, reg_renumber[i]);
3670 for (i = 0; i < scratch_list_length; i++)
3672 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3674 if (! cant_eliminate && basic_block_needs[0]
3675 && ! basic_block_needs[(int) class][scratch_block[i]])
3677 enum reg_class *p;
3679 for (p = reg_class_superclasses[(int) class];
3680 *p != LIM_REG_CLASSES; p++)
3681 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3682 break;
3684 if (*p == LIM_REG_CLASSES)
3685 continue;
3687 PUT_CODE (scratch_list[i], SCRATCH);
3688 scratch_list[i] = 0;
3689 something_changed = 1;
3690 continue;
3694 return something_changed;
3697 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3698 Also mark any hard registers used to store user variables as
3699 forbidden from being used for spill registers. */
3701 static void
3702 scan_paradoxical_subregs (x)
3703 register rtx x;
3705 register int i;
3706 register char *fmt;
3707 register enum rtx_code code = GET_CODE (x);
3709 switch (code)
3711 case REG:
3712 #ifdef SMALL_REGISTER_CLASSES
3713 if (SMALL_REGISTER_CLASSES
3714 && REGNO (x) < FIRST_PSEUDO_REGISTER
3715 && REG_USERVAR_P (x))
3716 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3717 #endif
3718 return;
3720 case CONST_INT:
3721 case CONST:
3722 case SYMBOL_REF:
3723 case LABEL_REF:
3724 case CONST_DOUBLE:
3725 case CC0:
3726 case PC:
3727 case USE:
3728 case CLOBBER:
3729 return;
3731 case SUBREG:
3732 if (GET_CODE (SUBREG_REG (x)) == REG
3733 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3734 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3735 = GET_MODE_SIZE (GET_MODE (x));
3736 return;
3739 fmt = GET_RTX_FORMAT (code);
3740 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3742 if (fmt[i] == 'e')
3743 scan_paradoxical_subregs (XEXP (x, i));
3744 else if (fmt[i] == 'E')
3746 register int j;
3747 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3748 scan_paradoxical_subregs (XVECEXP (x, i, j));
3753 static int
3754 hard_reg_use_compare (p1p, p2p)
3755 const GENERIC_PTR p1p;
3756 const GENERIC_PTR p2p;
3758 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3759 *p2 = (struct hard_reg_n_uses *)p2p;
3760 int tem = p1->uses - p2->uses;
3761 if (tem != 0) return tem;
3762 /* If regs are equally good, sort by regno,
3763 so that the results of qsort leave nothing to chance. */
3764 return p1->regno - p2->regno;
3767 /* Choose the order to consider regs for use as reload registers
3768 based on how much trouble would be caused by spilling one.
3769 Store them in order of decreasing preference in potential_reload_regs. */
3771 static void
3772 order_regs_for_reload (global)
3773 int global;
3775 register int i;
3776 register int o = 0;
3777 int large = 0;
3779 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3781 CLEAR_HARD_REG_SET (bad_spill_regs);
3783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3784 potential_reload_regs[i] = -1;
3786 /* Count number of uses of each hard reg by pseudo regs allocated to it
3787 and then order them by decreasing use. */
3789 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3791 hard_reg_n_uses[i].uses = 0;
3792 hard_reg_n_uses[i].regno = i;
3795 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3797 int regno = reg_renumber[i];
3798 if (regno >= 0)
3800 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3801 while (regno < lim)
3803 /* If allocated by local-alloc, show more uses since
3804 we're not going to be able to reallocate it, but
3805 we might if allocated by global alloc. */
3806 if (global && reg_allocno[i] < 0)
3807 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3809 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3812 large += reg_n_refs[i];
3815 /* Now fixed registers (which cannot safely be used for reloading)
3816 get a very high use count so they will be considered least desirable.
3817 Registers used explicitly in the rtl code are almost as bad. */
3819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3821 if (fixed_regs[i])
3823 hard_reg_n_uses[i].uses += 2 * large + 2;
3824 SET_HARD_REG_BIT (bad_spill_regs, i);
3826 else if (regs_explicitly_used[i])
3828 hard_reg_n_uses[i].uses += large + 1;
3829 /* ??? We are doing this here because of the potential that
3830 bad code may be generated if a register explicitly used in
3831 an insn was used as a spill register for that insn. But
3832 not using these are spill registers may lose on some machine.
3833 We'll have to see how this works out. */
3834 #ifdef SMALL_REGISTER_CLASSES
3835 if (! SMALL_REGISTER_CLASSES)
3836 #endif
3837 SET_HARD_REG_BIT (bad_spill_regs, i);
3840 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3841 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3843 #ifdef ELIMINABLE_REGS
3844 /* If registers other than the frame pointer are eliminable, mark them as
3845 poor choices. */
3846 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3848 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3849 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3851 #endif
3853 /* Prefer registers not so far used, for use in temporary loading.
3854 Among them, if REG_ALLOC_ORDER is defined, use that order.
3855 Otherwise, prefer registers not preserved by calls. */
3857 #ifdef REG_ALLOC_ORDER
3858 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3860 int regno = reg_alloc_order[i];
3862 if (hard_reg_n_uses[regno].uses == 0)
3863 potential_reload_regs[o++] = regno;
3865 #else
3866 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3868 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3869 potential_reload_regs[o++] = i;
3871 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3873 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3874 potential_reload_regs[o++] = i;
3876 #endif
3878 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3879 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3881 /* Now add the regs that are already used,
3882 preferring those used less often. The fixed and otherwise forbidden
3883 registers will be at the end of this list. */
3885 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3886 if (hard_reg_n_uses[i].uses != 0)
3887 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3890 /* Used in reload_as_needed to sort the spilled regs. */
3892 static int
3893 compare_spill_regs (r1p, r2p)
3894 const GENERIC_PTR r1p;
3895 const GENERIC_PTR r2p;
3897 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3898 return r1 - r2;
3901 /* Reload pseudo-registers into hard regs around each insn as needed.
3902 Additional register load insns are output before the insn that needs it
3903 and perhaps store insns after insns that modify the reloaded pseudo reg.
3905 reg_last_reload_reg and reg_reloaded_contents keep track of
3906 which registers are already available in reload registers.
3907 We update these for the reloads that we perform,
3908 as the insns are scanned. */
3910 static void
3911 reload_as_needed (first, live_known)
3912 rtx first;
3913 int live_known;
3915 register rtx insn;
3916 register int i;
3917 int this_block = 0;
3918 rtx x;
3919 rtx after_call = 0;
3921 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3922 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3923 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3924 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3925 reg_has_output_reload = (char *) alloca (max_regno);
3926 for (i = 0; i < n_spills; i++)
3928 reg_reloaded_contents[i] = -1;
3929 reg_reloaded_insn[i] = 0;
3932 /* Reset all offsets on eliminable registers to their initial values. */
3933 #ifdef ELIMINABLE_REGS
3934 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3936 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3937 reg_eliminate[i].initial_offset);
3938 reg_eliminate[i].previous_offset
3939 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3941 #else
3942 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3943 reg_eliminate[0].previous_offset
3944 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3945 #endif
3947 num_not_at_initial_offset = 0;
3949 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3950 pack registers with group needs. */
3951 if (n_spills > 1)
3953 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3954 for (i = 0; i < n_spills; i++)
3955 spill_reg_order[spill_regs[i]] = i;
3958 for (insn = first; insn;)
3960 register rtx next = NEXT_INSN (insn);
3962 /* Notice when we move to a new basic block. */
3963 if (live_known && this_block + 1 < n_basic_blocks
3964 && insn == basic_block_head[this_block+1])
3965 ++this_block;
3967 /* If we pass a label, copy the offsets from the label information
3968 into the current offsets of each elimination. */
3969 if (GET_CODE (insn) == CODE_LABEL)
3971 num_not_at_initial_offset = 0;
3972 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3974 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3975 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3976 if (reg_eliminate[i].can_eliminate
3977 && (reg_eliminate[i].offset
3978 != reg_eliminate[i].initial_offset))
3979 num_not_at_initial_offset++;
3983 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3985 rtx avoid_return_reg = 0;
3986 rtx oldpat = PATTERN (insn);
3988 #ifdef SMALL_REGISTER_CLASSES
3989 /* Set avoid_return_reg if this is an insn
3990 that might use the value of a function call. */
3991 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
3993 if (GET_CODE (PATTERN (insn)) == SET)
3994 after_call = SET_DEST (PATTERN (insn));
3995 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3996 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3997 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3998 else
3999 after_call = 0;
4001 else if (SMALL_REGISTER_CLASSES
4002 && after_call != 0
4003 && !(GET_CODE (PATTERN (insn)) == SET
4004 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4006 if (reg_referenced_p (after_call, PATTERN (insn)))
4007 avoid_return_reg = after_call;
4008 after_call = 0;
4010 #endif /* SMALL_REGISTER_CLASSES */
4012 /* If this is a USE and CLOBBER of a MEM, ensure that any
4013 references to eliminable registers have been removed. */
4015 if ((GET_CODE (PATTERN (insn)) == USE
4016 || GET_CODE (PATTERN (insn)) == CLOBBER)
4017 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4018 XEXP (XEXP (PATTERN (insn), 0), 0)
4019 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4020 GET_MODE (XEXP (PATTERN (insn), 0)),
4021 NULL_RTX, 0);
4023 /* If we need to do register elimination processing, do so.
4024 This might delete the insn, in which case we are done. */
4025 if (num_eliminable && GET_MODE (insn) == QImode)
4027 eliminate_regs_in_insn (insn, 1);
4028 if (GET_CODE (insn) == NOTE)
4030 insn = next;
4031 continue;
4035 if (GET_MODE (insn) == VOIDmode)
4036 n_reloads = 0;
4037 /* First find the pseudo regs that must be reloaded for this insn.
4038 This info is returned in the tables reload_... (see reload.h).
4039 Also modify the body of INSN by substituting RELOAD
4040 rtx's for those pseudo regs. */
4041 else
4043 bzero (reg_has_output_reload, max_regno);
4044 CLEAR_HARD_REG_SET (reg_is_output_reload);
4046 find_reloads (insn, 1, spill_indirect_levels, live_known,
4047 spill_reg_order);
4050 if (n_reloads > 0)
4052 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4053 rtx p;
4054 int class;
4056 /* If this block has not had spilling done for a
4057 particular clas and we have any non-optionals that need a
4058 spill reg in that class, abort. */
4060 for (class = 0; class < N_REG_CLASSES; class++)
4061 if (basic_block_needs[class] != 0
4062 && basic_block_needs[class][this_block] == 0)
4063 for (i = 0; i < n_reloads; i++)
4064 if (class == (int) reload_reg_class[i]
4065 && reload_reg_rtx[i] == 0
4066 && ! reload_optional[i]
4067 && (reload_in[i] != 0 || reload_out[i] != 0
4068 || reload_secondary_p[i] != 0))
4069 fatal_insn ("Non-optional registers need a spill register", insn);
4071 /* Now compute which reload regs to reload them into. Perhaps
4072 reusing reload regs from previous insns, or else output
4073 load insns to reload them. Maybe output store insns too.
4074 Record the choices of reload reg in reload_reg_rtx. */
4075 choose_reload_regs (insn, avoid_return_reg);
4077 #ifdef SMALL_REGISTER_CLASSES
4078 /* Merge any reloads that we didn't combine for fear of
4079 increasing the number of spill registers needed but now
4080 discover can be safely merged. */
4081 if (SMALL_REGISTER_CLASSES)
4082 merge_assigned_reloads (insn);
4083 #endif
4085 /* Generate the insns to reload operands into or out of
4086 their reload regs. */
4087 emit_reload_insns (insn);
4089 /* Substitute the chosen reload regs from reload_reg_rtx
4090 into the insn's body (or perhaps into the bodies of other
4091 load and store insn that we just made for reloading
4092 and that we moved the structure into). */
4093 subst_reloads ();
4095 /* If this was an ASM, make sure that all the reload insns
4096 we have generated are valid. If not, give an error
4097 and delete them. */
4099 if (asm_noperands (PATTERN (insn)) >= 0)
4100 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4101 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4102 && (recog_memoized (p) < 0
4103 || (insn_extract (p),
4104 ! constrain_operands (INSN_CODE (p), 1))))
4106 error_for_asm (insn,
4107 "`asm' operand requires impossible reload");
4108 PUT_CODE (p, NOTE);
4109 NOTE_SOURCE_FILE (p) = 0;
4110 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4113 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4114 is no longer validly lying around to save a future reload.
4115 Note that this does not detect pseudos that were reloaded
4116 for this insn in order to be stored in
4117 (obeying register constraints). That is correct; such reload
4118 registers ARE still valid. */
4119 note_stores (oldpat, forget_old_reloads_1);
4121 /* There may have been CLOBBER insns placed after INSN. So scan
4122 between INSN and NEXT and use them to forget old reloads. */
4123 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4124 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4125 note_stores (PATTERN (x), forget_old_reloads_1);
4127 #ifdef AUTO_INC_DEC
4128 /* Likewise for regs altered by auto-increment in this insn.
4129 But note that the reg-notes are not changed by reloading:
4130 they still contain the pseudo-regs, not the spill regs. */
4131 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4132 if (REG_NOTE_KIND (x) == REG_INC)
4134 /* See if this pseudo reg was reloaded in this insn.
4135 If so, its last-reload info is still valid
4136 because it is based on this insn's reload. */
4137 for (i = 0; i < n_reloads; i++)
4138 if (reload_out[i] == XEXP (x, 0))
4139 break;
4141 if (i == n_reloads)
4142 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4144 #endif
4146 /* A reload reg's contents are unknown after a label. */
4147 if (GET_CODE (insn) == CODE_LABEL)
4148 for (i = 0; i < n_spills; i++)
4150 reg_reloaded_contents[i] = -1;
4151 reg_reloaded_insn[i] = 0;
4154 /* Don't assume a reload reg is still good after a call insn
4155 if it is a call-used reg. */
4156 else if (GET_CODE (insn) == CALL_INSN)
4157 for (i = 0; i < n_spills; i++)
4158 if (call_used_regs[spill_regs[i]])
4160 reg_reloaded_contents[i] = -1;
4161 reg_reloaded_insn[i] = 0;
4164 /* In case registers overlap, allow certain insns to invalidate
4165 particular hard registers. */
4167 #ifdef INSN_CLOBBERS_REGNO_P
4168 for (i = 0 ; i < n_spills ; i++)
4169 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4171 reg_reloaded_contents[i] = -1;
4172 reg_reloaded_insn[i] = 0;
4174 #endif
4176 insn = next;
4178 #ifdef USE_C_ALLOCA
4179 alloca (0);
4180 #endif
4184 /* Discard all record of any value reloaded from X,
4185 or reloaded in X from someplace else;
4186 unless X is an output reload reg of the current insn.
4188 X may be a hard reg (the reload reg)
4189 or it may be a pseudo reg that was reloaded from. */
4191 static void
4192 forget_old_reloads_1 (x, ignored)
4193 rtx x;
4194 rtx ignored;
4196 register int regno;
4197 int nr;
4198 int offset = 0;
4200 /* note_stores does give us subregs of hard regs. */
4201 while (GET_CODE (x) == SUBREG)
4203 offset += SUBREG_WORD (x);
4204 x = SUBREG_REG (x);
4207 if (GET_CODE (x) != REG)
4208 return;
4210 regno = REGNO (x) + offset;
4212 if (regno >= FIRST_PSEUDO_REGISTER)
4213 nr = 1;
4214 else
4216 int i;
4217 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4218 /* Storing into a spilled-reg invalidates its contents.
4219 This can happen if a block-local pseudo is allocated to that reg
4220 and it wasn't spilled because this block's total need is 0.
4221 Then some insn might have an optional reload and use this reg. */
4222 for (i = 0; i < nr; i++)
4223 if (spill_reg_order[regno + i] >= 0
4224 /* But don't do this if the reg actually serves as an output
4225 reload reg in the current instruction. */
4226 && (n_reloads == 0
4227 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4229 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4230 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4234 /* Since value of X has changed,
4235 forget any value previously copied from it. */
4237 while (nr-- > 0)
4238 /* But don't forget a copy if this is the output reload
4239 that establishes the copy's validity. */
4240 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4241 reg_last_reload_reg[regno + nr] = 0;
4244 /* For each reload, the mode of the reload register. */
4245 static enum machine_mode reload_mode[MAX_RELOADS];
4247 /* For each reload, the largest number of registers it will require. */
4248 static int reload_nregs[MAX_RELOADS];
4250 /* Comparison function for qsort to decide which of two reloads
4251 should be handled first. *P1 and *P2 are the reload numbers. */
4253 static int
4254 reload_reg_class_lower (r1p, r2p)
4255 const GENERIC_PTR r1p;
4256 const GENERIC_PTR r2p;
4258 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4259 register int t;
4261 /* Consider required reloads before optional ones. */
4262 t = reload_optional[r1] - reload_optional[r2];
4263 if (t != 0)
4264 return t;
4266 /* Count all solitary classes before non-solitary ones. */
4267 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4268 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4269 if (t != 0)
4270 return t;
4272 /* Aside from solitaires, consider all multi-reg groups first. */
4273 t = reload_nregs[r2] - reload_nregs[r1];
4274 if (t != 0)
4275 return t;
4277 /* Consider reloads in order of increasing reg-class number. */
4278 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4279 if (t != 0)
4280 return t;
4282 /* If reloads are equally urgent, sort by reload number,
4283 so that the results of qsort leave nothing to chance. */
4284 return r1 - r2;
4287 /* The following HARD_REG_SETs indicate when each hard register is
4288 used for a reload of various parts of the current insn. */
4290 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4291 static HARD_REG_SET reload_reg_used;
4292 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4293 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4294 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4295 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4296 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4297 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4298 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4299 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4300 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4301 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4302 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4303 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4304 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4305 static HARD_REG_SET reload_reg_used_in_op_addr;
4306 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4307 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4308 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4309 static HARD_REG_SET reload_reg_used_in_insn;
4310 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4311 static HARD_REG_SET reload_reg_used_in_other_addr;
4313 /* If reg is in use as a reload reg for any sort of reload. */
4314 static HARD_REG_SET reload_reg_used_at_all;
4316 /* If reg is use as an inherited reload. We just mark the first register
4317 in the group. */
4318 static HARD_REG_SET reload_reg_used_for_inherit;
4320 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4321 TYPE. MODE is used to indicate how many consecutive regs are
4322 actually used. */
4324 static void
4325 mark_reload_reg_in_use (regno, opnum, type, mode)
4326 int regno;
4327 int opnum;
4328 enum reload_type type;
4329 enum machine_mode mode;
4331 int nregs = HARD_REGNO_NREGS (regno, mode);
4332 int i;
4334 for (i = regno; i < nregs + regno; i++)
4336 switch (type)
4338 case RELOAD_OTHER:
4339 SET_HARD_REG_BIT (reload_reg_used, i);
4340 break;
4342 case RELOAD_FOR_INPUT_ADDRESS:
4343 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4344 break;
4346 case RELOAD_FOR_INPADDR_ADDRESS:
4347 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4348 break;
4350 case RELOAD_FOR_OUTPUT_ADDRESS:
4351 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4352 break;
4354 case RELOAD_FOR_OUTADDR_ADDRESS:
4355 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4356 break;
4358 case RELOAD_FOR_OPERAND_ADDRESS:
4359 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4360 break;
4362 case RELOAD_FOR_OPADDR_ADDR:
4363 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4364 break;
4366 case RELOAD_FOR_OTHER_ADDRESS:
4367 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4368 break;
4370 case RELOAD_FOR_INPUT:
4371 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4372 break;
4374 case RELOAD_FOR_OUTPUT:
4375 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4376 break;
4378 case RELOAD_FOR_INSN:
4379 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4380 break;
4383 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4387 /* Similarly, but show REGNO is no longer in use for a reload. */
4389 static void
4390 clear_reload_reg_in_use (regno, opnum, type, mode)
4391 int regno;
4392 int opnum;
4393 enum reload_type type;
4394 enum machine_mode mode;
4396 int nregs = HARD_REGNO_NREGS (regno, mode);
4397 int i;
4399 for (i = regno; i < nregs + regno; i++)
4401 switch (type)
4403 case RELOAD_OTHER:
4404 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4405 break;
4407 case RELOAD_FOR_INPUT_ADDRESS:
4408 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4409 break;
4411 case RELOAD_FOR_INPADDR_ADDRESS:
4412 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4413 break;
4415 case RELOAD_FOR_OUTPUT_ADDRESS:
4416 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4417 break;
4419 case RELOAD_FOR_OUTADDR_ADDRESS:
4420 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4421 break;
4423 case RELOAD_FOR_OPERAND_ADDRESS:
4424 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4425 break;
4427 case RELOAD_FOR_OPADDR_ADDR:
4428 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4429 break;
4431 case RELOAD_FOR_OTHER_ADDRESS:
4432 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4433 break;
4435 case RELOAD_FOR_INPUT:
4436 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4437 break;
4439 case RELOAD_FOR_OUTPUT:
4440 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4441 break;
4443 case RELOAD_FOR_INSN:
4444 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4445 break;
4450 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4451 specified by OPNUM and TYPE. */
4453 static int
4454 reload_reg_free_p (regno, opnum, type)
4455 int regno;
4456 int opnum;
4457 enum reload_type type;
4459 int i;
4461 /* In use for a RELOAD_OTHER means it's not available for anything. */
4462 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4463 return 0;
4465 switch (type)
4467 case RELOAD_OTHER:
4468 /* In use for anything means we can't use it for RELOAD_OTHER. */
4469 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4470 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4471 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4472 return 0;
4474 for (i = 0; i < reload_n_operands; i++)
4475 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4476 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4477 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4478 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4479 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4480 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4481 return 0;
4483 return 1;
4485 case RELOAD_FOR_INPUT:
4486 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4487 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4488 return 0;
4490 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4491 return 0;
4493 /* If it is used for some other input, can't use it. */
4494 for (i = 0; i < reload_n_operands; i++)
4495 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4496 return 0;
4498 /* If it is used in a later operand's address, can't use it. */
4499 for (i = opnum + 1; i < reload_n_operands; i++)
4500 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4501 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4502 return 0;
4504 return 1;
4506 case RELOAD_FOR_INPUT_ADDRESS:
4507 /* Can't use a register if it is used for an input address for this
4508 operand or used as an input in an earlier one. */
4509 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4510 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4511 return 0;
4513 for (i = 0; i < opnum; i++)
4514 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4515 return 0;
4517 return 1;
4519 case RELOAD_FOR_INPADDR_ADDRESS:
4520 /* Can't use a register if it is used for an input address
4521 address for this operand or used as an input in an earlier
4522 one. */
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4524 return 0;
4526 for (i = 0; i < opnum; i++)
4527 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4528 return 0;
4530 return 1;
4532 case RELOAD_FOR_OUTPUT_ADDRESS:
4533 /* Can't use a register if it is used for an output address for this
4534 operand or used as an output in this or a later operand. */
4535 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4536 return 0;
4538 for (i = opnum; i < reload_n_operands; i++)
4539 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4540 return 0;
4542 return 1;
4544 case RELOAD_FOR_OUTADDR_ADDRESS:
4545 /* Can't use a register if it is used for an output address
4546 address for this operand or used as an output in this or a
4547 later operand. */
4548 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4549 return 0;
4551 for (i = opnum; i < reload_n_operands; i++)
4552 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4553 return 0;
4555 return 1;
4557 case RELOAD_FOR_OPERAND_ADDRESS:
4558 for (i = 0; i < reload_n_operands; i++)
4559 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4560 return 0;
4562 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4563 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4565 case RELOAD_FOR_OPADDR_ADDR:
4566 for (i = 0; i < reload_n_operands; i++)
4567 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4568 return 0;
4570 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4572 case RELOAD_FOR_OUTPUT:
4573 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4574 outputs, or an operand address for this or an earlier output. */
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4576 return 0;
4578 for (i = 0; i < reload_n_operands; i++)
4579 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4580 return 0;
4582 for (i = 0; i <= opnum; i++)
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4584 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4585 return 0;
4587 return 1;
4589 case RELOAD_FOR_INSN:
4590 for (i = 0; i < reload_n_operands; i++)
4591 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4592 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4593 return 0;
4595 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4596 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4598 case RELOAD_FOR_OTHER_ADDRESS:
4599 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4601 abort ();
4604 /* Return 1 if the value in reload reg REGNO, as used by a reload
4605 needed for the part of the insn specified by OPNUM and TYPE,
4606 is not in use for a reload in any prior part of the insn.
4608 We can assume that the reload reg was already tested for availability
4609 at the time it is needed, and we should not check this again,
4610 in case the reg has already been marked in use. */
4612 static int
4613 reload_reg_free_before_p (regno, opnum, type)
4614 int regno;
4615 int opnum;
4616 enum reload_type type;
4618 int i;
4620 switch (type)
4622 case RELOAD_FOR_OTHER_ADDRESS:
4623 /* These always come first. */
4624 return 1;
4626 case RELOAD_OTHER:
4627 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4629 /* If this use is for part of the insn,
4630 check the reg is not in use for any prior part. It is tempting
4631 to try to do this by falling through from objecs that occur
4632 later in the insn to ones that occur earlier, but that will not
4633 correctly take into account the fact that here we MUST ignore
4634 things that would prevent the register from being allocated in
4635 the first place, since we know that it was allocated. */
4637 case RELOAD_FOR_OUTPUT_ADDRESS:
4638 case RELOAD_FOR_OUTADDR_ADDRESS:
4639 /* Earlier reloads are for earlier outputs or their addresses,
4640 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4641 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4642 RELOAD_OTHER).. */
4643 for (i = 0; i < opnum; i++)
4644 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4645 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4646 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4647 return 0;
4649 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4650 return 0;
4652 for (i = 0; i < reload_n_operands; i++)
4653 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4654 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4655 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4656 return 0;
4658 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4659 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4660 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4662 case RELOAD_FOR_OUTPUT:
4663 /* This can't be used in the output address for this operand and
4664 anything that can't be used for it, except that we've already
4665 tested for RELOAD_FOR_INSN objects. */
4667 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4668 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4669 return 0;
4671 for (i = 0; i < opnum; i++)
4672 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4673 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4674 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4675 return 0;
4677 for (i = 0; i < reload_n_operands; i++)
4678 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4679 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4680 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4681 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4682 return 0;
4684 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4686 case RELOAD_FOR_OPERAND_ADDRESS:
4687 case RELOAD_FOR_OPADDR_ADDR:
4688 case RELOAD_FOR_INSN:
4689 /* These can't conflict with inputs, or each other, so all we have to
4690 test is input addresses and the addresses of OTHER items. */
4692 for (i = 0; i < reload_n_operands; i++)
4693 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4694 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4695 return 0;
4697 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4699 case RELOAD_FOR_INPUT:
4700 /* The only things earlier are the address for this and
4701 earlier inputs, other inputs (which we know we don't conflict
4702 with), and addresses of RELOAD_OTHER objects. */
4704 for (i = 0; i <= opnum; i++)
4705 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4706 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4707 return 0;
4709 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4711 case RELOAD_FOR_INPUT_ADDRESS:
4712 case RELOAD_FOR_INPADDR_ADDRESS:
4713 /* Similarly, all we have to check is for use in earlier inputs'
4714 addresses. */
4715 for (i = 0; i < opnum; i++)
4716 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4717 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4718 return 0;
4720 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4722 abort ();
4725 /* Return 1 if the value in reload reg REGNO, as used by a reload
4726 needed for the part of the insn specified by OPNUM and TYPE,
4727 is still available in REGNO at the end of the insn.
4729 We can assume that the reload reg was already tested for availability
4730 at the time it is needed, and we should not check this again,
4731 in case the reg has already been marked in use. */
4733 static int
4734 reload_reg_reaches_end_p (regno, opnum, type)
4735 int regno;
4736 int opnum;
4737 enum reload_type type;
4739 int i;
4741 switch (type)
4743 case RELOAD_OTHER:
4744 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4745 its value must reach the end. */
4746 return 1;
4748 /* If this use is for part of the insn,
4749 its value reaches if no subsequent part uses the same register.
4750 Just like the above function, don't try to do this with lots
4751 of fallthroughs. */
4753 case RELOAD_FOR_OTHER_ADDRESS:
4754 /* Here we check for everything else, since these don't conflict
4755 with anything else and everything comes later. */
4757 for (i = 0; i < reload_n_operands; i++)
4758 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4759 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4760 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4761 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4762 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4763 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4764 return 0;
4766 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4767 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4768 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4770 case RELOAD_FOR_INPUT_ADDRESS:
4771 case RELOAD_FOR_INPADDR_ADDRESS:
4772 /* Similar, except that we check only for this and subsequent inputs
4773 and the address of only subsequent inputs and we do not need
4774 to check for RELOAD_OTHER objects since they are known not to
4775 conflict. */
4777 for (i = opnum; i < reload_n_operands; i++)
4778 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4779 return 0;
4781 for (i = opnum + 1; i < reload_n_operands; i++)
4782 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4783 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4784 return 0;
4786 for (i = 0; i < reload_n_operands; i++)
4787 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4788 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4789 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4790 return 0;
4792 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4793 return 0;
4795 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4796 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4798 case RELOAD_FOR_INPUT:
4799 /* Similar to input address, except we start at the next operand for
4800 both input and input address and we do not check for
4801 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4802 would conflict. */
4804 for (i = opnum + 1; i < reload_n_operands; i++)
4805 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4806 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4807 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4808 return 0;
4810 /* ... fall through ... */
4812 case RELOAD_FOR_OPERAND_ADDRESS:
4813 /* Check outputs and their addresses. */
4815 for (i = 0; i < reload_n_operands; i++)
4816 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4817 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4818 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4819 return 0;
4821 return 1;
4823 case RELOAD_FOR_OPADDR_ADDR:
4824 for (i = 0; i < reload_n_operands; i++)
4825 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4828 return 0;
4830 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4831 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4833 case RELOAD_FOR_INSN:
4834 /* These conflict with other outputs with RELOAD_OTHER. So
4835 we need only check for output addresses. */
4837 opnum = -1;
4839 /* ... fall through ... */
4841 case RELOAD_FOR_OUTPUT:
4842 case RELOAD_FOR_OUTPUT_ADDRESS:
4843 case RELOAD_FOR_OUTADDR_ADDRESS:
4844 /* We already know these can't conflict with a later output. So the
4845 only thing to check are later output addresses. */
4846 for (i = opnum + 1; i < reload_n_operands; i++)
4847 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4848 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4849 return 0;
4851 return 1;
4854 abort ();
4857 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4858 Return 0 otherwise.
4860 This function uses the same algorithm as reload_reg_free_p above. */
4862 static int
4863 reloads_conflict (r1, r2)
4864 int r1, r2;
4866 enum reload_type r1_type = reload_when_needed[r1];
4867 enum reload_type r2_type = reload_when_needed[r2];
4868 int r1_opnum = reload_opnum[r1];
4869 int r2_opnum = reload_opnum[r2];
4871 /* RELOAD_OTHER conflicts with everything. */
4872 if (r2_type == RELOAD_OTHER)
4873 return 1;
4875 /* Otherwise, check conflicts differently for each type. */
4877 switch (r1_type)
4879 case RELOAD_FOR_INPUT:
4880 return (r2_type == RELOAD_FOR_INSN
4881 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4882 || r2_type == RELOAD_FOR_OPADDR_ADDR
4883 || r2_type == RELOAD_FOR_INPUT
4884 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4885 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4886 && r2_opnum > r1_opnum));
4888 case RELOAD_FOR_INPUT_ADDRESS:
4889 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4890 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4892 case RELOAD_FOR_INPADDR_ADDRESS:
4893 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4894 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4896 case RELOAD_FOR_OUTPUT_ADDRESS:
4897 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4898 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4900 case RELOAD_FOR_OUTADDR_ADDRESS:
4901 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4902 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4904 case RELOAD_FOR_OPERAND_ADDRESS:
4905 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4906 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4908 case RELOAD_FOR_OPADDR_ADDR:
4909 return (r2_type == RELOAD_FOR_INPUT
4910 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4912 case RELOAD_FOR_OUTPUT:
4913 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4914 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4915 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4916 && r2_opnum >= r1_opnum));
4918 case RELOAD_FOR_INSN:
4919 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4920 || r2_type == RELOAD_FOR_INSN
4921 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4923 case RELOAD_FOR_OTHER_ADDRESS:
4924 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4926 case RELOAD_OTHER:
4927 return 1;
4929 default:
4930 abort ();
4934 /* Vector of reload-numbers showing the order in which the reloads should
4935 be processed. */
4936 short reload_order[MAX_RELOADS];
4938 /* Indexed by reload number, 1 if incoming value
4939 inherited from previous insns. */
4940 char reload_inherited[MAX_RELOADS];
4942 /* For an inherited reload, this is the insn the reload was inherited from,
4943 if we know it. Otherwise, this is 0. */
4944 rtx reload_inheritance_insn[MAX_RELOADS];
4946 /* If non-zero, this is a place to get the value of the reload,
4947 rather than using reload_in. */
4948 rtx reload_override_in[MAX_RELOADS];
4950 /* For each reload, the index in spill_regs of the spill register used,
4951 or -1 if we did not need one of the spill registers for this reload. */
4952 int reload_spill_index[MAX_RELOADS];
4954 /* Find a spill register to use as a reload register for reload R.
4955 LAST_RELOAD is non-zero if this is the last reload for the insn being
4956 processed.
4958 Set reload_reg_rtx[R] to the register allocated.
4960 If NOERROR is nonzero, we return 1 if successful,
4961 or 0 if we couldn't find a spill reg and we didn't change anything. */
4963 static int
4964 allocate_reload_reg (r, insn, last_reload, noerror)
4965 int r;
4966 rtx insn;
4967 int last_reload;
4968 int noerror;
4970 int i;
4971 int pass;
4972 int count;
4973 rtx new;
4974 int regno;
4976 /* If we put this reload ahead, thinking it is a group,
4977 then insist on finding a group. Otherwise we can grab a
4978 reg that some other reload needs.
4979 (That can happen when we have a 68000 DATA_OR_FP_REG
4980 which is a group of data regs or one fp reg.)
4981 We need not be so restrictive if there are no more reloads
4982 for this insn.
4984 ??? Really it would be nicer to have smarter handling
4985 for that kind of reg class, where a problem like this is normal.
4986 Perhaps those classes should be avoided for reloading
4987 by use of more alternatives. */
4989 int force_group = reload_nregs[r] > 1 && ! last_reload;
4991 /* If we want a single register and haven't yet found one,
4992 take any reg in the right class and not in use.
4993 If we want a consecutive group, here is where we look for it.
4995 We use two passes so we can first look for reload regs to
4996 reuse, which are already in use for other reloads in this insn,
4997 and only then use additional registers.
4998 I think that maximizing reuse is needed to make sure we don't
4999 run out of reload regs. Suppose we have three reloads, and
5000 reloads A and B can share regs. These need two regs.
5001 Suppose A and B are given different regs.
5002 That leaves none for C. */
5003 for (pass = 0; pass < 2; pass++)
5005 /* I is the index in spill_regs.
5006 We advance it round-robin between insns to use all spill regs
5007 equally, so that inherited reloads have a chance
5008 of leapfrogging each other. Don't do this, however, when we have
5009 group needs and failure would be fatal; if we only have a relatively
5010 small number of spill registers, and more than one of them has
5011 group needs, then by starting in the middle, we may end up
5012 allocating the first one in such a way that we are not left with
5013 sufficient groups to handle the rest. */
5015 if (noerror || ! force_group)
5016 i = last_spill_reg;
5017 else
5018 i = -1;
5020 for (count = 0; count < n_spills; count++)
5022 int class = (int) reload_reg_class[r];
5024 i = (i + 1) % n_spills;
5026 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5027 reload_when_needed[r])
5028 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5029 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5030 /* Look first for regs to share, then for unshared. But
5031 don't share regs used for inherited reloads; they are
5032 the ones we want to preserve. */
5033 && (pass
5034 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5035 spill_regs[i])
5036 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5037 spill_regs[i]))))
5039 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5040 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5041 (on 68000) got us two FP regs. If NR is 1,
5042 we would reject both of them. */
5043 if (force_group)
5044 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5045 /* If we need only one reg, we have already won. */
5046 if (nr == 1)
5048 /* But reject a single reg if we demand a group. */
5049 if (force_group)
5050 continue;
5051 break;
5053 /* Otherwise check that as many consecutive regs as we need
5054 are available here.
5055 Also, don't use for a group registers that are
5056 needed for nongroups. */
5057 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5058 while (nr > 1)
5060 regno = spill_regs[i] + nr - 1;
5061 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5062 && spill_reg_order[regno] >= 0
5063 && reload_reg_free_p (regno, reload_opnum[r],
5064 reload_when_needed[r])
5065 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5066 regno)))
5067 break;
5068 nr--;
5070 if (nr == 1)
5071 break;
5075 /* If we found something on pass 1, omit pass 2. */
5076 if (count < n_spills)
5077 break;
5080 /* We should have found a spill register by now. */
5081 if (count == n_spills)
5083 if (noerror)
5084 return 0;
5085 goto failure;
5088 /* I is the index in SPILL_REG_RTX of the reload register we are to
5089 allocate. Get an rtx for it and find its register number. */
5091 new = spill_reg_rtx[i];
5093 if (new == 0 || GET_MODE (new) != reload_mode[r])
5094 spill_reg_rtx[i] = new
5095 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5097 regno = true_regnum (new);
5099 /* Detect when the reload reg can't hold the reload mode.
5100 This used to be one `if', but Sequent compiler can't handle that. */
5101 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5103 enum machine_mode test_mode = VOIDmode;
5104 if (reload_in[r])
5105 test_mode = GET_MODE (reload_in[r]);
5106 /* If reload_in[r] has VOIDmode, it means we will load it
5107 in whatever mode the reload reg has: to wit, reload_mode[r].
5108 We have already tested that for validity. */
5109 /* Aside from that, we need to test that the expressions
5110 to reload from or into have modes which are valid for this
5111 reload register. Otherwise the reload insns would be invalid. */
5112 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5113 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5114 if (! (reload_out[r] != 0
5115 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5117 /* The reg is OK. */
5118 last_spill_reg = i;
5120 /* Mark as in use for this insn the reload regs we use
5121 for this. */
5122 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5123 reload_when_needed[r], reload_mode[r]);
5125 reload_reg_rtx[r] = new;
5126 reload_spill_index[r] = i;
5127 return 1;
5131 /* The reg is not OK. */
5132 if (noerror)
5133 return 0;
5135 failure:
5136 if (asm_noperands (PATTERN (insn)) < 0)
5137 /* It's the compiler's fault. */
5138 fatal_insn ("Could not find a spill register", insn);
5140 /* It's the user's fault; the operand's mode and constraint
5141 don't match. Disable this reload so we don't crash in final. */
5142 error_for_asm (insn,
5143 "`asm' operand constraint incompatible with operand size");
5144 reload_in[r] = 0;
5145 reload_out[r] = 0;
5146 reload_reg_rtx[r] = 0;
5147 reload_optional[r] = 1;
5148 reload_secondary_p[r] = 1;
5150 return 1;
5153 /* Assign hard reg targets for the pseudo-registers we must reload
5154 into hard regs for this insn.
5155 Also output the instructions to copy them in and out of the hard regs.
5157 For machines with register classes, we are responsible for
5158 finding a reload reg in the proper class. */
5160 static void
5161 choose_reload_regs (insn, avoid_return_reg)
5162 rtx insn;
5163 rtx avoid_return_reg;
5165 register int i, j;
5166 int max_group_size = 1;
5167 enum reg_class group_class = NO_REGS;
5168 int inheritance;
5170 rtx save_reload_reg_rtx[MAX_RELOADS];
5171 char save_reload_inherited[MAX_RELOADS];
5172 rtx save_reload_inheritance_insn[MAX_RELOADS];
5173 rtx save_reload_override_in[MAX_RELOADS];
5174 int save_reload_spill_index[MAX_RELOADS];
5175 HARD_REG_SET save_reload_reg_used;
5176 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5177 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5178 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5179 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5180 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5181 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5182 HARD_REG_SET save_reload_reg_used_in_op_addr;
5183 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5184 HARD_REG_SET save_reload_reg_used_in_insn;
5185 HARD_REG_SET save_reload_reg_used_in_other_addr;
5186 HARD_REG_SET save_reload_reg_used_at_all;
5188 bzero (reload_inherited, MAX_RELOADS);
5189 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5190 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5192 CLEAR_HARD_REG_SET (reload_reg_used);
5193 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5194 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5195 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5196 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5197 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5199 for (i = 0; i < reload_n_operands; i++)
5201 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5202 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5203 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5204 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5205 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5206 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5209 #ifdef SMALL_REGISTER_CLASSES
5210 /* Don't bother with avoiding the return reg
5211 if we have no mandatory reload that could use it. */
5212 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5214 int do_avoid = 0;
5215 int regno = REGNO (avoid_return_reg);
5216 int nregs
5217 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5218 int r;
5220 for (r = regno; r < regno + nregs; r++)
5221 if (spill_reg_order[r] >= 0)
5222 for (j = 0; j < n_reloads; j++)
5223 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5224 && (reload_in[j] != 0 || reload_out[j] != 0
5225 || reload_secondary_p[j])
5227 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5228 do_avoid = 1;
5229 if (!do_avoid)
5230 avoid_return_reg = 0;
5232 #endif /* SMALL_REGISTER_CLASSES */
5234 #if 0 /* Not needed, now that we can always retry without inheritance. */
5235 /* See if we have more mandatory reloads than spill regs.
5236 If so, then we cannot risk optimizations that could prevent
5237 reloads from sharing one spill register.
5239 Since we will try finding a better register than reload_reg_rtx
5240 unless it is equal to reload_in or reload_out, count such reloads. */
5243 int tem = 0;
5244 #ifdef SMALL_REGISTER_CLASSES
5245 if (SMALL_REGISTER_CLASSES)
5246 tem = (avoid_return_reg != 0);
5247 #endif
5248 for (j = 0; j < n_reloads; j++)
5249 if (! reload_optional[j]
5250 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5251 && (reload_reg_rtx[j] == 0
5252 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5253 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5254 tem++;
5255 if (tem > n_spills)
5256 must_reuse = 1;
5258 #endif
5260 #ifdef SMALL_REGISTER_CLASSES
5261 /* Don't use the subroutine call return reg for a reload
5262 if we are supposed to avoid it. */
5263 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5265 int regno = REGNO (avoid_return_reg);
5266 int nregs
5267 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5268 int r;
5270 for (r = regno; r < regno + nregs; r++)
5271 if (spill_reg_order[r] >= 0)
5272 SET_HARD_REG_BIT (reload_reg_used, r);
5274 #endif /* SMALL_REGISTER_CLASSES */
5276 /* In order to be certain of getting the registers we need,
5277 we must sort the reloads into order of increasing register class.
5278 Then our grabbing of reload registers will parallel the process
5279 that provided the reload registers.
5281 Also note whether any of the reloads wants a consecutive group of regs.
5282 If so, record the maximum size of the group desired and what
5283 register class contains all the groups needed by this insn. */
5285 for (j = 0; j < n_reloads; j++)
5287 reload_order[j] = j;
5288 reload_spill_index[j] = -1;
5290 reload_mode[j]
5291 = (reload_inmode[j] == VOIDmode
5292 || (GET_MODE_SIZE (reload_outmode[j])
5293 > GET_MODE_SIZE (reload_inmode[j])))
5294 ? reload_outmode[j] : reload_inmode[j];
5296 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5298 if (reload_nregs[j] > 1)
5300 max_group_size = MAX (reload_nregs[j], max_group_size);
5301 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5304 /* If we have already decided to use a certain register,
5305 don't use it in another way. */
5306 if (reload_reg_rtx[j])
5307 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5308 reload_when_needed[j], reload_mode[j]);
5311 if (n_reloads > 1)
5312 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5314 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5315 sizeof reload_reg_rtx);
5316 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5317 bcopy ((char *) reload_inheritance_insn,
5318 (char *) save_reload_inheritance_insn,
5319 sizeof reload_inheritance_insn);
5320 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5321 sizeof reload_override_in);
5322 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5323 sizeof reload_spill_index);
5324 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5325 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5326 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5327 reload_reg_used_in_op_addr);
5329 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5330 reload_reg_used_in_op_addr_reload);
5332 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5333 reload_reg_used_in_insn);
5334 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5335 reload_reg_used_in_other_addr);
5337 for (i = 0; i < reload_n_operands; i++)
5339 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5340 reload_reg_used_in_output[i]);
5341 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5342 reload_reg_used_in_input[i]);
5343 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5344 reload_reg_used_in_input_addr[i]);
5345 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5346 reload_reg_used_in_inpaddr_addr[i]);
5347 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5348 reload_reg_used_in_output_addr[i]);
5349 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5350 reload_reg_used_in_outaddr_addr[i]);
5353 /* If -O, try first with inheritance, then turning it off.
5354 If not -O, don't do inheritance.
5355 Using inheritance when not optimizing leads to paradoxes
5356 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5357 because one side of the comparison might be inherited. */
5359 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5361 /* Process the reloads in order of preference just found.
5362 Beyond this point, subregs can be found in reload_reg_rtx.
5364 This used to look for an existing reloaded home for all
5365 of the reloads, and only then perform any new reloads.
5366 But that could lose if the reloads were done out of reg-class order
5367 because a later reload with a looser constraint might have an old
5368 home in a register needed by an earlier reload with a tighter constraint.
5370 To solve this, we make two passes over the reloads, in the order
5371 described above. In the first pass we try to inherit a reload
5372 from a previous insn. If there is a later reload that needs a
5373 class that is a proper subset of the class being processed, we must
5374 also allocate a spill register during the first pass.
5376 Then make a second pass over the reloads to allocate any reloads
5377 that haven't been given registers yet. */
5379 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5381 for (j = 0; j < n_reloads; j++)
5383 register int r = reload_order[j];
5385 /* Ignore reloads that got marked inoperative. */
5386 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5387 continue;
5389 /* If find_reloads chose a to use reload_in or reload_out as a reload
5390 register, we don't need to chose one. Otherwise, try even if it found
5391 one since we might save an insn if we find the value lying around. */
5392 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5393 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5394 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5395 continue;
5397 #if 0 /* No longer needed for correct operation.
5398 It might give better code, or might not; worth an experiment? */
5399 /* If this is an optional reload, we can't inherit from earlier insns
5400 until we are sure that any non-optional reloads have been allocated.
5401 The following code takes advantage of the fact that optional reloads
5402 are at the end of reload_order. */
5403 if (reload_optional[r] != 0)
5404 for (i = 0; i < j; i++)
5405 if ((reload_out[reload_order[i]] != 0
5406 || reload_in[reload_order[i]] != 0
5407 || reload_secondary_p[reload_order[i]])
5408 && ! reload_optional[reload_order[i]]
5409 && reload_reg_rtx[reload_order[i]] == 0)
5410 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5411 #endif
5413 /* First see if this pseudo is already available as reloaded
5414 for a previous insn. We cannot try to inherit for reloads
5415 that are smaller than the maximum number of registers needed
5416 for groups unless the register we would allocate cannot be used
5417 for the groups.
5419 We could check here to see if this is a secondary reload for
5420 an object that is already in a register of the desired class.
5421 This would avoid the need for the secondary reload register.
5422 But this is complex because we can't easily determine what
5423 objects might want to be loaded via this reload. So let a register
5424 be allocated here. In `emit_reload_insns' we suppress one of the
5425 loads in the case described above. */
5427 if (inheritance)
5429 register int regno = -1;
5430 enum machine_mode mode;
5432 if (reload_in[r] == 0)
5434 else if (GET_CODE (reload_in[r]) == REG)
5436 regno = REGNO (reload_in[r]);
5437 mode = GET_MODE (reload_in[r]);
5439 else if (GET_CODE (reload_in_reg[r]) == REG)
5441 regno = REGNO (reload_in_reg[r]);
5442 mode = GET_MODE (reload_in_reg[r]);
5444 #if 0
5445 /* This won't work, since REGNO can be a pseudo reg number.
5446 Also, it takes much more hair to keep track of all the things
5447 that can invalidate an inherited reload of part of a pseudoreg. */
5448 else if (GET_CODE (reload_in[r]) == SUBREG
5449 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5450 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5451 #endif
5453 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5455 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5457 if (reg_reloaded_contents[i] == regno
5458 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5459 >= GET_MODE_SIZE (mode))
5460 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5461 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5462 spill_regs[i])
5463 && (reload_nregs[r] == max_group_size
5464 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5465 spill_regs[i]))
5466 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5467 reload_when_needed[r])
5468 && reload_reg_free_before_p (spill_regs[i],
5469 reload_opnum[r],
5470 reload_when_needed[r]))
5472 /* If a group is needed, verify that all the subsequent
5473 registers still have their values intact. */
5474 int nr
5475 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5476 int k;
5478 for (k = 1; k < nr; k++)
5479 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5480 != regno)
5481 break;
5483 if (k == nr)
5485 int i1;
5487 /* We found a register that contains the
5488 value we need. If this register is the
5489 same as an `earlyclobber' operand of the
5490 current insn, just mark it as a place to
5491 reload from since we can't use it as the
5492 reload register itself. */
5494 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5495 if (reg_overlap_mentioned_for_reload_p
5496 (reg_last_reload_reg[regno],
5497 reload_earlyclobbers[i1]))
5498 break;
5500 if (i1 != n_earlyclobbers
5501 /* Don't really use the inherited spill reg
5502 if we need it wider than we've got it. */
5503 || (GET_MODE_SIZE (reload_mode[r])
5504 > GET_MODE_SIZE (mode)))
5505 reload_override_in[r] = reg_last_reload_reg[regno];
5506 else
5508 int k;
5509 /* We can use this as a reload reg. */
5510 /* Mark the register as in use for this part of
5511 the insn. */
5512 mark_reload_reg_in_use (spill_regs[i],
5513 reload_opnum[r],
5514 reload_when_needed[r],
5515 reload_mode[r]);
5516 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5517 reload_inherited[r] = 1;
5518 reload_inheritance_insn[r]
5519 = reg_reloaded_insn[i];
5520 reload_spill_index[r] = i;
5521 for (k = 0; k < nr; k++)
5522 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5523 spill_regs[i + k]);
5530 /* Here's another way to see if the value is already lying around. */
5531 if (inheritance
5532 && reload_in[r] != 0
5533 && ! reload_inherited[r]
5534 && reload_out[r] == 0
5535 && (CONSTANT_P (reload_in[r])
5536 || GET_CODE (reload_in[r]) == PLUS
5537 || GET_CODE (reload_in[r]) == REG
5538 || GET_CODE (reload_in[r]) == MEM)
5539 && (reload_nregs[r] == max_group_size
5540 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5542 register rtx equiv
5543 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5544 -1, NULL_PTR, 0, reload_mode[r]);
5545 int regno;
5547 if (equiv != 0)
5549 if (GET_CODE (equiv) == REG)
5550 regno = REGNO (equiv);
5551 else if (GET_CODE (equiv) == SUBREG)
5553 /* This must be a SUBREG of a hard register.
5554 Make a new REG since this might be used in an
5555 address and not all machines support SUBREGs
5556 there. */
5557 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5558 equiv = gen_rtx (REG, reload_mode[r], regno);
5560 else
5561 abort ();
5564 /* If we found a spill reg, reject it unless it is free
5565 and of the desired class. */
5566 if (equiv != 0
5567 && ((spill_reg_order[regno] >= 0
5568 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5569 reload_when_needed[r]))
5570 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5571 regno)))
5572 equiv = 0;
5574 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5575 equiv = 0;
5577 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5578 equiv = 0;
5580 /* We found a register that contains the value we need.
5581 If this register is the same as an `earlyclobber' operand
5582 of the current insn, just mark it as a place to reload from
5583 since we can't use it as the reload register itself. */
5585 if (equiv != 0)
5586 for (i = 0; i < n_earlyclobbers; i++)
5587 if (reg_overlap_mentioned_for_reload_p (equiv,
5588 reload_earlyclobbers[i]))
5590 reload_override_in[r] = equiv;
5591 equiv = 0;
5592 break;
5595 /* JRV: If the equiv register we have found is
5596 explicitly clobbered in the current insn, mark but
5597 don't use, as above. */
5599 if (equiv != 0 && regno_clobbered_p (regno, insn))
5601 reload_override_in[r] = equiv;
5602 equiv = 0;
5605 /* If we found an equivalent reg, say no code need be generated
5606 to load it, and use it as our reload reg. */
5607 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5609 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5610 int k;
5611 reload_reg_rtx[r] = equiv;
5612 reload_inherited[r] = 1;
5614 /* If any of the hard registers in EQUIV are spill
5615 registers, mark them as in use for this insn. */
5616 for (k = 0; k < nr; k++)
5618 i = spill_reg_order[regno + k];
5619 if (i >= 0)
5621 mark_reload_reg_in_use (regno, reload_opnum[r],
5622 reload_when_needed[r],
5623 reload_mode[r]);
5624 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5625 regno + k);
5631 /* If we found a register to use already, or if this is an optional
5632 reload, we are done. */
5633 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5634 continue;
5636 #if 0 /* No longer needed for correct operation. Might or might not
5637 give better code on the average. Want to experiment? */
5639 /* See if there is a later reload that has a class different from our
5640 class that intersects our class or that requires less register
5641 than our reload. If so, we must allocate a register to this
5642 reload now, since that reload might inherit a previous reload
5643 and take the only available register in our class. Don't do this
5644 for optional reloads since they will force all previous reloads
5645 to be allocated. Also don't do this for reloads that have been
5646 turned off. */
5648 for (i = j + 1; i < n_reloads; i++)
5650 int s = reload_order[i];
5652 if ((reload_in[s] == 0 && reload_out[s] == 0
5653 && ! reload_secondary_p[s])
5654 || reload_optional[s])
5655 continue;
5657 if ((reload_reg_class[s] != reload_reg_class[r]
5658 && reg_classes_intersect_p (reload_reg_class[r],
5659 reload_reg_class[s]))
5660 || reload_nregs[s] < reload_nregs[r])
5661 break;
5664 if (i == n_reloads)
5665 continue;
5667 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5668 #endif
5671 /* Now allocate reload registers for anything non-optional that
5672 didn't get one yet. */
5673 for (j = 0; j < n_reloads; j++)
5675 register int r = reload_order[j];
5677 /* Ignore reloads that got marked inoperative. */
5678 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5679 continue;
5681 /* Skip reloads that already have a register allocated or are
5682 optional. */
5683 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5684 continue;
5686 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5687 break;
5690 /* If that loop got all the way, we have won. */
5691 if (j == n_reloads)
5692 break;
5694 fail:
5695 /* Loop around and try without any inheritance. */
5696 /* First undo everything done by the failed attempt
5697 to allocate with inheritance. */
5698 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5699 sizeof reload_reg_rtx);
5700 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5701 sizeof reload_inherited);
5702 bcopy ((char *) save_reload_inheritance_insn,
5703 (char *) reload_inheritance_insn,
5704 sizeof reload_inheritance_insn);
5705 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5706 sizeof reload_override_in);
5707 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5708 sizeof reload_spill_index);
5709 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5710 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5711 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5712 save_reload_reg_used_in_op_addr);
5713 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5714 save_reload_reg_used_in_op_addr_reload);
5715 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5716 save_reload_reg_used_in_insn);
5717 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5718 save_reload_reg_used_in_other_addr);
5720 for (i = 0; i < reload_n_operands; i++)
5722 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5723 save_reload_reg_used_in_input[i]);
5724 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5725 save_reload_reg_used_in_output[i]);
5726 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5727 save_reload_reg_used_in_input_addr[i]);
5728 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5729 save_reload_reg_used_in_inpaddr_addr[i]);
5730 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5731 save_reload_reg_used_in_output_addr[i]);
5732 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5733 save_reload_reg_used_in_outaddr_addr[i]);
5737 /* If we thought we could inherit a reload, because it seemed that
5738 nothing else wanted the same reload register earlier in the insn,
5739 verify that assumption, now that all reloads have been assigned. */
5741 for (j = 0; j < n_reloads; j++)
5743 register int r = reload_order[j];
5745 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5746 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5747 reload_opnum[r],
5748 reload_when_needed[r]))
5749 reload_inherited[r] = 0;
5751 /* If we found a better place to reload from,
5752 validate it in the same fashion, if it is a reload reg. */
5753 if (reload_override_in[r]
5754 && (GET_CODE (reload_override_in[r]) == REG
5755 || GET_CODE (reload_override_in[r]) == SUBREG))
5757 int regno = true_regnum (reload_override_in[r]);
5758 if (spill_reg_order[regno] >= 0
5759 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5760 reload_when_needed[r]))
5761 reload_override_in[r] = 0;
5765 /* Now that reload_override_in is known valid,
5766 actually override reload_in. */
5767 for (j = 0; j < n_reloads; j++)
5768 if (reload_override_in[j])
5769 reload_in[j] = reload_override_in[j];
5771 /* If this reload won't be done because it has been cancelled or is
5772 optional and not inherited, clear reload_reg_rtx so other
5773 routines (such as subst_reloads) don't get confused. */
5774 for (j = 0; j < n_reloads; j++)
5775 if (reload_reg_rtx[j] != 0
5776 && ((reload_optional[j] && ! reload_inherited[j])
5777 || (reload_in[j] == 0 && reload_out[j] == 0
5778 && ! reload_secondary_p[j])))
5780 int regno = true_regnum (reload_reg_rtx[j]);
5782 if (spill_reg_order[regno] >= 0)
5783 clear_reload_reg_in_use (regno, reload_opnum[j],
5784 reload_when_needed[j], reload_mode[j]);
5785 reload_reg_rtx[j] = 0;
5788 /* Record which pseudos and which spill regs have output reloads. */
5789 for (j = 0; j < n_reloads; j++)
5791 register int r = reload_order[j];
5793 i = reload_spill_index[r];
5795 /* I is nonneg if this reload used one of the spill regs.
5796 If reload_reg_rtx[r] is 0, this is an optional reload
5797 that we opted to ignore. */
5798 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5799 && reload_reg_rtx[r] != 0)
5801 register int nregno = REGNO (reload_out[r]);
5802 int nr = 1;
5804 if (nregno < FIRST_PSEUDO_REGISTER)
5805 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5807 while (--nr >= 0)
5808 reg_has_output_reload[nregno + nr] = 1;
5810 if (i >= 0)
5812 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5813 while (--nr >= 0)
5814 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5817 if (reload_when_needed[r] != RELOAD_OTHER
5818 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5819 && reload_when_needed[r] != RELOAD_FOR_INSN)
5820 abort ();
5825 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5826 reloads of the same item for fear that we might not have enough reload
5827 registers. However, normally they will get the same reload register
5828 and hence actually need not be loaded twice.
5830 Here we check for the most common case of this phenomenon: when we have
5831 a number of reloads for the same object, each of which were allocated
5832 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5833 reload, and is not modified in the insn itself. If we find such,
5834 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5835 This will not increase the number of spill registers needed and will
5836 prevent redundant code. */
5838 #ifdef SMALL_REGISTER_CLASSES
5840 static void
5841 merge_assigned_reloads (insn)
5842 rtx insn;
5844 int i, j;
5846 /* Scan all the reloads looking for ones that only load values and
5847 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5848 assigned and not modified by INSN. */
5850 for (i = 0; i < n_reloads; i++)
5852 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5853 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5854 || reg_set_p (reload_reg_rtx[i], insn))
5855 continue;
5857 /* Look at all other reloads. Ensure that the only use of this
5858 reload_reg_rtx is in a reload that just loads the same value
5859 as we do. Note that any secondary reloads must be of the identical
5860 class since the values, modes, and result registers are the
5861 same, so we need not do anything with any secondary reloads. */
5863 for (j = 0; j < n_reloads; j++)
5865 if (i == j || reload_reg_rtx[j] == 0
5866 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5867 reload_reg_rtx[i]))
5868 continue;
5870 /* If the reload regs aren't exactly the same (e.g, different modes)
5871 or if the values are different, we can't merge anything with this
5872 reload register. */
5874 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5875 || reload_out[j] != 0 || reload_in[j] == 0
5876 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5877 break;
5880 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5881 we, in fact, found any matching reloads. */
5883 if (j == n_reloads)
5885 for (j = 0; j < n_reloads; j++)
5886 if (i != j && reload_reg_rtx[j] != 0
5887 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5889 reload_when_needed[i] = RELOAD_OTHER;
5890 reload_in[j] = 0;
5891 transfer_replacements (i, j);
5894 /* If this is now RELOAD_OTHER, look for any reloads that load
5895 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5896 if they were for inputs, RELOAD_OTHER for outputs. Note that
5897 this test is equivalent to looking for reloads for this operand
5898 number. */
5900 if (reload_when_needed[i] == RELOAD_OTHER)
5901 for (j = 0; j < n_reloads; j++)
5902 if (reload_in[j] != 0
5903 && reload_when_needed[i] != RELOAD_OTHER
5904 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5905 reload_in[i]))
5906 reload_when_needed[j]
5907 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5908 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5909 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5913 #endif /* SMALL_RELOAD_CLASSES */
5915 /* Output insns to reload values in and out of the chosen reload regs. */
5917 static void
5918 emit_reload_insns (insn)
5919 rtx insn;
5921 register int j;
5922 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5923 rtx other_input_address_reload_insns = 0;
5924 rtx other_input_reload_insns = 0;
5925 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5926 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5927 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5928 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5929 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5930 rtx operand_reload_insns = 0;
5931 rtx other_operand_reload_insns = 0;
5932 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5933 rtx following_insn = NEXT_INSN (insn);
5934 rtx before_insn = insn;
5935 int special;
5936 /* Values to be put in spill_reg_store are put here first. */
5937 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5939 for (j = 0; j < reload_n_operands; j++)
5940 input_reload_insns[j] = input_address_reload_insns[j]
5941 = inpaddr_address_reload_insns[j]
5942 = output_reload_insns[j] = output_address_reload_insns[j]
5943 = outaddr_address_reload_insns[j]
5944 = other_output_reload_insns[j] = 0;
5946 /* Now output the instructions to copy the data into and out of the
5947 reload registers. Do these in the order that the reloads were reported,
5948 since reloads of base and index registers precede reloads of operands
5949 and the operands may need the base and index registers reloaded. */
5951 for (j = 0; j < n_reloads; j++)
5953 register rtx old;
5954 rtx oldequiv_reg = 0;
5955 rtx this_reload_insn = 0;
5957 if (reload_spill_index[j] >= 0)
5958 new_spill_reg_store[reload_spill_index[j]] = 0;
5960 old = reload_in[j];
5961 if (old != 0 && ! reload_inherited[j]
5962 && ! rtx_equal_p (reload_reg_rtx[j], old)
5963 && reload_reg_rtx[j] != 0)
5965 register rtx reloadreg = reload_reg_rtx[j];
5966 rtx oldequiv = 0;
5967 enum machine_mode mode;
5968 rtx *where;
5970 /* Determine the mode to reload in.
5971 This is very tricky because we have three to choose from.
5972 There is the mode the insn operand wants (reload_inmode[J]).
5973 There is the mode of the reload register RELOADREG.
5974 There is the intrinsic mode of the operand, which we could find
5975 by stripping some SUBREGs.
5976 It turns out that RELOADREG's mode is irrelevant:
5977 we can change that arbitrarily.
5979 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5980 then the reload reg may not support QImode moves, so use SImode.
5981 If foo is in memory due to spilling a pseudo reg, this is safe,
5982 because the QImode value is in the least significant part of a
5983 slot big enough for a SImode. If foo is some other sort of
5984 memory reference, then it is impossible to reload this case,
5985 so previous passes had better make sure this never happens.
5987 Then consider a one-word union which has SImode and one of its
5988 members is a float, being fetched as (SUBREG:SF union:SI).
5989 We must fetch that as SFmode because we could be loading into
5990 a float-only register. In this case OLD's mode is correct.
5992 Consider an immediate integer: it has VOIDmode. Here we need
5993 to get a mode from something else.
5995 In some cases, there is a fourth mode, the operand's
5996 containing mode. If the insn specifies a containing mode for
5997 this operand, it overrides all others.
5999 I am not sure whether the algorithm here is always right,
6000 but it does the right things in those cases. */
6002 mode = GET_MODE (old);
6003 if (mode == VOIDmode)
6004 mode = reload_inmode[j];
6006 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6007 /* If we need a secondary register for this operation, see if
6008 the value is already in a register in that class. Don't
6009 do this if the secondary register will be used as a scratch
6010 register. */
6012 if (reload_secondary_in_reload[j] >= 0
6013 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6014 && optimize)
6015 oldequiv
6016 = find_equiv_reg (old, insn,
6017 reload_reg_class[reload_secondary_in_reload[j]],
6018 -1, NULL_PTR, 0, mode);
6019 #endif
6021 /* If reloading from memory, see if there is a register
6022 that already holds the same value. If so, reload from there.
6023 We can pass 0 as the reload_reg_p argument because
6024 any other reload has either already been emitted,
6025 in which case find_equiv_reg will see the reload-insn,
6026 or has yet to be emitted, in which case it doesn't matter
6027 because we will use this equiv reg right away. */
6029 if (oldequiv == 0 && optimize
6030 && (GET_CODE (old) == MEM
6031 || (GET_CODE (old) == REG
6032 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6033 && reg_renumber[REGNO (old)] < 0)))
6034 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6035 -1, NULL_PTR, 0, mode);
6037 if (oldequiv)
6039 int regno = true_regnum (oldequiv);
6041 /* If OLDEQUIV is a spill register, don't use it for this
6042 if any other reload needs it at an earlier stage of this insn
6043 or at this stage. */
6044 if (spill_reg_order[regno] >= 0
6045 && (! reload_reg_free_p (regno, reload_opnum[j],
6046 reload_when_needed[j])
6047 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6048 reload_when_needed[j])))
6049 oldequiv = 0;
6051 /* If OLDEQUIV is not a spill register,
6052 don't use it if any other reload wants it. */
6053 if (spill_reg_order[regno] < 0)
6055 int k;
6056 for (k = 0; k < n_reloads; k++)
6057 if (reload_reg_rtx[k] != 0 && k != j
6058 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6059 oldequiv))
6061 oldequiv = 0;
6062 break;
6066 /* If it is no cheaper to copy from OLDEQUIV into the
6067 reload register than it would be to move from memory,
6068 don't use it. Likewise, if we need a secondary register
6069 or memory. */
6071 if (oldequiv != 0
6072 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6073 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6074 reload_reg_class[j])
6075 >= MEMORY_MOVE_COST (mode)))
6076 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6077 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6078 mode, oldequiv)
6079 != NO_REGS)
6080 #endif
6081 #ifdef SECONDARY_MEMORY_NEEDED
6082 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6083 REGNO_REG_CLASS (regno),
6084 mode)
6085 #endif
6087 oldequiv = 0;
6090 if (oldequiv == 0)
6091 oldequiv = old;
6092 else if (GET_CODE (oldequiv) == REG)
6093 oldequiv_reg = oldequiv;
6094 else if (GET_CODE (oldequiv) == SUBREG)
6095 oldequiv_reg = SUBREG_REG (oldequiv);
6097 /* If we are reloading from a register that was recently stored in
6098 with an output-reload, see if we can prove there was
6099 actually no need to store the old value in it. */
6101 if (optimize && GET_CODE (oldequiv) == REG
6102 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6103 && spill_reg_order[REGNO (oldequiv)] >= 0
6104 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6105 && find_reg_note (insn, REG_DEAD, reload_in[j])
6106 /* This is unsafe if operand occurs more than once in current
6107 insn. Perhaps some occurrences weren't reloaded. */
6108 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6109 delete_output_reload
6110 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6112 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6113 then load RELOADREG from OLDEQUIV. Note that we cannot use
6114 gen_lowpart_common since it can do the wrong thing when
6115 RELOADREG has a multi-word mode. Note that RELOADREG
6116 must always be a REG here. */
6118 if (GET_MODE (reloadreg) != mode)
6119 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6120 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6121 oldequiv = SUBREG_REG (oldequiv);
6122 if (GET_MODE (oldequiv) != VOIDmode
6123 && mode != GET_MODE (oldequiv))
6124 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6126 /* Switch to the right place to emit the reload insns. */
6127 switch (reload_when_needed[j])
6129 case RELOAD_OTHER:
6130 where = &other_input_reload_insns;
6131 break;
6132 case RELOAD_FOR_INPUT:
6133 where = &input_reload_insns[reload_opnum[j]];
6134 break;
6135 case RELOAD_FOR_INPUT_ADDRESS:
6136 where = &input_address_reload_insns[reload_opnum[j]];
6137 break;
6138 case RELOAD_FOR_INPADDR_ADDRESS:
6139 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6140 break;
6141 case RELOAD_FOR_OUTPUT_ADDRESS:
6142 where = &output_address_reload_insns[reload_opnum[j]];
6143 break;
6144 case RELOAD_FOR_OUTADDR_ADDRESS:
6145 where = &outaddr_address_reload_insns[reload_opnum[j]];
6146 break;
6147 case RELOAD_FOR_OPERAND_ADDRESS:
6148 where = &operand_reload_insns;
6149 break;
6150 case RELOAD_FOR_OPADDR_ADDR:
6151 where = &other_operand_reload_insns;
6152 break;
6153 case RELOAD_FOR_OTHER_ADDRESS:
6154 where = &other_input_address_reload_insns;
6155 break;
6156 default:
6157 abort ();
6160 push_to_sequence (*where);
6161 special = 0;
6163 /* Auto-increment addresses must be reloaded in a special way. */
6164 if (GET_CODE (oldequiv) == POST_INC
6165 || GET_CODE (oldequiv) == POST_DEC
6166 || GET_CODE (oldequiv) == PRE_INC
6167 || GET_CODE (oldequiv) == PRE_DEC)
6169 /* We are not going to bother supporting the case where a
6170 incremented register can't be copied directly from
6171 OLDEQUIV since this seems highly unlikely. */
6172 if (reload_secondary_in_reload[j] >= 0)
6173 abort ();
6174 /* Prevent normal processing of this reload. */
6175 special = 1;
6176 /* Output a special code sequence for this case. */
6177 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6180 /* If we are reloading a pseudo-register that was set by the previous
6181 insn, see if we can get rid of that pseudo-register entirely
6182 by redirecting the previous insn into our reload register. */
6184 else if (optimize && GET_CODE (old) == REG
6185 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6186 && dead_or_set_p (insn, old)
6187 /* This is unsafe if some other reload
6188 uses the same reg first. */
6189 && reload_reg_free_before_p (REGNO (reloadreg),
6190 reload_opnum[j],
6191 reload_when_needed[j]))
6193 rtx temp = PREV_INSN (insn);
6194 while (temp && GET_CODE (temp) == NOTE)
6195 temp = PREV_INSN (temp);
6196 if (temp
6197 && GET_CODE (temp) == INSN
6198 && GET_CODE (PATTERN (temp)) == SET
6199 && SET_DEST (PATTERN (temp)) == old
6200 /* Make sure we can access insn_operand_constraint. */
6201 && asm_noperands (PATTERN (temp)) < 0
6202 /* This is unsafe if prev insn rejects our reload reg. */
6203 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6204 reloadreg)
6205 /* This is unsafe if operand occurs more than once in current
6206 insn. Perhaps some occurrences aren't reloaded. */
6207 && count_occurrences (PATTERN (insn), old) == 1
6208 /* Don't risk splitting a matching pair of operands. */
6209 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6211 /* Store into the reload register instead of the pseudo. */
6212 SET_DEST (PATTERN (temp)) = reloadreg;
6213 /* If these are the only uses of the pseudo reg,
6214 pretend for GDB it lives in the reload reg we used. */
6215 if (reg_n_deaths[REGNO (old)] == 1
6216 && reg_n_sets[REGNO (old)] == 1)
6218 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6219 alter_reg (REGNO (old), -1);
6221 special = 1;
6225 /* We can't do that, so output an insn to load RELOADREG. */
6227 if (! special)
6229 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6230 rtx second_reload_reg = 0;
6231 enum insn_code icode;
6233 /* If we have a secondary reload, pick up the secondary register
6234 and icode, if any. If OLDEQUIV and OLD are different or
6235 if this is an in-out reload, recompute whether or not we
6236 still need a secondary register and what the icode should
6237 be. If we still need a secondary register and the class or
6238 icode is different, go back to reloading from OLD if using
6239 OLDEQUIV means that we got the wrong type of register. We
6240 cannot have different class or icode due to an in-out reload
6241 because we don't make such reloads when both the input and
6242 output need secondary reload registers. */
6244 if (reload_secondary_in_reload[j] >= 0)
6246 int secondary_reload = reload_secondary_in_reload[j];
6247 rtx real_oldequiv = oldequiv;
6248 rtx real_old = old;
6250 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6251 and similarly for OLD.
6252 See comments in get_secondary_reload in reload.c. */
6253 if (GET_CODE (oldequiv) == REG
6254 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6255 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6256 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6258 if (GET_CODE (old) == REG
6259 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6260 && reg_equiv_mem[REGNO (old)] != 0)
6261 real_old = reg_equiv_mem[REGNO (old)];
6263 second_reload_reg = reload_reg_rtx[secondary_reload];
6264 icode = reload_secondary_in_icode[j];
6266 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6267 || (reload_in[j] != 0 && reload_out[j] != 0))
6269 enum reg_class new_class
6270 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6271 mode, real_oldequiv);
6273 if (new_class == NO_REGS)
6274 second_reload_reg = 0;
6275 else
6277 enum insn_code new_icode;
6278 enum machine_mode new_mode;
6280 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6281 REGNO (second_reload_reg)))
6282 oldequiv = old, real_oldequiv = real_old;
6283 else
6285 new_icode = reload_in_optab[(int) mode];
6286 if (new_icode != CODE_FOR_nothing
6287 && ((insn_operand_predicate[(int) new_icode][0]
6288 && ! ((*insn_operand_predicate[(int) new_icode][0])
6289 (reloadreg, mode)))
6290 || (insn_operand_predicate[(int) new_icode][1]
6291 && ! ((*insn_operand_predicate[(int) new_icode][1])
6292 (real_oldequiv, mode)))))
6293 new_icode = CODE_FOR_nothing;
6295 if (new_icode == CODE_FOR_nothing)
6296 new_mode = mode;
6297 else
6298 new_mode = insn_operand_mode[(int) new_icode][2];
6300 if (GET_MODE (second_reload_reg) != new_mode)
6302 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6303 new_mode))
6304 oldequiv = old, real_oldequiv = real_old;
6305 else
6306 second_reload_reg
6307 = gen_rtx (REG, new_mode,
6308 REGNO (second_reload_reg));
6314 /* If we still need a secondary reload register, check
6315 to see if it is being used as a scratch or intermediate
6316 register and generate code appropriately. If we need
6317 a scratch register, use REAL_OLDEQUIV since the form of
6318 the insn may depend on the actual address if it is
6319 a MEM. */
6321 if (second_reload_reg)
6323 if (icode != CODE_FOR_nothing)
6325 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6326 second_reload_reg));
6327 special = 1;
6329 else
6331 /* See if we need a scratch register to load the
6332 intermediate register (a tertiary reload). */
6333 enum insn_code tertiary_icode
6334 = reload_secondary_in_icode[secondary_reload];
6336 if (tertiary_icode != CODE_FOR_nothing)
6338 rtx third_reload_reg
6339 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6341 emit_insn ((GEN_FCN (tertiary_icode)
6342 (second_reload_reg, real_oldequiv,
6343 third_reload_reg)));
6345 else
6346 gen_reload (second_reload_reg, oldequiv,
6347 reload_opnum[j],
6348 reload_when_needed[j]);
6350 oldequiv = second_reload_reg;
6354 #endif
6356 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6357 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6358 reload_when_needed[j]);
6360 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6361 /* We may have to make a REG_DEAD note for the secondary reload
6362 register in the insns we just made. Find the last insn that
6363 mentioned the register. */
6364 if (! special && second_reload_reg
6365 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6367 rtx prev;
6369 for (prev = get_last_insn (); prev;
6370 prev = PREV_INSN (prev))
6371 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6372 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6373 PATTERN (prev)))
6375 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6376 second_reload_reg,
6377 REG_NOTES (prev));
6378 break;
6381 #endif
6384 this_reload_insn = get_last_insn ();
6385 /* End this sequence. */
6386 *where = get_insns ();
6387 end_sequence ();
6390 /* Add a note saying the input reload reg
6391 dies in this insn, if anyone cares. */
6392 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6393 if (old != 0
6394 && reload_reg_rtx[j] != old
6395 && reload_reg_rtx[j] != 0
6396 && reload_out[j] == 0
6397 && ! reload_inherited[j]
6398 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6400 register rtx reloadreg = reload_reg_rtx[j];
6402 #if 0
6403 /* We can't abort here because we need to support this for sched.c.
6404 It's not terrible to miss a REG_DEAD note, but we should try
6405 to figure out how to do this correctly. */
6406 /* The code below is incorrect for address-only reloads. */
6407 if (reload_when_needed[j] != RELOAD_OTHER
6408 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6409 abort ();
6410 #endif
6412 /* Add a death note to this insn, for an input reload. */
6414 if ((reload_when_needed[j] == RELOAD_OTHER
6415 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6416 && ! dead_or_set_p (insn, reloadreg))
6417 REG_NOTES (insn)
6418 = gen_rtx (EXPR_LIST, REG_DEAD,
6419 reloadreg, REG_NOTES (insn));
6422 /* When we inherit a reload, the last marked death of the reload reg
6423 may no longer really be a death. */
6424 if (reload_reg_rtx[j] != 0
6425 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6426 && reload_inherited[j])
6428 /* Handle inheriting an output reload.
6429 Remove the death note from the output reload insn. */
6430 if (reload_spill_index[j] >= 0
6431 && GET_CODE (reload_in[j]) == REG
6432 && spill_reg_store[reload_spill_index[j]] != 0
6433 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6434 REG_DEAD, REGNO (reload_reg_rtx[j])))
6435 remove_death (REGNO (reload_reg_rtx[j]),
6436 spill_reg_store[reload_spill_index[j]]);
6437 /* Likewise for input reloads that were inherited. */
6438 else if (reload_spill_index[j] >= 0
6439 && GET_CODE (reload_in[j]) == REG
6440 && spill_reg_store[reload_spill_index[j]] == 0
6441 && reload_inheritance_insn[j] != 0
6442 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6443 REGNO (reload_reg_rtx[j])))
6444 remove_death (REGNO (reload_reg_rtx[j]),
6445 reload_inheritance_insn[j]);
6446 else
6448 rtx prev;
6450 /* We got this register from find_equiv_reg.
6451 Search back for its last death note and get rid of it.
6452 But don't search back too far.
6453 Don't go past a place where this reg is set,
6454 since a death note before that remains valid. */
6455 for (prev = PREV_INSN (insn);
6456 prev && GET_CODE (prev) != CODE_LABEL;
6457 prev = PREV_INSN (prev))
6458 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6459 && dead_or_set_p (prev, reload_reg_rtx[j]))
6461 if (find_regno_note (prev, REG_DEAD,
6462 REGNO (reload_reg_rtx[j])))
6463 remove_death (REGNO (reload_reg_rtx[j]), prev);
6464 break;
6469 /* We might have used find_equiv_reg above to choose an alternate
6470 place from which to reload. If so, and it died, we need to remove
6471 that death and move it to one of the insns we just made. */
6473 if (oldequiv_reg != 0
6474 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6476 rtx prev, prev1;
6478 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6479 prev = PREV_INSN (prev))
6480 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6481 && dead_or_set_p (prev, oldequiv_reg))
6483 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6485 for (prev1 = this_reload_insn;
6486 prev1; prev1 = PREV_INSN (prev1))
6487 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6488 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6489 PATTERN (prev1)))
6491 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6492 oldequiv_reg,
6493 REG_NOTES (prev1));
6494 break;
6496 remove_death (REGNO (oldequiv_reg), prev);
6498 break;
6501 #endif
6503 /* If we are reloading a register that was recently stored in with an
6504 output-reload, see if we can prove there was
6505 actually no need to store the old value in it. */
6507 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6508 && reload_in[j] != 0
6509 && GET_CODE (reload_in[j]) == REG
6510 #if 0
6511 /* There doesn't seem to be any reason to restrict this to pseudos
6512 and doing so loses in the case where we are copying from a
6513 register of the wrong class. */
6514 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6515 #endif
6516 && spill_reg_store[reload_spill_index[j]] != 0
6517 /* This is unsafe if some other reload uses the same reg first. */
6518 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6519 reload_opnum[j], reload_when_needed[j])
6520 && dead_or_set_p (insn, reload_in[j])
6521 /* This is unsafe if operand occurs more than once in current
6522 insn. Perhaps some occurrences weren't reloaded. */
6523 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6524 delete_output_reload (insn, j,
6525 spill_reg_store[reload_spill_index[j]]);
6527 /* Input-reloading is done. Now do output-reloading,
6528 storing the value from the reload-register after the main insn
6529 if reload_out[j] is nonzero.
6531 ??? At some point we need to support handling output reloads of
6532 JUMP_INSNs or insns that set cc0. */
6533 old = reload_out[j];
6534 if (old != 0
6535 && reload_reg_rtx[j] != old
6536 && reload_reg_rtx[j] != 0)
6538 register rtx reloadreg = reload_reg_rtx[j];
6539 register rtx second_reloadreg = 0;
6540 rtx note, p;
6541 enum machine_mode mode;
6542 int special = 0;
6544 /* An output operand that dies right away does need a reload,
6545 but need not be copied from it. Show the new location in the
6546 REG_UNUSED note. */
6547 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6548 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6550 XEXP (note, 0) = reload_reg_rtx[j];
6551 continue;
6553 /* Likewise for a SUBREG of an operand that dies. */
6554 else if (GET_CODE (old) == SUBREG
6555 && GET_CODE (SUBREG_REG (old)) == REG
6556 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6557 SUBREG_REG (old))))
6559 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6560 reload_reg_rtx[j]);
6561 continue;
6563 else if (GET_CODE (old) == SCRATCH)
6564 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6565 but we don't want to make an output reload. */
6566 continue;
6568 #if 0
6569 /* Strip off of OLD any size-increasing SUBREGs such as
6570 (SUBREG:SI foo:QI 0). */
6572 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6573 && (GET_MODE_SIZE (GET_MODE (old))
6574 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6575 old = SUBREG_REG (old);
6576 #endif
6578 /* If is a JUMP_INSN, we can't support output reloads yet. */
6579 if (GET_CODE (insn) == JUMP_INSN)
6580 abort ();
6582 if (reload_when_needed[j] == RELOAD_OTHER)
6583 start_sequence ();
6584 else
6585 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6587 /* Determine the mode to reload in.
6588 See comments above (for input reloading). */
6590 mode = GET_MODE (old);
6591 if (mode == VOIDmode)
6593 /* VOIDmode should never happen for an output. */
6594 if (asm_noperands (PATTERN (insn)) < 0)
6595 /* It's the compiler's fault. */
6596 fatal_insn ("VOIDmode on an output", insn);
6597 error_for_asm (insn, "output operand is constant in `asm'");
6598 /* Prevent crash--use something we know is valid. */
6599 mode = word_mode;
6600 old = gen_rtx (REG, mode, REGNO (reloadreg));
6603 if (GET_MODE (reloadreg) != mode)
6604 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6606 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6608 /* If we need two reload regs, set RELOADREG to the intermediate
6609 one, since it will be stored into OLD. We might need a secondary
6610 register only for an input reload, so check again here. */
6612 if (reload_secondary_out_reload[j] >= 0)
6614 rtx real_old = old;
6616 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6617 && reg_equiv_mem[REGNO (old)] != 0)
6618 real_old = reg_equiv_mem[REGNO (old)];
6620 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6621 mode, real_old)
6622 != NO_REGS))
6624 second_reloadreg = reloadreg;
6625 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6627 /* See if RELOADREG is to be used as a scratch register
6628 or as an intermediate register. */
6629 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6631 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6632 (real_old, second_reloadreg, reloadreg)));
6633 special = 1;
6635 else
6637 /* See if we need both a scratch and intermediate reload
6638 register. */
6640 int secondary_reload = reload_secondary_out_reload[j];
6641 enum insn_code tertiary_icode
6642 = reload_secondary_out_icode[secondary_reload];
6644 if (GET_MODE (reloadreg) != mode)
6645 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6647 if (tertiary_icode != CODE_FOR_nothing)
6649 rtx third_reloadreg
6650 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6651 rtx tem;
6653 /* Copy primary reload reg to secondary reload reg.
6654 (Note that these have been swapped above, then
6655 secondary reload reg to OLD using our insn. */
6657 /* If REAL_OLD is a paradoxical SUBREG, remove it
6658 and try to put the opposite SUBREG on
6659 RELOADREG. */
6660 if (GET_CODE (real_old) == SUBREG
6661 && (GET_MODE_SIZE (GET_MODE (real_old))
6662 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6663 && 0 != (tem = gen_lowpart_common
6664 (GET_MODE (SUBREG_REG (real_old)),
6665 reloadreg)))
6666 real_old = SUBREG_REG (real_old), reloadreg = tem;
6668 gen_reload (reloadreg, second_reloadreg,
6669 reload_opnum[j], reload_when_needed[j]);
6670 emit_insn ((GEN_FCN (tertiary_icode)
6671 (real_old, reloadreg, third_reloadreg)));
6672 special = 1;
6675 else
6676 /* Copy between the reload regs here and then to
6677 OUT later. */
6679 gen_reload (reloadreg, second_reloadreg,
6680 reload_opnum[j], reload_when_needed[j]);
6684 #endif
6686 /* Output the last reload insn. */
6687 if (! special)
6688 gen_reload (old, reloadreg, reload_opnum[j],
6689 reload_when_needed[j]);
6691 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6692 /* If final will look at death notes for this reg,
6693 put one on the last output-reload insn to use it. Similarly
6694 for any secondary register. */
6695 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6696 for (p = get_last_insn (); p; p = PREV_INSN (p))
6697 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6698 && reg_overlap_mentioned_for_reload_p (reloadreg,
6699 PATTERN (p)))
6700 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6701 reloadreg, REG_NOTES (p));
6703 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6704 if (! special && second_reloadreg
6705 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6706 for (p = get_last_insn (); p; p = PREV_INSN (p))
6707 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6708 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6709 PATTERN (p)))
6710 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6711 second_reloadreg, REG_NOTES (p));
6712 #endif
6713 #endif
6714 /* Look at all insns we emitted, just to be safe. */
6715 for (p = get_insns (); p; p = NEXT_INSN (p))
6716 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6718 /* If this output reload doesn't come from a spill reg,
6719 clear any memory of reloaded copies of the pseudo reg.
6720 If this output reload comes from a spill reg,
6721 reg_has_output_reload will make this do nothing. */
6722 note_stores (PATTERN (p), forget_old_reloads_1);
6724 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6725 && reload_spill_index[j] >= 0)
6726 new_spill_reg_store[reload_spill_index[j]] = p;
6729 if (reload_when_needed[j] == RELOAD_OTHER)
6731 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6732 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6734 else
6735 output_reload_insns[reload_opnum[j]] = get_insns ();
6737 end_sequence ();
6741 /* Now write all the insns we made for reloads in the order expected by
6742 the allocation functions. Prior to the insn being reloaded, we write
6743 the following reloads:
6745 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6747 RELOAD_OTHER reloads.
6749 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6750 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6751 RELOAD_FOR_INPUT reload for the operand.
6753 RELOAD_FOR_OPADDR_ADDRS reloads.
6755 RELOAD_FOR_OPERAND_ADDRESS reloads.
6757 After the insn being reloaded, we write the following:
6759 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6760 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6761 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6762 reloads for the operand. The RELOAD_OTHER output reloads are
6763 output in descending order by reload number. */
6765 emit_insns_before (other_input_address_reload_insns, before_insn);
6766 emit_insns_before (other_input_reload_insns, before_insn);
6768 for (j = 0; j < reload_n_operands; j++)
6770 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6771 emit_insns_before (input_address_reload_insns[j], before_insn);
6772 emit_insns_before (input_reload_insns[j], before_insn);
6775 emit_insns_before (other_operand_reload_insns, before_insn);
6776 emit_insns_before (operand_reload_insns, before_insn);
6778 for (j = 0; j < reload_n_operands; j++)
6780 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6781 emit_insns_before (output_address_reload_insns[j], following_insn);
6782 emit_insns_before (output_reload_insns[j], following_insn);
6783 emit_insns_before (other_output_reload_insns[j], following_insn);
6786 /* Move death notes from INSN
6787 to output-operand-address and output reload insns. */
6788 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6790 rtx insn1;
6791 /* Loop over those insns, last ones first. */
6792 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6793 insn1 = PREV_INSN (insn1))
6794 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6796 rtx source = SET_SRC (PATTERN (insn1));
6797 rtx dest = SET_DEST (PATTERN (insn1));
6799 /* The note we will examine next. */
6800 rtx reg_notes = REG_NOTES (insn);
6801 /* The place that pointed to this note. */
6802 rtx *prev_reg_note = &REG_NOTES (insn);
6804 /* If the note is for something used in the source of this
6805 reload insn, or in the output address, move the note. */
6806 while (reg_notes)
6808 rtx next_reg_notes = XEXP (reg_notes, 1);
6809 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6810 && GET_CODE (XEXP (reg_notes, 0)) == REG
6811 && ((GET_CODE (dest) != REG
6812 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6813 dest))
6814 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6815 source)))
6817 *prev_reg_note = next_reg_notes;
6818 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6819 REG_NOTES (insn1) = reg_notes;
6821 else
6822 prev_reg_note = &XEXP (reg_notes, 1);
6824 reg_notes = next_reg_notes;
6828 #endif
6830 /* For all the spill regs newly reloaded in this instruction,
6831 record what they were reloaded from, so subsequent instructions
6832 can inherit the reloads.
6834 Update spill_reg_store for the reloads of this insn.
6835 Copy the elements that were updated in the loop above. */
6837 for (j = 0; j < n_reloads; j++)
6839 register int r = reload_order[j];
6840 register int i = reload_spill_index[r];
6842 /* I is nonneg if this reload used one of the spill regs.
6843 If reload_reg_rtx[r] is 0, this is an optional reload
6844 that we opted to ignore. */
6846 if (i >= 0 && reload_reg_rtx[r] != 0)
6848 int nr
6849 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6850 int k;
6851 int part_reaches_end = 0;
6852 int all_reaches_end = 1;
6854 /* For a multi register reload, we need to check if all or part
6855 of the value lives to the end. */
6856 for (k = 0; k < nr; k++)
6858 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6859 reload_when_needed[r]))
6860 part_reaches_end = 1;
6861 else
6862 all_reaches_end = 0;
6865 /* Ignore reloads that don't reach the end of the insn in
6866 entirety. */
6867 if (all_reaches_end)
6869 /* First, clear out memory of what used to be in this spill reg.
6870 If consecutive registers are used, clear them all. */
6872 for (k = 0; k < nr; k++)
6874 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6875 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6878 /* Maybe the spill reg contains a copy of reload_out. */
6879 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6881 register int nregno = REGNO (reload_out[r]);
6882 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6883 : HARD_REGNO_NREGS (nregno,
6884 GET_MODE (reload_reg_rtx[r])));
6886 spill_reg_store[i] = new_spill_reg_store[i];
6887 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6889 /* If NREGNO is a hard register, it may occupy more than
6890 one register. If it does, say what is in the
6891 rest of the registers assuming that both registers
6892 agree on how many words the object takes. If not,
6893 invalidate the subsequent registers. */
6895 if (nregno < FIRST_PSEUDO_REGISTER)
6896 for (k = 1; k < nnr; k++)
6897 reg_last_reload_reg[nregno + k]
6898 = (nr == nnr
6899 ? gen_rtx (REG,
6900 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6901 REGNO (reload_reg_rtx[r]) + k)
6902 : 0);
6904 /* Now do the inverse operation. */
6905 for (k = 0; k < nr; k++)
6907 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6908 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6909 ? nregno
6910 : nregno + k);
6911 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6915 /* Maybe the spill reg contains a copy of reload_in. Only do
6916 something if there will not be an output reload for
6917 the register being reloaded. */
6918 else if (reload_out[r] == 0
6919 && reload_in[r] != 0
6920 && ((GET_CODE (reload_in[r]) == REG
6921 && ! reg_has_output_reload[REGNO (reload_in[r])])
6922 || (GET_CODE (reload_in_reg[r]) == REG
6923 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6925 register int nregno;
6926 int nnr;
6928 if (GET_CODE (reload_in[r]) == REG)
6929 nregno = REGNO (reload_in[r]);
6930 else
6931 nregno = REGNO (reload_in_reg[r]);
6933 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6934 : HARD_REGNO_NREGS (nregno,
6935 GET_MODE (reload_reg_rtx[r])));
6937 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6939 if (nregno < FIRST_PSEUDO_REGISTER)
6940 for (k = 1; k < nnr; k++)
6941 reg_last_reload_reg[nregno + k]
6942 = (nr == nnr
6943 ? gen_rtx (REG,
6944 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6945 REGNO (reload_reg_rtx[r]) + k)
6946 : 0);
6948 /* Unless we inherited this reload, show we haven't
6949 recently done a store. */
6950 if (! reload_inherited[r])
6951 spill_reg_store[i] = 0;
6953 for (k = 0; k < nr; k++)
6955 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6956 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6957 ? nregno
6958 : nregno + k);
6959 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6960 = insn;
6965 /* However, if part of the reload reaches the end, then we must
6966 invalidate the old info for the part that survives to the end. */
6967 else if (part_reaches_end)
6969 for (k = 0; k < nr; k++)
6970 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6971 reload_opnum[r],
6972 reload_when_needed[r]))
6974 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6975 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6980 /* The following if-statement was #if 0'd in 1.34 (or before...).
6981 It's reenabled in 1.35 because supposedly nothing else
6982 deals with this problem. */
6984 /* If a register gets output-reloaded from a non-spill register,
6985 that invalidates any previous reloaded copy of it.
6986 But forget_old_reloads_1 won't get to see it, because
6987 it thinks only about the original insn. So invalidate it here. */
6988 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6990 register int nregno = REGNO (reload_out[r]);
6991 if (nregno >= FIRST_PSEUDO_REGISTER)
6992 reg_last_reload_reg[nregno] = 0;
6993 else
6995 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6997 while (num_regs-- > 0)
6998 reg_last_reload_reg[nregno + num_regs] = 0;
7004 /* Emit code to perform a reload from IN (which may be a reload register) to
7005 OUT (which may also be a reload register). IN or OUT is from operand
7006 OPNUM with reload type TYPE.
7008 Returns first insn emitted. */
7011 gen_reload (out, in, opnum, type)
7012 rtx out;
7013 rtx in;
7014 int opnum;
7015 enum reload_type type;
7017 rtx last = get_last_insn ();
7018 rtx tem;
7020 /* If IN is a paradoxical SUBREG, remove it and try to put the
7021 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7022 if (GET_CODE (in) == SUBREG
7023 && (GET_MODE_SIZE (GET_MODE (in))
7024 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7025 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7026 in = SUBREG_REG (in), out = tem;
7027 else if (GET_CODE (out) == SUBREG
7028 && (GET_MODE_SIZE (GET_MODE (out))
7029 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7030 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7031 out = SUBREG_REG (out), in = tem;
7033 /* How to do this reload can get quite tricky. Normally, we are being
7034 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7035 register that didn't get a hard register. In that case we can just
7036 call emit_move_insn.
7038 We can also be asked to reload a PLUS that adds a register or a MEM to
7039 another register, constant or MEM. This can occur during frame pointer
7040 elimination and while reloading addresses. This case is handled by
7041 trying to emit a single insn to perform the add. If it is not valid,
7042 we use a two insn sequence.
7044 Finally, we could be called to handle an 'o' constraint by putting
7045 an address into a register. In that case, we first try to do this
7046 with a named pattern of "reload_load_address". If no such pattern
7047 exists, we just emit a SET insn and hope for the best (it will normally
7048 be valid on machines that use 'o').
7050 This entire process is made complex because reload will never
7051 process the insns we generate here and so we must ensure that
7052 they will fit their constraints and also by the fact that parts of
7053 IN might be being reloaded separately and replaced with spill registers.
7054 Because of this, we are, in some sense, just guessing the right approach
7055 here. The one listed above seems to work.
7057 ??? At some point, this whole thing needs to be rethought. */
7059 if (GET_CODE (in) == PLUS
7060 && (GET_CODE (XEXP (in, 0)) == REG
7061 || GET_CODE (XEXP (in, 0)) == SUBREG
7062 || GET_CODE (XEXP (in, 0)) == MEM)
7063 && (GET_CODE (XEXP (in, 1)) == REG
7064 || GET_CODE (XEXP (in, 1)) == SUBREG
7065 || CONSTANT_P (XEXP (in, 1))
7066 || GET_CODE (XEXP (in, 1)) == MEM))
7068 /* We need to compute the sum of a register or a MEM and another
7069 register, constant, or MEM, and put it into the reload
7070 register. The best possible way of doing this is if the machine
7071 has a three-operand ADD insn that accepts the required operands.
7073 The simplest approach is to try to generate such an insn and see if it
7074 is recognized and matches its constraints. If so, it can be used.
7076 It might be better not to actually emit the insn unless it is valid,
7077 but we need to pass the insn as an operand to `recog' and
7078 `insn_extract' and it is simpler to emit and then delete the insn if
7079 not valid than to dummy things up. */
7081 rtx op0, op1, tem, insn;
7082 int code;
7084 op0 = find_replacement (&XEXP (in, 0));
7085 op1 = find_replacement (&XEXP (in, 1));
7087 /* Since constraint checking is strict, commutativity won't be
7088 checked, so we need to do that here to avoid spurious failure
7089 if the add instruction is two-address and the second operand
7090 of the add is the same as the reload reg, which is frequently
7091 the case. If the insn would be A = B + A, rearrange it so
7092 it will be A = A + B as constrain_operands expects. */
7094 if (GET_CODE (XEXP (in, 1)) == REG
7095 && REGNO (out) == REGNO (XEXP (in, 1)))
7096 tem = op0, op0 = op1, op1 = tem;
7098 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7099 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7101 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7102 code = recog_memoized (insn);
7104 if (code >= 0)
7106 insn_extract (insn);
7107 /* We want constrain operands to treat this insn strictly in
7108 its validity determination, i.e., the way it would after reload
7109 has completed. */
7110 if (constrain_operands (code, 1))
7111 return insn;
7114 delete_insns_since (last);
7116 /* If that failed, we must use a conservative two-insn sequence.
7117 use move to copy constant, MEM, or pseudo register to the reload
7118 register since "move" will be able to handle an arbitrary operand,
7119 unlike add which can't, in general. Then add the registers.
7121 If there is another way to do this for a specific machine, a
7122 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7123 we emit below. */
7125 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7126 || (GET_CODE (op1) == REG
7127 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7128 tem = op0, op0 = op1, op1 = tem;
7130 gen_reload (out, op0, opnum, type);
7132 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7133 This fixes a problem on the 32K where the stack pointer cannot
7134 be used as an operand of an add insn. */
7136 if (rtx_equal_p (op0, op1))
7137 op1 = out;
7139 insn = emit_insn (gen_add2_insn (out, op1));
7141 /* If that failed, copy the address register to the reload register.
7142 Then add the constant to the reload register. */
7144 code = recog_memoized (insn);
7146 if (code >= 0)
7148 insn_extract (insn);
7149 /* We want constrain operands to treat this insn strictly in
7150 its validity determination, i.e., the way it would after reload
7151 has completed. */
7152 if (constrain_operands (code, 1))
7153 return insn;
7156 delete_insns_since (last);
7158 gen_reload (out, op1, opnum, type);
7159 emit_insn (gen_add2_insn (out, op0));
7162 #ifdef SECONDARY_MEMORY_NEEDED
7163 /* If we need a memory location to do the move, do it that way. */
7164 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7165 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7166 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7167 REGNO_REG_CLASS (REGNO (out)),
7168 GET_MODE (out)))
7170 /* Get the memory to use and rewrite both registers to its mode. */
7171 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7173 if (GET_MODE (loc) != GET_MODE (out))
7174 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7176 if (GET_MODE (loc) != GET_MODE (in))
7177 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7179 gen_reload (loc, in, opnum, type);
7180 gen_reload (out, loc, opnum, type);
7182 #endif
7184 /* If IN is a simple operand, use gen_move_insn. */
7185 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7186 emit_insn (gen_move_insn (out, in));
7188 #ifdef HAVE_reload_load_address
7189 else if (HAVE_reload_load_address)
7190 emit_insn (gen_reload_load_address (out, in));
7191 #endif
7193 /* Otherwise, just write (set OUT IN) and hope for the best. */
7194 else
7195 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7197 /* Return the first insn emitted.
7198 We can not just return get_last_insn, because there may have
7199 been multiple instructions emitted. Also note that gen_move_insn may
7200 emit more than one insn itself, so we can not assume that there is one
7201 insn emitted per emit_insn_before call. */
7203 return last ? NEXT_INSN (last) : get_insns ();
7206 /* Delete a previously made output-reload
7207 whose result we now believe is not needed.
7208 First we double-check.
7210 INSN is the insn now being processed.
7211 OUTPUT_RELOAD_INSN is the insn of the output reload.
7212 J is the reload-number for this insn. */
7214 static void
7215 delete_output_reload (insn, j, output_reload_insn)
7216 rtx insn;
7217 int j;
7218 rtx output_reload_insn;
7220 register rtx i1;
7222 /* Get the raw pseudo-register referred to. */
7224 rtx reg = reload_in[j];
7225 while (GET_CODE (reg) == SUBREG)
7226 reg = SUBREG_REG (reg);
7228 /* If the pseudo-reg we are reloading is no longer referenced
7229 anywhere between the store into it and here,
7230 and no jumps or labels intervene, then the value can get
7231 here through the reload reg alone.
7232 Otherwise, give up--return. */
7233 for (i1 = NEXT_INSN (output_reload_insn);
7234 i1 != insn; i1 = NEXT_INSN (i1))
7236 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7237 return;
7238 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7239 && reg_mentioned_p (reg, PATTERN (i1)))
7240 return;
7243 if (cannot_omit_stores[REGNO (reg)])
7244 return;
7246 /* If this insn will store in the pseudo again,
7247 the previous store can be removed. */
7248 if (reload_out[j] == reload_in[j])
7249 delete_insn (output_reload_insn);
7251 /* See if the pseudo reg has been completely replaced
7252 with reload regs. If so, delete the store insn
7253 and forget we had a stack slot for the pseudo. */
7254 else if (reg_n_deaths[REGNO (reg)] == 1
7255 && reg_basic_block[REGNO (reg)] >= 0
7256 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7258 rtx i2;
7260 /* We know that it was used only between here
7261 and the beginning of the current basic block.
7262 (We also know that the last use before INSN was
7263 the output reload we are thinking of deleting, but never mind that.)
7264 Search that range; see if any ref remains. */
7265 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7267 rtx set = single_set (i2);
7269 /* Uses which just store in the pseudo don't count,
7270 since if they are the only uses, they are dead. */
7271 if (set != 0 && SET_DEST (set) == reg)
7272 continue;
7273 if (GET_CODE (i2) == CODE_LABEL
7274 || GET_CODE (i2) == JUMP_INSN)
7275 break;
7276 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7277 && reg_mentioned_p (reg, PATTERN (i2)))
7278 /* Some other ref remains;
7279 we can't do anything. */
7280 return;
7283 /* Delete the now-dead stores into this pseudo. */
7284 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7286 rtx set = single_set (i2);
7288 if (set != 0 && SET_DEST (set) == reg)
7290 /* This might be a basic block head,
7291 thus don't use delete_insn. */
7292 PUT_CODE (i2, NOTE);
7293 NOTE_SOURCE_FILE (i2) = 0;
7294 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7296 if (GET_CODE (i2) == CODE_LABEL
7297 || GET_CODE (i2) == JUMP_INSN)
7298 break;
7301 /* For the debugging info,
7302 say the pseudo lives in this reload reg. */
7303 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7304 alter_reg (REGNO (reg), -1);
7308 /* Output reload-insns to reload VALUE into RELOADREG.
7309 VALUE is an autoincrement or autodecrement RTX whose operand
7310 is a register or memory location;
7311 so reloading involves incrementing that location.
7313 INC_AMOUNT is the number to increment or decrement by (always positive).
7314 This cannot be deduced from VALUE. */
7316 static void
7317 inc_for_reload (reloadreg, value, inc_amount)
7318 rtx reloadreg;
7319 rtx value;
7320 int inc_amount;
7322 /* REG or MEM to be copied and incremented. */
7323 rtx incloc = XEXP (value, 0);
7324 /* Nonzero if increment after copying. */
7325 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7326 rtx last;
7327 rtx inc;
7328 rtx add_insn;
7329 int code;
7331 /* No hard register is equivalent to this register after
7332 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7333 we could inc/dec that register as well (maybe even using it for
7334 the source), but I'm not sure it's worth worrying about. */
7335 if (GET_CODE (incloc) == REG)
7336 reg_last_reload_reg[REGNO (incloc)] = 0;
7338 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7339 inc_amount = - inc_amount;
7341 inc = GEN_INT (inc_amount);
7343 /* If this is post-increment, first copy the location to the reload reg. */
7344 if (post)
7345 emit_insn (gen_move_insn (reloadreg, incloc));
7347 /* See if we can directly increment INCLOC. Use a method similar to that
7348 in gen_reload. */
7350 last = get_last_insn ();
7351 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7352 gen_rtx (PLUS, GET_MODE (incloc),
7353 incloc, inc)));
7355 code = recog_memoized (add_insn);
7356 if (code >= 0)
7358 insn_extract (add_insn);
7359 if (constrain_operands (code, 1))
7361 /* If this is a pre-increment and we have incremented the value
7362 where it lives, copy the incremented value to RELOADREG to
7363 be used as an address. */
7365 if (! post)
7366 emit_insn (gen_move_insn (reloadreg, incloc));
7368 return;
7372 delete_insns_since (last);
7374 /* If couldn't do the increment directly, must increment in RELOADREG.
7375 The way we do this depends on whether this is pre- or post-increment.
7376 For pre-increment, copy INCLOC to the reload register, increment it
7377 there, then save back. */
7379 if (! post)
7381 emit_insn (gen_move_insn (reloadreg, incloc));
7382 emit_insn (gen_add2_insn (reloadreg, inc));
7383 emit_insn (gen_move_insn (incloc, reloadreg));
7385 else
7387 /* Postincrement.
7388 Because this might be a jump insn or a compare, and because RELOADREG
7389 may not be available after the insn in an input reload, we must do
7390 the incrementation before the insn being reloaded for.
7392 We have already copied INCLOC to RELOADREG. Increment the copy in
7393 RELOADREG, save that back, then decrement RELOADREG so it has
7394 the original value. */
7396 emit_insn (gen_add2_insn (reloadreg, inc));
7397 emit_insn (gen_move_insn (incloc, reloadreg));
7398 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7401 return;
7404 /* Return 1 if we are certain that the constraint-string STRING allows
7405 the hard register REG. Return 0 if we can't be sure of this. */
7407 static int
7408 constraint_accepts_reg_p (string, reg)
7409 char *string;
7410 rtx reg;
7412 int value = 0;
7413 int regno = true_regnum (reg);
7414 int c;
7416 /* Initialize for first alternative. */
7417 value = 0;
7418 /* Check that each alternative contains `g' or `r'. */
7419 while (1)
7420 switch (c = *string++)
7422 case 0:
7423 /* If an alternative lacks `g' or `r', we lose. */
7424 return value;
7425 case ',':
7426 /* If an alternative lacks `g' or `r', we lose. */
7427 if (value == 0)
7428 return 0;
7429 /* Initialize for next alternative. */
7430 value = 0;
7431 break;
7432 case 'g':
7433 case 'r':
7434 /* Any general reg wins for this alternative. */
7435 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7436 value = 1;
7437 break;
7438 default:
7439 /* Any reg in specified class wins for this alternative. */
7441 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7443 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7444 value = 1;
7449 /* Return the number of places FIND appears within X, but don't count
7450 an occurrence if some SET_DEST is FIND. */
7452 static int
7453 count_occurrences (x, find)
7454 register rtx x, find;
7456 register int i, j;
7457 register enum rtx_code code;
7458 register char *format_ptr;
7459 int count;
7461 if (x == find)
7462 return 1;
7463 if (x == 0)
7464 return 0;
7466 code = GET_CODE (x);
7468 switch (code)
7470 case REG:
7471 case QUEUED:
7472 case CONST_INT:
7473 case CONST_DOUBLE:
7474 case SYMBOL_REF:
7475 case CODE_LABEL:
7476 case PC:
7477 case CC0:
7478 return 0;
7480 case SET:
7481 if (SET_DEST (x) == find)
7482 return count_occurrences (SET_SRC (x), find);
7483 break;
7486 format_ptr = GET_RTX_FORMAT (code);
7487 count = 0;
7489 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7491 switch (*format_ptr++)
7493 case 'e':
7494 count += count_occurrences (XEXP (x, i), find);
7495 break;
7497 case 'E':
7498 if (XVEC (x, i) != NULL)
7500 for (j = 0; j < XVECLEN (x, i); j++)
7501 count += count_occurrences (XVECEXP (x, i, j), find);
7503 break;
7506 return count;