import of gcc-2.8
[official-gcc.git] / gcc / reload1.c
blob182bb92abd5870502b8a8889f7c67464b650ec06
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include <stdio.h>
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
271 int reload_in_progress = 0;
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
284 struct obstack reload_obstack;
285 char *reload_firstobj;
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
301 static struct elim_table
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351 /* Number of labels in the current function. */
353 static int num_labels;
355 struct hard_reg_n_uses { int regno; int uses; };
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static int reload_cse_simplify_set PROTO((rtx, rtx));
403 static int reload_cse_simplify_operands PROTO((rtx));
404 static void reload_cse_check_clobber PROTO((rtx, rtx));
405 static void reload_cse_record_set PROTO((rtx, rtx));
406 static void reload_cse_delete_death_notes PROTO((rtx));
407 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
409 /* Initialize the reload pass once per compilation. */
411 void
412 init_reload ()
414 register int i;
416 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
417 Set spill_indirect_levels to the number of levels such addressing is
418 permitted, zero if it is not permitted at all. */
420 register rtx tem
421 = gen_rtx (MEM, Pmode,
422 gen_rtx (PLUS, Pmode,
423 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
424 GEN_INT (4)));
425 spill_indirect_levels = 0;
427 while (memory_address_p (QImode, tem))
429 spill_indirect_levels++;
430 tem = gen_rtx (MEM, Pmode, tem);
433 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
435 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
436 indirect_symref_ok = memory_address_p (QImode, tem);
438 /* See if reg+reg is a valid (and offsettable) address. */
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
442 tem = gen_rtx (PLUS, Pmode,
443 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
444 gen_rtx (REG, Pmode, i));
445 /* This way, we make sure that reg+reg is an offsettable address. */
446 tem = plus_constant (tem, 4);
448 if (memory_address_p (QImode, tem))
450 double_reg_address_ok = 1;
451 break;
455 /* Initialize obstack for our rtl allocation. */
456 gcc_obstack_init (&reload_obstack);
457 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
459 /* Decide which register class should be used when reloading
460 addresses. If we are using SMALL_REGISTER_CLASSES, and any
461 parameters are passed in registers, then we do not want to use
462 those registers when reloading an address. Otherwise, if a
463 function argument needs a reload, we may wind up clobbering
464 another argument to the function which was already computed. If
465 we find a subset class which simply avoids those registers, we
466 use it instead. ??? It would be better to only use the
467 restricted class when we actually are loading function arguments,
468 but that is hard to determine. */
469 reload_address_base_reg_class = BASE_REG_CLASS;
470 reload_address_index_reg_class = INDEX_REG_CLASS;
471 if (SMALL_REGISTER_CLASSES)
473 int regno;
474 HARD_REG_SET base, index;
475 enum reg_class *p;
477 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
478 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
481 if (FUNCTION_ARG_REGNO_P (regno))
483 CLEAR_HARD_REG_BIT (base, regno);
484 CLEAR_HARD_REG_BIT (index, regno);
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
489 baseok);
490 for (p = reg_class_subclasses[BASE_REG_CLASS];
491 *p != LIM_REG_CLASSES;
492 p++)
494 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
495 continue;
496 usebase:
497 reload_address_base_reg_class = *p;
498 break;
500 baseok:;
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
503 indexok);
504 for (p = reg_class_subclasses[INDEX_REG_CLASS];
505 *p != LIM_REG_CLASSES;
506 p++)
508 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
509 continue;
510 useindex:
511 reload_address_index_reg_class = *p;
512 break;
514 indexok:;
518 /* Main entry point for the reload pass.
520 FIRST is the first insn of the function being compiled.
522 GLOBAL nonzero means we were called from global_alloc
523 and should attempt to reallocate any pseudoregs that we
524 displace from hard regs we will use for reloads.
525 If GLOBAL is zero, we do not have enough information to do that,
526 so any pseudo reg that is spilled must go to the stack.
528 DUMPFILE is the global-reg debugging dump file stream, or 0.
529 If it is nonzero, messages are written to it to describe
530 which registers are seized as reload regs, which pseudo regs
531 are spilled from them, and where the pseudo regs are reallocated to.
533 Return value is nonzero if reload failed
534 and we must not do any more for this function. */
537 reload (first, global, dumpfile)
538 rtx first;
539 int global;
540 FILE *dumpfile;
542 register int class;
543 register int i, j, k;
544 register rtx insn;
545 register struct elim_table *ep;
547 /* The two pointers used to track the true location of the memory used
548 for label offsets. */
549 char *real_known_ptr = NULL_PTR;
550 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
552 int something_changed;
553 int something_needs_reloads;
554 int something_needs_elimination;
555 int new_basic_block_needs;
556 enum reg_class caller_save_spill_class = NO_REGS;
557 int caller_save_group_size = 1;
559 /* Nonzero means we couldn't get enough spill regs. */
560 int failure = 0;
562 /* The basic block number currently being processed for INSN. */
563 int this_block;
565 /* Make sure even insns with volatile mem refs are recognizable. */
566 init_recog ();
568 /* Enable find_equiv_reg to distinguish insns made by reload. */
569 reload_first_uid = get_max_uid ();
571 for (i = 0; i < N_REG_CLASSES; i++)
572 basic_block_needs[i] = 0;
574 #ifdef SECONDARY_MEMORY_NEEDED
575 /* Initialize the secondary memory table. */
576 clear_secondary_mem ();
577 #endif
579 /* Remember which hard regs appear explicitly
580 before we merge into `regs_ever_live' the ones in which
581 pseudo regs have been allocated. */
582 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
584 /* We don't have a stack slot for any spill reg yet. */
585 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
586 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
588 /* Initialize the save area information for caller-save, in case some
589 are needed. */
590 init_save_areas ();
592 /* Compute which hard registers are now in use
593 as homes for pseudo registers.
594 This is done here rather than (eg) in global_alloc
595 because this point is reached even if not optimizing. */
596 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
597 mark_home_live (i);
599 /* A function that receives a nonlocal goto must save all call-saved
600 registers. */
601 if (current_function_has_nonlocal_label)
602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
604 if (! call_used_regs[i] && ! fixed_regs[i])
605 regs_ever_live[i] = 1;
608 for (i = 0; i < scratch_list_length; i++)
609 if (scratch_list[i])
610 mark_scratch_live (scratch_list[i]);
612 /* Make sure that the last insn in the chain
613 is not something that needs reloading. */
614 emit_note (NULL_PTR, NOTE_INSN_DELETED);
616 /* Find all the pseudo registers that didn't get hard regs
617 but do have known equivalent constants or memory slots.
618 These include parameters (known equivalent to parameter slots)
619 and cse'd or loop-moved constant memory addresses.
621 Record constant equivalents in reg_equiv_constant
622 so they will be substituted by find_reloads.
623 Record memory equivalents in reg_mem_equiv so they can
624 be substituted eventually by altering the REG-rtx's. */
626 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
628 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
630 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
631 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
632 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
633 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
634 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
635 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
636 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
637 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
638 cannot_omit_stores = (char *) alloca (max_regno);
639 bzero (cannot_omit_stores, max_regno);
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
644 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
645 Also find all paradoxical subregs and find largest such for each pseudo.
646 On machines with small register classes, record hard registers that
647 are used for user variables. These can never be used for spills.
648 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
649 caller-saved registers must be marked live. */
651 for (insn = first; insn; insn = NEXT_INSN (insn))
653 rtx set = single_set (insn);
655 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
656 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
658 if (! call_used_regs[i])
659 regs_ever_live[i] = 1;
661 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
663 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
664 if (note
665 #ifdef LEGITIMATE_PIC_OPERAND_P
666 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
667 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
668 #endif
671 rtx x = XEXP (note, 0);
672 i = REGNO (SET_DEST (set));
673 if (i > LAST_VIRTUAL_REGISTER)
675 if (GET_CODE (x) == MEM)
677 /* If the operand is a PLUS, the MEM may be shared,
678 so make sure we have an unshared copy here. */
679 if (GET_CODE (XEXP (x, 0)) == PLUS)
680 x = copy_rtx (x);
682 reg_equiv_memory_loc[i] = x;
684 else if (CONSTANT_P (x))
686 if (LEGITIMATE_CONSTANT_P (x))
687 reg_equiv_constant[i] = x;
688 else
689 reg_equiv_memory_loc[i]
690 = force_const_mem (GET_MODE (SET_DEST (set)), x);
692 else
693 continue;
695 /* If this register is being made equivalent to a MEM
696 and the MEM is not SET_SRC, the equivalencing insn
697 is one with the MEM as a SET_DEST and it occurs later.
698 So don't mark this insn now. */
699 if (GET_CODE (x) != MEM
700 || rtx_equal_p (SET_SRC (set), x))
701 reg_equiv_init[i] = insn;
706 /* If this insn is setting a MEM from a register equivalent to it,
707 this is the equivalencing insn. */
708 else if (set && GET_CODE (SET_DEST (set)) == MEM
709 && GET_CODE (SET_SRC (set)) == REG
710 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
711 && rtx_equal_p (SET_DEST (set),
712 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
713 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
715 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
716 scan_paradoxical_subregs (PATTERN (insn));
719 /* Does this function require a frame pointer? */
721 frame_pointer_needed = (! flag_omit_frame_pointer
722 #ifdef EXIT_IGNORE_STACK
723 /* ?? If EXIT_IGNORE_STACK is set, we will not save
724 and restore sp for alloca. So we can't eliminate
725 the frame pointer in that case. At some point,
726 we should improve this by emitting the
727 sp-adjusting insns for this case. */
728 || (current_function_calls_alloca
729 && EXIT_IGNORE_STACK)
730 #endif
731 || FRAME_POINTER_REQUIRED);
733 num_eliminable = 0;
735 /* Initialize the table of registers to eliminate. The way we do this
736 depends on how the eliminable registers were defined. */
737 #ifdef ELIMINABLE_REGS
738 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
740 ep->can_eliminate = ep->can_eliminate_previous
741 = (CAN_ELIMINATE (ep->from, ep->to)
742 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
744 #else
745 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
746 = ! frame_pointer_needed;
747 #endif
749 /* Count the number of eliminable registers and build the FROM and TO
750 REG rtx's. Note that code in gen_rtx will cause, e.g.,
751 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
752 We depend on this. */
753 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
755 num_eliminable += ep->can_eliminate;
756 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
757 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
760 num_labels = max_label_num () - get_first_label_num ();
762 /* Allocate the tables used to store offset information at labels. */
763 /* We used to use alloca here, but the size of what it would try to
764 allocate would occasionally cause it to exceed the stack limit and
765 cause a core dump. */
766 real_known_ptr = xmalloc (num_labels);
767 real_at_ptr
768 = (int (*)[NUM_ELIMINABLE_REGS])
769 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
771 offsets_known_at = real_known_ptr - get_first_label_num ();
772 offsets_at
773 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
775 /* Alter each pseudo-reg rtx to contain its hard reg number.
776 Assign stack slots to the pseudos that lack hard regs or equivalents.
777 Do not touch virtual registers. */
779 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
780 alter_reg (i, -1);
782 /* If we have some registers we think can be eliminated, scan all insns to
783 see if there is an insn that sets one of these registers to something
784 other than itself plus a constant. If so, the register cannot be
785 eliminated. Doing this scan here eliminates an extra pass through the
786 main reload loop in the most common case where register elimination
787 cannot be done. */
788 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
789 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
790 || GET_CODE (insn) == CALL_INSN)
791 note_stores (PATTERN (insn), mark_not_eliminable);
793 #ifndef REGISTER_CONSTRAINTS
794 /* If all the pseudo regs have hard regs,
795 except for those that are never referenced,
796 we know that no reloads are needed. */
797 /* But that is not true if there are register constraints, since
798 in that case some pseudos might be in the wrong kind of hard reg. */
800 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
801 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
802 break;
804 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
806 free (real_known_ptr);
807 free (real_at_ptr);
808 return;
810 #endif
812 /* Compute the order of preference for hard registers to spill.
813 Store them by decreasing preference in potential_reload_regs. */
815 order_regs_for_reload (global);
817 /* So far, no hard regs have been spilled. */
818 n_spills = 0;
819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
820 spill_reg_order[i] = -1;
822 /* Initialize to -1, which means take the first spill register. */
823 last_spill_reg = -1;
825 /* On most machines, we can't use any register explicitly used in the
826 rtl as a spill register. But on some, we have to. Those will have
827 taken care to keep the life of hard regs as short as possible. */
829 if (! SMALL_REGISTER_CLASSES)
830 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
832 /* Spill any hard regs that we know we can't eliminate. */
833 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
834 if (! ep->can_eliminate)
835 spill_hard_reg (ep->from, global, dumpfile, 1);
837 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
838 if (frame_pointer_needed)
839 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
840 #endif
842 if (global)
843 for (i = 0; i < N_REG_CLASSES; i++)
845 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
846 bzero (basic_block_needs[i], n_basic_blocks);
849 /* From now on, we need to emit any moves without making new pseudos. */
850 reload_in_progress = 1;
852 /* This loop scans the entire function each go-round
853 and repeats until one repetition spills no additional hard regs. */
855 /* This flag is set when a pseudo reg is spilled,
856 to require another pass. Note that getting an additional reload
857 reg does not necessarily imply any pseudo reg was spilled;
858 sometimes we find a reload reg that no pseudo reg was allocated in. */
859 something_changed = 1;
860 /* This flag is set if there are any insns that require reloading. */
861 something_needs_reloads = 0;
862 /* This flag is set if there are any insns that require register
863 eliminations. */
864 something_needs_elimination = 0;
865 while (something_changed)
867 rtx after_call = 0;
869 /* For each class, number of reload regs needed in that class.
870 This is the maximum over all insns of the needs in that class
871 of the individual insn. */
872 int max_needs[N_REG_CLASSES];
873 /* For each class, size of group of consecutive regs
874 that is needed for the reloads of this class. */
875 int group_size[N_REG_CLASSES];
876 /* For each class, max number of consecutive groups needed.
877 (Each group contains group_size[CLASS] consecutive registers.) */
878 int max_groups[N_REG_CLASSES];
879 /* For each class, max number needed of regs that don't belong
880 to any of the groups. */
881 int max_nongroups[N_REG_CLASSES];
882 /* For each class, the machine mode which requires consecutive
883 groups of regs of that class.
884 If two different modes ever require groups of one class,
885 they must be the same size and equally restrictive for that class,
886 otherwise we can't handle the complexity. */
887 enum machine_mode group_mode[N_REG_CLASSES];
888 /* Record the insn where each maximum need is first found. */
889 rtx max_needs_insn[N_REG_CLASSES];
890 rtx max_groups_insn[N_REG_CLASSES];
891 rtx max_nongroups_insn[N_REG_CLASSES];
892 rtx x;
893 HOST_WIDE_INT starting_frame_size;
894 int previous_frame_pointer_needed = frame_pointer_needed;
895 static char *reg_class_names[] = REG_CLASS_NAMES;
897 something_changed = 0;
898 bzero ((char *) max_needs, sizeof max_needs);
899 bzero ((char *) max_groups, sizeof max_groups);
900 bzero ((char *) max_nongroups, sizeof max_nongroups);
901 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
902 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
903 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
904 bzero ((char *) group_size, sizeof group_size);
905 for (i = 0; i < N_REG_CLASSES; i++)
906 group_mode[i] = VOIDmode;
908 /* Keep track of which basic blocks are needing the reloads. */
909 this_block = 0;
911 /* Remember whether any element of basic_block_needs
912 changes from 0 to 1 in this pass. */
913 new_basic_block_needs = 0;
915 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
916 here because the stack size may be a part of the offset computation
917 for register elimination, and there might have been new stack slots
918 created in the last iteration of this loop. */
919 assign_stack_local (BLKmode, 0, 0);
921 starting_frame_size = get_frame_size ();
923 /* Reset all offsets on eliminable registers to their initial values. */
924 #ifdef ELIMINABLE_REGS
925 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
927 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
928 ep->previous_offset = ep->offset
929 = ep->max_offset = ep->initial_offset;
931 #else
932 #ifdef INITIAL_FRAME_POINTER_OFFSET
933 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
934 #else
935 if (!FRAME_POINTER_REQUIRED)
936 abort ();
937 reg_eliminate[0].initial_offset = 0;
938 #endif
939 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
940 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
941 #endif
943 num_not_at_initial_offset = 0;
945 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
947 /* Set a known offset for each forced label to be at the initial offset
948 of each elimination. We do this because we assume that all
949 computed jumps occur from a location where each elimination is
950 at its initial offset. */
952 for (x = forced_labels; x; x = XEXP (x, 1))
953 if (XEXP (x, 0))
954 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
956 /* For each pseudo register that has an equivalent location defined,
957 try to eliminate any eliminable registers (such as the frame pointer)
958 assuming initial offsets for the replacement register, which
959 is the normal case.
961 If the resulting location is directly addressable, substitute
962 the MEM we just got directly for the old REG.
964 If it is not addressable but is a constant or the sum of a hard reg
965 and constant, it is probably not addressable because the constant is
966 out of range, in that case record the address; we will generate
967 hairy code to compute the address in a register each time it is
968 needed. Similarly if it is a hard register, but one that is not
969 valid as an address register.
971 If the location is not addressable, but does not have one of the
972 above forms, assign a stack slot. We have to do this to avoid the
973 potential of producing lots of reloads if, e.g., a location involves
974 a pseudo that didn't get a hard register and has an equivalent memory
975 location that also involves a pseudo that didn't get a hard register.
977 Perhaps at some point we will improve reload_when_needed handling
978 so this problem goes away. But that's very hairy. */
980 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
981 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
983 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
985 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
986 XEXP (x, 0)))
987 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
988 else if (CONSTANT_P (XEXP (x, 0))
989 || (GET_CODE (XEXP (x, 0)) == REG
990 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
991 || (GET_CODE (XEXP (x, 0)) == PLUS
992 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
993 && (REGNO (XEXP (XEXP (x, 0), 0))
994 < FIRST_PSEUDO_REGISTER)
995 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
996 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
997 else
999 /* Make a new stack slot. Then indicate that something
1000 changed so we go back and recompute offsets for
1001 eliminable registers because the allocation of memory
1002 below might change some offset. reg_equiv_{mem,address}
1003 will be set up for this pseudo on the next pass around
1004 the loop. */
1005 reg_equiv_memory_loc[i] = 0;
1006 reg_equiv_init[i] = 0;
1007 alter_reg (i, -1);
1008 something_changed = 1;
1012 /* If we allocated another pseudo to the stack, redo elimination
1013 bookkeeping. */
1014 if (something_changed)
1015 continue;
1017 /* If caller-saves needs a group, initialize the group to include
1018 the size and mode required for caller-saves. */
1020 if (caller_save_group_size > 1)
1022 group_mode[(int) caller_save_spill_class] = Pmode;
1023 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1026 /* Compute the most additional registers needed by any instruction.
1027 Collect information separately for each class of regs. */
1029 for (insn = first; insn; insn = NEXT_INSN (insn))
1031 if (global && this_block + 1 < n_basic_blocks
1032 && insn == basic_block_head[this_block+1])
1033 ++this_block;
1035 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1036 might include REG_LABEL), we need to see what effects this
1037 has on the known offsets at labels. */
1039 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1040 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1041 && REG_NOTES (insn) != 0))
1042 set_label_offsets (insn, insn, 0);
1044 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1046 /* Nonzero means don't use a reload reg that overlaps
1047 the place where a function value can be returned. */
1048 rtx avoid_return_reg = 0;
1050 rtx old_body = PATTERN (insn);
1051 int old_code = INSN_CODE (insn);
1052 rtx old_notes = REG_NOTES (insn);
1053 int did_elimination = 0;
1055 /* To compute the number of reload registers of each class
1056 needed for an insn, we must simulate what choose_reload_regs
1057 can do. We do this by splitting an insn into an "input" and
1058 an "output" part. RELOAD_OTHER reloads are used in both.
1059 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1060 which must be live over the entire input section of reloads,
1061 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1062 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1063 inputs.
1065 The registers needed for output are RELOAD_OTHER and
1066 RELOAD_FOR_OUTPUT, which are live for the entire output
1067 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1068 reloads for each operand.
1070 The total number of registers needed is the maximum of the
1071 inputs and outputs. */
1073 struct needs
1075 /* [0] is normal, [1] is nongroup. */
1076 int regs[2][N_REG_CLASSES];
1077 int groups[N_REG_CLASSES];
1080 /* Each `struct needs' corresponds to one RELOAD_... type. */
1081 struct {
1082 struct needs other;
1083 struct needs input;
1084 struct needs output;
1085 struct needs insn;
1086 struct needs other_addr;
1087 struct needs op_addr;
1088 struct needs op_addr_reload;
1089 struct needs in_addr[MAX_RECOG_OPERANDS];
1090 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1091 struct needs out_addr[MAX_RECOG_OPERANDS];
1092 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1093 } insn_needs;
1095 /* If needed, eliminate any eliminable registers. */
1096 if (num_eliminable)
1097 did_elimination = eliminate_regs_in_insn (insn, 0);
1099 /* Set avoid_return_reg if this is an insn
1100 that might use the value of a function call. */
1101 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1103 if (GET_CODE (PATTERN (insn)) == SET)
1104 after_call = SET_DEST (PATTERN (insn));
1105 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1106 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1107 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1108 else
1109 after_call = 0;
1111 else if (SMALL_REGISTER_CLASSES && after_call != 0
1112 && !(GET_CODE (PATTERN (insn)) == SET
1113 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1115 if (reg_referenced_p (after_call, PATTERN (insn)))
1116 avoid_return_reg = after_call;
1117 after_call = 0;
1120 /* Analyze the instruction. */
1121 find_reloads (insn, 0, spill_indirect_levels, global,
1122 spill_reg_order);
1124 /* Remember for later shortcuts which insns had any reloads or
1125 register eliminations.
1127 One might think that it would be worthwhile to mark insns
1128 that need register replacements but not reloads, but this is
1129 not safe because find_reloads may do some manipulation of
1130 the insn (such as swapping commutative operands), which would
1131 be lost when we restore the old pattern after register
1132 replacement. So the actions of find_reloads must be redone in
1133 subsequent passes or in reload_as_needed.
1135 However, it is safe to mark insns that need reloads
1136 but not register replacement. */
1138 PUT_MODE (insn, (did_elimination ? QImode
1139 : n_reloads ? HImode
1140 : GET_MODE (insn) == DImode ? DImode
1141 : VOIDmode));
1143 /* Discard any register replacements done. */
1144 if (did_elimination)
1146 obstack_free (&reload_obstack, reload_firstobj);
1147 PATTERN (insn) = old_body;
1148 INSN_CODE (insn) = old_code;
1149 REG_NOTES (insn) = old_notes;
1150 something_needs_elimination = 1;
1153 /* If this insn has no reloads, we need not do anything except
1154 in the case of a CALL_INSN when we have caller-saves and
1155 caller-save needs reloads. */
1157 if (n_reloads == 0
1158 && ! (GET_CODE (insn) == CALL_INSN
1159 && caller_save_spill_class != NO_REGS))
1160 continue;
1162 something_needs_reloads = 1;
1163 bzero ((char *) &insn_needs, sizeof insn_needs);
1165 /* Count each reload once in every class
1166 containing the reload's own class. */
1168 for (i = 0; i < n_reloads; i++)
1170 register enum reg_class *p;
1171 enum reg_class class = reload_reg_class[i];
1172 int size;
1173 enum machine_mode mode;
1174 int nongroup_need;
1175 struct needs *this_needs;
1177 /* Don't count the dummy reloads, for which one of the
1178 regs mentioned in the insn can be used for reloading.
1179 Don't count optional reloads.
1180 Don't count reloads that got combined with others. */
1181 if (reload_reg_rtx[i] != 0
1182 || reload_optional[i] != 0
1183 || (reload_out[i] == 0 && reload_in[i] == 0
1184 && ! reload_secondary_p[i]))
1185 continue;
1187 /* Show that a reload register of this class is needed
1188 in this basic block. We do not use insn_needs and
1189 insn_groups because they are overly conservative for
1190 this purpose. */
1191 if (global && ! basic_block_needs[(int) class][this_block])
1193 basic_block_needs[(int) class][this_block] = 1;
1194 new_basic_block_needs = 1;
1197 mode = reload_inmode[i];
1198 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1199 mode = reload_outmode[i];
1200 size = CLASS_MAX_NREGS (class, mode);
1202 /* If this class doesn't want a group, determine if we have
1203 a nongroup need or a regular need. We have a nongroup
1204 need if this reload conflicts with a group reload whose
1205 class intersects with this reload's class. */
1207 nongroup_need = 0;
1208 if (size == 1)
1209 for (j = 0; j < n_reloads; j++)
1210 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1211 (GET_MODE_SIZE (reload_outmode[j])
1212 > GET_MODE_SIZE (reload_inmode[j]))
1213 ? reload_outmode[j]
1214 : reload_inmode[j])
1215 > 1)
1216 && (!reload_optional[j])
1217 && (reload_in[j] != 0 || reload_out[j] != 0
1218 || reload_secondary_p[j])
1219 && reloads_conflict (i, j)
1220 && reg_classes_intersect_p (class,
1221 reload_reg_class[j]))
1223 nongroup_need = 1;
1224 break;
1227 /* Decide which time-of-use to count this reload for. */
1228 switch (reload_when_needed[i])
1230 case RELOAD_OTHER:
1231 this_needs = &insn_needs.other;
1232 break;
1233 case RELOAD_FOR_INPUT:
1234 this_needs = &insn_needs.input;
1235 break;
1236 case RELOAD_FOR_OUTPUT:
1237 this_needs = &insn_needs.output;
1238 break;
1239 case RELOAD_FOR_INSN:
1240 this_needs = &insn_needs.insn;
1241 break;
1242 case RELOAD_FOR_OTHER_ADDRESS:
1243 this_needs = &insn_needs.other_addr;
1244 break;
1245 case RELOAD_FOR_INPUT_ADDRESS:
1246 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1247 break;
1248 case RELOAD_FOR_INPADDR_ADDRESS:
1249 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1250 break;
1251 case RELOAD_FOR_OUTPUT_ADDRESS:
1252 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1253 break;
1254 case RELOAD_FOR_OUTADDR_ADDRESS:
1255 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1256 break;
1257 case RELOAD_FOR_OPERAND_ADDRESS:
1258 this_needs = &insn_needs.op_addr;
1259 break;
1260 case RELOAD_FOR_OPADDR_ADDR:
1261 this_needs = &insn_needs.op_addr_reload;
1262 break;
1265 if (size > 1)
1267 enum machine_mode other_mode, allocate_mode;
1269 /* Count number of groups needed separately from
1270 number of individual regs needed. */
1271 this_needs->groups[(int) class]++;
1272 p = reg_class_superclasses[(int) class];
1273 while (*p != LIM_REG_CLASSES)
1274 this_needs->groups[(int) *p++]++;
1276 /* Record size and mode of a group of this class. */
1277 /* If more than one size group is needed,
1278 make all groups the largest needed size. */
1279 if (group_size[(int) class] < size)
1281 other_mode = group_mode[(int) class];
1282 allocate_mode = mode;
1284 group_size[(int) class] = size;
1285 group_mode[(int) class] = mode;
1287 else
1289 other_mode = mode;
1290 allocate_mode = group_mode[(int) class];
1293 /* Crash if two dissimilar machine modes both need
1294 groups of consecutive regs of the same class. */
1296 if (other_mode != VOIDmode && other_mode != allocate_mode
1297 && ! modes_equiv_for_class_p (allocate_mode,
1298 other_mode, class))
1299 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1300 insn);
1302 else if (size == 1)
1304 this_needs->regs[nongroup_need][(int) class] += 1;
1305 p = reg_class_superclasses[(int) class];
1306 while (*p != LIM_REG_CLASSES)
1307 this_needs->regs[nongroup_need][(int) *p++] += 1;
1309 else
1310 abort ();
1313 /* All reloads have been counted for this insn;
1314 now merge the various times of use.
1315 This sets insn_needs, etc., to the maximum total number
1316 of registers needed at any point in this insn. */
1318 for (i = 0; i < N_REG_CLASSES; i++)
1320 int in_max, out_max;
1322 /* Compute normal and nongroup needs. */
1323 for (j = 0; j <= 1; j++)
1325 for (in_max = 0, out_max = 0, k = 0;
1326 k < reload_n_operands; k++)
1328 in_max
1329 = MAX (in_max,
1330 (insn_needs.in_addr[k].regs[j][i]
1331 + insn_needs.in_addr_addr[k].regs[j][i]));
1332 out_max
1333 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1334 out_max
1335 = MAX (out_max,
1336 insn_needs.out_addr_addr[k].regs[j][i]);
1339 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1340 and operand addresses but not things used to reload
1341 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1342 don't conflict with things needed to reload inputs or
1343 outputs. */
1345 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1346 insn_needs.op_addr_reload.regs[j][i]),
1347 in_max);
1349 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1351 insn_needs.input.regs[j][i]
1352 = MAX (insn_needs.input.regs[j][i]
1353 + insn_needs.op_addr.regs[j][i]
1354 + insn_needs.insn.regs[j][i],
1355 in_max + insn_needs.input.regs[j][i]);
1357 insn_needs.output.regs[j][i] += out_max;
1358 insn_needs.other.regs[j][i]
1359 += MAX (MAX (insn_needs.input.regs[j][i],
1360 insn_needs.output.regs[j][i]),
1361 insn_needs.other_addr.regs[j][i]);
1365 /* Now compute group needs. */
1366 for (in_max = 0, out_max = 0, j = 0;
1367 j < reload_n_operands; j++)
1369 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1370 in_max = MAX (in_max,
1371 insn_needs.in_addr_addr[j].groups[i]);
1372 out_max
1373 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1374 out_max
1375 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1378 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1379 insn_needs.op_addr_reload.groups[i]),
1380 in_max);
1381 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1383 insn_needs.input.groups[i]
1384 = MAX (insn_needs.input.groups[i]
1385 + insn_needs.op_addr.groups[i]
1386 + insn_needs.insn.groups[i],
1387 in_max + insn_needs.input.groups[i]);
1389 insn_needs.output.groups[i] += out_max;
1390 insn_needs.other.groups[i]
1391 += MAX (MAX (insn_needs.input.groups[i],
1392 insn_needs.output.groups[i]),
1393 insn_needs.other_addr.groups[i]);
1396 /* If this is a CALL_INSN and caller-saves will need
1397 a spill register, act as if the spill register is
1398 needed for this insn. However, the spill register
1399 can be used by any reload of this insn, so we only
1400 need do something if no need for that class has
1401 been recorded.
1403 The assumption that every CALL_INSN will trigger a
1404 caller-save is highly conservative, however, the number
1405 of cases where caller-saves will need a spill register but
1406 a block containing a CALL_INSN won't need a spill register
1407 of that class should be quite rare.
1409 If a group is needed, the size and mode of the group will
1410 have been set up at the beginning of this loop. */
1412 if (GET_CODE (insn) == CALL_INSN
1413 && caller_save_spill_class != NO_REGS)
1415 /* See if this register would conflict with any reload
1416 that needs a group. */
1417 int nongroup_need = 0;
1418 int *caller_save_needs;
1420 for (j = 0; j < n_reloads; j++)
1421 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1422 (GET_MODE_SIZE (reload_outmode[j])
1423 > GET_MODE_SIZE (reload_inmode[j]))
1424 ? reload_outmode[j]
1425 : reload_inmode[j])
1426 > 1)
1427 && reg_classes_intersect_p (caller_save_spill_class,
1428 reload_reg_class[j]))
1430 nongroup_need = 1;
1431 break;
1434 caller_save_needs
1435 = (caller_save_group_size > 1
1436 ? insn_needs.other.groups
1437 : insn_needs.other.regs[nongroup_need]);
1439 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1441 register enum reg_class *p
1442 = reg_class_superclasses[(int) caller_save_spill_class];
1444 caller_save_needs[(int) caller_save_spill_class]++;
1446 while (*p != LIM_REG_CLASSES)
1447 caller_save_needs[(int) *p++] += 1;
1450 /* Show that this basic block will need a register of
1451 this class. */
1453 if (global
1454 && ! (basic_block_needs[(int) caller_save_spill_class]
1455 [this_block]))
1457 basic_block_needs[(int) caller_save_spill_class]
1458 [this_block] = 1;
1459 new_basic_block_needs = 1;
1463 /* If this insn stores the value of a function call,
1464 and that value is in a register that has been spilled,
1465 and if the insn needs a reload in a class
1466 that might use that register as the reload register,
1467 then add add an extra need in that class.
1468 This makes sure we have a register available that does
1469 not overlap the return value. */
1471 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1473 int regno = REGNO (avoid_return_reg);
1474 int nregs
1475 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1476 int r;
1477 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1479 /* First compute the "basic needs", which counts a
1480 need only in the smallest class in which it
1481 is required. */
1483 bcopy ((char *) insn_needs.other.regs[0],
1484 (char *) basic_needs, sizeof basic_needs);
1485 bcopy ((char *) insn_needs.other.groups,
1486 (char *) basic_groups, sizeof basic_groups);
1488 for (i = 0; i < N_REG_CLASSES; i++)
1490 enum reg_class *p;
1492 if (basic_needs[i] >= 0)
1493 for (p = reg_class_superclasses[i];
1494 *p != LIM_REG_CLASSES; p++)
1495 basic_needs[(int) *p] -= basic_needs[i];
1497 if (basic_groups[i] >= 0)
1498 for (p = reg_class_superclasses[i];
1499 *p != LIM_REG_CLASSES; p++)
1500 basic_groups[(int) *p] -= basic_groups[i];
1503 /* Now count extra regs if there might be a conflict with
1504 the return value register. */
1506 for (r = regno; r < regno + nregs; r++)
1507 if (spill_reg_order[r] >= 0)
1508 for (i = 0; i < N_REG_CLASSES; i++)
1509 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1511 if (basic_needs[i] > 0)
1513 enum reg_class *p;
1515 insn_needs.other.regs[0][i]++;
1516 p = reg_class_superclasses[i];
1517 while (*p != LIM_REG_CLASSES)
1518 insn_needs.other.regs[0][(int) *p++]++;
1520 if (basic_groups[i] > 0)
1522 enum reg_class *p;
1524 insn_needs.other.groups[i]++;
1525 p = reg_class_superclasses[i];
1526 while (*p != LIM_REG_CLASSES)
1527 insn_needs.other.groups[(int) *p++]++;
1532 /* For each class, collect maximum need of any insn. */
1534 for (i = 0; i < N_REG_CLASSES; i++)
1536 if (max_needs[i] < insn_needs.other.regs[0][i])
1538 max_needs[i] = insn_needs.other.regs[0][i];
1539 max_needs_insn[i] = insn;
1541 if (max_groups[i] < insn_needs.other.groups[i])
1543 max_groups[i] = insn_needs.other.groups[i];
1544 max_groups_insn[i] = insn;
1546 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1548 max_nongroups[i] = insn_needs.other.regs[1][i];
1549 max_nongroups_insn[i] = insn;
1553 /* Note that there is a continue statement above. */
1556 /* If we allocated any new memory locations, make another pass
1557 since it might have changed elimination offsets. */
1558 if (starting_frame_size != get_frame_size ())
1559 something_changed = 1;
1561 if (dumpfile)
1562 for (i = 0; i < N_REG_CLASSES; i++)
1564 if (max_needs[i] > 0)
1565 fprintf (dumpfile,
1566 ";; Need %d reg%s of class %s (for insn %d).\n",
1567 max_needs[i], max_needs[i] == 1 ? "" : "s",
1568 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1569 if (max_nongroups[i] > 0)
1570 fprintf (dumpfile,
1571 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1572 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1573 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1574 if (max_groups[i] > 0)
1575 fprintf (dumpfile,
1576 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1577 max_groups[i], max_groups[i] == 1 ? "" : "s",
1578 mode_name[(int) group_mode[i]],
1579 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1582 /* If we have caller-saves, set up the save areas and see if caller-save
1583 will need a spill register. */
1585 if (caller_save_needed)
1587 /* Set the offsets for setup_save_areas. */
1588 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1589 ep++)
1590 ep->previous_offset = ep->max_offset;
1592 if ( ! setup_save_areas (&something_changed)
1593 && caller_save_spill_class == NO_REGS)
1595 /* The class we will need depends on whether the machine
1596 supports the sum of two registers for an address; see
1597 find_address_reloads for details. */
1599 caller_save_spill_class
1600 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1601 caller_save_group_size
1602 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1603 something_changed = 1;
1607 /* See if anything that happened changes which eliminations are valid.
1608 For example, on the Sparc, whether or not the frame pointer can
1609 be eliminated can depend on what registers have been used. We need
1610 not check some conditions again (such as flag_omit_frame_pointer)
1611 since they can't have changed. */
1613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1614 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1615 #ifdef ELIMINABLE_REGS
1616 || ! CAN_ELIMINATE (ep->from, ep->to)
1617 #endif
1619 ep->can_eliminate = 0;
1621 /* Look for the case where we have discovered that we can't replace
1622 register A with register B and that means that we will now be
1623 trying to replace register A with register C. This means we can
1624 no longer replace register C with register B and we need to disable
1625 such an elimination, if it exists. This occurs often with A == ap,
1626 B == sp, and C == fp. */
1628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1630 struct elim_table *op;
1631 register int new_to = -1;
1633 if (! ep->can_eliminate && ep->can_eliminate_previous)
1635 /* Find the current elimination for ep->from, if there is a
1636 new one. */
1637 for (op = reg_eliminate;
1638 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1639 if (op->from == ep->from && op->can_eliminate)
1641 new_to = op->to;
1642 break;
1645 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1646 disable it. */
1647 for (op = reg_eliminate;
1648 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1649 if (op->from == new_to && op->to == ep->to)
1650 op->can_eliminate = 0;
1654 /* See if any registers that we thought we could eliminate the previous
1655 time are no longer eliminable. If so, something has changed and we
1656 must spill the register. Also, recompute the number of eliminable
1657 registers and see if the frame pointer is needed; it is if there is
1658 no elimination of the frame pointer that we can perform. */
1660 frame_pointer_needed = 1;
1661 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1663 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1664 && ep->to != HARD_FRAME_POINTER_REGNUM)
1665 frame_pointer_needed = 0;
1667 if (! ep->can_eliminate && ep->can_eliminate_previous)
1669 ep->can_eliminate_previous = 0;
1670 spill_hard_reg (ep->from, global, dumpfile, 1);
1671 something_changed = 1;
1672 num_eliminable--;
1676 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1677 /* If we didn't need a frame pointer last time, but we do now, spill
1678 the hard frame pointer. */
1679 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1681 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1682 something_changed = 1;
1684 #endif
1686 /* If all needs are met, we win. */
1688 for (i = 0; i < N_REG_CLASSES; i++)
1689 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1690 break;
1691 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1692 break;
1694 /* Not all needs are met; must spill some hard regs. */
1696 /* Put all registers spilled so far back in potential_reload_regs, but
1697 put them at the front, since we've already spilled most of the
1698 pseudos in them (we might have left some pseudos unspilled if they
1699 were in a block that didn't need any spill registers of a conflicting
1700 class. We used to try to mark off the need for those registers,
1701 but doing so properly is very complex and reallocating them is the
1702 simpler approach. First, "pack" potential_reload_regs by pushing
1703 any nonnegative entries towards the end. That will leave room
1704 for the registers we already spilled.
1706 Also, undo the marking of the spill registers from the last time
1707 around in FORBIDDEN_REGS since we will be probably be allocating
1708 them again below.
1710 ??? It is theoretically possible that we might end up not using one
1711 of our previously-spilled registers in this allocation, even though
1712 they are at the head of the list. It's not clear what to do about
1713 this, but it was no better before, when we marked off the needs met
1714 by the previously-spilled registers. With the current code, globals
1715 can be allocated into these registers, but locals cannot. */
1717 if (n_spills)
1719 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1720 if (potential_reload_regs[i] != -1)
1721 potential_reload_regs[j--] = potential_reload_regs[i];
1723 for (i = 0; i < n_spills; i++)
1725 potential_reload_regs[i] = spill_regs[i];
1726 spill_reg_order[spill_regs[i]] = -1;
1727 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1730 n_spills = 0;
1733 /* Now find more reload regs to satisfy the remaining need
1734 Do it by ascending class number, since otherwise a reg
1735 might be spilled for a big class and might fail to count
1736 for a smaller class even though it belongs to that class.
1738 Count spilled regs in `spills', and add entries to
1739 `spill_regs' and `spill_reg_order'.
1741 ??? Note there is a problem here.
1742 When there is a need for a group in a high-numbered class,
1743 and also need for non-group regs that come from a lower class,
1744 the non-group regs are chosen first. If there aren't many regs,
1745 they might leave no room for a group.
1747 This was happening on the 386. To fix it, we added the code
1748 that calls possible_group_p, so that the lower class won't
1749 break up the last possible group.
1751 Really fixing the problem would require changes above
1752 in counting the regs already spilled, and in choose_reload_regs.
1753 It might be hard to avoid introducing bugs there. */
1755 CLEAR_HARD_REG_SET (counted_for_groups);
1756 CLEAR_HARD_REG_SET (counted_for_nongroups);
1758 for (class = 0; class < N_REG_CLASSES; class++)
1760 /* First get the groups of registers.
1761 If we got single registers first, we might fragment
1762 possible groups. */
1763 while (max_groups[class] > 0)
1765 /* If any single spilled regs happen to form groups,
1766 count them now. Maybe we don't really need
1767 to spill another group. */
1768 count_possible_groups (group_size, group_mode, max_groups,
1769 class);
1771 if (max_groups[class] <= 0)
1772 break;
1774 /* Groups of size 2 (the only groups used on most machines)
1775 are treated specially. */
1776 if (group_size[class] == 2)
1778 /* First, look for a register that will complete a group. */
1779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1781 int other;
1783 j = potential_reload_regs[i];
1784 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1786 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1787 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1788 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1789 && HARD_REGNO_MODE_OK (other, group_mode[class])
1790 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1791 other)
1792 /* We don't want one part of another group.
1793 We could get "two groups" that overlap! */
1794 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1796 (j < FIRST_PSEUDO_REGISTER - 1
1797 && (other = j + 1, spill_reg_order[other] >= 0)
1798 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1799 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1800 && HARD_REGNO_MODE_OK (j, group_mode[class])
1801 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1802 other)
1803 && ! TEST_HARD_REG_BIT (counted_for_groups,
1804 other))))
1806 register enum reg_class *p;
1808 /* We have found one that will complete a group,
1809 so count off one group as provided. */
1810 max_groups[class]--;
1811 p = reg_class_superclasses[class];
1812 while (*p != LIM_REG_CLASSES)
1814 if (group_size [(int) *p] <= group_size [class])
1815 max_groups[(int) *p]--;
1816 p++;
1819 /* Indicate both these regs are part of a group. */
1820 SET_HARD_REG_BIT (counted_for_groups, j);
1821 SET_HARD_REG_BIT (counted_for_groups, other);
1822 break;
1825 /* We can't complete a group, so start one. */
1826 /* Look for a pair neither of which is explicitly used. */
1827 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1828 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1830 int k;
1831 j = potential_reload_regs[i];
1832 /* Verify that J+1 is a potential reload reg. */
1833 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1834 if (potential_reload_regs[k] == j + 1)
1835 break;
1836 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1837 && k < FIRST_PSEUDO_REGISTER
1838 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1839 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1840 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1841 && HARD_REGNO_MODE_OK (j, group_mode[class])
1842 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1843 j + 1)
1844 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1845 /* Reject J at this stage
1846 if J+1 was explicitly used. */
1847 && ! regs_explicitly_used[j + 1])
1848 break;
1850 /* Now try any group at all
1851 whose registers are not in bad_spill_regs. */
1852 if (i == FIRST_PSEUDO_REGISTER)
1853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1855 int k;
1856 j = potential_reload_regs[i];
1857 /* Verify that J+1 is a potential reload reg. */
1858 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1859 if (potential_reload_regs[k] == j + 1)
1860 break;
1861 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1862 && k < FIRST_PSEUDO_REGISTER
1863 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1864 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1865 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1866 && HARD_REGNO_MODE_OK (j, group_mode[class])
1867 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1868 j + 1)
1869 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1870 break;
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1876 if (i >= FIRST_PSEUDO_REGISTER)
1878 /* There are no groups left to spill. */
1879 spill_failure (max_groups_insn[class]);
1880 failure = 1;
1881 goto failed;
1883 else
1884 something_changed
1885 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1886 global, dumpfile);
1888 else
1890 /* For groups of more than 2 registers,
1891 look for a sufficient sequence of unspilled registers,
1892 and spill them all at once. */
1893 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1895 int k;
1897 j = potential_reload_regs[i];
1898 if (j >= 0
1899 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1900 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1902 /* Check each reg in the sequence. */
1903 for (k = 0; k < group_size[class]; k++)
1904 if (! (spill_reg_order[j + k] < 0
1905 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1906 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1907 break;
1908 /* We got a full sequence, so spill them all. */
1909 if (k == group_size[class])
1911 register enum reg_class *p;
1912 for (k = 0; k < group_size[class]; k++)
1914 int idx;
1915 SET_HARD_REG_BIT (counted_for_groups, j + k);
1916 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1917 if (potential_reload_regs[idx] == j + k)
1918 break;
1919 something_changed
1920 |= new_spill_reg (idx, class,
1921 max_needs, NULL_PTR,
1922 global, dumpfile);
1925 /* We have found one that will complete a group,
1926 so count off one group as provided. */
1927 max_groups[class]--;
1928 p = reg_class_superclasses[class];
1929 while (*p != LIM_REG_CLASSES)
1931 if (group_size [(int) *p]
1932 <= group_size [class])
1933 max_groups[(int) *p]--;
1934 p++;
1936 break;
1940 /* We couldn't find any registers for this reload.
1941 Avoid going into an infinite loop. */
1942 if (i >= FIRST_PSEUDO_REGISTER)
1944 /* There are no groups left. */
1945 spill_failure (max_groups_insn[class]);
1946 failure = 1;
1947 goto failed;
1952 /* Now similarly satisfy all need for single registers. */
1954 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1956 /* If we spilled enough regs, but they weren't counted
1957 against the non-group need, see if we can count them now.
1958 If so, we can avoid some actual spilling. */
1959 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1960 for (i = 0; i < n_spills; i++)
1961 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1962 spill_regs[i])
1963 && !TEST_HARD_REG_BIT (counted_for_groups,
1964 spill_regs[i])
1965 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1966 spill_regs[i])
1967 && max_nongroups[class] > 0)
1969 register enum reg_class *p;
1971 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1972 max_nongroups[class]--;
1973 p = reg_class_superclasses[class];
1974 while (*p != LIM_REG_CLASSES)
1975 max_nongroups[(int) *p++]--;
1977 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1978 break;
1980 /* Consider the potential reload regs that aren't
1981 yet in use as reload regs, in order of preference.
1982 Find the most preferred one that's in this class. */
1984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1985 if (potential_reload_regs[i] >= 0
1986 && TEST_HARD_REG_BIT (reg_class_contents[class],
1987 potential_reload_regs[i])
1988 /* If this reg will not be available for groups,
1989 pick one that does not foreclose possible groups.
1990 This is a kludge, and not very general,
1991 but it should be sufficient to make the 386 work,
1992 and the problem should not occur on machines with
1993 more registers. */
1994 && (max_nongroups[class] == 0
1995 || possible_group_p (potential_reload_regs[i], max_groups)))
1996 break;
1998 /* If we couldn't get a register, try to get one even if we
1999 might foreclose possible groups. This may cause problems
2000 later, but that's better than aborting now, since it is
2001 possible that we will, in fact, be able to form the needed
2002 group even with this allocation. */
2004 if (i >= FIRST_PSEUDO_REGISTER
2005 && (asm_noperands (max_needs[class] > 0
2006 ? max_needs_insn[class]
2007 : max_nongroups_insn[class])
2008 < 0))
2009 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2010 if (potential_reload_regs[i] >= 0
2011 && TEST_HARD_REG_BIT (reg_class_contents[class],
2012 potential_reload_regs[i]))
2013 break;
2015 /* I should be the index in potential_reload_regs
2016 of the new reload reg we have found. */
2018 if (i >= FIRST_PSEUDO_REGISTER)
2020 /* There are no possible registers left to spill. */
2021 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2022 : max_nongroups_insn[class]);
2023 failure = 1;
2024 goto failed;
2026 else
2027 something_changed
2028 |= new_spill_reg (i, class, max_needs, max_nongroups,
2029 global, dumpfile);
2034 /* If global-alloc was run, notify it of any register eliminations we have
2035 done. */
2036 if (global)
2037 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2038 if (ep->can_eliminate)
2039 mark_elimination (ep->from, ep->to);
2041 /* Insert code to save and restore call-clobbered hard regs
2042 around calls. Tell if what mode to use so that we will process
2043 those insns in reload_as_needed if we have to. */
2045 if (caller_save_needed)
2046 save_call_clobbered_regs (num_eliminable ? QImode
2047 : caller_save_spill_class != NO_REGS ? HImode
2048 : VOIDmode);
2050 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2051 If that insn didn't set the register (i.e., it copied the register to
2052 memory), just delete that insn instead of the equivalencing insn plus
2053 anything now dead. If we call delete_dead_insn on that insn, we may
2054 delete the insn that actually sets the register if the register die
2055 there and that is incorrect. */
2057 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2058 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2059 && GET_CODE (reg_equiv_init[i]) != NOTE)
2061 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2062 delete_dead_insn (reg_equiv_init[i]);
2063 else
2065 PUT_CODE (reg_equiv_init[i], NOTE);
2066 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2067 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2071 /* Use the reload registers where necessary
2072 by generating move instructions to move the must-be-register
2073 values into or out of the reload registers. */
2075 if (something_needs_reloads || something_needs_elimination
2076 || (caller_save_needed && num_eliminable)
2077 || caller_save_spill_class != NO_REGS)
2078 reload_as_needed (first, global);
2080 /* If we were able to eliminate the frame pointer, show that it is no
2081 longer live at the start of any basic block. If it ls live by
2082 virtue of being in a pseudo, that pseudo will be marked live
2083 and hence the frame pointer will be known to be live via that
2084 pseudo. */
2086 if (! frame_pointer_needed)
2087 for (i = 0; i < n_basic_blocks; i++)
2088 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2089 HARD_FRAME_POINTER_REGNUM);
2091 /* Come here (with failure set nonzero) if we can't get enough spill regs
2092 and we decide not to abort about it. */
2093 failed:
2095 reload_in_progress = 0;
2097 /* Now eliminate all pseudo regs by modifying them into
2098 their equivalent memory references.
2099 The REG-rtx's for the pseudos are modified in place,
2100 so all insns that used to refer to them now refer to memory.
2102 For a reg that has a reg_equiv_address, all those insns
2103 were changed by reloading so that no insns refer to it any longer;
2104 but the DECL_RTL of a variable decl may refer to it,
2105 and if so this causes the debugging info to mention the variable. */
2107 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2109 rtx addr = 0;
2110 int in_struct = 0;
2111 if (reg_equiv_mem[i])
2113 addr = XEXP (reg_equiv_mem[i], 0);
2114 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2116 if (reg_equiv_address[i])
2117 addr = reg_equiv_address[i];
2118 if (addr)
2120 if (reg_renumber[i] < 0)
2122 rtx reg = regno_reg_rtx[i];
2123 XEXP (reg, 0) = addr;
2124 REG_USERVAR_P (reg) = 0;
2125 MEM_IN_STRUCT_P (reg) = in_struct;
2126 PUT_CODE (reg, MEM);
2128 else if (reg_equiv_mem[i])
2129 XEXP (reg_equiv_mem[i], 0) = addr;
2133 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2134 /* Make a pass over all the insns and remove death notes for things that
2135 are no longer registers or no longer die in the insn (e.g., an input
2136 and output pseudo being tied). */
2138 for (insn = first; insn; insn = NEXT_INSN (insn))
2139 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2141 rtx note, next;
2143 for (note = REG_NOTES (insn); note; note = next)
2145 next = XEXP (note, 1);
2146 if (REG_NOTE_KIND (note) == REG_DEAD
2147 && (GET_CODE (XEXP (note, 0)) != REG
2148 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2149 remove_note (insn, note);
2152 #endif
2154 /* If we are doing stack checking, give a warning if this function's
2155 frame size is larger than we expect. */
2156 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2158 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2161 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2162 size += UNITS_PER_WORD;
2164 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2165 warning ("frame size too large for reliable stack checking");
2168 /* Indicate that we no longer have known memory locations or constants. */
2169 reg_equiv_constant = 0;
2170 reg_equiv_memory_loc = 0;
2172 if (real_known_ptr)
2173 free (real_known_ptr);
2174 if (real_at_ptr)
2175 free (real_at_ptr);
2177 if (scratch_list)
2178 free (scratch_list);
2179 scratch_list = 0;
2180 if (scratch_block)
2181 free (scratch_block);
2182 scratch_block = 0;
2184 CLEAR_HARD_REG_SET (used_spill_regs);
2185 for (i = 0; i < n_spills; i++)
2186 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2188 return failure;
2191 /* Nonzero if, after spilling reg REGNO for non-groups,
2192 it will still be possible to find a group if we still need one. */
2194 static int
2195 possible_group_p (regno, max_groups)
2196 int regno;
2197 int *max_groups;
2199 int i;
2200 int class = (int) NO_REGS;
2202 for (i = 0; i < (int) N_REG_CLASSES; i++)
2203 if (max_groups[i] > 0)
2205 class = i;
2206 break;
2209 if (class == (int) NO_REGS)
2210 return 1;
2212 /* Consider each pair of consecutive registers. */
2213 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2215 /* Ignore pairs that include reg REGNO. */
2216 if (i == regno || i + 1 == regno)
2217 continue;
2219 /* Ignore pairs that are outside the class that needs the group.
2220 ??? Here we fail to handle the case where two different classes
2221 independently need groups. But this never happens with our
2222 current machine descriptions. */
2223 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2224 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2225 continue;
2227 /* A pair of consecutive regs we can still spill does the trick. */
2228 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2229 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2230 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2231 return 1;
2233 /* A pair of one already spilled and one we can spill does it
2234 provided the one already spilled is not otherwise reserved. */
2235 if (spill_reg_order[i] < 0
2236 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2237 && spill_reg_order[i + 1] >= 0
2238 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2239 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2240 return 1;
2241 if (spill_reg_order[i + 1] < 0
2242 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2243 && spill_reg_order[i] >= 0
2244 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2245 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2246 return 1;
2249 return 0;
2252 /* Count any groups of CLASS that can be formed from the registers recently
2253 spilled. */
2255 static void
2256 count_possible_groups (group_size, group_mode, max_groups, class)
2257 int *group_size;
2258 enum machine_mode *group_mode;
2259 int *max_groups;
2260 int class;
2262 HARD_REG_SET new;
2263 int i, j;
2265 /* Now find all consecutive groups of spilled registers
2266 and mark each group off against the need for such groups.
2267 But don't count them against ordinary need, yet. */
2269 if (group_size[class] == 0)
2270 return;
2272 CLEAR_HARD_REG_SET (new);
2274 /* Make a mask of all the regs that are spill regs in class I. */
2275 for (i = 0; i < n_spills; i++)
2276 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2277 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2278 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2279 SET_HARD_REG_BIT (new, spill_regs[i]);
2281 /* Find each consecutive group of them. */
2282 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2283 if (TEST_HARD_REG_BIT (new, i)
2284 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2285 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2287 for (j = 1; j < group_size[class]; j++)
2288 if (! TEST_HARD_REG_BIT (new, i + j))
2289 break;
2291 if (j == group_size[class])
2293 /* We found a group. Mark it off against this class's need for
2294 groups, and against each superclass too. */
2295 register enum reg_class *p;
2297 max_groups[class]--;
2298 p = reg_class_superclasses[class];
2299 while (*p != LIM_REG_CLASSES)
2301 if (group_size [(int) *p] <= group_size [class])
2302 max_groups[(int) *p]--;
2303 p++;
2306 /* Don't count these registers again. */
2307 for (j = 0; j < group_size[class]; j++)
2308 SET_HARD_REG_BIT (counted_for_groups, i + j);
2311 /* Skip to the last reg in this group. When i is incremented above,
2312 it will then point to the first reg of the next possible group. */
2313 i += j - 1;
2317 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2318 another mode that needs to be reloaded for the same register class CLASS.
2319 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2320 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2322 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2323 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2324 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2325 causes unnecessary failures on machines requiring alignment of register
2326 groups when the two modes are different sizes, because the larger mode has
2327 more strict alignment rules than the smaller mode. */
2329 static int
2330 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2331 enum machine_mode allocate_mode, other_mode;
2332 enum reg_class class;
2334 register int regno;
2335 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2337 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2338 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2339 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2340 return 0;
2342 return 1;
2345 /* Handle the failure to find a register to spill.
2346 INSN should be one of the insns which needed this particular spill reg. */
2348 static void
2349 spill_failure (insn)
2350 rtx insn;
2352 if (asm_noperands (PATTERN (insn)) >= 0)
2353 error_for_asm (insn, "`asm' needs too many reloads");
2354 else
2355 fatal_insn ("Unable to find a register to spill.", insn);
2358 /* Add a new register to the tables of available spill-registers
2359 (as well as spilling all pseudos allocated to the register).
2360 I is the index of this register in potential_reload_regs.
2361 CLASS is the regclass whose need is being satisfied.
2362 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2363 so that this register can count off against them.
2364 MAX_NONGROUPS is 0 if this register is part of a group.
2365 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2367 static int
2368 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2369 int i;
2370 int class;
2371 int *max_needs;
2372 int *max_nongroups;
2373 int global;
2374 FILE *dumpfile;
2376 register enum reg_class *p;
2377 int val;
2378 int regno = potential_reload_regs[i];
2380 if (i >= FIRST_PSEUDO_REGISTER)
2381 abort (); /* Caller failed to find any register. */
2383 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2384 fatal ("fixed or forbidden register was spilled.\n\
2385 This may be due to a compiler bug or to impossible asm\n\
2386 statements or clauses.");
2388 /* Make reg REGNO an additional reload reg. */
2390 potential_reload_regs[i] = -1;
2391 spill_regs[n_spills] = regno;
2392 spill_reg_order[regno] = n_spills;
2393 if (dumpfile)
2394 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2396 /* Clear off the needs we just satisfied. */
2398 max_needs[class]--;
2399 p = reg_class_superclasses[class];
2400 while (*p != LIM_REG_CLASSES)
2401 max_needs[(int) *p++]--;
2403 if (max_nongroups && max_nongroups[class] > 0)
2405 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2406 max_nongroups[class]--;
2407 p = reg_class_superclasses[class];
2408 while (*p != LIM_REG_CLASSES)
2409 max_nongroups[(int) *p++]--;
2412 /* Spill every pseudo reg that was allocated to this reg
2413 or to something that overlaps this reg. */
2415 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2417 /* If there are some registers still to eliminate and this register
2418 wasn't ever used before, additional stack space may have to be
2419 allocated to store this register. Thus, we may have changed the offset
2420 between the stack and frame pointers, so mark that something has changed.
2421 (If new pseudos were spilled, thus requiring more space, VAL would have
2422 been set non-zero by the call to spill_hard_reg above since additional
2423 reloads may be needed in that case.
2425 One might think that we need only set VAL to 1 if this is a call-used
2426 register. However, the set of registers that must be saved by the
2427 prologue is not identical to the call-used set. For example, the
2428 register used by the call insn for the return PC is a call-used register,
2429 but must be saved by the prologue. */
2430 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2431 val = 1;
2433 regs_ever_live[spill_regs[n_spills]] = 1;
2434 n_spills++;
2436 return val;
2439 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2440 data that is dead in INSN. */
2442 static void
2443 delete_dead_insn (insn)
2444 rtx insn;
2446 rtx prev = prev_real_insn (insn);
2447 rtx prev_dest;
2449 /* If the previous insn sets a register that dies in our insn, delete it
2450 too. */
2451 if (prev && GET_CODE (PATTERN (prev)) == SET
2452 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2453 && reg_mentioned_p (prev_dest, PATTERN (insn))
2454 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2455 delete_dead_insn (prev);
2457 PUT_CODE (insn, NOTE);
2458 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2459 NOTE_SOURCE_FILE (insn) = 0;
2462 /* Modify the home of pseudo-reg I.
2463 The new home is present in reg_renumber[I].
2465 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2466 or it may be -1, meaning there is none or it is not relevant.
2467 This is used so that all pseudos spilled from a given hard reg
2468 can share one stack slot. */
2470 static void
2471 alter_reg (i, from_reg)
2472 register int i;
2473 int from_reg;
2475 /* When outputting an inline function, this can happen
2476 for a reg that isn't actually used. */
2477 if (regno_reg_rtx[i] == 0)
2478 return;
2480 /* If the reg got changed to a MEM at rtl-generation time,
2481 ignore it. */
2482 if (GET_CODE (regno_reg_rtx[i]) != REG)
2483 return;
2485 /* Modify the reg-rtx to contain the new hard reg
2486 number or else to contain its pseudo reg number. */
2487 REGNO (regno_reg_rtx[i])
2488 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2490 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2491 allocate a stack slot for it. */
2493 if (reg_renumber[i] < 0
2494 && REG_N_REFS (i) > 0
2495 && reg_equiv_constant[i] == 0
2496 && reg_equiv_memory_loc[i] == 0)
2498 register rtx x;
2499 int inherent_size = PSEUDO_REGNO_BYTES (i);
2500 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2501 int adjust = 0;
2503 /* Each pseudo reg has an inherent size which comes from its own mode,
2504 and a total size which provides room for paradoxical subregs
2505 which refer to the pseudo reg in wider modes.
2507 We can use a slot already allocated if it provides both
2508 enough inherent space and enough total space.
2509 Otherwise, we allocate a new slot, making sure that it has no less
2510 inherent space, and no less total space, then the previous slot. */
2511 if (from_reg == -1)
2513 /* No known place to spill from => no slot to reuse. */
2514 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2515 inherent_size == total_size ? 0 : -1);
2516 if (BYTES_BIG_ENDIAN)
2517 /* Cancel the big-endian correction done in assign_stack_local.
2518 Get the address of the beginning of the slot.
2519 This is so we can do a big-endian correction unconditionally
2520 below. */
2521 adjust = inherent_size - total_size;
2523 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2525 /* Reuse a stack slot if possible. */
2526 else if (spill_stack_slot[from_reg] != 0
2527 && spill_stack_slot_width[from_reg] >= total_size
2528 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2529 >= inherent_size))
2530 x = spill_stack_slot[from_reg];
2531 /* Allocate a bigger slot. */
2532 else
2534 /* Compute maximum size needed, both for inherent size
2535 and for total size. */
2536 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2537 rtx stack_slot;
2538 if (spill_stack_slot[from_reg])
2540 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2541 > inherent_size)
2542 mode = GET_MODE (spill_stack_slot[from_reg]);
2543 if (spill_stack_slot_width[from_reg] > total_size)
2544 total_size = spill_stack_slot_width[from_reg];
2546 /* Make a slot with that size. */
2547 x = assign_stack_local (mode, total_size,
2548 inherent_size == total_size ? 0 : -1);
2549 stack_slot = x;
2550 if (BYTES_BIG_ENDIAN)
2552 /* Cancel the big-endian correction done in assign_stack_local.
2553 Get the address of the beginning of the slot.
2554 This is so we can do a big-endian correction unconditionally
2555 below. */
2556 adjust = GET_MODE_SIZE (mode) - total_size;
2557 if (adjust)
2558 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2559 * BITS_PER_UNIT,
2560 MODE_INT, 1),
2561 plus_constant (XEXP (x, 0), adjust));
2563 spill_stack_slot[from_reg] = stack_slot;
2564 spill_stack_slot_width[from_reg] = total_size;
2567 /* On a big endian machine, the "address" of the slot
2568 is the address of the low part that fits its inherent mode. */
2569 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2570 adjust += (total_size - inherent_size);
2572 /* If we have any adjustment to make, or if the stack slot is the
2573 wrong mode, make a new stack slot. */
2574 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2576 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2577 plus_constant (XEXP (x, 0), adjust));
2578 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2581 /* Save the stack slot for later. */
2582 reg_equiv_memory_loc[i] = x;
2586 /* Mark the slots in regs_ever_live for the hard regs
2587 used by pseudo-reg number REGNO. */
2589 void
2590 mark_home_live (regno)
2591 int regno;
2593 register int i, lim;
2594 i = reg_renumber[regno];
2595 if (i < 0)
2596 return;
2597 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2598 while (i < lim)
2599 regs_ever_live[i++] = 1;
2602 /* Mark the registers used in SCRATCH as being live. */
2604 static void
2605 mark_scratch_live (scratch)
2606 rtx scratch;
2608 register int i;
2609 int regno = REGNO (scratch);
2610 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2612 for (i = regno; i < lim; i++)
2613 regs_ever_live[i] = 1;
2616 /* This function handles the tracking of elimination offsets around branches.
2618 X is a piece of RTL being scanned.
2620 INSN is the insn that it came from, if any.
2622 INITIAL_P is non-zero if we are to set the offset to be the initial
2623 offset and zero if we are setting the offset of the label to be the
2624 current offset. */
2626 static void
2627 set_label_offsets (x, insn, initial_p)
2628 rtx x;
2629 rtx insn;
2630 int initial_p;
2632 enum rtx_code code = GET_CODE (x);
2633 rtx tem;
2634 int i;
2635 struct elim_table *p;
2637 switch (code)
2639 case LABEL_REF:
2640 if (LABEL_REF_NONLOCAL_P (x))
2641 return;
2643 x = XEXP (x, 0);
2645 /* ... fall through ... */
2647 case CODE_LABEL:
2648 /* If we know nothing about this label, set the desired offsets. Note
2649 that this sets the offset at a label to be the offset before a label
2650 if we don't know anything about the label. This is not correct for
2651 the label after a BARRIER, but is the best guess we can make. If
2652 we guessed wrong, we will suppress an elimination that might have
2653 been possible had we been able to guess correctly. */
2655 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2657 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2658 offsets_at[CODE_LABEL_NUMBER (x)][i]
2659 = (initial_p ? reg_eliminate[i].initial_offset
2660 : reg_eliminate[i].offset);
2661 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2664 /* Otherwise, if this is the definition of a label and it is
2665 preceded by a BARRIER, set our offsets to the known offset of
2666 that label. */
2668 else if (x == insn
2669 && (tem = prev_nonnote_insn (insn)) != 0
2670 && GET_CODE (tem) == BARRIER)
2672 num_not_at_initial_offset = 0;
2673 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2675 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2676 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2677 if (reg_eliminate[i].can_eliminate
2678 && (reg_eliminate[i].offset
2679 != reg_eliminate[i].initial_offset))
2680 num_not_at_initial_offset++;
2684 else
2685 /* If neither of the above cases is true, compare each offset
2686 with those previously recorded and suppress any eliminations
2687 where the offsets disagree. */
2689 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2690 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2691 != (initial_p ? reg_eliminate[i].initial_offset
2692 : reg_eliminate[i].offset))
2693 reg_eliminate[i].can_eliminate = 0;
2695 return;
2697 case JUMP_INSN:
2698 set_label_offsets (PATTERN (insn), insn, initial_p);
2700 /* ... fall through ... */
2702 case INSN:
2703 case CALL_INSN:
2704 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2705 and hence must have all eliminations at their initial offsets. */
2706 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2707 if (REG_NOTE_KIND (tem) == REG_LABEL)
2708 set_label_offsets (XEXP (tem, 0), insn, 1);
2709 return;
2711 case ADDR_VEC:
2712 case ADDR_DIFF_VEC:
2713 /* Each of the labels in the address vector must be at their initial
2714 offsets. We want the first first for ADDR_VEC and the second
2715 field for ADDR_DIFF_VEC. */
2717 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2718 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2719 insn, initial_p);
2720 return;
2722 case SET:
2723 /* We only care about setting PC. If the source is not RETURN,
2724 IF_THEN_ELSE, or a label, disable any eliminations not at
2725 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2726 isn't one of those possibilities. For branches to a label,
2727 call ourselves recursively.
2729 Note that this can disable elimination unnecessarily when we have
2730 a non-local goto since it will look like a non-constant jump to
2731 someplace in the current function. This isn't a significant
2732 problem since such jumps will normally be when all elimination
2733 pairs are back to their initial offsets. */
2735 if (SET_DEST (x) != pc_rtx)
2736 return;
2738 switch (GET_CODE (SET_SRC (x)))
2740 case PC:
2741 case RETURN:
2742 return;
2744 case LABEL_REF:
2745 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2746 return;
2748 case IF_THEN_ELSE:
2749 tem = XEXP (SET_SRC (x), 1);
2750 if (GET_CODE (tem) == LABEL_REF)
2751 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2752 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2753 break;
2755 tem = XEXP (SET_SRC (x), 2);
2756 if (GET_CODE (tem) == LABEL_REF)
2757 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2758 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2759 break;
2760 return;
2762 default:
2763 break;
2766 /* If we reach here, all eliminations must be at their initial
2767 offset because we are doing a jump to a variable address. */
2768 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2769 if (p->offset != p->initial_offset)
2770 p->can_eliminate = 0;
2771 break;
2773 default:
2774 break;
2778 /* Used for communication between the next two function to properly share
2779 the vector for an ASM_OPERANDS. */
2781 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2783 /* Scan X and replace any eliminable registers (such as fp) with a
2784 replacement (such as sp), plus an offset.
2786 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2787 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2788 MEM, we are allowed to replace a sum of a register and the constant zero
2789 with the register, which we cannot do outside a MEM. In addition, we need
2790 to record the fact that a register is referenced outside a MEM.
2792 If INSN is an insn, it is the insn containing X. If we replace a REG
2793 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2794 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2795 that the REG is being modified.
2797 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2798 That's used when we eliminate in expressions stored in notes.
2799 This means, do not set ref_outside_mem even if the reference
2800 is outside of MEMs.
2802 If we see a modification to a register we know about, take the
2803 appropriate action (see case SET, below).
2805 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2806 replacements done assuming all offsets are at their initial values. If
2807 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2808 encounter, return the actual location so that find_reloads will do
2809 the proper thing. */
2812 eliminate_regs (x, mem_mode, insn, storing)
2813 rtx x;
2814 enum machine_mode mem_mode;
2815 rtx insn;
2816 int storing;
2818 enum rtx_code code = GET_CODE (x);
2819 struct elim_table *ep;
2820 int regno;
2821 rtx new;
2822 int i, j;
2823 char *fmt;
2824 int copied = 0;
2826 switch (code)
2828 case CONST_INT:
2829 case CONST_DOUBLE:
2830 case CONST:
2831 case SYMBOL_REF:
2832 case CODE_LABEL:
2833 case PC:
2834 case CC0:
2835 case ASM_INPUT:
2836 case ADDR_VEC:
2837 case ADDR_DIFF_VEC:
2838 case RETURN:
2839 return x;
2841 case ADDRESSOF:
2842 /* This is only for the benefit of the debugging backends, which call
2843 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2844 removed after CSE. */
2845 new = eliminate_regs (XEXP (x, 0), 0, insn, 0);
2846 if (GET_CODE (new) == MEM)
2847 return XEXP (new, 0);
2848 return x;
2850 case REG:
2851 regno = REGNO (x);
2853 /* First handle the case where we encounter a bare register that
2854 is eliminable. Replace it with a PLUS. */
2855 if (regno < FIRST_PSEUDO_REGISTER)
2857 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2858 ep++)
2859 if (ep->from_rtx == x && ep->can_eliminate)
2861 if (! mem_mode
2862 /* Refs inside notes don't count for this purpose. */
2863 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2864 || GET_CODE (insn) == INSN_LIST)))
2865 ep->ref_outside_mem = 1;
2866 return plus_constant (ep->to_rtx, ep->previous_offset);
2870 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2871 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2873 /* In this case, find_reloads would attempt to either use an
2874 incorrect address (if something is not at its initial offset)
2875 or substitute an replaced address into an insn (which loses
2876 if the offset is changed by some later action). So we simply
2877 return the replaced stack slot (assuming it is changed by
2878 elimination) and ignore the fact that this is actually a
2879 reference to the pseudo. Ensure we make a copy of the
2880 address in case it is shared. */
2881 new = eliminate_regs (reg_equiv_memory_loc[regno],
2882 mem_mode, insn, 0);
2883 if (new != reg_equiv_memory_loc[regno])
2885 cannot_omit_stores[regno] = 1;
2886 return copy_rtx (new);
2889 return x;
2891 case PLUS:
2892 /* If this is the sum of an eliminable register and a constant, rework
2893 the sum. */
2894 if (GET_CODE (XEXP (x, 0)) == REG
2895 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2896 && CONSTANT_P (XEXP (x, 1)))
2898 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2899 ep++)
2900 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2902 if (! mem_mode
2903 /* Refs inside notes don't count for this purpose. */
2904 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2905 || GET_CODE (insn) == INSN_LIST)))
2906 ep->ref_outside_mem = 1;
2908 /* The only time we want to replace a PLUS with a REG (this
2909 occurs when the constant operand of the PLUS is the negative
2910 of the offset) is when we are inside a MEM. We won't want
2911 to do so at other times because that would change the
2912 structure of the insn in a way that reload can't handle.
2913 We special-case the commonest situation in
2914 eliminate_regs_in_insn, so just replace a PLUS with a
2915 PLUS here, unless inside a MEM. */
2916 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2917 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2918 return ep->to_rtx;
2919 else
2920 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2921 plus_constant (XEXP (x, 1),
2922 ep->previous_offset));
2925 /* If the register is not eliminable, we are done since the other
2926 operand is a constant. */
2927 return x;
2930 /* If this is part of an address, we want to bring any constant to the
2931 outermost PLUS. We will do this by doing register replacement in
2932 our operands and seeing if a constant shows up in one of them.
2934 We assume here this is part of an address (or a "load address" insn)
2935 since an eliminable register is not likely to appear in any other
2936 context.
2938 If we have (plus (eliminable) (reg)), we want to produce
2939 (plus (plus (replacement) (reg) (const))). If this was part of a
2940 normal add insn, (plus (replacement) (reg)) will be pushed as a
2941 reload. This is the desired action. */
2944 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2945 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2947 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2949 /* If one side is a PLUS and the other side is a pseudo that
2950 didn't get a hard register but has a reg_equiv_constant,
2951 we must replace the constant here since it may no longer
2952 be in the position of any operand. */
2953 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2954 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2955 && reg_renumber[REGNO (new1)] < 0
2956 && reg_equiv_constant != 0
2957 && reg_equiv_constant[REGNO (new1)] != 0)
2958 new1 = reg_equiv_constant[REGNO (new1)];
2959 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2960 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2961 && reg_renumber[REGNO (new0)] < 0
2962 && reg_equiv_constant[REGNO (new0)] != 0)
2963 new0 = reg_equiv_constant[REGNO (new0)];
2965 new = form_sum (new0, new1);
2967 /* As above, if we are not inside a MEM we do not want to
2968 turn a PLUS into something else. We might try to do so here
2969 for an addition of 0 if we aren't optimizing. */
2970 if (! mem_mode && GET_CODE (new) != PLUS)
2971 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2972 else
2973 return new;
2976 return x;
2978 case MULT:
2979 /* If this is the product of an eliminable register and a
2980 constant, apply the distribute law and move the constant out
2981 so that we have (plus (mult ..) ..). This is needed in order
2982 to keep load-address insns valid. This case is pathological.
2983 We ignore the possibility of overflow here. */
2984 if (GET_CODE (XEXP (x, 0)) == REG
2985 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2986 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2987 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2988 ep++)
2989 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2991 if (! mem_mode
2992 /* Refs inside notes don't count for this purpose. */
2993 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2994 || GET_CODE (insn) == INSN_LIST)))
2995 ep->ref_outside_mem = 1;
2997 return
2998 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2999 ep->previous_offset * INTVAL (XEXP (x, 1)));
3002 /* ... fall through ... */
3004 case CALL:
3005 case COMPARE:
3006 case MINUS:
3007 case DIV: case UDIV:
3008 case MOD: case UMOD:
3009 case AND: case IOR: case XOR:
3010 case ROTATERT: case ROTATE:
3011 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3012 case NE: case EQ:
3013 case GE: case GT: case GEU: case GTU:
3014 case LE: case LT: case LEU: case LTU:
3016 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3017 rtx new1
3018 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
3020 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3021 return gen_rtx (code, GET_MODE (x), new0, new1);
3023 return x;
3025 case EXPR_LIST:
3026 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3027 if (XEXP (x, 0))
3029 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3030 if (new != XEXP (x, 0))
3031 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3034 /* ... fall through ... */
3036 case INSN_LIST:
3037 /* Now do eliminations in the rest of the chain. If this was
3038 an EXPR_LIST, this might result in allocating more memory than is
3039 strictly needed, but it simplifies the code. */
3040 if (XEXP (x, 1))
3042 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
3043 if (new != XEXP (x, 1))
3044 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3046 return x;
3048 case PRE_INC:
3049 case POST_INC:
3050 case PRE_DEC:
3051 case POST_DEC:
3052 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3053 if (ep->to_rtx == XEXP (x, 0))
3055 int size = GET_MODE_SIZE (mem_mode);
3057 /* If more bytes than MEM_MODE are pushed, account for them. */
3058 #ifdef PUSH_ROUNDING
3059 if (ep->to_rtx == stack_pointer_rtx)
3060 size = PUSH_ROUNDING (size);
3061 #endif
3062 if (code == PRE_DEC || code == POST_DEC)
3063 ep->offset += size;
3064 else
3065 ep->offset -= size;
3068 /* Fall through to generic unary operation case. */
3069 case STRICT_LOW_PART:
3070 case NEG: case NOT:
3071 case SIGN_EXTEND: case ZERO_EXTEND:
3072 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3073 case FLOAT: case FIX:
3074 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3075 case ABS:
3076 case SQRT:
3077 case FFS:
3078 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3079 if (new != XEXP (x, 0))
3080 return gen_rtx (code, GET_MODE (x), new);
3081 return x;
3083 case SUBREG:
3084 /* Similar to above processing, but preserve SUBREG_WORD.
3085 Convert (subreg (mem)) to (mem) if not paradoxical.
3086 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3087 pseudo didn't get a hard reg, we must replace this with the
3088 eliminated version of the memory location because push_reloads
3089 may do the replacement in certain circumstances. */
3090 if (GET_CODE (SUBREG_REG (x)) == REG
3091 && (GET_MODE_SIZE (GET_MODE (x))
3092 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3093 && reg_equiv_memory_loc != 0
3094 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3096 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3097 mem_mode, insn, 0);
3099 /* If we didn't change anything, we must retain the pseudo. */
3100 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3101 new = SUBREG_REG (x);
3102 else
3104 /* Otherwise, ensure NEW isn't shared in case we have to reload
3105 it. */
3106 new = copy_rtx (new);
3108 /* In this case, we must show that the pseudo is used in this
3109 insn so that delete_output_reload will do the right thing. */
3110 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3111 && GET_CODE (insn) != INSN_LIST)
3112 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3113 insn);
3116 else
3117 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3119 if (new != XEXP (x, 0))
3121 int x_size = GET_MODE_SIZE (GET_MODE (x));
3122 int new_size = GET_MODE_SIZE (GET_MODE (new));
3124 /* When asked to spill a partial word subreg, we need to go
3125 ahead and spill the whole thing against the possibility
3126 that we reload the whole reg and find garbage at the top. */
3127 if (storing
3128 && GET_CODE (new) == MEM
3129 && x_size < new_size
3130 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3131 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3132 return new;
3133 else if (GET_CODE (new) == MEM
3134 && x_size <= new_size
3135 #ifdef LOAD_EXTEND_OP
3136 /* On these machines we will be reloading what is
3137 inside the SUBREG if it originally was a pseudo and
3138 the inner and outer modes are both a word or
3139 smaller. So leave the SUBREG then. */
3140 && ! (GET_CODE (SUBREG_REG (x)) == REG
3141 && x_size <= UNITS_PER_WORD
3142 && new_size <= UNITS_PER_WORD
3143 && x_size > new_size
3144 && INTEGRAL_MODE_P (GET_MODE (new))
3145 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3146 #endif
3149 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3150 enum machine_mode mode = GET_MODE (x);
3152 if (BYTES_BIG_ENDIAN)
3153 offset += (MIN (UNITS_PER_WORD,
3154 GET_MODE_SIZE (GET_MODE (new)))
3155 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3157 PUT_MODE (new, mode);
3158 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3159 return new;
3161 else
3162 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3165 return x;
3167 case USE:
3168 /* If using a register that is the source of an eliminate we still
3169 think can be performed, note it cannot be performed since we don't
3170 know how this register is used. */
3171 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3172 if (ep->from_rtx == XEXP (x, 0))
3173 ep->can_eliminate = 0;
3175 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3176 if (new != XEXP (x, 0))
3177 return gen_rtx (code, GET_MODE (x), new);
3178 return x;
3180 case CLOBBER:
3181 /* If clobbering a register that is the replacement register for an
3182 elimination we still think can be performed, note that it cannot
3183 be performed. Otherwise, we need not be concerned about it. */
3184 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3185 if (ep->to_rtx == XEXP (x, 0))
3186 ep->can_eliminate = 0;
3188 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3189 if (new != XEXP (x, 0))
3190 return gen_rtx (code, GET_MODE (x), new);
3191 return x;
3193 case ASM_OPERANDS:
3195 rtx *temp_vec;
3196 /* Properly handle sharing input and constraint vectors. */
3197 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3199 /* When we come to a new vector not seen before,
3200 scan all its elements; keep the old vector if none
3201 of them changes; otherwise, make a copy. */
3202 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3203 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3204 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3205 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3206 mem_mode, insn, 0);
3208 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3209 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3210 break;
3212 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3213 new_asm_operands_vec = old_asm_operands_vec;
3214 else
3215 new_asm_operands_vec
3216 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3219 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3220 if (new_asm_operands_vec == old_asm_operands_vec)
3221 return x;
3223 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3224 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3225 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3226 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3227 ASM_OPERANDS_SOURCE_FILE (x),
3228 ASM_OPERANDS_SOURCE_LINE (x));
3229 new->volatil = x->volatil;
3230 return new;
3233 case SET:
3234 /* Check for setting a register that we know about. */
3235 if (GET_CODE (SET_DEST (x)) == REG)
3237 /* See if this is setting the replacement register for an
3238 elimination.
3240 If DEST is the hard frame pointer, we do nothing because we
3241 assume that all assignments to the frame pointer are for
3242 non-local gotos and are being done at a time when they are valid
3243 and do not disturb anything else. Some machines want to
3244 eliminate a fake argument pointer (or even a fake frame pointer)
3245 with either the real frame or the stack pointer. Assignments to
3246 the hard frame pointer must not prevent this elimination. */
3248 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3249 ep++)
3250 if (ep->to_rtx == SET_DEST (x)
3251 && SET_DEST (x) != hard_frame_pointer_rtx)
3253 /* If it is being incremented, adjust the offset. Otherwise,
3254 this elimination can't be done. */
3255 rtx src = SET_SRC (x);
3257 if (GET_CODE (src) == PLUS
3258 && XEXP (src, 0) == SET_DEST (x)
3259 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3260 ep->offset -= INTVAL (XEXP (src, 1));
3261 else
3262 ep->can_eliminate = 0;
3265 /* Now check to see we are assigning to a register that can be
3266 eliminated. If so, it must be as part of a PARALLEL, since we
3267 will not have been called if this is a single SET. So indicate
3268 that we can no longer eliminate this reg. */
3269 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3270 ep++)
3271 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3272 ep->can_eliminate = 0;
3275 /* Now avoid the loop below in this common case. */
3277 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3278 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3280 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3281 write a CLOBBER insn. */
3282 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3283 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3284 && GET_CODE (insn) != INSN_LIST)
3285 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3287 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3288 return gen_rtx (SET, VOIDmode, new0, new1);
3291 return x;
3293 case MEM:
3294 /* This is only for the benefit of the debugging backends, which call
3295 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3296 removed after CSE. */
3297 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3298 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn, 0);
3300 /* Our only special processing is to pass the mode of the MEM to our
3301 recursive call and copy the flags. While we are here, handle this
3302 case more efficiently. */
3303 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3304 if (new != XEXP (x, 0))
3306 new = gen_rtx (MEM, GET_MODE (x), new);
3307 new->volatil = x->volatil;
3308 new->unchanging = x->unchanging;
3309 new->in_struct = x->in_struct;
3310 return new;
3312 else
3313 return x;
3315 default:
3316 break;
3319 /* Process each of our operands recursively. If any have changed, make a
3320 copy of the rtx. */
3321 fmt = GET_RTX_FORMAT (code);
3322 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3324 if (*fmt == 'e')
3326 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3327 if (new != XEXP (x, i) && ! copied)
3329 rtx new_x = rtx_alloc (code);
3330 bcopy ((char *) x, (char *) new_x,
3331 (sizeof (*new_x) - sizeof (new_x->fld)
3332 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3333 x = new_x;
3334 copied = 1;
3336 XEXP (x, i) = new;
3338 else if (*fmt == 'E')
3340 int copied_vec = 0;
3341 for (j = 0; j < XVECLEN (x, i); j++)
3343 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3344 if (new != XVECEXP (x, i, j) && ! copied_vec)
3346 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3347 XVEC (x, i)->elem);
3348 if (! copied)
3350 rtx new_x = rtx_alloc (code);
3351 bcopy ((char *) x, (char *) new_x,
3352 (sizeof (*new_x) - sizeof (new_x->fld)
3353 + (sizeof (new_x->fld[0])
3354 * GET_RTX_LENGTH (code))));
3355 x = new_x;
3356 copied = 1;
3358 XVEC (x, i) = new_v;
3359 copied_vec = 1;
3361 XVECEXP (x, i, j) = new;
3366 return x;
3369 /* Scan INSN and eliminate all eliminable registers in it.
3371 If REPLACE is nonzero, do the replacement destructively. Also
3372 delete the insn as dead it if it is setting an eliminable register.
3374 If REPLACE is zero, do all our allocations in reload_obstack.
3376 If no eliminations were done and this insn doesn't require any elimination
3377 processing (these are not identical conditions: it might be updating sp,
3378 but not referencing fp; this needs to be seen during reload_as_needed so
3379 that the offset between fp and sp can be taken into consideration), zero
3380 is returned. Otherwise, 1 is returned. */
3382 static int
3383 eliminate_regs_in_insn (insn, replace)
3384 rtx insn;
3385 int replace;
3387 rtx old_body = PATTERN (insn);
3388 rtx old_set = single_set (insn);
3389 rtx new_body;
3390 int val = 0;
3391 struct elim_table *ep;
3393 if (! replace)
3394 push_obstacks (&reload_obstack, &reload_obstack);
3396 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3397 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3399 /* Check for setting an eliminable register. */
3400 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3401 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3403 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3404 /* If this is setting the frame pointer register to the
3405 hardware frame pointer register and this is an elimination
3406 that will be done (tested above), this insn is really
3407 adjusting the frame pointer downward to compensate for
3408 the adjustment done before a nonlocal goto. */
3409 if (ep->from == FRAME_POINTER_REGNUM
3410 && ep->to == HARD_FRAME_POINTER_REGNUM)
3412 rtx src = SET_SRC (old_set);
3413 int offset, ok = 0;
3414 rtx prev_insn, prev_set;
3416 if (src == ep->to_rtx)
3417 offset = 0, ok = 1;
3418 else if (GET_CODE (src) == PLUS
3419 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3420 offset = INTVAL (XEXP (src, 0)), ok = 1;
3421 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3422 && (prev_set = single_set (prev_insn)) != 0
3423 && rtx_equal_p (SET_DEST (prev_set), src))
3425 src = SET_SRC (prev_set);
3426 if (src == ep->to_rtx)
3427 offset = 0, ok = 1;
3428 else if (GET_CODE (src) == PLUS
3429 && GET_CODE (XEXP (src, 0)) == CONST_INT
3430 && XEXP (src, 1) == ep->to_rtx)
3431 offset = INTVAL (XEXP (src, 0)), ok = 1;
3432 else if (GET_CODE (src) == PLUS
3433 && GET_CODE (XEXP (src, 1)) == CONST_INT
3434 && XEXP (src, 0) == ep->to_rtx)
3435 offset = INTVAL (XEXP (src, 1)), ok = 1;
3438 if (ok)
3440 if (replace)
3442 rtx src
3443 = plus_constant (ep->to_rtx, offset - ep->offset);
3445 /* First see if this insn remains valid when we
3446 make the change. If not, keep the INSN_CODE
3447 the same and let reload fit it up. */
3448 validate_change (insn, &SET_SRC (old_set), src, 1);
3449 validate_change (insn, &SET_DEST (old_set),
3450 ep->to_rtx, 1);
3451 if (! apply_change_group ())
3453 SET_SRC (old_set) = src;
3454 SET_DEST (old_set) = ep->to_rtx;
3458 val = 1;
3459 goto done;
3462 #endif
3464 /* In this case this insn isn't serving a useful purpose. We
3465 will delete it in reload_as_needed once we know that this
3466 elimination is, in fact, being done.
3468 If REPLACE isn't set, we can't delete this insn, but needn't
3469 process it since it won't be used unless something changes. */
3470 if (replace)
3471 delete_dead_insn (insn);
3472 val = 1;
3473 goto done;
3476 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3477 in the insn is the negative of the offset in FROM. Substitute
3478 (set (reg) (reg to)) for the insn and change its code.
3480 We have to do this here, rather than in eliminate_regs, do that we can
3481 change the insn code. */
3483 if (GET_CODE (SET_SRC (old_set)) == PLUS
3484 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3485 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3486 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3487 ep++)
3488 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3489 && ep->can_eliminate)
3491 /* We must stop at the first elimination that will be used.
3492 If this one would replace the PLUS with a REG, do it
3493 now. Otherwise, quit the loop and let eliminate_regs
3494 do its normal replacement. */
3495 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3497 /* We assume here that we don't need a PARALLEL of
3498 any CLOBBERs for this assignment. There's not
3499 much we can do if we do need it. */
3500 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3501 SET_DEST (old_set), ep->to_rtx);
3502 INSN_CODE (insn) = -1;
3503 val = 1;
3504 goto done;
3507 break;
3511 old_asm_operands_vec = 0;
3513 /* Replace the body of this insn with a substituted form. If we changed
3514 something, return non-zero.
3516 If we are replacing a body that was a (set X (plus Y Z)), try to
3517 re-recognize the insn. We do this in case we had a simple addition
3518 but now can do this as a load-address. This saves an insn in this
3519 common case. */
3521 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3522 if (new_body != old_body)
3524 /* If we aren't replacing things permanently and we changed something,
3525 make another copy to ensure that all the RTL is new. Otherwise
3526 things can go wrong if find_reload swaps commutative operands
3527 and one is inside RTL that has been copied while the other is not. */
3529 /* Don't copy an asm_operands because (1) there's no need and (2)
3530 copy_rtx can't do it properly when there are multiple outputs. */
3531 if (! replace && asm_noperands (old_body) < 0)
3532 new_body = copy_rtx (new_body);
3534 /* If we had a move insn but now we don't, rerecognize it. This will
3535 cause spurious re-recognition if the old move had a PARALLEL since
3536 the new one still will, but we can't call single_set without
3537 having put NEW_BODY into the insn and the re-recognition won't
3538 hurt in this rare case. */
3539 if (old_set != 0
3540 && ((GET_CODE (SET_SRC (old_set)) == REG
3541 && (GET_CODE (new_body) != SET
3542 || GET_CODE (SET_SRC (new_body)) != REG))
3543 /* If this was a load from or store to memory, compare
3544 the MEM in recog_operand to the one in the insn. If they
3545 are not equal, then rerecognize the insn. */
3546 || (old_set != 0
3547 && ((GET_CODE (SET_SRC (old_set)) == MEM
3548 && SET_SRC (old_set) != recog_operand[1])
3549 || (GET_CODE (SET_DEST (old_set)) == MEM
3550 && SET_DEST (old_set) != recog_operand[0])))
3551 /* If this was an add insn before, rerecognize. */
3552 || GET_CODE (SET_SRC (old_set)) == PLUS))
3554 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3555 /* If recognition fails, store the new body anyway.
3556 It's normal to have recognition failures here
3557 due to bizarre memory addresses; reloading will fix them. */
3558 PATTERN (insn) = new_body;
3560 else
3561 PATTERN (insn) = new_body;
3563 val = 1;
3566 /* Loop through all elimination pairs. See if any have changed and
3567 recalculate the number not at initial offset.
3569 Compute the maximum offset (minimum offset if the stack does not
3570 grow downward) for each elimination pair.
3572 We also detect a cases where register elimination cannot be done,
3573 namely, if a register would be both changed and referenced outside a MEM
3574 in the resulting insn since such an insn is often undefined and, even if
3575 not, we cannot know what meaning will be given to it. Note that it is
3576 valid to have a register used in an address in an insn that changes it
3577 (presumably with a pre- or post-increment or decrement).
3579 If anything changes, return nonzero. */
3581 num_not_at_initial_offset = 0;
3582 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3584 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3585 ep->can_eliminate = 0;
3587 ep->ref_outside_mem = 0;
3589 if (ep->previous_offset != ep->offset)
3590 val = 1;
3592 ep->previous_offset = ep->offset;
3593 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3594 num_not_at_initial_offset++;
3596 #ifdef STACK_GROWS_DOWNWARD
3597 ep->max_offset = MAX (ep->max_offset, ep->offset);
3598 #else
3599 ep->max_offset = MIN (ep->max_offset, ep->offset);
3600 #endif
3603 done:
3604 /* If we changed something, perform elimination in REG_NOTES. This is
3605 needed even when REPLACE is zero because a REG_DEAD note might refer
3606 to a register that we eliminate and could cause a different number
3607 of spill registers to be needed in the final reload pass than in
3608 the pre-passes. */
3609 if (val && REG_NOTES (insn) != 0)
3610 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3612 if (! replace)
3613 pop_obstacks ();
3615 return val;
3618 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3619 replacement we currently believe is valid, mark it as not eliminable if X
3620 modifies DEST in any way other than by adding a constant integer to it.
3622 If DEST is the frame pointer, we do nothing because we assume that
3623 all assignments to the hard frame pointer are nonlocal gotos and are being
3624 done at a time when they are valid and do not disturb anything else.
3625 Some machines want to eliminate a fake argument pointer with either the
3626 frame or stack pointer. Assignments to the hard frame pointer must not
3627 prevent this elimination.
3629 Called via note_stores from reload before starting its passes to scan
3630 the insns of the function. */
3632 static void
3633 mark_not_eliminable (dest, x)
3634 rtx dest;
3635 rtx x;
3637 register int i;
3639 /* A SUBREG of a hard register here is just changing its mode. We should
3640 not see a SUBREG of an eliminable hard register, but check just in
3641 case. */
3642 if (GET_CODE (dest) == SUBREG)
3643 dest = SUBREG_REG (dest);
3645 if (dest == hard_frame_pointer_rtx)
3646 return;
3648 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3649 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3650 && (GET_CODE (x) != SET
3651 || GET_CODE (SET_SRC (x)) != PLUS
3652 || XEXP (SET_SRC (x), 0) != dest
3653 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3655 reg_eliminate[i].can_eliminate_previous
3656 = reg_eliminate[i].can_eliminate = 0;
3657 num_eliminable--;
3661 /* Kick all pseudos out of hard register REGNO.
3662 If GLOBAL is nonzero, try to find someplace else to put them.
3663 If DUMPFILE is nonzero, log actions taken on that file.
3665 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3666 because we found we can't eliminate some register. In the case, no pseudos
3667 are allowed to be in the register, even if they are only in a block that
3668 doesn't require spill registers, unlike the case when we are spilling this
3669 hard reg to produce another spill register.
3671 Return nonzero if any pseudos needed to be kicked out. */
3673 static int
3674 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3675 register int regno;
3676 int global;
3677 FILE *dumpfile;
3678 int cant_eliminate;
3680 enum reg_class class = REGNO_REG_CLASS (regno);
3681 int something_changed = 0;
3682 register int i;
3684 SET_HARD_REG_BIT (forbidden_regs, regno);
3686 if (cant_eliminate)
3687 regs_ever_live[regno] = 1;
3689 /* Spill every pseudo reg that was allocated to this reg
3690 or to something that overlaps this reg. */
3692 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3693 if (reg_renumber[i] >= 0
3694 && reg_renumber[i] <= regno
3695 && (reg_renumber[i]
3696 + HARD_REGNO_NREGS (reg_renumber[i],
3697 PSEUDO_REGNO_MODE (i))
3698 > regno))
3700 /* If this register belongs solely to a basic block which needed no
3701 spilling of any class that this register is contained in,
3702 leave it be, unless we are spilling this register because
3703 it was a hard register that can't be eliminated. */
3705 if (! cant_eliminate
3706 && basic_block_needs[0]
3707 && REG_BASIC_BLOCK (i) >= 0
3708 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3710 enum reg_class *p;
3712 for (p = reg_class_superclasses[(int) class];
3713 *p != LIM_REG_CLASSES; p++)
3714 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3715 break;
3717 if (*p == LIM_REG_CLASSES)
3718 continue;
3721 /* Mark it as no longer having a hard register home. */
3722 reg_renumber[i] = -1;
3723 /* We will need to scan everything again. */
3724 something_changed = 1;
3725 if (global)
3726 retry_global_alloc (i, forbidden_regs);
3728 alter_reg (i, regno);
3729 if (dumpfile)
3731 if (reg_renumber[i] == -1)
3732 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3733 else
3734 fprintf (dumpfile, " Register %d now in %d.\n\n",
3735 i, reg_renumber[i]);
3738 for (i = 0; i < scratch_list_length; i++)
3740 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3742 if (! cant_eliminate && basic_block_needs[0]
3743 && ! basic_block_needs[(int) class][scratch_block[i]])
3745 enum reg_class *p;
3747 for (p = reg_class_superclasses[(int) class];
3748 *p != LIM_REG_CLASSES; p++)
3749 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3750 break;
3752 if (*p == LIM_REG_CLASSES)
3753 continue;
3755 PUT_CODE (scratch_list[i], SCRATCH);
3756 scratch_list[i] = 0;
3757 something_changed = 1;
3758 continue;
3762 return something_changed;
3765 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3766 Also mark any hard registers used to store user variables as
3767 forbidden from being used for spill registers. */
3769 static void
3770 scan_paradoxical_subregs (x)
3771 register rtx x;
3773 register int i;
3774 register char *fmt;
3775 register enum rtx_code code = GET_CODE (x);
3777 switch (code)
3779 case REG:
3780 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3781 && REG_USERVAR_P (x))
3782 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3783 return;
3785 case CONST_INT:
3786 case CONST:
3787 case SYMBOL_REF:
3788 case LABEL_REF:
3789 case CONST_DOUBLE:
3790 case CC0:
3791 case PC:
3792 case USE:
3793 case CLOBBER:
3794 return;
3796 case SUBREG:
3797 if (GET_CODE (SUBREG_REG (x)) == REG
3798 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3799 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3800 = GET_MODE_SIZE (GET_MODE (x));
3801 return;
3803 default:
3804 break;
3807 fmt = GET_RTX_FORMAT (code);
3808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3810 if (fmt[i] == 'e')
3811 scan_paradoxical_subregs (XEXP (x, i));
3812 else if (fmt[i] == 'E')
3814 register int j;
3815 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3816 scan_paradoxical_subregs (XVECEXP (x, i, j));
3821 static int
3822 hard_reg_use_compare (p1p, p2p)
3823 const GENERIC_PTR p1p;
3824 const GENERIC_PTR p2p;
3826 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3827 *p2 = (struct hard_reg_n_uses *)p2p;
3828 int tem = p1->uses - p2->uses;
3829 if (tem != 0) return tem;
3830 /* If regs are equally good, sort by regno,
3831 so that the results of qsort leave nothing to chance. */
3832 return p1->regno - p2->regno;
3835 /* Choose the order to consider regs for use as reload registers
3836 based on how much trouble would be caused by spilling one.
3837 Store them in order of decreasing preference in potential_reload_regs. */
3839 static void
3840 order_regs_for_reload (global)
3841 int global;
3843 register int i;
3844 register int o = 0;
3845 int large = 0;
3847 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3849 CLEAR_HARD_REG_SET (bad_spill_regs);
3851 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3852 potential_reload_regs[i] = -1;
3854 /* Count number of uses of each hard reg by pseudo regs allocated to it
3855 and then order them by decreasing use. */
3857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3859 hard_reg_n_uses[i].uses = 0;
3860 hard_reg_n_uses[i].regno = i;
3863 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3865 int regno = reg_renumber[i];
3866 if (regno >= 0)
3868 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3869 while (regno < lim)
3871 /* If allocated by local-alloc, show more uses since
3872 we're not going to be able to reallocate it, but
3873 we might if allocated by global alloc. */
3874 if (global && reg_allocno[i] < 0)
3875 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3877 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3880 large += REG_N_REFS (i);
3883 /* Now fixed registers (which cannot safely be used for reloading)
3884 get a very high use count so they will be considered least desirable.
3885 Registers used explicitly in the rtl code are almost as bad. */
3887 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3889 if (fixed_regs[i])
3891 hard_reg_n_uses[i].uses += 2 * large + 2;
3892 SET_HARD_REG_BIT (bad_spill_regs, i);
3894 else if (regs_explicitly_used[i])
3896 hard_reg_n_uses[i].uses += large + 1;
3897 if (! SMALL_REGISTER_CLASSES)
3898 /* ??? We are doing this here because of the potential
3899 that bad code may be generated if a register explicitly
3900 used in an insn was used as a spill register for that
3901 insn. But not using these are spill registers may lose
3902 on some machine. We'll have to see how this works out. */
3903 SET_HARD_REG_BIT (bad_spill_regs, i);
3906 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3907 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3909 #ifdef ELIMINABLE_REGS
3910 /* If registers other than the frame pointer are eliminable, mark them as
3911 poor choices. */
3912 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3914 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3915 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3917 #endif
3919 /* Prefer registers not so far used, for use in temporary loading.
3920 Among them, if REG_ALLOC_ORDER is defined, use that order.
3921 Otherwise, prefer registers not preserved by calls. */
3923 #ifdef REG_ALLOC_ORDER
3924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3926 int regno = reg_alloc_order[i];
3928 if (hard_reg_n_uses[regno].uses == 0)
3929 potential_reload_regs[o++] = regno;
3931 #else
3932 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3934 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3935 potential_reload_regs[o++] = i;
3937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3939 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3940 potential_reload_regs[o++] = i;
3942 #endif
3944 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3945 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3947 /* Now add the regs that are already used,
3948 preferring those used less often. The fixed and otherwise forbidden
3949 registers will be at the end of this list. */
3951 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3952 if (hard_reg_n_uses[i].uses != 0)
3953 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3956 /* Used in reload_as_needed to sort the spilled regs. */
3958 static int
3959 compare_spill_regs (r1p, r2p)
3960 const GENERIC_PTR r1p;
3961 const GENERIC_PTR r2p;
3963 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3964 return r1 - r2;
3967 /* Reload pseudo-registers into hard regs around each insn as needed.
3968 Additional register load insns are output before the insn that needs it
3969 and perhaps store insns after insns that modify the reloaded pseudo reg.
3971 reg_last_reload_reg and reg_reloaded_contents keep track of
3972 which registers are already available in reload registers.
3973 We update these for the reloads that we perform,
3974 as the insns are scanned. */
3976 static void
3977 reload_as_needed (first, live_known)
3978 rtx first;
3979 int live_known;
3981 register rtx insn;
3982 register int i;
3983 int this_block = 0;
3984 rtx x;
3985 rtx after_call = 0;
3987 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3988 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3989 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3990 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3991 reg_has_output_reload = (char *) alloca (max_regno);
3992 for (i = 0; i < n_spills; i++)
3994 reg_reloaded_contents[i] = -1;
3995 reg_reloaded_insn[i] = 0;
3998 /* Reset all offsets on eliminable registers to their initial values. */
3999 #ifdef ELIMINABLE_REGS
4000 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4002 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
4003 reg_eliminate[i].initial_offset);
4004 reg_eliminate[i].previous_offset
4005 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4007 #else
4008 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4009 reg_eliminate[0].previous_offset
4010 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4011 #endif
4013 num_not_at_initial_offset = 0;
4015 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4016 pack registers with group needs. */
4017 if (n_spills > 1)
4019 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4020 for (i = 0; i < n_spills; i++)
4021 spill_reg_order[spill_regs[i]] = i;
4024 for (insn = first; insn;)
4026 register rtx next = NEXT_INSN (insn);
4028 /* Notice when we move to a new basic block. */
4029 if (live_known && this_block + 1 < n_basic_blocks
4030 && insn == basic_block_head[this_block+1])
4031 ++this_block;
4033 /* If we pass a label, copy the offsets from the label information
4034 into the current offsets of each elimination. */
4035 if (GET_CODE (insn) == CODE_LABEL)
4037 num_not_at_initial_offset = 0;
4038 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4040 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4041 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4042 if (reg_eliminate[i].can_eliminate
4043 && (reg_eliminate[i].offset
4044 != reg_eliminate[i].initial_offset))
4045 num_not_at_initial_offset++;
4049 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4051 rtx avoid_return_reg = 0;
4052 rtx oldpat = PATTERN (insn);
4054 /* Set avoid_return_reg if this is an insn
4055 that might use the value of a function call. */
4056 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4058 if (GET_CODE (PATTERN (insn)) == SET)
4059 after_call = SET_DEST (PATTERN (insn));
4060 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4061 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4062 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4063 else
4064 after_call = 0;
4066 else if (SMALL_REGISTER_CLASSES && after_call != 0
4067 && !(GET_CODE (PATTERN (insn)) == SET
4068 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4070 if (reg_referenced_p (after_call, PATTERN (insn)))
4071 avoid_return_reg = after_call;
4072 after_call = 0;
4075 /* If this is a USE and CLOBBER of a MEM, ensure that any
4076 references to eliminable registers have been removed. */
4078 if ((GET_CODE (PATTERN (insn)) == USE
4079 || GET_CODE (PATTERN (insn)) == CLOBBER)
4080 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4081 XEXP (XEXP (PATTERN (insn), 0), 0)
4082 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4083 GET_MODE (XEXP (PATTERN (insn), 0)),
4084 NULL_RTX, 0);
4086 /* If we need to do register elimination processing, do so.
4087 This might delete the insn, in which case we are done. */
4088 if (num_eliminable && GET_MODE (insn) == QImode)
4090 eliminate_regs_in_insn (insn, 1);
4091 if (GET_CODE (insn) == NOTE)
4093 insn = next;
4094 continue;
4098 if (GET_MODE (insn) == VOIDmode)
4099 n_reloads = 0;
4100 /* First find the pseudo regs that must be reloaded for this insn.
4101 This info is returned in the tables reload_... (see reload.h).
4102 Also modify the body of INSN by substituting RELOAD
4103 rtx's for those pseudo regs. */
4104 else
4106 bzero (reg_has_output_reload, max_regno);
4107 CLEAR_HARD_REG_SET (reg_is_output_reload);
4109 find_reloads (insn, 1, spill_indirect_levels, live_known,
4110 spill_reg_order);
4113 if (n_reloads > 0)
4115 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4116 rtx p;
4117 int class;
4119 /* If this block has not had spilling done for a
4120 particular clas and we have any non-optionals that need a
4121 spill reg in that class, abort. */
4123 for (class = 0; class < N_REG_CLASSES; class++)
4124 if (basic_block_needs[class] != 0
4125 && basic_block_needs[class][this_block] == 0)
4126 for (i = 0; i < n_reloads; i++)
4127 if (class == (int) reload_reg_class[i]
4128 && reload_reg_rtx[i] == 0
4129 && ! reload_optional[i]
4130 && (reload_in[i] != 0 || reload_out[i] != 0
4131 || reload_secondary_p[i] != 0))
4132 fatal_insn ("Non-optional registers need a spill register", insn);
4134 /* Now compute which reload regs to reload them into. Perhaps
4135 reusing reload regs from previous insns, or else output
4136 load insns to reload them. Maybe output store insns too.
4137 Record the choices of reload reg in reload_reg_rtx. */
4138 choose_reload_regs (insn, avoid_return_reg);
4140 /* Merge any reloads that we didn't combine for fear of
4141 increasing the number of spill registers needed but now
4142 discover can be safely merged. */
4143 if (SMALL_REGISTER_CLASSES)
4144 merge_assigned_reloads (insn);
4146 /* Generate the insns to reload operands into or out of
4147 their reload regs. */
4148 emit_reload_insns (insn);
4150 /* Substitute the chosen reload regs from reload_reg_rtx
4151 into the insn's body (or perhaps into the bodies of other
4152 load and store insn that we just made for reloading
4153 and that we moved the structure into). */
4154 subst_reloads ();
4156 /* If this was an ASM, make sure that all the reload insns
4157 we have generated are valid. If not, give an error
4158 and delete them. */
4160 if (asm_noperands (PATTERN (insn)) >= 0)
4161 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4162 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4163 && (recog_memoized (p) < 0
4164 || (insn_extract (p),
4165 ! constrain_operands (INSN_CODE (p), 1))))
4167 error_for_asm (insn,
4168 "`asm' operand requires impossible reload");
4169 PUT_CODE (p, NOTE);
4170 NOTE_SOURCE_FILE (p) = 0;
4171 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4174 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4175 is no longer validly lying around to save a future reload.
4176 Note that this does not detect pseudos that were reloaded
4177 for this insn in order to be stored in
4178 (obeying register constraints). That is correct; such reload
4179 registers ARE still valid. */
4180 note_stores (oldpat, forget_old_reloads_1);
4182 /* There may have been CLOBBER insns placed after INSN. So scan
4183 between INSN and NEXT and use them to forget old reloads. */
4184 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4185 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4186 note_stores (PATTERN (x), forget_old_reloads_1);
4188 #ifdef AUTO_INC_DEC
4189 /* Likewise for regs altered by auto-increment in this insn.
4190 But note that the reg-notes are not changed by reloading:
4191 they still contain the pseudo-regs, not the spill regs. */
4192 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4193 if (REG_NOTE_KIND (x) == REG_INC)
4195 /* See if this pseudo reg was reloaded in this insn.
4196 If so, its last-reload info is still valid
4197 because it is based on this insn's reload. */
4198 for (i = 0; i < n_reloads; i++)
4199 if (reload_out[i] == XEXP (x, 0))
4200 break;
4202 if (i == n_reloads)
4203 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4205 #endif
4207 /* A reload reg's contents are unknown after a label. */
4208 if (GET_CODE (insn) == CODE_LABEL)
4209 for (i = 0; i < n_spills; i++)
4211 reg_reloaded_contents[i] = -1;
4212 reg_reloaded_insn[i] = 0;
4215 /* Don't assume a reload reg is still good after a call insn
4216 if it is a call-used reg. */
4217 else if (GET_CODE (insn) == CALL_INSN)
4218 for (i = 0; i < n_spills; i++)
4219 if (call_used_regs[spill_regs[i]])
4221 reg_reloaded_contents[i] = -1;
4222 reg_reloaded_insn[i] = 0;
4225 /* In case registers overlap, allow certain insns to invalidate
4226 particular hard registers. */
4228 #ifdef INSN_CLOBBERS_REGNO_P
4229 for (i = 0 ; i < n_spills ; i++)
4230 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4232 reg_reloaded_contents[i] = -1;
4233 reg_reloaded_insn[i] = 0;
4235 #endif
4237 insn = next;
4239 #ifdef USE_C_ALLOCA
4240 alloca (0);
4241 #endif
4245 /* Discard all record of any value reloaded from X,
4246 or reloaded in X from someplace else;
4247 unless X is an output reload reg of the current insn.
4249 X may be a hard reg (the reload reg)
4250 or it may be a pseudo reg that was reloaded from. */
4252 static void
4253 forget_old_reloads_1 (x, ignored)
4254 rtx x;
4255 rtx ignored;
4257 register int regno;
4258 int nr;
4259 int offset = 0;
4261 /* note_stores does give us subregs of hard regs. */
4262 while (GET_CODE (x) == SUBREG)
4264 offset += SUBREG_WORD (x);
4265 x = SUBREG_REG (x);
4268 if (GET_CODE (x) != REG)
4269 return;
4271 regno = REGNO (x) + offset;
4273 if (regno >= FIRST_PSEUDO_REGISTER)
4274 nr = 1;
4275 else
4277 int i;
4278 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4279 /* Storing into a spilled-reg invalidates its contents.
4280 This can happen if a block-local pseudo is allocated to that reg
4281 and it wasn't spilled because this block's total need is 0.
4282 Then some insn might have an optional reload and use this reg. */
4283 for (i = 0; i < nr; i++)
4284 if (spill_reg_order[regno + i] >= 0
4285 /* But don't do this if the reg actually serves as an output
4286 reload reg in the current instruction. */
4287 && (n_reloads == 0
4288 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4290 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4291 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4295 /* Since value of X has changed,
4296 forget any value previously copied from it. */
4298 while (nr-- > 0)
4299 /* But don't forget a copy if this is the output reload
4300 that establishes the copy's validity. */
4301 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4302 reg_last_reload_reg[regno + nr] = 0;
4305 /* For each reload, the mode of the reload register. */
4306 static enum machine_mode reload_mode[MAX_RELOADS];
4308 /* For each reload, the largest number of registers it will require. */
4309 static int reload_nregs[MAX_RELOADS];
4311 /* Comparison function for qsort to decide which of two reloads
4312 should be handled first. *P1 and *P2 are the reload numbers. */
4314 static int
4315 reload_reg_class_lower (r1p, r2p)
4316 const GENERIC_PTR r1p;
4317 const GENERIC_PTR r2p;
4319 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4320 register int t;
4322 /* Consider required reloads before optional ones. */
4323 t = reload_optional[r1] - reload_optional[r2];
4324 if (t != 0)
4325 return t;
4327 /* Count all solitary classes before non-solitary ones. */
4328 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4329 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4330 if (t != 0)
4331 return t;
4333 /* Aside from solitaires, consider all multi-reg groups first. */
4334 t = reload_nregs[r2] - reload_nregs[r1];
4335 if (t != 0)
4336 return t;
4338 /* Consider reloads in order of increasing reg-class number. */
4339 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4340 if (t != 0)
4341 return t;
4343 /* If reloads are equally urgent, sort by reload number,
4344 so that the results of qsort leave nothing to chance. */
4345 return r1 - r2;
4348 /* The following HARD_REG_SETs indicate when each hard register is
4349 used for a reload of various parts of the current insn. */
4351 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4352 static HARD_REG_SET reload_reg_used;
4353 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4354 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4355 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4356 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4357 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4358 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4359 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4360 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4361 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4362 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4363 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4364 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4365 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4366 static HARD_REG_SET reload_reg_used_in_op_addr;
4367 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4368 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4369 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4370 static HARD_REG_SET reload_reg_used_in_insn;
4371 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4372 static HARD_REG_SET reload_reg_used_in_other_addr;
4374 /* If reg is in use as a reload reg for any sort of reload. */
4375 static HARD_REG_SET reload_reg_used_at_all;
4377 /* If reg is use as an inherited reload. We just mark the first register
4378 in the group. */
4379 static HARD_REG_SET reload_reg_used_for_inherit;
4381 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4382 TYPE. MODE is used to indicate how many consecutive regs are
4383 actually used. */
4385 static void
4386 mark_reload_reg_in_use (regno, opnum, type, mode)
4387 int regno;
4388 int opnum;
4389 enum reload_type type;
4390 enum machine_mode mode;
4392 int nregs = HARD_REGNO_NREGS (regno, mode);
4393 int i;
4395 for (i = regno; i < nregs + regno; i++)
4397 switch (type)
4399 case RELOAD_OTHER:
4400 SET_HARD_REG_BIT (reload_reg_used, i);
4401 break;
4403 case RELOAD_FOR_INPUT_ADDRESS:
4404 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4405 break;
4407 case RELOAD_FOR_INPADDR_ADDRESS:
4408 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4409 break;
4411 case RELOAD_FOR_OUTPUT_ADDRESS:
4412 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4413 break;
4415 case RELOAD_FOR_OUTADDR_ADDRESS:
4416 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4417 break;
4419 case RELOAD_FOR_OPERAND_ADDRESS:
4420 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4421 break;
4423 case RELOAD_FOR_OPADDR_ADDR:
4424 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4425 break;
4427 case RELOAD_FOR_OTHER_ADDRESS:
4428 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4429 break;
4431 case RELOAD_FOR_INPUT:
4432 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4433 break;
4435 case RELOAD_FOR_OUTPUT:
4436 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4437 break;
4439 case RELOAD_FOR_INSN:
4440 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4441 break;
4444 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4448 /* Similarly, but show REGNO is no longer in use for a reload. */
4450 static void
4451 clear_reload_reg_in_use (regno, opnum, type, mode)
4452 int regno;
4453 int opnum;
4454 enum reload_type type;
4455 enum machine_mode mode;
4457 int nregs = HARD_REGNO_NREGS (regno, mode);
4458 int i;
4460 for (i = regno; i < nregs + regno; i++)
4462 switch (type)
4464 case RELOAD_OTHER:
4465 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4466 break;
4468 case RELOAD_FOR_INPUT_ADDRESS:
4469 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4470 break;
4472 case RELOAD_FOR_INPADDR_ADDRESS:
4473 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4474 break;
4476 case RELOAD_FOR_OUTPUT_ADDRESS:
4477 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4478 break;
4480 case RELOAD_FOR_OUTADDR_ADDRESS:
4481 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4482 break;
4484 case RELOAD_FOR_OPERAND_ADDRESS:
4485 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4486 break;
4488 case RELOAD_FOR_OPADDR_ADDR:
4489 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4490 break;
4492 case RELOAD_FOR_OTHER_ADDRESS:
4493 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4494 break;
4496 case RELOAD_FOR_INPUT:
4497 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4498 break;
4500 case RELOAD_FOR_OUTPUT:
4501 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4502 break;
4504 case RELOAD_FOR_INSN:
4505 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4506 break;
4511 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4512 specified by OPNUM and TYPE. */
4514 static int
4515 reload_reg_free_p (regno, opnum, type)
4516 int regno;
4517 int opnum;
4518 enum reload_type type;
4520 int i;
4522 /* In use for a RELOAD_OTHER means it's not available for anything. */
4523 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4524 return 0;
4526 switch (type)
4528 case RELOAD_OTHER:
4529 /* In use for anything means we can't use it for RELOAD_OTHER. */
4530 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4531 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4532 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4533 return 0;
4535 for (i = 0; i < reload_n_operands; i++)
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4537 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4541 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4542 return 0;
4544 return 1;
4546 case RELOAD_FOR_INPUT:
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4548 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4549 return 0;
4551 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4552 return 0;
4554 /* If it is used for some other input, can't use it. */
4555 for (i = 0; i < reload_n_operands; i++)
4556 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4557 return 0;
4559 /* If it is used in a later operand's address, can't use it. */
4560 for (i = opnum + 1; i < reload_n_operands; i++)
4561 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4562 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4563 return 0;
4565 return 1;
4567 case RELOAD_FOR_INPUT_ADDRESS:
4568 /* Can't use a register if it is used for an input address for this
4569 operand or used as an input in an earlier one. */
4570 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4571 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4572 return 0;
4574 for (i = 0; i < opnum; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4576 return 0;
4578 return 1;
4580 case RELOAD_FOR_INPADDR_ADDRESS:
4581 /* Can't use a register if it is used for an input address
4582 address for this operand or used as an input in an earlier
4583 one. */
4584 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4585 return 0;
4587 for (i = 0; i < opnum; i++)
4588 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4589 return 0;
4591 return 1;
4593 case RELOAD_FOR_OUTPUT_ADDRESS:
4594 /* Can't use a register if it is used for an output address for this
4595 operand or used as an output in this or a later operand. */
4596 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4597 return 0;
4599 for (i = opnum; i < reload_n_operands; i++)
4600 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4601 return 0;
4603 return 1;
4605 case RELOAD_FOR_OUTADDR_ADDRESS:
4606 /* Can't use a register if it is used for an output address
4607 address for this operand or used as an output in this or a
4608 later operand. */
4609 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4610 return 0;
4612 for (i = opnum; i < reload_n_operands; i++)
4613 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4614 return 0;
4616 return 1;
4618 case RELOAD_FOR_OPERAND_ADDRESS:
4619 for (i = 0; i < reload_n_operands; i++)
4620 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4621 return 0;
4623 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4624 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4626 case RELOAD_FOR_OPADDR_ADDR:
4627 for (i = 0; i < reload_n_operands; i++)
4628 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4629 return 0;
4631 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4633 case RELOAD_FOR_OUTPUT:
4634 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4635 outputs, or an operand address for this or an earlier output. */
4636 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4637 return 0;
4639 for (i = 0; i < reload_n_operands; i++)
4640 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4641 return 0;
4643 for (i = 0; i <= opnum; i++)
4644 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4645 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4646 return 0;
4648 return 1;
4650 case RELOAD_FOR_INSN:
4651 for (i = 0; i < reload_n_operands; i++)
4652 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4653 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4654 return 0;
4656 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4657 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4659 case RELOAD_FOR_OTHER_ADDRESS:
4660 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4662 abort ();
4665 /* Return 1 if the value in reload reg REGNO, as used by a reload
4666 needed for the part of the insn specified by OPNUM and TYPE,
4667 is not in use for a reload in any prior part of the insn.
4669 We can assume that the reload reg was already tested for availability
4670 at the time it is needed, and we should not check this again,
4671 in case the reg has already been marked in use. */
4673 static int
4674 reload_reg_free_before_p (regno, opnum, type)
4675 int regno;
4676 int opnum;
4677 enum reload_type type;
4679 int i;
4681 switch (type)
4683 case RELOAD_FOR_OTHER_ADDRESS:
4684 /* These always come first. */
4685 return 1;
4687 case RELOAD_OTHER:
4688 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4690 /* If this use is for part of the insn,
4691 check the reg is not in use for any prior part. It is tempting
4692 to try to do this by falling through from objecs that occur
4693 later in the insn to ones that occur earlier, but that will not
4694 correctly take into account the fact that here we MUST ignore
4695 things that would prevent the register from being allocated in
4696 the first place, since we know that it was allocated. */
4698 case RELOAD_FOR_OUTPUT_ADDRESS:
4699 case RELOAD_FOR_OUTADDR_ADDRESS:
4700 /* Earlier reloads are for earlier outputs or their addresses,
4701 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4702 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4703 RELOAD_OTHER).. */
4704 for (i = 0; i < opnum; i++)
4705 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4706 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4707 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4708 return 0;
4710 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4711 return 0;
4713 for (i = 0; i < reload_n_operands; i++)
4714 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4715 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4716 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4717 return 0;
4719 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4720 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4721 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4723 case RELOAD_FOR_OUTPUT:
4724 /* This can't be used in the output address for this operand and
4725 anything that can't be used for it, except that we've already
4726 tested for RELOAD_FOR_INSN objects. */
4728 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4729 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4730 return 0;
4732 for (i = 0; i < opnum; i++)
4733 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4734 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4735 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4736 return 0;
4738 for (i = 0; i < reload_n_operands; i++)
4739 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4740 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4741 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4742 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4743 return 0;
4745 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4747 case RELOAD_FOR_OPERAND_ADDRESS:
4748 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4749 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4750 return 0;
4752 /* ... fall through ... */
4754 case RELOAD_FOR_OPADDR_ADDR:
4755 case RELOAD_FOR_INSN:
4756 /* These can't conflict with inputs, or each other, so all we have to
4757 test is input addresses and the addresses of OTHER items. */
4759 for (i = 0; i < reload_n_operands; i++)
4760 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4761 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4762 return 0;
4764 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4766 case RELOAD_FOR_INPUT:
4767 /* The only things earlier are the address for this and
4768 earlier inputs, other inputs (which we know we don't conflict
4769 with), and addresses of RELOAD_OTHER objects. */
4771 for (i = 0; i <= opnum; i++)
4772 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4773 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4774 return 0;
4776 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4778 case RELOAD_FOR_INPUT_ADDRESS:
4779 case RELOAD_FOR_INPADDR_ADDRESS:
4780 /* Similarly, all we have to check is for use in earlier inputs'
4781 addresses. */
4782 for (i = 0; i < opnum; i++)
4783 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4784 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4785 return 0;
4787 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4789 abort ();
4792 /* Return 1 if the value in reload reg REGNO, as used by a reload
4793 needed for the part of the insn specified by OPNUM and TYPE,
4794 is still available in REGNO at the end of the insn.
4796 We can assume that the reload reg was already tested for availability
4797 at the time it is needed, and we should not check this again,
4798 in case the reg has already been marked in use. */
4800 static int
4801 reload_reg_reaches_end_p (regno, opnum, type)
4802 int regno;
4803 int opnum;
4804 enum reload_type type;
4806 int i;
4808 switch (type)
4810 case RELOAD_OTHER:
4811 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4812 its value must reach the end. */
4813 return 1;
4815 /* If this use is for part of the insn,
4816 its value reaches if no subsequent part uses the same register.
4817 Just like the above function, don't try to do this with lots
4818 of fallthroughs. */
4820 case RELOAD_FOR_OTHER_ADDRESS:
4821 /* Here we check for everything else, since these don't conflict
4822 with anything else and everything comes later. */
4824 for (i = 0; i < reload_n_operands; i++)
4825 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4829 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4830 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4831 return 0;
4833 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4834 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4835 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4837 case RELOAD_FOR_INPUT_ADDRESS:
4838 case RELOAD_FOR_INPADDR_ADDRESS:
4839 /* Similar, except that we check only for this and subsequent inputs
4840 and the address of only subsequent inputs and we do not need
4841 to check for RELOAD_OTHER objects since they are known not to
4842 conflict. */
4844 for (i = opnum; i < reload_n_operands; i++)
4845 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4846 return 0;
4848 for (i = opnum + 1; i < reload_n_operands; i++)
4849 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4850 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4851 return 0;
4853 for (i = 0; i < reload_n_operands; i++)
4854 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4855 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4856 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4857 return 0;
4859 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4860 return 0;
4862 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4863 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4865 case RELOAD_FOR_INPUT:
4866 /* Similar to input address, except we start at the next operand for
4867 both input and input address and we do not check for
4868 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4869 would conflict. */
4871 for (i = opnum + 1; i < reload_n_operands; i++)
4872 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4873 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4874 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4875 return 0;
4877 /* ... fall through ... */
4879 case RELOAD_FOR_OPERAND_ADDRESS:
4880 /* Check outputs and their addresses. */
4882 for (i = 0; i < reload_n_operands; i++)
4883 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4884 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4885 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4886 return 0;
4888 return 1;
4890 case RELOAD_FOR_OPADDR_ADDR:
4891 for (i = 0; i < reload_n_operands; i++)
4892 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4893 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4894 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4895 return 0;
4897 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4898 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4900 case RELOAD_FOR_INSN:
4901 /* These conflict with other outputs with RELOAD_OTHER. So
4902 we need only check for output addresses. */
4904 opnum = -1;
4906 /* ... fall through ... */
4908 case RELOAD_FOR_OUTPUT:
4909 case RELOAD_FOR_OUTPUT_ADDRESS:
4910 case RELOAD_FOR_OUTADDR_ADDRESS:
4911 /* We already know these can't conflict with a later output. So the
4912 only thing to check are later output addresses. */
4913 for (i = opnum + 1; i < reload_n_operands; i++)
4914 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4915 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4916 return 0;
4918 return 1;
4921 abort ();
4924 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4925 Return 0 otherwise.
4927 This function uses the same algorithm as reload_reg_free_p above. */
4929 static int
4930 reloads_conflict (r1, r2)
4931 int r1, r2;
4933 enum reload_type r1_type = reload_when_needed[r1];
4934 enum reload_type r2_type = reload_when_needed[r2];
4935 int r1_opnum = reload_opnum[r1];
4936 int r2_opnum = reload_opnum[r2];
4938 /* RELOAD_OTHER conflicts with everything. */
4939 if (r2_type == RELOAD_OTHER)
4940 return 1;
4942 /* Otherwise, check conflicts differently for each type. */
4944 switch (r1_type)
4946 case RELOAD_FOR_INPUT:
4947 return (r2_type == RELOAD_FOR_INSN
4948 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4949 || r2_type == RELOAD_FOR_OPADDR_ADDR
4950 || r2_type == RELOAD_FOR_INPUT
4951 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4952 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4953 && r2_opnum > r1_opnum));
4955 case RELOAD_FOR_INPUT_ADDRESS:
4956 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4957 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4959 case RELOAD_FOR_INPADDR_ADDRESS:
4960 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4961 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4963 case RELOAD_FOR_OUTPUT_ADDRESS:
4964 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4965 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4967 case RELOAD_FOR_OUTADDR_ADDRESS:
4968 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4969 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4971 case RELOAD_FOR_OPERAND_ADDRESS:
4972 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4973 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4975 case RELOAD_FOR_OPADDR_ADDR:
4976 return (r2_type == RELOAD_FOR_INPUT
4977 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4979 case RELOAD_FOR_OUTPUT:
4980 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4981 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4982 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4983 && r2_opnum >= r1_opnum));
4985 case RELOAD_FOR_INSN:
4986 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4987 || r2_type == RELOAD_FOR_INSN
4988 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4990 case RELOAD_FOR_OTHER_ADDRESS:
4991 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4993 case RELOAD_OTHER:
4994 return 1;
4996 default:
4997 abort ();
5001 /* Vector of reload-numbers showing the order in which the reloads should
5002 be processed. */
5003 short reload_order[MAX_RELOADS];
5005 /* Indexed by reload number, 1 if incoming value
5006 inherited from previous insns. */
5007 char reload_inherited[MAX_RELOADS];
5009 /* For an inherited reload, this is the insn the reload was inherited from,
5010 if we know it. Otherwise, this is 0. */
5011 rtx reload_inheritance_insn[MAX_RELOADS];
5013 /* If non-zero, this is a place to get the value of the reload,
5014 rather than using reload_in. */
5015 rtx reload_override_in[MAX_RELOADS];
5017 /* For each reload, the index in spill_regs of the spill register used,
5018 or -1 if we did not need one of the spill registers for this reload. */
5019 int reload_spill_index[MAX_RELOADS];
5021 /* Find a spill register to use as a reload register for reload R.
5022 LAST_RELOAD is non-zero if this is the last reload for the insn being
5023 processed.
5025 Set reload_reg_rtx[R] to the register allocated.
5027 If NOERROR is nonzero, we return 1 if successful,
5028 or 0 if we couldn't find a spill reg and we didn't change anything. */
5030 static int
5031 allocate_reload_reg (r, insn, last_reload, noerror)
5032 int r;
5033 rtx insn;
5034 int last_reload;
5035 int noerror;
5037 int i;
5038 int pass;
5039 int count;
5040 rtx new;
5041 int regno;
5043 /* If we put this reload ahead, thinking it is a group,
5044 then insist on finding a group. Otherwise we can grab a
5045 reg that some other reload needs.
5046 (That can happen when we have a 68000 DATA_OR_FP_REG
5047 which is a group of data regs or one fp reg.)
5048 We need not be so restrictive if there are no more reloads
5049 for this insn.
5051 ??? Really it would be nicer to have smarter handling
5052 for that kind of reg class, where a problem like this is normal.
5053 Perhaps those classes should be avoided for reloading
5054 by use of more alternatives. */
5056 int force_group = reload_nregs[r] > 1 && ! last_reload;
5058 /* If we want a single register and haven't yet found one,
5059 take any reg in the right class and not in use.
5060 If we want a consecutive group, here is where we look for it.
5062 We use two passes so we can first look for reload regs to
5063 reuse, which are already in use for other reloads in this insn,
5064 and only then use additional registers.
5065 I think that maximizing reuse is needed to make sure we don't
5066 run out of reload regs. Suppose we have three reloads, and
5067 reloads A and B can share regs. These need two regs.
5068 Suppose A and B are given different regs.
5069 That leaves none for C. */
5070 for (pass = 0; pass < 2; pass++)
5072 /* I is the index in spill_regs.
5073 We advance it round-robin between insns to use all spill regs
5074 equally, so that inherited reloads have a chance
5075 of leapfrogging each other. Don't do this, however, when we have
5076 group needs and failure would be fatal; if we only have a relatively
5077 small number of spill registers, and more than one of them has
5078 group needs, then by starting in the middle, we may end up
5079 allocating the first one in such a way that we are not left with
5080 sufficient groups to handle the rest. */
5082 if (noerror || ! force_group)
5083 i = last_spill_reg;
5084 else
5085 i = -1;
5087 for (count = 0; count < n_spills; count++)
5089 int class = (int) reload_reg_class[r];
5091 i = (i + 1) % n_spills;
5093 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5094 reload_when_needed[r])
5095 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5096 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5097 /* Look first for regs to share, then for unshared. But
5098 don't share regs used for inherited reloads; they are
5099 the ones we want to preserve. */
5100 && (pass
5101 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5102 spill_regs[i])
5103 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5104 spill_regs[i]))))
5106 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5107 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5108 (on 68000) got us two FP regs. If NR is 1,
5109 we would reject both of them. */
5110 if (force_group)
5111 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5112 /* If we need only one reg, we have already won. */
5113 if (nr == 1)
5115 /* But reject a single reg if we demand a group. */
5116 if (force_group)
5117 continue;
5118 break;
5120 /* Otherwise check that as many consecutive regs as we need
5121 are available here.
5122 Also, don't use for a group registers that are
5123 needed for nongroups. */
5124 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5125 while (nr > 1)
5127 regno = spill_regs[i] + nr - 1;
5128 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5129 && spill_reg_order[regno] >= 0
5130 && reload_reg_free_p (regno, reload_opnum[r],
5131 reload_when_needed[r])
5132 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5133 regno)))
5134 break;
5135 nr--;
5137 if (nr == 1)
5138 break;
5142 /* If we found something on pass 1, omit pass 2. */
5143 if (count < n_spills)
5144 break;
5147 /* We should have found a spill register by now. */
5148 if (count == n_spills)
5150 if (noerror)
5151 return 0;
5152 goto failure;
5155 /* I is the index in SPILL_REG_RTX of the reload register we are to
5156 allocate. Get an rtx for it and find its register number. */
5158 new = spill_reg_rtx[i];
5160 if (new == 0 || GET_MODE (new) != reload_mode[r])
5161 spill_reg_rtx[i] = new
5162 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5164 regno = true_regnum (new);
5166 /* Detect when the reload reg can't hold the reload mode.
5167 This used to be one `if', but Sequent compiler can't handle that. */
5168 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5170 enum machine_mode test_mode = VOIDmode;
5171 if (reload_in[r])
5172 test_mode = GET_MODE (reload_in[r]);
5173 /* If reload_in[r] has VOIDmode, it means we will load it
5174 in whatever mode the reload reg has: to wit, reload_mode[r].
5175 We have already tested that for validity. */
5176 /* Aside from that, we need to test that the expressions
5177 to reload from or into have modes which are valid for this
5178 reload register. Otherwise the reload insns would be invalid. */
5179 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5180 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5181 if (! (reload_out[r] != 0
5182 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5184 /* The reg is OK. */
5185 last_spill_reg = i;
5187 /* Mark as in use for this insn the reload regs we use
5188 for this. */
5189 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5190 reload_when_needed[r], reload_mode[r]);
5192 reload_reg_rtx[r] = new;
5193 reload_spill_index[r] = i;
5194 return 1;
5198 /* The reg is not OK. */
5199 if (noerror)
5200 return 0;
5202 failure:
5203 if (asm_noperands (PATTERN (insn)) < 0)
5204 /* It's the compiler's fault. */
5205 fatal_insn ("Could not find a spill register", insn);
5207 /* It's the user's fault; the operand's mode and constraint
5208 don't match. Disable this reload so we don't crash in final. */
5209 error_for_asm (insn,
5210 "`asm' operand constraint incompatible with operand size");
5211 reload_in[r] = 0;
5212 reload_out[r] = 0;
5213 reload_reg_rtx[r] = 0;
5214 reload_optional[r] = 1;
5215 reload_secondary_p[r] = 1;
5217 return 1;
5220 /* Assign hard reg targets for the pseudo-registers we must reload
5221 into hard regs for this insn.
5222 Also output the instructions to copy them in and out of the hard regs.
5224 For machines with register classes, we are responsible for
5225 finding a reload reg in the proper class. */
5227 static void
5228 choose_reload_regs (insn, avoid_return_reg)
5229 rtx insn;
5230 rtx avoid_return_reg;
5232 register int i, j;
5233 int max_group_size = 1;
5234 enum reg_class group_class = NO_REGS;
5235 int inheritance;
5237 rtx save_reload_reg_rtx[MAX_RELOADS];
5238 char save_reload_inherited[MAX_RELOADS];
5239 rtx save_reload_inheritance_insn[MAX_RELOADS];
5240 rtx save_reload_override_in[MAX_RELOADS];
5241 int save_reload_spill_index[MAX_RELOADS];
5242 HARD_REG_SET save_reload_reg_used;
5243 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5244 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5245 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5246 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5247 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5248 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5249 HARD_REG_SET save_reload_reg_used_in_op_addr;
5250 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5251 HARD_REG_SET save_reload_reg_used_in_insn;
5252 HARD_REG_SET save_reload_reg_used_in_other_addr;
5253 HARD_REG_SET save_reload_reg_used_at_all;
5255 bzero (reload_inherited, MAX_RELOADS);
5256 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5257 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5259 CLEAR_HARD_REG_SET (reload_reg_used);
5260 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5261 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5262 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5263 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5264 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5266 for (i = 0; i < reload_n_operands; i++)
5268 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5269 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5270 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5271 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5272 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5273 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5276 /* Don't bother with avoiding the return reg
5277 if we have no mandatory reload that could use it. */
5278 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5280 int do_avoid = 0;
5281 int regno = REGNO (avoid_return_reg);
5282 int nregs
5283 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5284 int r;
5286 for (r = regno; r < regno + nregs; r++)
5287 if (spill_reg_order[r] >= 0)
5288 for (j = 0; j < n_reloads; j++)
5289 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5290 && (reload_in[j] != 0 || reload_out[j] != 0
5291 || reload_secondary_p[j])
5293 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5294 do_avoid = 1;
5295 if (!do_avoid)
5296 avoid_return_reg = 0;
5299 #if 0 /* Not needed, now that we can always retry without inheritance. */
5300 /* See if we have more mandatory reloads than spill regs.
5301 If so, then we cannot risk optimizations that could prevent
5302 reloads from sharing one spill register.
5304 Since we will try finding a better register than reload_reg_rtx
5305 unless it is equal to reload_in or reload_out, count such reloads. */
5308 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5309 for (j = 0; j < n_reloads; j++)
5310 if (! reload_optional[j]
5311 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5312 && (reload_reg_rtx[j] == 0
5313 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5314 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5315 tem++;
5316 if (tem > n_spills)
5317 must_reuse = 1;
5319 #endif
5321 /* Don't use the subroutine call return reg for a reload
5322 if we are supposed to avoid it. */
5323 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5325 int regno = REGNO (avoid_return_reg);
5326 int nregs
5327 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5328 int r;
5330 for (r = regno; r < regno + nregs; r++)
5331 if (spill_reg_order[r] >= 0)
5332 SET_HARD_REG_BIT (reload_reg_used, r);
5335 /* In order to be certain of getting the registers we need,
5336 we must sort the reloads into order of increasing register class.
5337 Then our grabbing of reload registers will parallel the process
5338 that provided the reload registers.
5340 Also note whether any of the reloads wants a consecutive group of regs.
5341 If so, record the maximum size of the group desired and what
5342 register class contains all the groups needed by this insn. */
5344 for (j = 0; j < n_reloads; j++)
5346 reload_order[j] = j;
5347 reload_spill_index[j] = -1;
5349 reload_mode[j]
5350 = (reload_inmode[j] == VOIDmode
5351 || (GET_MODE_SIZE (reload_outmode[j])
5352 > GET_MODE_SIZE (reload_inmode[j])))
5353 ? reload_outmode[j] : reload_inmode[j];
5355 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5357 if (reload_nregs[j] > 1)
5359 max_group_size = MAX (reload_nregs[j], max_group_size);
5360 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5363 /* If we have already decided to use a certain register,
5364 don't use it in another way. */
5365 if (reload_reg_rtx[j])
5366 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5367 reload_when_needed[j], reload_mode[j]);
5370 if (n_reloads > 1)
5371 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5373 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5374 sizeof reload_reg_rtx);
5375 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5376 bcopy ((char *) reload_inheritance_insn,
5377 (char *) save_reload_inheritance_insn,
5378 sizeof reload_inheritance_insn);
5379 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5380 sizeof reload_override_in);
5381 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5382 sizeof reload_spill_index);
5383 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5384 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5385 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5386 reload_reg_used_in_op_addr);
5388 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5389 reload_reg_used_in_op_addr_reload);
5391 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5392 reload_reg_used_in_insn);
5393 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5394 reload_reg_used_in_other_addr);
5396 for (i = 0; i < reload_n_operands; i++)
5398 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5399 reload_reg_used_in_output[i]);
5400 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5401 reload_reg_used_in_input[i]);
5402 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5403 reload_reg_used_in_input_addr[i]);
5404 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5405 reload_reg_used_in_inpaddr_addr[i]);
5406 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5407 reload_reg_used_in_output_addr[i]);
5408 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5409 reload_reg_used_in_outaddr_addr[i]);
5412 /* If -O, try first with inheritance, then turning it off.
5413 If not -O, don't do inheritance.
5414 Using inheritance when not optimizing leads to paradoxes
5415 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5416 because one side of the comparison might be inherited. */
5418 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5420 /* Process the reloads in order of preference just found.
5421 Beyond this point, subregs can be found in reload_reg_rtx.
5423 This used to look for an existing reloaded home for all
5424 of the reloads, and only then perform any new reloads.
5425 But that could lose if the reloads were done out of reg-class order
5426 because a later reload with a looser constraint might have an old
5427 home in a register needed by an earlier reload with a tighter constraint.
5429 To solve this, we make two passes over the reloads, in the order
5430 described above. In the first pass we try to inherit a reload
5431 from a previous insn. If there is a later reload that needs a
5432 class that is a proper subset of the class being processed, we must
5433 also allocate a spill register during the first pass.
5435 Then make a second pass over the reloads to allocate any reloads
5436 that haven't been given registers yet. */
5438 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5440 for (j = 0; j < n_reloads; j++)
5442 register int r = reload_order[j];
5444 /* Ignore reloads that got marked inoperative. */
5445 if (reload_out[r] == 0 && reload_in[r] == 0
5446 && ! reload_secondary_p[r])
5447 continue;
5449 /* If find_reloads chose a to use reload_in or reload_out as a reload
5450 register, we don't need to chose one. Otherwise, try even if it
5451 found one since we might save an insn if we find the value lying
5452 around. */
5453 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5454 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5455 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5456 continue;
5458 #if 0 /* No longer needed for correct operation.
5459 It might give better code, or might not; worth an experiment? */
5460 /* If this is an optional reload, we can't inherit from earlier insns
5461 until we are sure that any non-optional reloads have been allocated.
5462 The following code takes advantage of the fact that optional reloads
5463 are at the end of reload_order. */
5464 if (reload_optional[r] != 0)
5465 for (i = 0; i < j; i++)
5466 if ((reload_out[reload_order[i]] != 0
5467 || reload_in[reload_order[i]] != 0
5468 || reload_secondary_p[reload_order[i]])
5469 && ! reload_optional[reload_order[i]]
5470 && reload_reg_rtx[reload_order[i]] == 0)
5471 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5472 #endif
5474 /* First see if this pseudo is already available as reloaded
5475 for a previous insn. We cannot try to inherit for reloads
5476 that are smaller than the maximum number of registers needed
5477 for groups unless the register we would allocate cannot be used
5478 for the groups.
5480 We could check here to see if this is a secondary reload for
5481 an object that is already in a register of the desired class.
5482 This would avoid the need for the secondary reload register.
5483 But this is complex because we can't easily determine what
5484 objects might want to be loaded via this reload. So let a
5485 register be allocated here. In `emit_reload_insns' we suppress
5486 one of the loads in the case described above. */
5488 if (inheritance)
5490 register int regno = -1;
5491 enum machine_mode mode;
5493 if (reload_in[r] == 0)
5495 else if (GET_CODE (reload_in[r]) == REG)
5497 regno = REGNO (reload_in[r]);
5498 mode = GET_MODE (reload_in[r]);
5500 else if (GET_CODE (reload_in_reg[r]) == REG)
5502 regno = REGNO (reload_in_reg[r]);
5503 mode = GET_MODE (reload_in_reg[r]);
5505 #if 0
5506 /* This won't work, since REGNO can be a pseudo reg number.
5507 Also, it takes much more hair to keep track of all the things
5508 that can invalidate an inherited reload of part of a pseudoreg. */
5509 else if (GET_CODE (reload_in[r]) == SUBREG
5510 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5511 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5512 #endif
5514 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5516 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5518 if (reg_reloaded_contents[i] == regno
5519 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5520 >= GET_MODE_SIZE (mode))
5521 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5522 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5523 spill_regs[i])
5524 && (reload_nregs[r] == max_group_size
5525 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5526 spill_regs[i]))
5527 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5528 reload_when_needed[r])
5529 && reload_reg_free_before_p (spill_regs[i],
5530 reload_opnum[r],
5531 reload_when_needed[r]))
5533 /* If a group is needed, verify that all the subsequent
5534 registers still have their values intact. */
5535 int nr
5536 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5537 int k;
5539 for (k = 1; k < nr; k++)
5540 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5541 != regno)
5542 break;
5544 if (k == nr)
5546 int i1;
5548 /* We found a register that contains the
5549 value we need. If this register is the
5550 same as an `earlyclobber' operand of the
5551 current insn, just mark it as a place to
5552 reload from since we can't use it as the
5553 reload register itself. */
5555 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5556 if (reg_overlap_mentioned_for_reload_p
5557 (reg_last_reload_reg[regno],
5558 reload_earlyclobbers[i1]))
5559 break;
5561 if (i1 != n_earlyclobbers
5562 /* Don't really use the inherited spill reg
5563 if we need it wider than we've got it. */
5564 || (GET_MODE_SIZE (reload_mode[r])
5565 > GET_MODE_SIZE (mode)))
5566 reload_override_in[r] = reg_last_reload_reg[regno];
5567 else
5569 int k;
5570 /* We can use this as a reload reg. */
5571 /* Mark the register as in use for this part of
5572 the insn. */
5573 mark_reload_reg_in_use (spill_regs[i],
5574 reload_opnum[r],
5575 reload_when_needed[r],
5576 reload_mode[r]);
5577 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5578 reload_inherited[r] = 1;
5579 reload_inheritance_insn[r]
5580 = reg_reloaded_insn[i];
5581 reload_spill_index[r] = i;
5582 for (k = 0; k < nr; k++)
5583 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5584 spill_regs[i + k]);
5591 /* Here's another way to see if the value is already lying around. */
5592 if (inheritance
5593 && reload_in[r] != 0
5594 && ! reload_inherited[r]
5595 && reload_out[r] == 0
5596 && (CONSTANT_P (reload_in[r])
5597 || GET_CODE (reload_in[r]) == PLUS
5598 || GET_CODE (reload_in[r]) == REG
5599 || GET_CODE (reload_in[r]) == MEM)
5600 && (reload_nregs[r] == max_group_size
5601 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5603 register rtx equiv
5604 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5605 -1, NULL_PTR, 0, reload_mode[r]);
5606 int regno;
5608 if (equiv != 0)
5610 if (GET_CODE (equiv) == REG)
5611 regno = REGNO (equiv);
5612 else if (GET_CODE (equiv) == SUBREG)
5614 /* This must be a SUBREG of a hard register.
5615 Make a new REG since this might be used in an
5616 address and not all machines support SUBREGs
5617 there. */
5618 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5619 equiv = gen_rtx (REG, reload_mode[r], regno);
5621 else
5622 abort ();
5625 /* If we found a spill reg, reject it unless it is free
5626 and of the desired class. */
5627 if (equiv != 0
5628 && ((spill_reg_order[regno] >= 0
5629 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5630 reload_when_needed[r]))
5631 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5632 regno)))
5633 equiv = 0;
5635 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5636 equiv = 0;
5638 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5639 equiv = 0;
5641 /* We found a register that contains the value we need.
5642 If this register is the same as an `earlyclobber' operand
5643 of the current insn, just mark it as a place to reload from
5644 since we can't use it as the reload register itself. */
5646 if (equiv != 0)
5647 for (i = 0; i < n_earlyclobbers; i++)
5648 if (reg_overlap_mentioned_for_reload_p (equiv,
5649 reload_earlyclobbers[i]))
5651 reload_override_in[r] = equiv;
5652 equiv = 0;
5653 break;
5656 /* JRV: If the equiv register we have found is
5657 explicitly clobbered in the current insn, mark but
5658 don't use, as above. */
5660 if (equiv != 0 && regno_clobbered_p (regno, insn))
5662 reload_override_in[r] = equiv;
5663 equiv = 0;
5666 /* If we found an equivalent reg, say no code need be generated
5667 to load it, and use it as our reload reg. */
5668 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5670 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5671 int k;
5672 reload_reg_rtx[r] = equiv;
5673 reload_inherited[r] = 1;
5675 /* If any of the hard registers in EQUIV are spill
5676 registers, mark them as in use for this insn. */
5677 for (k = 0; k < nr; k++)
5679 i = spill_reg_order[regno + k];
5680 if (i >= 0)
5682 mark_reload_reg_in_use (regno, reload_opnum[r],
5683 reload_when_needed[r],
5684 reload_mode[r]);
5685 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5686 regno + k);
5692 /* If we found a register to use already, or if this is an optional
5693 reload, we are done. */
5694 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5695 continue;
5697 #if 0 /* No longer needed for correct operation. Might or might not
5698 give better code on the average. Want to experiment? */
5700 /* See if there is a later reload that has a class different from our
5701 class that intersects our class or that requires less register
5702 than our reload. If so, we must allocate a register to this
5703 reload now, since that reload might inherit a previous reload
5704 and take the only available register in our class. Don't do this
5705 for optional reloads since they will force all previous reloads
5706 to be allocated. Also don't do this for reloads that have been
5707 turned off. */
5709 for (i = j + 1; i < n_reloads; i++)
5711 int s = reload_order[i];
5713 if ((reload_in[s] == 0 && reload_out[s] == 0
5714 && ! reload_secondary_p[s])
5715 || reload_optional[s])
5716 continue;
5718 if ((reload_reg_class[s] != reload_reg_class[r]
5719 && reg_classes_intersect_p (reload_reg_class[r],
5720 reload_reg_class[s]))
5721 || reload_nregs[s] < reload_nregs[r])
5722 break;
5725 if (i == n_reloads)
5726 continue;
5728 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5729 #endif
5732 /* Now allocate reload registers for anything non-optional that
5733 didn't get one yet. */
5734 for (j = 0; j < n_reloads; j++)
5736 register int r = reload_order[j];
5738 /* Ignore reloads that got marked inoperative. */
5739 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5740 continue;
5742 /* Skip reloads that already have a register allocated or are
5743 optional. */
5744 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5745 continue;
5747 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5748 break;
5751 /* If that loop got all the way, we have won. */
5752 if (j == n_reloads)
5753 break;
5755 fail:
5756 /* Loop around and try without any inheritance. */
5757 /* First undo everything done by the failed attempt
5758 to allocate with inheritance. */
5759 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5760 sizeof reload_reg_rtx);
5761 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5762 sizeof reload_inherited);
5763 bcopy ((char *) save_reload_inheritance_insn,
5764 (char *) reload_inheritance_insn,
5765 sizeof reload_inheritance_insn);
5766 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5767 sizeof reload_override_in);
5768 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5769 sizeof reload_spill_index);
5770 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5771 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5772 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5773 save_reload_reg_used_in_op_addr);
5774 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5775 save_reload_reg_used_in_op_addr_reload);
5776 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5777 save_reload_reg_used_in_insn);
5778 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5779 save_reload_reg_used_in_other_addr);
5781 for (i = 0; i < reload_n_operands; i++)
5783 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5784 save_reload_reg_used_in_input[i]);
5785 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5786 save_reload_reg_used_in_output[i]);
5787 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5788 save_reload_reg_used_in_input_addr[i]);
5789 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5790 save_reload_reg_used_in_inpaddr_addr[i]);
5791 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5792 save_reload_reg_used_in_output_addr[i]);
5793 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5794 save_reload_reg_used_in_outaddr_addr[i]);
5798 /* If we thought we could inherit a reload, because it seemed that
5799 nothing else wanted the same reload register earlier in the insn,
5800 verify that assumption, now that all reloads have been assigned. */
5802 for (j = 0; j < n_reloads; j++)
5804 register int r = reload_order[j];
5806 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5807 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5808 reload_opnum[r],
5809 reload_when_needed[r]))
5810 reload_inherited[r] = 0;
5812 /* If we found a better place to reload from,
5813 validate it in the same fashion, if it is a reload reg. */
5814 if (reload_override_in[r]
5815 && (GET_CODE (reload_override_in[r]) == REG
5816 || GET_CODE (reload_override_in[r]) == SUBREG))
5818 int regno = true_regnum (reload_override_in[r]);
5819 if (spill_reg_order[regno] >= 0
5820 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5821 reload_when_needed[r]))
5822 reload_override_in[r] = 0;
5826 /* Now that reload_override_in is known valid,
5827 actually override reload_in. */
5828 for (j = 0; j < n_reloads; j++)
5829 if (reload_override_in[j])
5830 reload_in[j] = reload_override_in[j];
5832 /* If this reload won't be done because it has been cancelled or is
5833 optional and not inherited, clear reload_reg_rtx so other
5834 routines (such as subst_reloads) don't get confused. */
5835 for (j = 0; j < n_reloads; j++)
5836 if (reload_reg_rtx[j] != 0
5837 && ((reload_optional[j] && ! reload_inherited[j])
5838 || (reload_in[j] == 0 && reload_out[j] == 0
5839 && ! reload_secondary_p[j])))
5841 int regno = true_regnum (reload_reg_rtx[j]);
5843 if (spill_reg_order[regno] >= 0)
5844 clear_reload_reg_in_use (regno, reload_opnum[j],
5845 reload_when_needed[j], reload_mode[j]);
5846 reload_reg_rtx[j] = 0;
5849 /* Record which pseudos and which spill regs have output reloads. */
5850 for (j = 0; j < n_reloads; j++)
5852 register int r = reload_order[j];
5854 i = reload_spill_index[r];
5856 /* I is nonneg if this reload used one of the spill regs.
5857 If reload_reg_rtx[r] is 0, this is an optional reload
5858 that we opted to ignore. */
5859 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5860 && reload_reg_rtx[r] != 0)
5862 register int nregno = REGNO (reload_out[r]);
5863 int nr = 1;
5865 if (nregno < FIRST_PSEUDO_REGISTER)
5866 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5868 while (--nr >= 0)
5869 reg_has_output_reload[nregno + nr] = 1;
5871 if (i >= 0)
5873 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5874 while (--nr >= 0)
5875 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5878 if (reload_when_needed[r] != RELOAD_OTHER
5879 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5880 && reload_when_needed[r] != RELOAD_FOR_INSN)
5881 abort ();
5886 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5887 reloads of the same item for fear that we might not have enough reload
5888 registers. However, normally they will get the same reload register
5889 and hence actually need not be loaded twice.
5891 Here we check for the most common case of this phenomenon: when we have
5892 a number of reloads for the same object, each of which were allocated
5893 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5894 reload, and is not modified in the insn itself. If we find such,
5895 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5896 This will not increase the number of spill registers needed and will
5897 prevent redundant code. */
5899 static void
5900 merge_assigned_reloads (insn)
5901 rtx insn;
5903 int i, j;
5905 /* Scan all the reloads looking for ones that only load values and
5906 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5907 assigned and not modified by INSN. */
5909 for (i = 0; i < n_reloads; i++)
5911 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5912 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5913 || reg_set_p (reload_reg_rtx[i], insn))
5914 continue;
5916 /* Look at all other reloads. Ensure that the only use of this
5917 reload_reg_rtx is in a reload that just loads the same value
5918 as we do. Note that any secondary reloads must be of the identical
5919 class since the values, modes, and result registers are the
5920 same, so we need not do anything with any secondary reloads. */
5922 for (j = 0; j < n_reloads; j++)
5924 if (i == j || reload_reg_rtx[j] == 0
5925 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5926 reload_reg_rtx[i]))
5927 continue;
5929 /* If the reload regs aren't exactly the same (e.g, different modes)
5930 or if the values are different, we can't merge anything with this
5931 reload register. */
5933 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5934 || reload_out[j] != 0 || reload_in[j] == 0
5935 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5936 break;
5939 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5940 we, in fact, found any matching reloads. */
5942 if (j == n_reloads)
5944 for (j = 0; j < n_reloads; j++)
5945 if (i != j && reload_reg_rtx[j] != 0
5946 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5948 reload_when_needed[i] = RELOAD_OTHER;
5949 reload_in[j] = 0;
5950 transfer_replacements (i, j);
5953 /* If this is now RELOAD_OTHER, look for any reloads that load
5954 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5955 if they were for inputs, RELOAD_OTHER for outputs. Note that
5956 this test is equivalent to looking for reloads for this operand
5957 number. */
5959 if (reload_when_needed[i] == RELOAD_OTHER)
5960 for (j = 0; j < n_reloads; j++)
5961 if (reload_in[j] != 0
5962 && reload_when_needed[i] != RELOAD_OTHER
5963 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5964 reload_in[i]))
5965 reload_when_needed[j]
5966 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5967 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5968 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5974 /* Output insns to reload values in and out of the chosen reload regs. */
5976 static void
5977 emit_reload_insns (insn)
5978 rtx insn;
5980 register int j;
5981 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5982 rtx other_input_address_reload_insns = 0;
5983 rtx other_input_reload_insns = 0;
5984 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5985 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5986 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5988 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5989 rtx operand_reload_insns = 0;
5990 rtx other_operand_reload_insns = 0;
5991 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5992 rtx following_insn = NEXT_INSN (insn);
5993 rtx before_insn = insn;
5994 int special;
5995 /* Values to be put in spill_reg_store are put here first. */
5996 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5998 for (j = 0; j < reload_n_operands; j++)
5999 input_reload_insns[j] = input_address_reload_insns[j]
6000 = inpaddr_address_reload_insns[j]
6001 = output_reload_insns[j] = output_address_reload_insns[j]
6002 = outaddr_address_reload_insns[j]
6003 = other_output_reload_insns[j] = 0;
6005 /* Now output the instructions to copy the data into and out of the
6006 reload registers. Do these in the order that the reloads were reported,
6007 since reloads of base and index registers precede reloads of operands
6008 and the operands may need the base and index registers reloaded. */
6010 for (j = 0; j < n_reloads; j++)
6012 register rtx old;
6013 rtx oldequiv_reg = 0;
6014 rtx this_reload_insn = 0;
6016 if (reload_spill_index[j] >= 0)
6017 new_spill_reg_store[reload_spill_index[j]] = 0;
6019 old = reload_in[j];
6020 if (old != 0 && ! reload_inherited[j]
6021 && ! rtx_equal_p (reload_reg_rtx[j], old)
6022 && reload_reg_rtx[j] != 0)
6024 register rtx reloadreg = reload_reg_rtx[j];
6025 rtx oldequiv = 0;
6026 enum machine_mode mode;
6027 rtx *where;
6029 /* Determine the mode to reload in.
6030 This is very tricky because we have three to choose from.
6031 There is the mode the insn operand wants (reload_inmode[J]).
6032 There is the mode of the reload register RELOADREG.
6033 There is the intrinsic mode of the operand, which we could find
6034 by stripping some SUBREGs.
6035 It turns out that RELOADREG's mode is irrelevant:
6036 we can change that arbitrarily.
6038 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6039 then the reload reg may not support QImode moves, so use SImode.
6040 If foo is in memory due to spilling a pseudo reg, this is safe,
6041 because the QImode value is in the least significant part of a
6042 slot big enough for a SImode. If foo is some other sort of
6043 memory reference, then it is impossible to reload this case,
6044 so previous passes had better make sure this never happens.
6046 Then consider a one-word union which has SImode and one of its
6047 members is a float, being fetched as (SUBREG:SF union:SI).
6048 We must fetch that as SFmode because we could be loading into
6049 a float-only register. In this case OLD's mode is correct.
6051 Consider an immediate integer: it has VOIDmode. Here we need
6052 to get a mode from something else.
6054 In some cases, there is a fourth mode, the operand's
6055 containing mode. If the insn specifies a containing mode for
6056 this operand, it overrides all others.
6058 I am not sure whether the algorithm here is always right,
6059 but it does the right things in those cases. */
6061 mode = GET_MODE (old);
6062 if (mode == VOIDmode)
6063 mode = reload_inmode[j];
6065 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6066 /* If we need a secondary register for this operation, see if
6067 the value is already in a register in that class. Don't
6068 do this if the secondary register will be used as a scratch
6069 register. */
6071 if (reload_secondary_in_reload[j] >= 0
6072 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6073 && optimize)
6074 oldequiv
6075 = find_equiv_reg (old, insn,
6076 reload_reg_class[reload_secondary_in_reload[j]],
6077 -1, NULL_PTR, 0, mode);
6078 #endif
6080 /* If reloading from memory, see if there is a register
6081 that already holds the same value. If so, reload from there.
6082 We can pass 0 as the reload_reg_p argument because
6083 any other reload has either already been emitted,
6084 in which case find_equiv_reg will see the reload-insn,
6085 or has yet to be emitted, in which case it doesn't matter
6086 because we will use this equiv reg right away. */
6088 if (oldequiv == 0 && optimize
6089 && (GET_CODE (old) == MEM
6090 || (GET_CODE (old) == REG
6091 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6092 && reg_renumber[REGNO (old)] < 0)))
6093 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6094 -1, NULL_PTR, 0, mode);
6096 if (oldequiv)
6098 int regno = true_regnum (oldequiv);
6100 /* If OLDEQUIV is a spill register, don't use it for this
6101 if any other reload needs it at an earlier stage of this insn
6102 or at this stage. */
6103 if (spill_reg_order[regno] >= 0
6104 && (! reload_reg_free_p (regno, reload_opnum[j],
6105 reload_when_needed[j])
6106 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6107 reload_when_needed[j])))
6108 oldequiv = 0;
6110 /* If OLDEQUIV is not a spill register,
6111 don't use it if any other reload wants it. */
6112 if (spill_reg_order[regno] < 0)
6114 int k;
6115 for (k = 0; k < n_reloads; k++)
6116 if (reload_reg_rtx[k] != 0 && k != j
6117 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6118 oldequiv))
6120 oldequiv = 0;
6121 break;
6125 /* If it is no cheaper to copy from OLDEQUIV into the
6126 reload register than it would be to move from memory,
6127 don't use it. Likewise, if we need a secondary register
6128 or memory. */
6130 if (oldequiv != 0
6131 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6132 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6133 reload_reg_class[j])
6134 >= MEMORY_MOVE_COST (mode)))
6135 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6136 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6137 mode, oldequiv)
6138 != NO_REGS)
6139 #endif
6140 #ifdef SECONDARY_MEMORY_NEEDED
6141 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6142 REGNO_REG_CLASS (regno),
6143 mode)
6144 #endif
6146 oldequiv = 0;
6149 if (oldequiv == 0)
6150 oldequiv = old;
6151 else if (GET_CODE (oldequiv) == REG)
6152 oldequiv_reg = oldequiv;
6153 else if (GET_CODE (oldequiv) == SUBREG)
6154 oldequiv_reg = SUBREG_REG (oldequiv);
6156 /* If we are reloading from a register that was recently stored in
6157 with an output-reload, see if we can prove there was
6158 actually no need to store the old value in it. */
6160 if (optimize && GET_CODE (oldequiv) == REG
6161 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6162 && spill_reg_order[REGNO (oldequiv)] >= 0
6163 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6164 && find_reg_note (insn, REG_DEAD, reload_in[j])
6165 /* This is unsafe if operand occurs more than once in current
6166 insn. Perhaps some occurrences weren't reloaded. */
6167 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6168 delete_output_reload
6169 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6171 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6172 then load RELOADREG from OLDEQUIV. Note that we cannot use
6173 gen_lowpart_common since it can do the wrong thing when
6174 RELOADREG has a multi-word mode. Note that RELOADREG
6175 must always be a REG here. */
6177 if (GET_MODE (reloadreg) != mode)
6178 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6179 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6180 oldequiv = SUBREG_REG (oldequiv);
6181 if (GET_MODE (oldequiv) != VOIDmode
6182 && mode != GET_MODE (oldequiv))
6183 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6185 /* Switch to the right place to emit the reload insns. */
6186 switch (reload_when_needed[j])
6188 case RELOAD_OTHER:
6189 where = &other_input_reload_insns;
6190 break;
6191 case RELOAD_FOR_INPUT:
6192 where = &input_reload_insns[reload_opnum[j]];
6193 break;
6194 case RELOAD_FOR_INPUT_ADDRESS:
6195 where = &input_address_reload_insns[reload_opnum[j]];
6196 break;
6197 case RELOAD_FOR_INPADDR_ADDRESS:
6198 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6199 break;
6200 case RELOAD_FOR_OUTPUT_ADDRESS:
6201 where = &output_address_reload_insns[reload_opnum[j]];
6202 break;
6203 case RELOAD_FOR_OUTADDR_ADDRESS:
6204 where = &outaddr_address_reload_insns[reload_opnum[j]];
6205 break;
6206 case RELOAD_FOR_OPERAND_ADDRESS:
6207 where = &operand_reload_insns;
6208 break;
6209 case RELOAD_FOR_OPADDR_ADDR:
6210 where = &other_operand_reload_insns;
6211 break;
6212 case RELOAD_FOR_OTHER_ADDRESS:
6213 where = &other_input_address_reload_insns;
6214 break;
6215 default:
6216 abort ();
6219 push_to_sequence (*where);
6220 special = 0;
6222 /* Auto-increment addresses must be reloaded in a special way. */
6223 if (GET_CODE (oldequiv) == POST_INC
6224 || GET_CODE (oldequiv) == POST_DEC
6225 || GET_CODE (oldequiv) == PRE_INC
6226 || GET_CODE (oldequiv) == PRE_DEC)
6228 /* We are not going to bother supporting the case where a
6229 incremented register can't be copied directly from
6230 OLDEQUIV since this seems highly unlikely. */
6231 if (reload_secondary_in_reload[j] >= 0)
6232 abort ();
6233 /* Prevent normal processing of this reload. */
6234 special = 1;
6235 /* Output a special code sequence for this case. */
6236 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6239 /* If we are reloading a pseudo-register that was set by the previous
6240 insn, see if we can get rid of that pseudo-register entirely
6241 by redirecting the previous insn into our reload register. */
6243 else if (optimize && GET_CODE (old) == REG
6244 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6245 && dead_or_set_p (insn, old)
6246 /* This is unsafe if some other reload
6247 uses the same reg first. */
6248 && reload_reg_free_before_p (REGNO (reloadreg),
6249 reload_opnum[j],
6250 reload_when_needed[j]))
6252 rtx temp = PREV_INSN (insn);
6253 while (temp && GET_CODE (temp) == NOTE)
6254 temp = PREV_INSN (temp);
6255 if (temp
6256 && GET_CODE (temp) == INSN
6257 && GET_CODE (PATTERN (temp)) == SET
6258 && SET_DEST (PATTERN (temp)) == old
6259 /* Make sure we can access insn_operand_constraint. */
6260 && asm_noperands (PATTERN (temp)) < 0
6261 /* This is unsafe if prev insn rejects our reload reg. */
6262 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6263 reloadreg)
6264 /* This is unsafe if operand occurs more than once in current
6265 insn. Perhaps some occurrences aren't reloaded. */
6266 && count_occurrences (PATTERN (insn), old) == 1
6267 /* Don't risk splitting a matching pair of operands. */
6268 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6270 /* Store into the reload register instead of the pseudo. */
6271 SET_DEST (PATTERN (temp)) = reloadreg;
6272 /* If these are the only uses of the pseudo reg,
6273 pretend for GDB it lives in the reload reg we used. */
6274 if (REG_N_DEATHS (REGNO (old)) == 1
6275 && REG_N_SETS (REGNO (old)) == 1)
6277 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6278 alter_reg (REGNO (old), -1);
6280 special = 1;
6284 /* We can't do that, so output an insn to load RELOADREG. */
6286 if (! special)
6288 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6289 rtx second_reload_reg = 0;
6290 enum insn_code icode;
6292 /* If we have a secondary reload, pick up the secondary register
6293 and icode, if any. If OLDEQUIV and OLD are different or
6294 if this is an in-out reload, recompute whether or not we
6295 still need a secondary register and what the icode should
6296 be. If we still need a secondary register and the class or
6297 icode is different, go back to reloading from OLD if using
6298 OLDEQUIV means that we got the wrong type of register. We
6299 cannot have different class or icode due to an in-out reload
6300 because we don't make such reloads when both the input and
6301 output need secondary reload registers. */
6303 if (reload_secondary_in_reload[j] >= 0)
6305 int secondary_reload = reload_secondary_in_reload[j];
6306 rtx real_oldequiv = oldequiv;
6307 rtx real_old = old;
6309 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6310 and similarly for OLD.
6311 See comments in get_secondary_reload in reload.c. */
6312 if (GET_CODE (oldequiv) == REG
6313 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6314 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6315 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6317 if (GET_CODE (old) == REG
6318 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6319 && reg_equiv_mem[REGNO (old)] != 0)
6320 real_old = reg_equiv_mem[REGNO (old)];
6322 second_reload_reg = reload_reg_rtx[secondary_reload];
6323 icode = reload_secondary_in_icode[j];
6325 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6326 || (reload_in[j] != 0 && reload_out[j] != 0))
6328 enum reg_class new_class
6329 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6330 mode, real_oldequiv);
6332 if (new_class == NO_REGS)
6333 second_reload_reg = 0;
6334 else
6336 enum insn_code new_icode;
6337 enum machine_mode new_mode;
6339 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6340 REGNO (second_reload_reg)))
6341 oldequiv = old, real_oldequiv = real_old;
6342 else
6344 new_icode = reload_in_optab[(int) mode];
6345 if (new_icode != CODE_FOR_nothing
6346 && ((insn_operand_predicate[(int) new_icode][0]
6347 && ! ((*insn_operand_predicate[(int) new_icode][0])
6348 (reloadreg, mode)))
6349 || (insn_operand_predicate[(int) new_icode][1]
6350 && ! ((*insn_operand_predicate[(int) new_icode][1])
6351 (real_oldequiv, mode)))))
6352 new_icode = CODE_FOR_nothing;
6354 if (new_icode == CODE_FOR_nothing)
6355 new_mode = mode;
6356 else
6357 new_mode = insn_operand_mode[(int) new_icode][2];
6359 if (GET_MODE (second_reload_reg) != new_mode)
6361 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6362 new_mode))
6363 oldequiv = old, real_oldequiv = real_old;
6364 else
6365 second_reload_reg
6366 = gen_rtx (REG, new_mode,
6367 REGNO (second_reload_reg));
6373 /* If we still need a secondary reload register, check
6374 to see if it is being used as a scratch or intermediate
6375 register and generate code appropriately. If we need
6376 a scratch register, use REAL_OLDEQUIV since the form of
6377 the insn may depend on the actual address if it is
6378 a MEM. */
6380 if (second_reload_reg)
6382 if (icode != CODE_FOR_nothing)
6384 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6385 second_reload_reg));
6386 special = 1;
6388 else
6390 /* See if we need a scratch register to load the
6391 intermediate register (a tertiary reload). */
6392 enum insn_code tertiary_icode
6393 = reload_secondary_in_icode[secondary_reload];
6395 if (tertiary_icode != CODE_FOR_nothing)
6397 rtx third_reload_reg
6398 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6400 emit_insn ((GEN_FCN (tertiary_icode)
6401 (second_reload_reg, real_oldequiv,
6402 third_reload_reg)));
6404 else
6405 gen_reload (second_reload_reg, oldequiv,
6406 reload_opnum[j],
6407 reload_when_needed[j]);
6409 oldequiv = second_reload_reg;
6413 #endif
6415 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6416 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6417 reload_when_needed[j]);
6419 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6420 /* We may have to make a REG_DEAD note for the secondary reload
6421 register in the insns we just made. Find the last insn that
6422 mentioned the register. */
6423 if (! special && second_reload_reg
6424 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6426 rtx prev;
6428 for (prev = get_last_insn (); prev;
6429 prev = PREV_INSN (prev))
6430 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6431 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6432 PATTERN (prev)))
6434 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6435 second_reload_reg,
6436 REG_NOTES (prev));
6437 break;
6440 #endif
6443 this_reload_insn = get_last_insn ();
6444 /* End this sequence. */
6445 *where = get_insns ();
6446 end_sequence ();
6449 /* Add a note saying the input reload reg
6450 dies in this insn, if anyone cares. */
6451 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6452 if (old != 0
6453 && reload_reg_rtx[j] != old
6454 && reload_reg_rtx[j] != 0
6455 && reload_out[j] == 0
6456 && ! reload_inherited[j]
6457 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6459 register rtx reloadreg = reload_reg_rtx[j];
6461 #if 0
6462 /* We can't abort here because we need to support this for sched.c.
6463 It's not terrible to miss a REG_DEAD note, but we should try
6464 to figure out how to do this correctly. */
6465 /* The code below is incorrect for address-only reloads. */
6466 if (reload_when_needed[j] != RELOAD_OTHER
6467 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6468 abort ();
6469 #endif
6471 /* Add a death note to this insn, for an input reload. */
6473 if ((reload_when_needed[j] == RELOAD_OTHER
6474 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6475 && ! dead_or_set_p (insn, reloadreg))
6476 REG_NOTES (insn)
6477 = gen_rtx (EXPR_LIST, REG_DEAD,
6478 reloadreg, REG_NOTES (insn));
6481 /* When we inherit a reload, the last marked death of the reload reg
6482 may no longer really be a death. */
6483 if (reload_reg_rtx[j] != 0
6484 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6485 && reload_inherited[j])
6487 /* Handle inheriting an output reload.
6488 Remove the death note from the output reload insn. */
6489 if (reload_spill_index[j] >= 0
6490 && GET_CODE (reload_in[j]) == REG
6491 && spill_reg_store[reload_spill_index[j]] != 0
6492 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6493 REG_DEAD, REGNO (reload_reg_rtx[j])))
6494 remove_death (REGNO (reload_reg_rtx[j]),
6495 spill_reg_store[reload_spill_index[j]]);
6496 /* Likewise for input reloads that were inherited. */
6497 else if (reload_spill_index[j] >= 0
6498 && GET_CODE (reload_in[j]) == REG
6499 && spill_reg_store[reload_spill_index[j]] == 0
6500 && reload_inheritance_insn[j] != 0
6501 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6502 REGNO (reload_reg_rtx[j])))
6503 remove_death (REGNO (reload_reg_rtx[j]),
6504 reload_inheritance_insn[j]);
6505 else
6507 rtx prev;
6509 /* We got this register from find_equiv_reg.
6510 Search back for its last death note and get rid of it.
6511 But don't search back too far.
6512 Don't go past a place where this reg is set,
6513 since a death note before that remains valid. */
6514 for (prev = PREV_INSN (insn);
6515 prev && GET_CODE (prev) != CODE_LABEL;
6516 prev = PREV_INSN (prev))
6517 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6518 && dead_or_set_p (prev, reload_reg_rtx[j]))
6520 if (find_regno_note (prev, REG_DEAD,
6521 REGNO (reload_reg_rtx[j])))
6522 remove_death (REGNO (reload_reg_rtx[j]), prev);
6523 break;
6528 /* We might have used find_equiv_reg above to choose an alternate
6529 place from which to reload. If so, and it died, we need to remove
6530 that death and move it to one of the insns we just made. */
6532 if (oldequiv_reg != 0
6533 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6535 rtx prev, prev1;
6537 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6538 prev = PREV_INSN (prev))
6539 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6540 && dead_or_set_p (prev, oldequiv_reg))
6542 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6544 for (prev1 = this_reload_insn;
6545 prev1; prev1 = PREV_INSN (prev1))
6546 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6547 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6548 PATTERN (prev1)))
6550 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6551 oldequiv_reg,
6552 REG_NOTES (prev1));
6553 break;
6555 remove_death (REGNO (oldequiv_reg), prev);
6557 break;
6560 #endif
6562 /* If we are reloading a register that was recently stored in with an
6563 output-reload, see if we can prove there was
6564 actually no need to store the old value in it. */
6566 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6567 && reload_in[j] != 0
6568 && GET_CODE (reload_in[j]) == REG
6569 #if 0
6570 /* There doesn't seem to be any reason to restrict this to pseudos
6571 and doing so loses in the case where we are copying from a
6572 register of the wrong class. */
6573 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6574 #endif
6575 && spill_reg_store[reload_spill_index[j]] != 0
6576 /* This is unsafe if some other reload uses the same reg first. */
6577 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6578 reload_opnum[j], reload_when_needed[j])
6579 && dead_or_set_p (insn, reload_in[j])
6580 /* This is unsafe if operand occurs more than once in current
6581 insn. Perhaps some occurrences weren't reloaded. */
6582 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6583 delete_output_reload (insn, j,
6584 spill_reg_store[reload_spill_index[j]]);
6586 /* Input-reloading is done. Now do output-reloading,
6587 storing the value from the reload-register after the main insn
6588 if reload_out[j] is nonzero.
6590 ??? At some point we need to support handling output reloads of
6591 JUMP_INSNs or insns that set cc0. */
6592 old = reload_out[j];
6593 if (old != 0
6594 && reload_reg_rtx[j] != old
6595 && reload_reg_rtx[j] != 0)
6597 register rtx reloadreg = reload_reg_rtx[j];
6598 register rtx second_reloadreg = 0;
6599 rtx note, p;
6600 enum machine_mode mode;
6601 int special = 0;
6603 /* An output operand that dies right away does need a reload,
6604 but need not be copied from it. Show the new location in the
6605 REG_UNUSED note. */
6606 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6607 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6609 XEXP (note, 0) = reload_reg_rtx[j];
6610 continue;
6612 /* Likewise for a SUBREG of an operand that dies. */
6613 else if (GET_CODE (old) == SUBREG
6614 && GET_CODE (SUBREG_REG (old)) == REG
6615 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6616 SUBREG_REG (old))))
6618 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6619 reload_reg_rtx[j]);
6620 continue;
6622 else if (GET_CODE (old) == SCRATCH)
6623 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6624 but we don't want to make an output reload. */
6625 continue;
6627 #if 0
6628 /* Strip off of OLD any size-increasing SUBREGs such as
6629 (SUBREG:SI foo:QI 0). */
6631 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6632 && (GET_MODE_SIZE (GET_MODE (old))
6633 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6634 old = SUBREG_REG (old);
6635 #endif
6637 /* If is a JUMP_INSN, we can't support output reloads yet. */
6638 if (GET_CODE (insn) == JUMP_INSN)
6639 abort ();
6641 if (reload_when_needed[j] == RELOAD_OTHER)
6642 start_sequence ();
6643 else
6644 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6646 /* Determine the mode to reload in.
6647 See comments above (for input reloading). */
6649 mode = GET_MODE (old);
6650 if (mode == VOIDmode)
6652 /* VOIDmode should never happen for an output. */
6653 if (asm_noperands (PATTERN (insn)) < 0)
6654 /* It's the compiler's fault. */
6655 fatal_insn ("VOIDmode on an output", insn);
6656 error_for_asm (insn, "output operand is constant in `asm'");
6657 /* Prevent crash--use something we know is valid. */
6658 mode = word_mode;
6659 old = gen_rtx (REG, mode, REGNO (reloadreg));
6662 if (GET_MODE (reloadreg) != mode)
6663 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6665 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6667 /* If we need two reload regs, set RELOADREG to the intermediate
6668 one, since it will be stored into OLD. We might need a secondary
6669 register only for an input reload, so check again here. */
6671 if (reload_secondary_out_reload[j] >= 0)
6673 rtx real_old = old;
6675 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6676 && reg_equiv_mem[REGNO (old)] != 0)
6677 real_old = reg_equiv_mem[REGNO (old)];
6679 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6680 mode, real_old)
6681 != NO_REGS))
6683 second_reloadreg = reloadreg;
6684 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6686 /* See if RELOADREG is to be used as a scratch register
6687 or as an intermediate register. */
6688 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6690 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6691 (real_old, second_reloadreg, reloadreg)));
6692 special = 1;
6694 else
6696 /* See if we need both a scratch and intermediate reload
6697 register. */
6699 int secondary_reload = reload_secondary_out_reload[j];
6700 enum insn_code tertiary_icode
6701 = reload_secondary_out_icode[secondary_reload];
6703 if (GET_MODE (reloadreg) != mode)
6704 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6706 if (tertiary_icode != CODE_FOR_nothing)
6708 rtx third_reloadreg
6709 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6710 rtx tem;
6712 /* Copy primary reload reg to secondary reload reg.
6713 (Note that these have been swapped above, then
6714 secondary reload reg to OLD using our insn. */
6716 /* If REAL_OLD is a paradoxical SUBREG, remove it
6717 and try to put the opposite SUBREG on
6718 RELOADREG. */
6719 if (GET_CODE (real_old) == SUBREG
6720 && (GET_MODE_SIZE (GET_MODE (real_old))
6721 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6722 && 0 != (tem = gen_lowpart_common
6723 (GET_MODE (SUBREG_REG (real_old)),
6724 reloadreg)))
6725 real_old = SUBREG_REG (real_old), reloadreg = tem;
6727 gen_reload (reloadreg, second_reloadreg,
6728 reload_opnum[j], reload_when_needed[j]);
6729 emit_insn ((GEN_FCN (tertiary_icode)
6730 (real_old, reloadreg, third_reloadreg)));
6731 special = 1;
6734 else
6735 /* Copy between the reload regs here and then to
6736 OUT later. */
6738 gen_reload (reloadreg, second_reloadreg,
6739 reload_opnum[j], reload_when_needed[j]);
6743 #endif
6745 /* Output the last reload insn. */
6746 if (! special)
6747 gen_reload (old, reloadreg, reload_opnum[j],
6748 reload_when_needed[j]);
6750 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6751 /* If final will look at death notes for this reg,
6752 put one on the last output-reload insn to use it. Similarly
6753 for any secondary register. */
6754 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6755 for (p = get_last_insn (); p; p = PREV_INSN (p))
6756 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6757 && reg_overlap_mentioned_for_reload_p (reloadreg,
6758 PATTERN (p)))
6759 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6760 reloadreg, REG_NOTES (p));
6762 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6763 if (! special && second_reloadreg
6764 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6765 for (p = get_last_insn (); p; p = PREV_INSN (p))
6766 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6767 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6768 PATTERN (p)))
6769 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6770 second_reloadreg, REG_NOTES (p));
6771 #endif
6772 #endif
6773 /* Look at all insns we emitted, just to be safe. */
6774 for (p = get_insns (); p; p = NEXT_INSN (p))
6775 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6777 /* If this output reload doesn't come from a spill reg,
6778 clear any memory of reloaded copies of the pseudo reg.
6779 If this output reload comes from a spill reg,
6780 reg_has_output_reload will make this do nothing. */
6781 note_stores (PATTERN (p), forget_old_reloads_1);
6783 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6784 && reload_spill_index[j] >= 0)
6785 new_spill_reg_store[reload_spill_index[j]] = p;
6788 if (reload_when_needed[j] == RELOAD_OTHER)
6790 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6791 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6793 else
6794 output_reload_insns[reload_opnum[j]] = get_insns ();
6796 end_sequence ();
6800 /* Now write all the insns we made for reloads in the order expected by
6801 the allocation functions. Prior to the insn being reloaded, we write
6802 the following reloads:
6804 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6806 RELOAD_OTHER reloads.
6808 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6809 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6810 RELOAD_FOR_INPUT reload for the operand.
6812 RELOAD_FOR_OPADDR_ADDRS reloads.
6814 RELOAD_FOR_OPERAND_ADDRESS reloads.
6816 After the insn being reloaded, we write the following:
6818 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6819 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6820 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6821 reloads for the operand. The RELOAD_OTHER output reloads are
6822 output in descending order by reload number. */
6824 emit_insns_before (other_input_address_reload_insns, before_insn);
6825 emit_insns_before (other_input_reload_insns, before_insn);
6827 for (j = 0; j < reload_n_operands; j++)
6829 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6830 emit_insns_before (input_address_reload_insns[j], before_insn);
6831 emit_insns_before (input_reload_insns[j], before_insn);
6834 emit_insns_before (other_operand_reload_insns, before_insn);
6835 emit_insns_before (operand_reload_insns, before_insn);
6837 for (j = 0; j < reload_n_operands; j++)
6839 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6840 emit_insns_before (output_address_reload_insns[j], following_insn);
6841 emit_insns_before (output_reload_insns[j], following_insn);
6842 emit_insns_before (other_output_reload_insns[j], following_insn);
6845 /* Move death notes from INSN
6846 to output-operand-address and output reload insns. */
6847 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6849 rtx insn1;
6850 /* Loop over those insns, last ones first. */
6851 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6852 insn1 = PREV_INSN (insn1))
6853 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6855 rtx source = SET_SRC (PATTERN (insn1));
6856 rtx dest = SET_DEST (PATTERN (insn1));
6858 /* The note we will examine next. */
6859 rtx reg_notes = REG_NOTES (insn);
6860 /* The place that pointed to this note. */
6861 rtx *prev_reg_note = &REG_NOTES (insn);
6863 /* If the note is for something used in the source of this
6864 reload insn, or in the output address, move the note. */
6865 while (reg_notes)
6867 rtx next_reg_notes = XEXP (reg_notes, 1);
6868 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6869 && GET_CODE (XEXP (reg_notes, 0)) == REG
6870 && ((GET_CODE (dest) != REG
6871 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6872 dest))
6873 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6874 source)))
6876 *prev_reg_note = next_reg_notes;
6877 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6878 REG_NOTES (insn1) = reg_notes;
6880 else
6881 prev_reg_note = &XEXP (reg_notes, 1);
6883 reg_notes = next_reg_notes;
6887 #endif
6889 /* For all the spill regs newly reloaded in this instruction,
6890 record what they were reloaded from, so subsequent instructions
6891 can inherit the reloads.
6893 Update spill_reg_store for the reloads of this insn.
6894 Copy the elements that were updated in the loop above. */
6896 for (j = 0; j < n_reloads; j++)
6898 register int r = reload_order[j];
6899 register int i = reload_spill_index[r];
6901 /* I is nonneg if this reload used one of the spill regs.
6902 If reload_reg_rtx[r] is 0, this is an optional reload
6903 that we opted to ignore. */
6905 if (i >= 0 && reload_reg_rtx[r] != 0)
6907 int nr
6908 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6909 int k;
6910 int part_reaches_end = 0;
6911 int all_reaches_end = 1;
6913 /* For a multi register reload, we need to check if all or part
6914 of the value lives to the end. */
6915 for (k = 0; k < nr; k++)
6917 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6918 reload_when_needed[r]))
6919 part_reaches_end = 1;
6920 else
6921 all_reaches_end = 0;
6924 /* Ignore reloads that don't reach the end of the insn in
6925 entirety. */
6926 if (all_reaches_end)
6928 /* First, clear out memory of what used to be in this spill reg.
6929 If consecutive registers are used, clear them all. */
6931 for (k = 0; k < nr; k++)
6933 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6934 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6937 /* Maybe the spill reg contains a copy of reload_out. */
6938 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6940 register int nregno = REGNO (reload_out[r]);
6941 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6942 : HARD_REGNO_NREGS (nregno,
6943 GET_MODE (reload_reg_rtx[r])));
6945 spill_reg_store[i] = new_spill_reg_store[i];
6946 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6948 /* If NREGNO is a hard register, it may occupy more than
6949 one register. If it does, say what is in the
6950 rest of the registers assuming that both registers
6951 agree on how many words the object takes. If not,
6952 invalidate the subsequent registers. */
6954 if (nregno < FIRST_PSEUDO_REGISTER)
6955 for (k = 1; k < nnr; k++)
6956 reg_last_reload_reg[nregno + k]
6957 = (nr == nnr
6958 ? gen_rtx (REG,
6959 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6960 REGNO (reload_reg_rtx[r]) + k)
6961 : 0);
6963 /* Now do the inverse operation. */
6964 for (k = 0; k < nr; k++)
6966 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6967 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6968 ? nregno
6969 : nregno + k);
6970 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6974 /* Maybe the spill reg contains a copy of reload_in. Only do
6975 something if there will not be an output reload for
6976 the register being reloaded. */
6977 else if (reload_out[r] == 0
6978 && reload_in[r] != 0
6979 && ((GET_CODE (reload_in[r]) == REG
6980 && ! reg_has_output_reload[REGNO (reload_in[r])])
6981 || (GET_CODE (reload_in_reg[r]) == REG
6982 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6984 register int nregno;
6985 int nnr;
6987 if (GET_CODE (reload_in[r]) == REG)
6988 nregno = REGNO (reload_in[r]);
6989 else
6990 nregno = REGNO (reload_in_reg[r]);
6992 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6993 : HARD_REGNO_NREGS (nregno,
6994 GET_MODE (reload_reg_rtx[r])));
6996 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6998 if (nregno < FIRST_PSEUDO_REGISTER)
6999 for (k = 1; k < nnr; k++)
7000 reg_last_reload_reg[nregno + k]
7001 = (nr == nnr
7002 ? gen_rtx (REG,
7003 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7004 REGNO (reload_reg_rtx[r]) + k)
7005 : 0);
7007 /* Unless we inherited this reload, show we haven't
7008 recently done a store. */
7009 if (! reload_inherited[r])
7010 spill_reg_store[i] = 0;
7012 for (k = 0; k < nr; k++)
7014 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7015 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7016 ? nregno
7017 : nregno + k);
7018 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7019 = insn;
7024 /* However, if part of the reload reaches the end, then we must
7025 invalidate the old info for the part that survives to the end. */
7026 else if (part_reaches_end)
7028 for (k = 0; k < nr; k++)
7029 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7030 reload_opnum[r],
7031 reload_when_needed[r]))
7033 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7034 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7039 /* The following if-statement was #if 0'd in 1.34 (or before...).
7040 It's reenabled in 1.35 because supposedly nothing else
7041 deals with this problem. */
7043 /* If a register gets output-reloaded from a non-spill register,
7044 that invalidates any previous reloaded copy of it.
7045 But forget_old_reloads_1 won't get to see it, because
7046 it thinks only about the original insn. So invalidate it here. */
7047 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7049 register int nregno = REGNO (reload_out[r]);
7050 if (nregno >= FIRST_PSEUDO_REGISTER)
7051 reg_last_reload_reg[nregno] = 0;
7052 else
7054 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7056 while (num_regs-- > 0)
7057 reg_last_reload_reg[nregno + num_regs] = 0;
7063 /* Emit code to perform a reload from IN (which may be a reload register) to
7064 OUT (which may also be a reload register). IN or OUT is from operand
7065 OPNUM with reload type TYPE.
7067 Returns first insn emitted. */
7070 gen_reload (out, in, opnum, type)
7071 rtx out;
7072 rtx in;
7073 int opnum;
7074 enum reload_type type;
7076 rtx last = get_last_insn ();
7077 rtx tem;
7079 /* If IN is a paradoxical SUBREG, remove it and try to put the
7080 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7081 if (GET_CODE (in) == SUBREG
7082 && (GET_MODE_SIZE (GET_MODE (in))
7083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7084 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7085 in = SUBREG_REG (in), out = tem;
7086 else if (GET_CODE (out) == SUBREG
7087 && (GET_MODE_SIZE (GET_MODE (out))
7088 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7089 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7090 out = SUBREG_REG (out), in = tem;
7092 /* How to do this reload can get quite tricky. Normally, we are being
7093 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7094 register that didn't get a hard register. In that case we can just
7095 call emit_move_insn.
7097 We can also be asked to reload a PLUS that adds a register or a MEM to
7098 another register, constant or MEM. This can occur during frame pointer
7099 elimination and while reloading addresses. This case is handled by
7100 trying to emit a single insn to perform the add. If it is not valid,
7101 we use a two insn sequence.
7103 Finally, we could be called to handle an 'o' constraint by putting
7104 an address into a register. In that case, we first try to do this
7105 with a named pattern of "reload_load_address". If no such pattern
7106 exists, we just emit a SET insn and hope for the best (it will normally
7107 be valid on machines that use 'o').
7109 This entire process is made complex because reload will never
7110 process the insns we generate here and so we must ensure that
7111 they will fit their constraints and also by the fact that parts of
7112 IN might be being reloaded separately and replaced with spill registers.
7113 Because of this, we are, in some sense, just guessing the right approach
7114 here. The one listed above seems to work.
7116 ??? At some point, this whole thing needs to be rethought. */
7118 if (GET_CODE (in) == PLUS
7119 && (GET_CODE (XEXP (in, 0)) == REG
7120 || GET_CODE (XEXP (in, 0)) == SUBREG
7121 || GET_CODE (XEXP (in, 0)) == MEM)
7122 && (GET_CODE (XEXP (in, 1)) == REG
7123 || GET_CODE (XEXP (in, 1)) == SUBREG
7124 || CONSTANT_P (XEXP (in, 1))
7125 || GET_CODE (XEXP (in, 1)) == MEM))
7127 /* We need to compute the sum of a register or a MEM and another
7128 register, constant, or MEM, and put it into the reload
7129 register. The best possible way of doing this is if the machine
7130 has a three-operand ADD insn that accepts the required operands.
7132 The simplest approach is to try to generate such an insn and see if it
7133 is recognized and matches its constraints. If so, it can be used.
7135 It might be better not to actually emit the insn unless it is valid,
7136 but we need to pass the insn as an operand to `recog' and
7137 `insn_extract' and it is simpler to emit and then delete the insn if
7138 not valid than to dummy things up. */
7140 rtx op0, op1, tem, insn;
7141 int code;
7143 op0 = find_replacement (&XEXP (in, 0));
7144 op1 = find_replacement (&XEXP (in, 1));
7146 /* Since constraint checking is strict, commutativity won't be
7147 checked, so we need to do that here to avoid spurious failure
7148 if the add instruction is two-address and the second operand
7149 of the add is the same as the reload reg, which is frequently
7150 the case. If the insn would be A = B + A, rearrange it so
7151 it will be A = A + B as constrain_operands expects. */
7153 if (GET_CODE (XEXP (in, 1)) == REG
7154 && REGNO (out) == REGNO (XEXP (in, 1)))
7155 tem = op0, op0 = op1, op1 = tem;
7157 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7158 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7160 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7161 code = recog_memoized (insn);
7163 if (code >= 0)
7165 insn_extract (insn);
7166 /* We want constrain operands to treat this insn strictly in
7167 its validity determination, i.e., the way it would after reload
7168 has completed. */
7169 if (constrain_operands (code, 1))
7170 return insn;
7173 delete_insns_since (last);
7175 /* If that failed, we must use a conservative two-insn sequence.
7176 use move to copy constant, MEM, or pseudo register to the reload
7177 register since "move" will be able to handle an arbitrary operand,
7178 unlike add which can't, in general. Then add the registers.
7180 If there is another way to do this for a specific machine, a
7181 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7182 we emit below. */
7184 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7185 || (GET_CODE (op1) == REG
7186 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7187 tem = op0, op0 = op1, op1 = tem;
7189 gen_reload (out, op0, opnum, type);
7191 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7192 This fixes a problem on the 32K where the stack pointer cannot
7193 be used as an operand of an add insn. */
7195 if (rtx_equal_p (op0, op1))
7196 op1 = out;
7198 insn = emit_insn (gen_add2_insn (out, op1));
7200 /* If that failed, copy the address register to the reload register.
7201 Then add the constant to the reload register. */
7203 code = recog_memoized (insn);
7205 if (code >= 0)
7207 insn_extract (insn);
7208 /* We want constrain operands to treat this insn strictly in
7209 its validity determination, i.e., the way it would after reload
7210 has completed. */
7211 if (constrain_operands (code, 1))
7212 return insn;
7215 delete_insns_since (last);
7217 gen_reload (out, op1, opnum, type);
7218 emit_insn (gen_add2_insn (out, op0));
7221 #ifdef SECONDARY_MEMORY_NEEDED
7222 /* If we need a memory location to do the move, do it that way. */
7223 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7224 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7225 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7226 REGNO_REG_CLASS (REGNO (out)),
7227 GET_MODE (out)))
7229 /* Get the memory to use and rewrite both registers to its mode. */
7230 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7232 if (GET_MODE (loc) != GET_MODE (out))
7233 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7235 if (GET_MODE (loc) != GET_MODE (in))
7236 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7238 gen_reload (loc, in, opnum, type);
7239 gen_reload (out, loc, opnum, type);
7241 #endif
7243 /* If IN is a simple operand, use gen_move_insn. */
7244 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7245 emit_insn (gen_move_insn (out, in));
7247 #ifdef HAVE_reload_load_address
7248 else if (HAVE_reload_load_address)
7249 emit_insn (gen_reload_load_address (out, in));
7250 #endif
7252 /* Otherwise, just write (set OUT IN) and hope for the best. */
7253 else
7254 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7256 /* Return the first insn emitted.
7257 We can not just return get_last_insn, because there may have
7258 been multiple instructions emitted. Also note that gen_move_insn may
7259 emit more than one insn itself, so we can not assume that there is one
7260 insn emitted per emit_insn_before call. */
7262 return last ? NEXT_INSN (last) : get_insns ();
7265 /* Delete a previously made output-reload
7266 whose result we now believe is not needed.
7267 First we double-check.
7269 INSN is the insn now being processed.
7270 OUTPUT_RELOAD_INSN is the insn of the output reload.
7271 J is the reload-number for this insn. */
7273 static void
7274 delete_output_reload (insn, j, output_reload_insn)
7275 rtx insn;
7276 int j;
7277 rtx output_reload_insn;
7279 register rtx i1;
7281 /* Get the raw pseudo-register referred to. */
7283 rtx reg = reload_in[j];
7284 while (GET_CODE (reg) == SUBREG)
7285 reg = SUBREG_REG (reg);
7287 /* If the pseudo-reg we are reloading is no longer referenced
7288 anywhere between the store into it and here,
7289 and no jumps or labels intervene, then the value can get
7290 here through the reload reg alone.
7291 Otherwise, give up--return. */
7292 for (i1 = NEXT_INSN (output_reload_insn);
7293 i1 != insn; i1 = NEXT_INSN (i1))
7295 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7296 return;
7297 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7298 && reg_mentioned_p (reg, PATTERN (i1)))
7299 return;
7302 if (cannot_omit_stores[REGNO (reg)])
7303 return;
7305 /* If this insn will store in the pseudo again,
7306 the previous store can be removed. */
7307 if (reload_out[j] == reload_in[j])
7308 delete_insn (output_reload_insn);
7310 /* See if the pseudo reg has been completely replaced
7311 with reload regs. If so, delete the store insn
7312 and forget we had a stack slot for the pseudo. */
7313 else if (REG_N_DEATHS (REGNO (reg)) == 1
7314 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7315 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7317 rtx i2;
7319 /* We know that it was used only between here
7320 and the beginning of the current basic block.
7321 (We also know that the last use before INSN was
7322 the output reload we are thinking of deleting, but never mind that.)
7323 Search that range; see if any ref remains. */
7324 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7326 rtx set = single_set (i2);
7328 /* Uses which just store in the pseudo don't count,
7329 since if they are the only uses, they are dead. */
7330 if (set != 0 && SET_DEST (set) == reg)
7331 continue;
7332 if (GET_CODE (i2) == CODE_LABEL
7333 || GET_CODE (i2) == JUMP_INSN)
7334 break;
7335 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7336 && reg_mentioned_p (reg, PATTERN (i2)))
7337 /* Some other ref remains;
7338 we can't do anything. */
7339 return;
7342 /* Delete the now-dead stores into this pseudo. */
7343 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7345 rtx set = single_set (i2);
7347 if (set != 0 && SET_DEST (set) == reg)
7349 /* This might be a basic block head,
7350 thus don't use delete_insn. */
7351 PUT_CODE (i2, NOTE);
7352 NOTE_SOURCE_FILE (i2) = 0;
7353 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7355 if (GET_CODE (i2) == CODE_LABEL
7356 || GET_CODE (i2) == JUMP_INSN)
7357 break;
7360 /* For the debugging info,
7361 say the pseudo lives in this reload reg. */
7362 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7363 alter_reg (REGNO (reg), -1);
7367 /* Output reload-insns to reload VALUE into RELOADREG.
7368 VALUE is an autoincrement or autodecrement RTX whose operand
7369 is a register or memory location;
7370 so reloading involves incrementing that location.
7372 INC_AMOUNT is the number to increment or decrement by (always positive).
7373 This cannot be deduced from VALUE. */
7375 static void
7376 inc_for_reload (reloadreg, value, inc_amount)
7377 rtx reloadreg;
7378 rtx value;
7379 int inc_amount;
7381 /* REG or MEM to be copied and incremented. */
7382 rtx incloc = XEXP (value, 0);
7383 /* Nonzero if increment after copying. */
7384 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7385 rtx last;
7386 rtx inc;
7387 rtx add_insn;
7388 int code;
7390 /* No hard register is equivalent to this register after
7391 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7392 we could inc/dec that register as well (maybe even using it for
7393 the source), but I'm not sure it's worth worrying about. */
7394 if (GET_CODE (incloc) == REG)
7395 reg_last_reload_reg[REGNO (incloc)] = 0;
7397 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7398 inc_amount = - inc_amount;
7400 inc = GEN_INT (inc_amount);
7402 /* If this is post-increment, first copy the location to the reload reg. */
7403 if (post)
7404 emit_insn (gen_move_insn (reloadreg, incloc));
7406 /* See if we can directly increment INCLOC. Use a method similar to that
7407 in gen_reload. */
7409 last = get_last_insn ();
7410 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7411 gen_rtx (PLUS, GET_MODE (incloc),
7412 incloc, inc)));
7414 code = recog_memoized (add_insn);
7415 if (code >= 0)
7417 insn_extract (add_insn);
7418 if (constrain_operands (code, 1))
7420 /* If this is a pre-increment and we have incremented the value
7421 where it lives, copy the incremented value to RELOADREG to
7422 be used as an address. */
7424 if (! post)
7425 emit_insn (gen_move_insn (reloadreg, incloc));
7427 return;
7431 delete_insns_since (last);
7433 /* If couldn't do the increment directly, must increment in RELOADREG.
7434 The way we do this depends on whether this is pre- or post-increment.
7435 For pre-increment, copy INCLOC to the reload register, increment it
7436 there, then save back. */
7438 if (! post)
7440 emit_insn (gen_move_insn (reloadreg, incloc));
7441 emit_insn (gen_add2_insn (reloadreg, inc));
7442 emit_insn (gen_move_insn (incloc, reloadreg));
7444 else
7446 /* Postincrement.
7447 Because this might be a jump insn or a compare, and because RELOADREG
7448 may not be available after the insn in an input reload, we must do
7449 the incrementation before the insn being reloaded for.
7451 We have already copied INCLOC to RELOADREG. Increment the copy in
7452 RELOADREG, save that back, then decrement RELOADREG so it has
7453 the original value. */
7455 emit_insn (gen_add2_insn (reloadreg, inc));
7456 emit_insn (gen_move_insn (incloc, reloadreg));
7457 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7460 return;
7463 /* Return 1 if we are certain that the constraint-string STRING allows
7464 the hard register REG. Return 0 if we can't be sure of this. */
7466 static int
7467 constraint_accepts_reg_p (string, reg)
7468 char *string;
7469 rtx reg;
7471 int value = 0;
7472 int regno = true_regnum (reg);
7473 int c;
7475 /* Initialize for first alternative. */
7476 value = 0;
7477 /* Check that each alternative contains `g' or `r'. */
7478 while (1)
7479 switch (c = *string++)
7481 case 0:
7482 /* If an alternative lacks `g' or `r', we lose. */
7483 return value;
7484 case ',':
7485 /* If an alternative lacks `g' or `r', we lose. */
7486 if (value == 0)
7487 return 0;
7488 /* Initialize for next alternative. */
7489 value = 0;
7490 break;
7491 case 'g':
7492 case 'r':
7493 /* Any general reg wins for this alternative. */
7494 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7495 value = 1;
7496 break;
7497 default:
7498 /* Any reg in specified class wins for this alternative. */
7500 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7502 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7503 value = 1;
7508 /* Return the number of places FIND appears within X, but don't count
7509 an occurrence if some SET_DEST is FIND. */
7511 static int
7512 count_occurrences (x, find)
7513 register rtx x, find;
7515 register int i, j;
7516 register enum rtx_code code;
7517 register char *format_ptr;
7518 int count;
7520 if (x == find)
7521 return 1;
7522 if (x == 0)
7523 return 0;
7525 code = GET_CODE (x);
7527 switch (code)
7529 case REG:
7530 case QUEUED:
7531 case CONST_INT:
7532 case CONST_DOUBLE:
7533 case SYMBOL_REF:
7534 case CODE_LABEL:
7535 case PC:
7536 case CC0:
7537 return 0;
7539 case SET:
7540 if (SET_DEST (x) == find)
7541 return count_occurrences (SET_SRC (x), find);
7542 break;
7544 default:
7545 break;
7548 format_ptr = GET_RTX_FORMAT (code);
7549 count = 0;
7551 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7553 switch (*format_ptr++)
7555 case 'e':
7556 count += count_occurrences (XEXP (x, i), find);
7557 break;
7559 case 'E':
7560 if (XVEC (x, i) != NULL)
7562 for (j = 0; j < XVECLEN (x, i); j++)
7563 count += count_occurrences (XVECEXP (x, i, j), find);
7565 break;
7568 return count;
7571 /* This array holds values which are equivalent to a hard register
7572 during reload_cse_regs. Each array element is an EXPR_LIST of
7573 values. Each time a hard register is set, we set the corresponding
7574 array element to the value. Each time a hard register is copied
7575 into memory, we add the memory location to the corresponding array
7576 element. We don't store values or memory addresses with side
7577 effects in this array.
7579 If the value is a CONST_INT, then the mode of the containing
7580 EXPR_LIST is the mode in which that CONST_INT was referenced.
7582 We sometimes clobber a specific entry in a list. In that case, we
7583 just set XEXP (list-entry, 0) to 0. */
7585 static rtx *reg_values;
7587 /* This is a preallocated REG rtx which we use as a temporary in
7588 reload_cse_invalidate_regno, so that we don't need to allocate a
7589 new one each time through a loop in that function. */
7591 static rtx invalidate_regno_rtx;
7593 /* This is a set of registers for which we must remove REG_DEAD notes in
7594 previous insns, because our modifications made them invalid. That can
7595 happen if we introduced the register into the current insn, or we deleted
7596 the current insn which used to set the register. */
7598 static HARD_REG_SET no_longer_dead_regs;
7600 /* Invalidate any entries in reg_values which depend on REGNO,
7601 including those for REGNO itself. This is called if REGNO is
7602 changing. If CLOBBER is true, then always forget anything we
7603 currently know about REGNO. MODE is the mode of the assignment to
7604 REGNO, which is used to determine how many hard registers are being
7605 changed. If MODE is VOIDmode, then only REGNO is being changed;
7606 this is used when invalidating call clobbered registers across a
7607 call. */
7609 static void
7610 reload_cse_invalidate_regno (regno, mode, clobber)
7611 int regno;
7612 enum machine_mode mode;
7613 int clobber;
7615 int endregno;
7616 register int i;
7618 /* Our callers don't always go through true_regnum; we may see a
7619 pseudo-register here from a CLOBBER or the like. We probably
7620 won't ever see a pseudo-register that has a real register number,
7621 for we check anyhow for safety. */
7622 if (regno >= FIRST_PSEUDO_REGISTER)
7623 regno = reg_renumber[regno];
7624 if (regno < 0)
7625 return;
7627 if (mode == VOIDmode)
7628 endregno = regno + 1;
7629 else
7630 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7632 if (clobber)
7633 for (i = regno; i < endregno; i++)
7634 reg_values[i] = 0;
7636 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7638 rtx x;
7640 for (x = reg_values[i]; x; x = XEXP (x, 1))
7642 if (XEXP (x, 0) != 0
7643 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7645 /* If this is the only entry on the list, clear
7646 reg_values[i]. Otherwise, just clear this entry on
7647 the list. */
7648 if (XEXP (x, 1) == 0 && x == reg_values[i])
7650 reg_values[i] = 0;
7651 break;
7653 XEXP (x, 0) = 0;
7658 /* We must look at earlier registers, in case REGNO is part of a
7659 multi word value but is not the first register. If an earlier
7660 register has a value in a mode which overlaps REGNO, then we must
7661 invalidate that earlier register. Note that we do not need to
7662 check REGNO or later registers (we must not check REGNO itself,
7663 because we would incorrectly conclude that there was a conflict). */
7665 for (i = 0; i < regno; i++)
7667 rtx x;
7669 for (x = reg_values[i]; x; x = XEXP (x, 1))
7671 if (XEXP (x, 0) != 0)
7673 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7674 REGNO (invalidate_regno_rtx) = i;
7675 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7676 NULL_PTR))
7678 reload_cse_invalidate_regno (i, VOIDmode, 1);
7679 break;
7686 /* The memory at address MEM_BASE is being changed.
7687 Return whether this change will invalidate VAL. */
7689 static int
7690 reload_cse_mem_conflict_p (mem_base, val)
7691 rtx mem_base;
7692 rtx val;
7694 enum rtx_code code;
7695 char *fmt;
7696 int i;
7698 code = GET_CODE (val);
7699 switch (code)
7701 /* Get rid of a few simple cases quickly. */
7702 case REG:
7703 case PC:
7704 case CC0:
7705 case SCRATCH:
7706 case CONST:
7707 case CONST_INT:
7708 case CONST_DOUBLE:
7709 case SYMBOL_REF:
7710 case LABEL_REF:
7711 return 0;
7713 case MEM:
7714 if (GET_MODE (mem_base) == BLKmode
7715 || GET_MODE (val) == BLKmode)
7716 return 1;
7717 if (anti_dependence (val, mem_base))
7718 return 1;
7719 /* The address may contain nested MEMs. */
7720 break;
7722 default:
7723 break;
7726 fmt = GET_RTX_FORMAT (code);
7728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7730 if (fmt[i] == 'e')
7732 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7733 return 1;
7735 else if (fmt[i] == 'E')
7737 int j;
7739 for (j = 0; j < XVECLEN (val, i); j++)
7740 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7741 return 1;
7745 return 0;
7748 /* Invalidate any entries in reg_values which are changed because of a
7749 store to MEM_RTX. If this is called because of a non-const call
7750 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7752 static void
7753 reload_cse_invalidate_mem (mem_rtx)
7754 rtx mem_rtx;
7756 register int i;
7758 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7760 rtx x;
7762 for (x = reg_values[i]; x; x = XEXP (x, 1))
7764 if (XEXP (x, 0) != 0
7765 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7767 /* If this is the only entry on the list, clear
7768 reg_values[i]. Otherwise, just clear this entry on
7769 the list. */
7770 if (XEXP (x, 1) == 0 && x == reg_values[i])
7772 reg_values[i] = 0;
7773 break;
7775 XEXP (x, 0) = 0;
7781 /* Invalidate DEST, which is being assigned to or clobbered. The
7782 second parameter exists so that this function can be passed to
7783 note_stores; it is ignored. */
7785 static void
7786 reload_cse_invalidate_rtx (dest, ignore)
7787 rtx dest;
7788 rtx ignore;
7790 while (GET_CODE (dest) == STRICT_LOW_PART
7791 || GET_CODE (dest) == SIGN_EXTRACT
7792 || GET_CODE (dest) == ZERO_EXTRACT
7793 || GET_CODE (dest) == SUBREG)
7794 dest = XEXP (dest, 0);
7796 if (GET_CODE (dest) == REG)
7797 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7798 else if (GET_CODE (dest) == MEM)
7799 reload_cse_invalidate_mem (dest);
7802 /* Possibly delete death notes on the insns before INSN if modifying INSN
7803 extended the lifespan of the registers. */
7805 static void
7806 reload_cse_delete_death_notes (insn)
7807 rtx insn;
7809 int dreg;
7811 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
7813 rtx trial;
7815 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
7816 continue;
7818 for (trial = prev_nonnote_insn (insn);
7819 (trial
7820 && GET_CODE (trial) != CODE_LABEL
7821 && GET_CODE (trial) != BARRIER);
7822 trial = prev_nonnote_insn (trial))
7824 if (find_regno_note (trial, REG_DEAD, dreg))
7826 remove_death (dreg, trial);
7827 break;
7833 /* Record that the current insn uses hard reg REGNO in mode MODE. This
7834 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
7835 notes for this register. */
7837 static void
7838 reload_cse_no_longer_dead (regno, mode)
7839 int regno;
7840 enum machine_mode mode;
7842 int nregs = HARD_REGNO_NREGS (regno, mode);
7843 while (nregs-- > 0)
7845 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
7846 regno++;
7851 /* Do a very simple CSE pass over the hard registers.
7853 This function detects no-op moves where we happened to assign two
7854 different pseudo-registers to the same hard register, and then
7855 copied one to the other. Reload will generate a useless
7856 instruction copying a register to itself.
7858 This function also detects cases where we load a value from memory
7859 into two different registers, and (if memory is more expensive than
7860 registers) changes it to simply copy the first register into the
7861 second register.
7863 Another optimization is performed that scans the operands of each
7864 instruction to see whether the value is already available in a
7865 hard register. It then replaces the operand with the hard register
7866 if possible, much like an optional reload would. */
7868 void
7869 reload_cse_regs (first)
7870 rtx first;
7872 char *firstobj;
7873 rtx callmem;
7874 register int i;
7875 rtx insn;
7877 init_alias_analysis ();
7879 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7880 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7881 reg_values[i] = 0;
7883 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7884 free them when we are done. */
7885 push_obstacks (&reload_obstack, &reload_obstack);
7886 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7888 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7889 memory for a non-const call instruction. */
7890 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7892 /* This is used in reload_cse_invalidate_regno to avoid consing a
7893 new REG in a loop in that function. */
7894 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7896 for (insn = first; insn; insn = NEXT_INSN (insn))
7898 rtx body;
7900 if (GET_CODE (insn) == CODE_LABEL)
7902 /* Forget all the register values at a code label. We don't
7903 try to do anything clever around jumps. */
7904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7905 reg_values[i] = 0;
7907 continue;
7910 #ifdef NON_SAVING_SETJMP
7911 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7912 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7914 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7915 reg_values[i] = 0;
7917 continue;
7919 #endif
7921 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7922 continue;
7924 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7926 /* If this is a call instruction, forget anything stored in a
7927 call clobbered register, or, if this is not a const call, in
7928 memory. */
7929 if (GET_CODE (insn) == CALL_INSN)
7931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7932 if (call_used_regs[i])
7933 reload_cse_invalidate_regno (i, VOIDmode, 1);
7935 if (! CONST_CALL_P (insn))
7936 reload_cse_invalidate_mem (callmem);
7939 body = PATTERN (insn);
7940 if (GET_CODE (body) == SET)
7942 int count = 0;
7943 if (reload_cse_noop_set_p (body, insn))
7945 PUT_CODE (insn, NOTE);
7946 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7947 NOTE_SOURCE_FILE (insn) = 0;
7948 reload_cse_delete_death_notes (insn);
7950 /* We're done with this insn. */
7951 continue;
7954 /* It's not a no-op, but we can try to simplify it. */
7955 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7956 count += reload_cse_simplify_set (body, insn);
7958 if (count > 0 && apply_change_group ())
7959 reload_cse_delete_death_notes (insn);
7960 else if (reload_cse_simplify_operands (insn))
7961 reload_cse_delete_death_notes (insn);
7963 reload_cse_record_set (body, body);
7965 else if (GET_CODE (body) == PARALLEL)
7967 int count = 0;
7969 /* If every action in a PARALLEL is a noop, we can delete
7970 the entire PARALLEL. */
7971 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7972 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7973 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7974 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7975 break;
7976 if (i < 0)
7978 PUT_CODE (insn, NOTE);
7979 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7980 NOTE_SOURCE_FILE (insn) = 0;
7981 reload_cse_delete_death_notes (insn);
7983 /* We're done with this insn. */
7984 continue;
7987 /* It's not a no-op, but we can try to simplify it. */
7988 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7989 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7990 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
7991 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
7993 if (count > 0 && apply_change_group ())
7994 reload_cse_delete_death_notes (insn);
7995 else if (reload_cse_simplify_operands (insn))
7996 reload_cse_delete_death_notes (insn);
7998 /* Look through the PARALLEL and record the values being
7999 set, if possible. Also handle any CLOBBERs. */
8000 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8002 rtx x = XVECEXP (body, 0, i);
8004 if (GET_CODE (x) == SET)
8005 reload_cse_record_set (x, body);
8006 else
8007 note_stores (x, reload_cse_invalidate_rtx);
8010 else
8011 note_stores (body, reload_cse_invalidate_rtx);
8013 #ifdef AUTO_INC_DEC
8014 /* Clobber any registers which appear in REG_INC notes. We
8015 could keep track of the changes to their values, but it is
8016 unlikely to help. */
8018 rtx x;
8020 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8021 if (REG_NOTE_KIND (x) == REG_INC)
8022 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8024 #endif
8026 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8027 after we have processed the insn. */
8028 if (GET_CODE (insn) == CALL_INSN)
8030 rtx x;
8032 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8033 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8034 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8038 /* Free all the temporary structures we created, and go back to the
8039 regular obstacks. */
8040 obstack_free (&reload_obstack, firstobj);
8041 pop_obstacks ();
8044 /* Return whether the values known for REGNO are equal to VAL. MODE
8045 is the mode of the object that VAL is being copied to; this matters
8046 if VAL is a CONST_INT. */
8048 static int
8049 reload_cse_regno_equal_p (regno, val, mode)
8050 int regno;
8051 rtx val;
8052 enum machine_mode mode;
8054 rtx x;
8056 if (val == 0)
8057 return 0;
8059 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8060 if (XEXP (x, 0) != 0
8061 && rtx_equal_p (XEXP (x, 0), val)
8062 && (GET_CODE (val) != CONST_INT
8063 || mode == GET_MODE (x)
8064 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8065 /* On a big endian machine if the value spans more than
8066 one register then this register holds the high part of
8067 it and we can't use it.
8069 ??? We should also compare with the high part of the
8070 value. */
8071 && !(WORDS_BIG_ENDIAN
8072 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8074 GET_MODE_BITSIZE (GET_MODE (x))))))
8075 return 1;
8077 return 0;
8080 /* See whether a single set is a noop. SET is the set instruction we
8081 are should check, and INSN is the instruction from which it came. */
8083 static int
8084 reload_cse_noop_set_p (set, insn)
8085 rtx set;
8086 rtx insn;
8088 rtx src, dest;
8089 enum machine_mode dest_mode;
8090 int dreg, sreg;
8091 int ret;
8093 src = SET_SRC (set);
8094 dest = SET_DEST (set);
8095 dest_mode = GET_MODE (dest);
8097 if (side_effects_p (src))
8098 return 0;
8100 dreg = true_regnum (dest);
8101 sreg = true_regnum (src);
8103 /* Check for setting a register to itself. In this case, we don't
8104 have to worry about REG_DEAD notes. */
8105 if (dreg >= 0 && dreg == sreg)
8106 return 1;
8108 ret = 0;
8109 if (dreg >= 0)
8111 /* Check for setting a register to itself. */
8112 if (dreg == sreg)
8113 ret = 1;
8115 /* Check for setting a register to a value which we already know
8116 is in the register. */
8117 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8118 ret = 1;
8120 /* Check for setting a register DREG to another register SREG
8121 where SREG is equal to a value which is already in DREG. */
8122 else if (sreg >= 0)
8124 rtx x;
8126 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8128 rtx tmp;
8130 if (XEXP (x, 0) == 0)
8131 continue;
8133 if (dest_mode == GET_MODE (x))
8134 tmp = XEXP (x, 0);
8135 else if (GET_MODE_BITSIZE (dest_mode)
8136 < GET_MODE_BITSIZE (GET_MODE (x)))
8137 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8138 else
8139 continue;
8141 if (tmp
8142 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8144 ret = 1;
8145 break;
8150 else if (GET_CODE (dest) == MEM)
8152 /* Check for storing a register to memory when we know that the
8153 register is equivalent to the memory location. */
8154 if (sreg >= 0
8155 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8156 && ! side_effects_p (dest))
8157 ret = 1;
8160 /* If we can delete this SET, then we need to look for an earlier
8161 REG_DEAD note on DREG, and remove it if it exists. */
8162 if (ret && dreg >= 0)
8164 if (! find_regno_note (insn, REG_UNUSED, dreg))
8165 reload_cse_no_longer_dead (dreg, dest_mode);
8168 return ret;
8171 /* Try to simplify a single SET instruction. SET is the set pattern.
8172 INSN is the instruction it came from.
8173 This function only handles one case: if we set a register to a value
8174 which is not a register, we try to find that value in some other register
8175 and change the set into a register copy. */
8177 static int
8178 reload_cse_simplify_set (set, insn)
8179 rtx set;
8180 rtx insn;
8182 int dreg;
8183 rtx src;
8184 enum machine_mode dest_mode;
8185 enum reg_class dclass;
8186 register int i;
8188 dreg = true_regnum (SET_DEST (set));
8189 if (dreg < 0)
8190 return 0;
8192 src = SET_SRC (set);
8193 if (side_effects_p (src) || true_regnum (src) >= 0)
8194 return 0;
8196 /* If memory loads are cheaper than register copies, don't change
8197 them. */
8198 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8199 return 0;
8201 dest_mode = GET_MODE (SET_DEST (set));
8202 dclass = REGNO_REG_CLASS (dreg);
8203 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8205 if (i != dreg
8206 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8207 && reload_cse_regno_equal_p (i, src, dest_mode))
8209 int validated;
8211 /* Pop back to the real obstacks while changing the insn. */
8212 pop_obstacks ();
8214 validated = validate_change (insn, &SET_SRC (set),
8215 gen_rtx (REG, dest_mode, i), 1);
8217 /* Go back to the obstack we are using for temporary
8218 storage. */
8219 push_obstacks (&reload_obstack, &reload_obstack);
8221 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8223 reload_cse_no_longer_dead (i, dest_mode);
8224 return 1;
8228 return 0;
8231 /* Try to replace operands in INSN with equivalent values that are already
8232 in registers. This can be viewed as optional reloading.
8234 For each non-register operand in the insn, see if any hard regs are
8235 known to be equivalent to that operand. Record the alternatives which
8236 can accept these hard registers. Among all alternatives, select the
8237 ones which are better or equal to the one currently matching, where
8238 "better" is in terms of '?' and '!' constraints. Among the remaining
8239 alternatives, select the one which replaces most operands with
8240 hard registers. */
8242 static int
8243 reload_cse_simplify_operands (insn)
8244 rtx insn;
8246 #ifdef REGISTER_CONSTRAINTS
8247 int insn_code_number, n_operands, n_alternatives;
8248 int i,j;
8250 char *constraints[MAX_RECOG_OPERANDS];
8252 /* Vector recording how bad an alternative is. */
8253 int *alternative_reject;
8254 /* Vector recording how many registers can be introduced by choosing
8255 this alternative. */
8256 int *alternative_nregs;
8257 /* Array of vectors recording, for each operand and each alternative,
8258 which hard register to substitute, or -1 if the operand should be
8259 left as it is. */
8260 int *op_alt_regno[MAX_RECOG_OPERANDS];
8261 /* Array of alternatives, sorted in order of decreasing desirability. */
8262 int *alternative_order;
8264 /* Find out some information about this insn. */
8265 insn_code_number = recog_memoized (insn);
8266 /* We don't modify asm instructions. */
8267 if (insn_code_number < 0)
8268 return 0;
8270 n_operands = insn_n_operands[insn_code_number];
8271 n_alternatives = insn_n_alternatives[insn_code_number];
8273 if (n_alternatives == 0 || n_operands == 0)
8274 return;
8275 insn_extract (insn);
8277 /* Figure out which alternative currently matches. */
8278 if (! constrain_operands (insn_code_number, 1))
8279 abort ();
8281 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8282 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8283 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8284 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8285 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8287 for (i = 0; i < n_operands; i++)
8289 enum machine_mode mode;
8290 int regno;
8291 char *p;
8293 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8294 for (j = 0; j < n_alternatives; j++)
8295 op_alt_regno[i][j] = -1;
8297 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8298 mode = insn_operand_mode[insn_code_number][i];
8300 /* Add the reject values for each alternative given by the constraints
8301 for this operand. */
8302 j = 0;
8303 while (*p != '\0')
8305 char c = *p++;
8306 if (c == ',')
8307 j++;
8308 else if (c == '?')
8309 alternative_reject[j] += 3;
8310 else if (c == '!')
8311 alternative_reject[j] += 300;
8314 /* We won't change operands which are already registers. We
8315 also don't want to modify output operands. */
8316 regno = true_regnum (recog_operand[i]);
8317 if (regno >= 0
8318 || constraints[i][0] == '='
8319 || constraints[i][0] == '+')
8320 continue;
8322 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8324 int class = (int) NO_REGS;
8326 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8327 continue;
8329 /* We found a register equal to this operand. Now look for all
8330 alternatives that can accept this register and have not been
8331 assigned a register they can use yet. */
8332 j = 0;
8333 p = constraints[i];
8334 for (;;)
8336 char c = *p++;
8338 switch (c)
8340 case '=': case '+': case '?':
8341 case '#': case '&': case '!':
8342 case '*': case '%':
8343 case '0': case '1': case '2': case '3': case '4':
8344 case 'm': case '<': case '>': case 'V': case 'o':
8345 case 'E': case 'F': case 'G': case 'H':
8346 case 's': case 'i': case 'n':
8347 case 'I': case 'J': case 'K': case 'L':
8348 case 'M': case 'N': case 'O': case 'P':
8349 #ifdef EXTRA_CONSTRAINT
8350 case 'Q': case 'R': case 'S': case 'T': case 'U':
8351 #endif
8352 case 'p': case 'X':
8353 /* These don't say anything we care about. */
8354 break;
8356 case 'g': case 'r':
8357 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8358 break;
8360 default:
8361 class
8362 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8363 break;
8365 case ',': case '\0':
8366 /* See if REGNO fits this alternative, and set it up as the
8367 replacement register if we don't have one for this
8368 alternative yet. */
8369 if (op_alt_regno[i][j] == -1
8370 && reg_fits_class_p (gen_rtx (REG, mode, regno), class,
8371 0, mode))
8373 alternative_nregs[j]++;
8374 op_alt_regno[i][j] = regno;
8376 j++;
8377 break;
8380 if (c == '\0')
8381 break;
8386 /* Record all alternatives which are better or equal to the currently
8387 matching one in the alternative_order array. */
8388 for (i = j = 0; i < n_alternatives; i++)
8389 if (alternative_reject[i] <= alternative_reject[which_alternative])
8390 alternative_order[j++] = i;
8391 n_alternatives = j;
8393 /* Sort it. Given a small number of alternatives, a dumb algorithm
8394 won't hurt too much. */
8395 for (i = 0; i < n_alternatives - 1; i++)
8397 int best = i;
8398 int best_reject = alternative_reject[alternative_order[i]];
8399 int best_nregs = alternative_nregs[alternative_order[i]];
8400 int tmp;
8402 for (j = i + 1; j < n_alternatives; j++)
8404 int this_reject = alternative_reject[alternative_order[j]];
8405 int this_nregs = alternative_nregs[alternative_order[j]];
8407 if (this_reject < best_reject
8408 || (this_reject == best_reject && this_nregs < best_nregs))
8410 best = j;
8411 best_reject = this_reject;
8412 best_nregs = this_nregs;
8416 tmp = alternative_order[best];
8417 alternative_order[best] = alternative_order[i];
8418 alternative_order[i] = tmp;
8421 /* Substitute the operands as determined by op_alt_regno for the best
8422 alternative. */
8423 j = alternative_order[0];
8424 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8426 /* Pop back to the real obstacks while changing the insn. */
8427 pop_obstacks ();
8429 for (i = 0; i < n_operands; i++)
8431 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8432 if (op_alt_regno[i][j] == -1)
8433 continue;
8435 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8436 validate_change (insn, recog_operand_loc[i],
8437 gen_rtx (REG, mode, op_alt_regno[i][j]), 1);
8440 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8442 int op = recog_dup_num[i];
8443 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8445 if (op_alt_regno[op][j] == -1)
8446 continue;
8448 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8449 validate_change (insn, recog_dup_loc[i],
8450 gen_rtx (REG, mode, op_alt_regno[op][j]), 1);
8453 /* Go back to the obstack we are using for temporary
8454 storage. */
8455 push_obstacks (&reload_obstack, &reload_obstack);
8457 return apply_change_group ();
8458 #else
8459 return 0;
8460 #endif
8463 /* These two variables are used to pass information from
8464 reload_cse_record_set to reload_cse_check_clobber. */
8466 static int reload_cse_check_clobbered;
8467 static rtx reload_cse_check_src;
8469 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8470 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8471 second argument, which is passed by note_stores, is ignored. */
8473 static void
8474 reload_cse_check_clobber (dest, ignore)
8475 rtx dest;
8476 rtx ignore;
8478 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8479 reload_cse_check_clobbered = 1;
8482 /* Record the result of a SET instruction. SET is the set pattern.
8483 BODY is the pattern of the insn that it came from. */
8485 static void
8486 reload_cse_record_set (set, body)
8487 rtx set;
8488 rtx body;
8490 rtx dest, src, x;
8491 int dreg, sreg;
8492 enum machine_mode dest_mode;
8494 dest = SET_DEST (set);
8495 src = SET_SRC (set);
8496 dreg = true_regnum (dest);
8497 sreg = true_regnum (src);
8498 dest_mode = GET_MODE (dest);
8500 /* Some machines don't define AUTO_INC_DEC, but they still use push
8501 instructions. We need to catch that case here in order to
8502 invalidate the stack pointer correctly. Note that invalidating
8503 the stack pointer is different from invalidating DEST. */
8504 x = dest;
8505 while (GET_CODE (x) == SUBREG
8506 || GET_CODE (x) == ZERO_EXTRACT
8507 || GET_CODE (x) == SIGN_EXTRACT
8508 || GET_CODE (x) == STRICT_LOW_PART)
8509 x = XEXP (x, 0);
8510 if (push_operand (x, GET_MODE (x)))
8512 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8513 reload_cse_invalidate_rtx (dest, NULL_RTX);
8514 return;
8517 /* We can only handle an assignment to a register, or a store of a
8518 register to a memory location. For other cases, we just clobber
8519 the destination. We also have to just clobber if there are side
8520 effects in SRC or DEST. */
8521 if ((dreg < 0 && GET_CODE (dest) != MEM)
8522 || side_effects_p (src)
8523 || side_effects_p (dest))
8525 reload_cse_invalidate_rtx (dest, NULL_RTX);
8526 return;
8529 #ifdef HAVE_cc0
8530 /* We don't try to handle values involving CC, because it's a pain
8531 to keep track of when they have to be invalidated. */
8532 if (reg_mentioned_p (cc0_rtx, src)
8533 || reg_mentioned_p (cc0_rtx, dest))
8535 reload_cse_invalidate_rtx (dest, NULL_RTX);
8536 return;
8538 #endif
8540 /* If BODY is a PARALLEL, then we need to see whether the source of
8541 SET is clobbered by some other instruction in the PARALLEL. */
8542 if (GET_CODE (body) == PARALLEL)
8544 int i;
8546 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8548 rtx x;
8550 x = XVECEXP (body, 0, i);
8551 if (x == set)
8552 continue;
8554 reload_cse_check_clobbered = 0;
8555 reload_cse_check_src = src;
8556 note_stores (x, reload_cse_check_clobber);
8557 if (reload_cse_check_clobbered)
8559 reload_cse_invalidate_rtx (dest, NULL_RTX);
8560 return;
8565 if (dreg >= 0)
8567 int i;
8569 /* This is an assignment to a register. Update the value we
8570 have stored for the register. */
8571 if (sreg >= 0)
8573 rtx x;
8575 /* This is a copy from one register to another. Any values
8576 which were valid for SREG are now valid for DREG. If the
8577 mode changes, we use gen_lowpart_common to extract only
8578 the part of the value that is copied. */
8579 reg_values[dreg] = 0;
8580 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8582 rtx tmp;
8584 if (XEXP (x, 0) == 0)
8585 continue;
8586 if (dest_mode == GET_MODE (XEXP (x, 0)))
8587 tmp = XEXP (x, 0);
8588 else if (GET_MODE_BITSIZE (dest_mode)
8589 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8590 continue;
8591 else
8592 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8593 if (tmp)
8594 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8595 reg_values[dreg]);
8598 else
8599 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8601 /* We've changed DREG, so invalidate any values held by other
8602 registers that depend upon it. */
8603 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8605 /* If this assignment changes more than one hard register,
8606 forget anything we know about the others. */
8607 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8608 reg_values[dreg + i] = 0;
8610 else if (GET_CODE (dest) == MEM)
8612 /* Invalidate conflicting memory locations. */
8613 reload_cse_invalidate_mem (dest);
8615 /* If we're storing a register to memory, add DEST to the list
8616 in REG_VALUES. */
8617 if (sreg >= 0 && ! side_effects_p (dest))
8618 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8619 reg_values[sreg]);
8621 else
8623 /* We should have bailed out earlier. */
8624 abort ();