1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx
*reg_last_reload_reg
;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload
;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload
;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx
*reg_equiv_constant
;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx
*reg_equiv_memory_loc
;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx
*reg_equiv_address
;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width
;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx
*reg_equiv_init
;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
141 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs
;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
166 static HARD_REG_SET bad_spill_regs
;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs
;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg
;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used
[FIRST_PSEUDO_REGISTER
];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups
;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups
;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores
;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
220 static char spill_indirect_levels
;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok
;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok
;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs
[N_REG_CLASSES
];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid
;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed
;
256 /* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
259 int reload_in_progress
= 0;
261 /* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
265 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
266 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
272 struct obstack reload_obstack
;
273 char *reload_firstobj
;
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels
;
281 /* Allocation number table from global register allocation. */
282 extern int *reg_allocno
;
284 /* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
289 static struct elim_table
291 int from
; /* Register number to be eliminated. */
292 int to
; /* Register number used as replacement. */
293 int initial_offset
; /* Initial difference between values. */
294 int can_eliminate
; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset
; /* Current offset between the two regs. */
298 int max_offset
; /* Maximum offset between the two regs. */
299 int previous_offset
; /* Offset at end of previous insn. */
300 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx
; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx
; /* REG rtx for the replacement. */
309 /* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
313 #ifdef ELIMINABLE_REGS
316 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
319 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
321 /* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324 static int num_not_at_initial_offset
;
326 /* Count the number of registers that we may be able to eliminate. */
327 static int num_eliminable
;
329 /* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
336 static char *offsets_known_at
;
337 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
339 /* Number of labels in the current function. */
341 static int num_labels
;
343 struct hard_reg_n_uses
{ int regno
; int uses
; };
345 static int possible_group_p
PROTO((int, int *));
346 static void count_possible_groups
PROTO((int *, enum machine_mode
*,
348 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
351 static void spill_failure
PROTO((rtx
));
352 static int new_spill_reg
PROTO((int, int, int *, int *, int,
354 static void delete_dead_insn
PROTO((rtx
));
355 static void alter_reg
PROTO((int, int));
356 static void mark_scratch_live
PROTO((rtx
));
357 static void set_label_offsets
PROTO((rtx
, rtx
, int));
358 static int eliminate_regs_in_insn
PROTO((rtx
, int));
359 static void mark_not_eliminable
PROTO((rtx
, rtx
));
360 static int spill_hard_reg
PROTO((int, int, FILE *, int));
361 static void scan_paradoxical_subregs
PROTO((rtx
));
362 static int hard_reg_use_compare
PROTO((struct hard_reg_n_uses
*,
363 struct hard_reg_n_uses
*));
364 static void order_regs_for_reload
PROTO((int));
365 static int compare_spill_regs
PROTO((short *, short *));
366 static void reload_as_needed
PROTO((rtx
, int));
367 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
368 static int reload_reg_class_lower
PROTO((short *, short *));
369 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
371 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
373 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
374 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
));
375 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
376 static int reloads_conflict
PROTO((int, int));
377 static int allocate_reload_reg
PROTO((int, rtx
, int, int));
378 static void choose_reload_regs
PROTO((rtx
, rtx
));
379 static void merge_assigned_reloads
PROTO((rtx
));
380 static void emit_reload_insns
PROTO((rtx
));
381 static void delete_output_reload
PROTO((rtx
, int, rtx
));
382 static void inc_for_reload
PROTO((rtx
, rtx
, int));
383 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
384 static int count_occurrences
PROTO((rtx
, rtx
));
386 /* Initialize the reload pass once per compilation. */
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
398 = gen_rtx (MEM
, Pmode
,
399 gen_rtx (PLUS
, Pmode
,
400 gen_rtx (REG
, Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
402 spill_indirect_levels
= 0;
404 while (memory_address_p (QImode
, tem
))
406 spill_indirect_levels
++;
407 tem
= gen_rtx (MEM
, Pmode
, tem
);
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
412 tem
= gen_rtx (MEM
, Pmode
, gen_rtx (SYMBOL_REF
, Pmode
, "foo"));
413 indirect_symref_ok
= memory_address_p (QImode
, tem
);
415 /* See if reg+reg is a valid (and offsettable) address. */
417 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
419 tem
= gen_rtx (PLUS
, Pmode
,
420 gen_rtx (REG
, Pmode
, HARD_FRAME_POINTER_REGNUM
),
421 gen_rtx (REG
, Pmode
, i
));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem
= plus_constant (tem
, 4);
425 if (memory_address_p (QImode
, tem
))
427 double_reg_address_ok
= 1;
432 /* Initialize obstack for our rtl allocation. */
433 gcc_obstack_init (&reload_obstack
);
434 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
437 /* Main entry point for the reload pass.
439 FIRST is the first insn of the function being compiled.
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
450 are spilled from them, and where the pseudo regs are reallocated to.
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
456 reload (first
, global
, dumpfile
)
462 register int i
, j
, k
;
464 register struct elim_table
*ep
;
466 int something_changed
;
467 int something_needs_reloads
;
468 int something_needs_elimination
;
469 int new_basic_block_needs
;
470 enum reg_class caller_save_spill_class
= NO_REGS
;
471 int caller_save_group_size
= 1;
473 /* Nonzero means we couldn't get enough spill regs. */
476 /* The basic block number currently being processed for INSN. */
479 /* Make sure even insns with volatile mem refs are recognizable. */
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid
= get_max_uid ();
485 for (i
= 0; i
< N_REG_CLASSES
; i
++)
486 basic_block_needs
[i
] = 0;
488 #ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live
, regs_explicitly_used
, sizeof regs_ever_live
);
498 /* We don't have a stack slot for any spill reg yet. */
499 bzero ((char *) spill_stack_slot
, sizeof spill_stack_slot
);
500 bzero ((char *) spill_stack_slot_width
, sizeof spill_stack_slot_width
);
502 /* Initialize the save area information for caller-save, in case some
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
511 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
514 for (i
= 0; i
< scratch_list_length
; i
++)
516 mark_scratch_live (scratch_list
[i
]);
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
520 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
532 reg_equiv_constant
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
533 bzero ((char *) reg_equiv_constant
, max_regno
* sizeof (rtx
));
534 reg_equiv_memory_loc
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
535 bzero ((char *) reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
536 reg_equiv_mem
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
537 bzero ((char *) reg_equiv_mem
, max_regno
* sizeof (rtx
));
538 reg_equiv_init
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
539 bzero ((char *) reg_equiv_init
, max_regno
* sizeof (rtx
));
540 reg_equiv_address
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
541 bzero ((char *) reg_equiv_address
, max_regno
* sizeof (rtx
));
542 reg_max_ref_width
= (int *) alloca (max_regno
* sizeof (int));
543 bzero ((char *) reg_max_ref_width
, max_regno
* sizeof (int));
544 cannot_omit_stores
= (char *) alloca (max_regno
);
545 bzero (cannot_omit_stores
, max_regno
);
547 #ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs
);
551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
558 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
560 rtx set
= single_set (insn
);
562 if (GET_CODE (insn
) == NOTE
&& CONST_CALL_P (insn
)
563 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
564 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
565 if (! call_used_regs
[i
])
566 regs_ever_live
[i
] = 1;
568 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
570 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
572 #ifdef LEGITIMATE_PIC_OPERAND_P
573 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
578 rtx x
= XEXP (note
, 0);
579 i
= REGNO (SET_DEST (set
));
580 if (i
> LAST_VIRTUAL_REGISTER
)
582 if (GET_CODE (x
) == MEM
)
583 reg_equiv_memory_loc
[i
] = x
;
584 else if (CONSTANT_P (x
))
586 if (LEGITIMATE_CONSTANT_P (x
))
587 reg_equiv_constant
[i
] = x
;
589 reg_equiv_memory_loc
[i
]
590 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x
) != MEM
600 || rtx_equal_p (SET_SRC (set
), x
))
601 reg_equiv_init
[i
] = insn
;
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
609 && GET_CODE (SET_SRC (set
)) == REG
610 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
611 && rtx_equal_p (SET_DEST (set
),
612 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
613 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
615 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn
));
619 /* Does this function require a frame pointer? */
621 frame_pointer_needed
= (! flag_omit_frame_pointer
622 #ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK
)
631 || FRAME_POINTER_REQUIRED
);
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637 #ifdef ELIMINABLE_REGS
638 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
640 ep
->can_eliminate
= ep
->can_eliminate_previous
641 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
642 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
645 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
646 = ! frame_pointer_needed
;
649 /* Count the number of eliminable registers and build the FROM and TO
650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
655 num_eliminable
+= ep
->can_eliminate
;
656 ep
->from_rtx
= gen_rtx (REG
, Pmode
, ep
->from
);
657 ep
->to_rtx
= gen_rtx (REG
, Pmode
, ep
->to
);
660 num_labels
= max_label_num () - get_first_label_num ();
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at
= (char *) alloca (num_labels
);
665 = (int (*)[NUM_ELIMINABLE_REGS
])
666 alloca (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
668 offsets_known_at
-= get_first_label_num ();
669 offsets_at
-= get_first_label_num ();
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
675 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode
, 0, 0);
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
689 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
690 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
691 || GET_CODE (insn
) == CALL_INSN
)
692 note_stores (PATTERN (insn
), mark_not_eliminable
);
694 #ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
701 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
702 if (reg_renumber
[i
] == -1 && reg_n_refs
[i
] != 0)
705 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
712 order_regs_for_reload (global
);
714 /* So far, no hard regs have been spilled. */
716 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
717 spill_reg_order
[i
] = -1;
719 /* Initialize to -1, which means take the first spill register. */
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
726 #ifndef SMALL_REGISTER_CLASSES
727 COPY_HARD_REG_SET (forbidden_regs
, bad_spill_regs
);
730 /* Spill any hard regs that we know we can't eliminate. */
731 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
732 if (! ep
->can_eliminate
)
733 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
735 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
736 if (frame_pointer_needed
)
737 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
741 for (i
= 0; i
< N_REG_CLASSES
; i
++)
743 basic_block_needs
[i
] = (char *) alloca (n_basic_blocks
);
744 bzero (basic_block_needs
[i
], n_basic_blocks
);
747 /* From now on, we need to emit any moves without making new pseudos. */
748 reload_in_progress
= 1;
750 /* This loop scans the entire function each go-round
751 and repeats until one repetition spills no additional hard regs. */
753 /* This flag is set when a pseudo reg is spilled,
754 to require another pass. Note that getting an additional reload
755 reg does not necessarily imply any pseudo reg was spilled;
756 sometimes we find a reload reg that no pseudo reg was allocated in. */
757 something_changed
= 1;
758 /* This flag is set if there are any insns that require reloading. */
759 something_needs_reloads
= 0;
760 /* This flag is set if there are any insns that require register
762 something_needs_elimination
= 0;
763 while (something_changed
)
767 /* For each class, number of reload regs needed in that class.
768 This is the maximum over all insns of the needs in that class
769 of the individual insn. */
770 int max_needs
[N_REG_CLASSES
];
771 /* For each class, size of group of consecutive regs
772 that is needed for the reloads of this class. */
773 int group_size
[N_REG_CLASSES
];
774 /* For each class, max number of consecutive groups needed.
775 (Each group contains group_size[CLASS] consecutive registers.) */
776 int max_groups
[N_REG_CLASSES
];
777 /* For each class, max number needed of regs that don't belong
778 to any of the groups. */
779 int max_nongroups
[N_REG_CLASSES
];
780 /* For each class, the machine mode which requires consecutive
781 groups of regs of that class.
782 If two different modes ever require groups of one class,
783 they must be the same size and equally restrictive for that class,
784 otherwise we can't handle the complexity. */
785 enum machine_mode group_mode
[N_REG_CLASSES
];
786 /* Record the insn where each maximum need is first found. */
787 rtx max_needs_insn
[N_REG_CLASSES
];
788 rtx max_groups_insn
[N_REG_CLASSES
];
789 rtx max_nongroups_insn
[N_REG_CLASSES
];
791 int starting_frame_size
= get_frame_size ();
792 int previous_frame_pointer_needed
= frame_pointer_needed
;
793 static char *reg_class_names
[] = REG_CLASS_NAMES
;
795 something_changed
= 0;
796 bzero ((char *) max_needs
, sizeof max_needs
);
797 bzero ((char *) max_groups
, sizeof max_groups
);
798 bzero ((char *) max_nongroups
, sizeof max_nongroups
);
799 bzero ((char *) max_needs_insn
, sizeof max_needs_insn
);
800 bzero ((char *) max_groups_insn
, sizeof max_groups_insn
);
801 bzero ((char *) max_nongroups_insn
, sizeof max_nongroups_insn
);
802 bzero ((char *) group_size
, sizeof group_size
);
803 for (i
= 0; i
< N_REG_CLASSES
; i
++)
804 group_mode
[i
] = VOIDmode
;
806 /* Keep track of which basic blocks are needing the reloads. */
809 /* Remember whether any element of basic_block_needs
810 changes from 0 to 1 in this pass. */
811 new_basic_block_needs
= 0;
813 /* Reset all offsets on eliminable registers to their initial values. */
814 #ifdef ELIMINABLE_REGS
815 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
817 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
818 ep
->previous_offset
= ep
->offset
819 = ep
->max_offset
= ep
->initial_offset
;
822 #ifdef INITIAL_FRAME_POINTER_OFFSET
823 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
825 if (!FRAME_POINTER_REQUIRED
)
827 reg_eliminate
[0].initial_offset
= 0;
829 reg_eliminate
[0].previous_offset
= reg_eliminate
[0].max_offset
830 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
833 num_not_at_initial_offset
= 0;
835 bzero ((char *) &offsets_known_at
[get_first_label_num ()], num_labels
);
837 /* Set a known offset for each forced label to be at the initial offset
838 of each elimination. We do this because we assume that all
839 computed jumps occur from a location where each elimination is
840 at its initial offset. */
842 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
844 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
846 /* For each pseudo register that has an equivalent location defined,
847 try to eliminate any eliminable registers (such as the frame pointer)
848 assuming initial offsets for the replacement register, which
851 If the resulting location is directly addressable, substitute
852 the MEM we just got directly for the old REG.
854 If it is not addressable but is a constant or the sum of a hard reg
855 and constant, it is probably not addressable because the constant is
856 out of range, in that case record the address; we will generate
857 hairy code to compute the address in a register each time it is
858 needed. Similarly if it is a hard register, but one that is not
859 valid as an address register.
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
870 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
871 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
873 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
877 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
878 else if (CONSTANT_P (XEXP (x
, 0))
879 || (GET_CODE (XEXP (x
, 0)) == REG
880 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
881 || (GET_CODE (XEXP (x
, 0)) == PLUS
882 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
883 && (REGNO (XEXP (XEXP (x
, 0), 0))
884 < FIRST_PSEUDO_REGISTER
)
885 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
886 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
889 /* Make a new stack slot. Then indicate that something
890 changed so we go back and recompute offsets for
891 eliminable registers because the allocation of memory
892 below might change some offset. reg_equiv_{mem,address}
893 will be set up for this pseudo on the next pass around
895 reg_equiv_memory_loc
[i
] = 0;
896 reg_equiv_init
[i
] = 0;
898 something_changed
= 1;
902 /* If we allocated another pseudo to the stack, redo elimination
904 if (something_changed
)
907 /* If caller-saves needs a group, initialize the group to include
908 the size and mode required for caller-saves. */
910 if (caller_save_group_size
> 1)
912 group_mode
[(int) caller_save_spill_class
] = Pmode
;
913 group_size
[(int) caller_save_spill_class
] = caller_save_group_size
;
916 /* Compute the most additional registers needed by any instruction.
917 Collect information separately for each class of regs. */
919 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
921 if (global
&& this_block
+ 1 < n_basic_blocks
922 && insn
== basic_block_head
[this_block
+1])
925 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
926 might include REG_LABEL), we need to see what effects this
927 has on the known offsets at labels. */
929 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
930 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
931 && REG_NOTES (insn
) != 0))
932 set_label_offsets (insn
, insn
, 0);
934 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
936 /* Nonzero means don't use a reload reg that overlaps
937 the place where a function value can be returned. */
938 rtx avoid_return_reg
= 0;
940 rtx old_body
= PATTERN (insn
);
941 int old_code
= INSN_CODE (insn
);
942 rtx old_notes
= REG_NOTES (insn
);
943 int did_elimination
= 0;
945 /* To compute the number of reload registers of each class
946 needed for an insn, we must simulate what choose_reload_regs
947 can do. We do this by splitting an insn into an "input" and
948 an "output" part. RELOAD_OTHER reloads are used in both.
949 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
950 which must be live over the entire input section of reloads,
951 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
952 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
955 The registers needed for output are RELOAD_OTHER and
956 RELOAD_FOR_OUTPUT, which are live for the entire output
957 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
958 reloads for each operand.
960 The total number of registers needed is the maximum of the
961 inputs and outputs. */
965 /* [0] is normal, [1] is nongroup. */
966 int regs
[2][N_REG_CLASSES
];
967 int groups
[N_REG_CLASSES
];
970 /* Each `struct needs' corresponds to one RELOAD_... type. */
976 struct needs other_addr
;
977 struct needs op_addr
;
978 struct needs op_addr_reload
;
979 struct needs in_addr
[MAX_RECOG_OPERANDS
];
980 struct needs out_addr
[MAX_RECOG_OPERANDS
];
983 /* If needed, eliminate any eliminable registers. */
985 did_elimination
= eliminate_regs_in_insn (insn
, 0);
987 #ifdef SMALL_REGISTER_CLASSES
988 /* Set avoid_return_reg if this is an insn
989 that might use the value of a function call. */
990 if (GET_CODE (insn
) == CALL_INSN
)
992 if (GET_CODE (PATTERN (insn
)) == SET
)
993 after_call
= SET_DEST (PATTERN (insn
));
994 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
995 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
996 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1000 else if (after_call
!= 0
1001 && !(GET_CODE (PATTERN (insn
)) == SET
1002 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
1004 if (reg_referenced_p (after_call
, PATTERN (insn
)))
1005 avoid_return_reg
= after_call
;
1008 #endif /* SMALL_REGISTER_CLASSES */
1010 /* Analyze the instruction. */
1011 find_reloads (insn
, 0, spill_indirect_levels
, global
,
1014 /* Remember for later shortcuts which insns had any reloads or
1015 register eliminations.
1017 One might think that it would be worthwhile to mark insns
1018 that need register replacements but not reloads, but this is
1019 not safe because find_reloads may do some manipulation of
1020 the insn (such as swapping commutative operands), which would
1021 be lost when we restore the old pattern after register
1022 replacement. So the actions of find_reloads must be redone in
1023 subsequent passes or in reload_as_needed.
1025 However, it is safe to mark insns that need reloads
1026 but not register replacement. */
1028 PUT_MODE (insn
, (did_elimination
? QImode
1029 : n_reloads
? HImode
1030 : GET_MODE (insn
) == DImode
? DImode
1033 /* Discard any register replacements done. */
1034 if (did_elimination
)
1036 obstack_free (&reload_obstack
, reload_firstobj
);
1037 PATTERN (insn
) = old_body
;
1038 INSN_CODE (insn
) = old_code
;
1039 REG_NOTES (insn
) = old_notes
;
1040 something_needs_elimination
= 1;
1043 /* If this insn has no reloads, we need not do anything except
1044 in the case of a CALL_INSN when we have caller-saves and
1045 caller-save needs reloads. */
1048 && ! (GET_CODE (insn
) == CALL_INSN
1049 && caller_save_spill_class
!= NO_REGS
))
1052 something_needs_reloads
= 1;
1053 bzero ((char *) &insn_needs
, sizeof insn_needs
);
1055 /* Count each reload once in every class
1056 containing the reload's own class. */
1058 for (i
= 0; i
< n_reloads
; i
++)
1060 register enum reg_class
*p
;
1061 enum reg_class
class = reload_reg_class
[i
];
1063 enum machine_mode mode
;
1065 struct needs
*this_needs
;
1067 /* Don't count the dummy reloads, for which one of the
1068 regs mentioned in the insn can be used for reloading.
1069 Don't count optional reloads.
1070 Don't count reloads that got combined with others. */
1071 if (reload_reg_rtx
[i
] != 0
1072 || reload_optional
[i
] != 0
1073 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1074 && ! reload_secondary_p
[i
]))
1077 /* Show that a reload register of this class is needed
1078 in this basic block. We do not use insn_needs and
1079 insn_groups because they are overly conservative for
1081 if (global
&& ! basic_block_needs
[(int) class][this_block
])
1083 basic_block_needs
[(int) class][this_block
] = 1;
1084 new_basic_block_needs
= 1;
1088 mode
= reload_inmode
[i
];
1089 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1090 mode
= reload_outmode
[i
];
1091 size
= CLASS_MAX_NREGS (class, mode
);
1093 /* If this class doesn't want a group, determine if we have
1094 a nongroup need or a regular need. We have a nongroup
1095 need if this reload conflicts with a group reload whose
1096 class intersects with this reload's class. */
1100 for (j
= 0; j
< n_reloads
; j
++)
1101 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1102 (GET_MODE_SIZE (reload_outmode
[j
])
1103 > GET_MODE_SIZE (reload_inmode
[j
]))
1107 && (!reload_optional
[j
])
1108 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
1109 || reload_secondary_p
[j
])
1110 && reloads_conflict (i
, j
)
1111 && reg_classes_intersect_p (class,
1112 reload_reg_class
[j
]))
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed
[i
])
1122 this_needs
= &insn_needs
.other
;
1124 case RELOAD_FOR_INPUT
:
1125 this_needs
= &insn_needs
.input
;
1127 case RELOAD_FOR_OUTPUT
:
1128 this_needs
= &insn_needs
.output
;
1130 case RELOAD_FOR_INSN
:
1131 this_needs
= &insn_needs
.insn
;
1133 case RELOAD_FOR_OTHER_ADDRESS
:
1134 this_needs
= &insn_needs
.other_addr
;
1136 case RELOAD_FOR_INPUT_ADDRESS
:
1137 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1139 case RELOAD_FOR_OUTPUT_ADDRESS
:
1140 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1142 case RELOAD_FOR_OPERAND_ADDRESS
:
1143 this_needs
= &insn_needs
.op_addr
;
1145 case RELOAD_FOR_OPADDR_ADDR
:
1146 this_needs
= &insn_needs
.op_addr_reload
;
1152 enum machine_mode other_mode
, allocate_mode
;
1154 /* Count number of groups needed separately from
1155 number of individual regs needed. */
1156 this_needs
->groups
[(int) class]++;
1157 p
= reg_class_superclasses
[(int) class];
1158 while (*p
!= LIM_REG_CLASSES
)
1159 this_needs
->groups
[(int) *p
++]++;
1161 /* Record size and mode of a group of this class. */
1162 /* If more than one size group is needed,
1163 make all groups the largest needed size. */
1164 if (group_size
[(int) class] < size
)
1166 other_mode
= group_mode
[(int) class];
1167 allocate_mode
= mode
;
1169 group_size
[(int) class] = size
;
1170 group_mode
[(int) class] = mode
;
1175 allocate_mode
= group_mode
[(int) class];
1178 /* Crash if two dissimilar machine modes both need
1179 groups of consecutive regs of the same class. */
1181 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1182 && ! modes_equiv_for_class_p (allocate_mode
,
1184 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1189 this_needs
->regs
[nongroup_need
][(int) class] += 1;
1190 p
= reg_class_superclasses
[(int) class];
1191 while (*p
!= LIM_REG_CLASSES
)
1192 this_needs
->regs
[nongroup_need
][(int) *p
++] += 1;
1198 /* All reloads have been counted for this insn;
1199 now merge the various times of use.
1200 This sets insn_needs, etc., to the maximum total number
1201 of registers needed at any point in this insn. */
1203 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1205 int in_max
, out_max
;
1207 /* Compute normal and nongroup needs. */
1208 for (j
= 0; j
<= 1; j
++)
1210 for (in_max
= 0, out_max
= 0, k
= 0;
1211 k
< reload_n_operands
; k
++)
1214 = MAX (in_max
, insn_needs
.in_addr
[k
].regs
[j
][i
]);
1216 = MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1219 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1220 and operand addresses but not things used to reload
1221 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1222 don't conflict with things needed to reload inputs or
1225 in_max
= MAX (MAX (insn_needs
.op_addr
.regs
[j
][i
],
1226 insn_needs
.op_addr_reload
.regs
[j
][i
]),
1229 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1231 insn_needs
.input
.regs
[j
][i
]
1232 = MAX (insn_needs
.input
.regs
[j
][i
]
1233 + insn_needs
.op_addr
.regs
[j
][i
]
1234 + insn_needs
.insn
.regs
[j
][i
],
1235 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1237 insn_needs
.output
.regs
[j
][i
] += out_max
;
1238 insn_needs
.other
.regs
[j
][i
]
1239 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1240 insn_needs
.output
.regs
[j
][i
]),
1241 insn_needs
.other_addr
.regs
[j
][i
]);
1245 /* Now compute group needs. */
1246 for (in_max
= 0, out_max
= 0, j
= 0;
1247 j
< reload_n_operands
; j
++)
1249 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1251 = MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1254 in_max
= MAX (MAX (insn_needs
.op_addr
.groups
[i
],
1255 insn_needs
.op_addr_reload
.groups
[i
]),
1257 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1259 insn_needs
.input
.groups
[i
]
1260 = MAX (insn_needs
.input
.groups
[i
]
1261 + insn_needs
.op_addr
.groups
[i
]
1262 + insn_needs
.insn
.groups
[i
],
1263 in_max
+ insn_needs
.input
.groups
[i
]);
1265 insn_needs
.output
.groups
[i
] += out_max
;
1266 insn_needs
.other
.groups
[i
]
1267 += MAX (MAX (insn_needs
.input
.groups
[i
],
1268 insn_needs
.output
.groups
[i
]),
1269 insn_needs
.other_addr
.groups
[i
]);
1272 /* If this is a CALL_INSN and caller-saves will need
1273 a spill register, act as if the spill register is
1274 needed for this insn. However, the spill register
1275 can be used by any reload of this insn, so we only
1276 need do something if no need for that class has
1279 The assumption that every CALL_INSN will trigger a
1280 caller-save is highly conservative, however, the number
1281 of cases where caller-saves will need a spill register but
1282 a block containing a CALL_INSN won't need a spill register
1283 of that class should be quite rare.
1285 If a group is needed, the size and mode of the group will
1286 have been set up at the beginning of this loop. */
1288 if (GET_CODE (insn
) == CALL_INSN
1289 && caller_save_spill_class
!= NO_REGS
)
1291 /* See if this register would conflict with any reload
1292 that needs a group. */
1293 int nongroup_need
= 0;
1294 int *caller_save_needs
;
1296 for (j
= 0; j
< n_reloads
; j
++)
1297 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1298 (GET_MODE_SIZE (reload_outmode
[j
])
1299 > GET_MODE_SIZE (reload_inmode
[j
]))
1303 && reg_classes_intersect_p (caller_save_spill_class
,
1304 reload_reg_class
[j
]))
1311 = (caller_save_group_size
> 1
1312 ? insn_needs
.other
.groups
1313 : insn_needs
.other
.regs
[nongroup_need
]);
1315 if (caller_save_needs
[(int) caller_save_spill_class
] == 0)
1317 register enum reg_class
*p
1318 = reg_class_superclasses
[(int) caller_save_spill_class
];
1320 caller_save_needs
[(int) caller_save_spill_class
]++;
1322 while (*p
!= LIM_REG_CLASSES
)
1323 caller_save_needs
[(int) *p
++] += 1;
1326 /* Show that this basic block will need a register of
1330 && ! (basic_block_needs
[(int) caller_save_spill_class
]
1333 basic_block_needs
[(int) caller_save_spill_class
]
1335 new_basic_block_needs
= 1;
1339 #ifdef SMALL_REGISTER_CLASSES
1340 /* If this insn stores the value of a function call,
1341 and that value is in a register that has been spilled,
1342 and if the insn needs a reload in a class
1343 that might use that register as the reload register,
1344 then add add an extra need in that class.
1345 This makes sure we have a register available that does
1346 not overlap the return value. */
1348 if (avoid_return_reg
)
1350 int regno
= REGNO (avoid_return_reg
);
1352 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
1354 int basic_needs
[N_REG_CLASSES
], basic_groups
[N_REG_CLASSES
];
1356 /* First compute the "basic needs", which counts a
1357 need only in the smallest class in which it
1360 bcopy ((char *) insn_needs
.other
.regs
[0],
1361 (char *) basic_needs
, sizeof basic_needs
);
1362 bcopy ((char *) insn_needs
.other
.groups
,
1363 (char *) basic_groups
, sizeof basic_groups
);
1365 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1369 if (basic_needs
[i
] >= 0)
1370 for (p
= reg_class_superclasses
[i
];
1371 *p
!= LIM_REG_CLASSES
; p
++)
1372 basic_needs
[(int) *p
] -= basic_needs
[i
];
1374 if (basic_groups
[i
] >= 0)
1375 for (p
= reg_class_superclasses
[i
];
1376 *p
!= LIM_REG_CLASSES
; p
++)
1377 basic_groups
[(int) *p
] -= basic_groups
[i
];
1380 /* Now count extra regs if there might be a conflict with
1381 the return value register. */
1383 for (r
= regno
; r
< regno
+ nregs
; r
++)
1384 if (spill_reg_order
[r
] >= 0)
1385 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1386 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], r
))
1388 if (basic_needs
[i
] > 0)
1392 insn_needs
.other
.regs
[0][i
]++;
1393 p
= reg_class_superclasses
[i
];
1394 while (*p
!= LIM_REG_CLASSES
)
1395 insn_needs
.other
.regs
[0][(int) *p
++]++;
1397 if (basic_groups
[i
] > 0)
1401 insn_needs
.other
.groups
[i
]++;
1402 p
= reg_class_superclasses
[i
];
1403 while (*p
!= LIM_REG_CLASSES
)
1404 insn_needs
.other
.groups
[(int) *p
++]++;
1408 #endif /* SMALL_REGISTER_CLASSES */
1410 /* For each class, collect maximum need of any insn. */
1412 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1414 if (max_needs
[i
] < insn_needs
.other
.regs
[0][i
])
1416 max_needs
[i
] = insn_needs
.other
.regs
[0][i
];
1417 max_needs_insn
[i
] = insn
;
1419 if (max_groups
[i
] < insn_needs
.other
.groups
[i
])
1421 max_groups
[i
] = insn_needs
.other
.groups
[i
];
1422 max_groups_insn
[i
] = insn
;
1424 if (max_nongroups
[i
] < insn_needs
.other
.regs
[1][i
])
1426 max_nongroups
[i
] = insn_needs
.other
.regs
[1][i
];
1427 max_nongroups_insn
[i
] = insn
;
1431 /* Note that there is a continue statement above. */
1434 /* If we allocated any new memory locations, make another pass
1435 since it might have changed elimination offsets. */
1436 if (starting_frame_size
!= get_frame_size ())
1437 something_changed
= 1;
1440 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1442 if (max_needs
[i
] > 0)
1444 ";; Need %d reg%s of class %s (for insn %d).\n",
1445 max_needs
[i
], max_needs
[i
] == 1 ? "" : "s",
1446 reg_class_names
[i
], INSN_UID (max_needs_insn
[i
]));
1447 if (max_nongroups
[i
] > 0)
1449 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1450 max_nongroups
[i
], max_nongroups
[i
] == 1 ? "" : "s",
1451 reg_class_names
[i
], INSN_UID (max_nongroups_insn
[i
]));
1452 if (max_groups
[i
] > 0)
1454 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1455 max_groups
[i
], max_groups
[i
] == 1 ? "" : "s",
1456 mode_name
[(int) group_mode
[i
]],
1457 reg_class_names
[i
], INSN_UID (max_groups_insn
[i
]));
1460 /* If we have caller-saves, set up the save areas and see if caller-save
1461 will need a spill register. */
1463 if (caller_save_needed
1464 && ! setup_save_areas (&something_changed
)
1465 && caller_save_spill_class
== NO_REGS
)
1467 /* The class we will need depends on whether the machine
1468 supports the sum of two registers for an address; see
1469 find_address_reloads for details. */
1471 caller_save_spill_class
1472 = double_reg_address_ok
? INDEX_REG_CLASS
: BASE_REG_CLASS
;
1473 caller_save_group_size
1474 = CLASS_MAX_NREGS (caller_save_spill_class
, Pmode
);
1475 something_changed
= 1;
1478 /* See if anything that happened changes which eliminations are valid.
1479 For example, on the Sparc, whether or not the frame pointer can
1480 be eliminated can depend on what registers have been used. We need
1481 not check some conditions again (such as flag_omit_frame_pointer)
1482 since they can't have changed. */
1484 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1485 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
1486 #ifdef ELIMINABLE_REGS
1487 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
1490 ep
->can_eliminate
= 0;
1492 /* Look for the case where we have discovered that we can't replace
1493 register A with register B and that means that we will now be
1494 trying to replace register A with register C. This means we can
1495 no longer replace register C with register B and we need to disable
1496 such an elimination, if it exists. This occurs often with A == ap,
1497 B == sp, and C == fp. */
1499 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1501 struct elim_table
*op
;
1502 register int new_to
= -1;
1504 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1506 /* Find the current elimination for ep->from, if there is a
1508 for (op
= reg_eliminate
;
1509 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1510 if (op
->from
== ep
->from
&& op
->can_eliminate
)
1516 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1518 for (op
= reg_eliminate
;
1519 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1520 if (op
->from
== new_to
&& op
->to
== ep
->to
)
1521 op
->can_eliminate
= 0;
1525 /* See if any registers that we thought we could eliminate the previous
1526 time are no longer eliminable. If so, something has changed and we
1527 must spill the register. Also, recompute the number of eliminable
1528 registers and see if the frame pointer is needed; it is if there is
1529 no elimination of the frame pointer that we can perform. */
1531 frame_pointer_needed
= 1;
1532 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1534 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
1535 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
1536 frame_pointer_needed
= 0;
1538 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1540 ep
->can_eliminate_previous
= 0;
1541 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
1542 something_changed
= 1;
1547 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1548 /* If we didn't need a frame pointer last time, but we do now, spill
1549 the hard frame pointer. */
1550 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
1552 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
1553 something_changed
= 1;
1557 /* If all needs are met, we win. */
1559 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1560 if (max_needs
[i
] > 0 || max_groups
[i
] > 0 || max_nongroups
[i
] > 0)
1562 if (i
== N_REG_CLASSES
&& !new_basic_block_needs
&& ! something_changed
)
1565 /* Not all needs are met; must spill some hard regs. */
1567 /* Put all registers spilled so far back in potential_reload_regs, but
1568 put them at the front, since we've already spilled most of the
1569 pseudos in them (we might have left some pseudos unspilled if they
1570 were in a block that didn't need any spill registers of a conflicting
1571 class. We used to try to mark off the need for those registers,
1572 but doing so properly is very complex and reallocating them is the
1573 simpler approach. First, "pack" potential_reload_regs by pushing
1574 any nonnegative entries towards the end. That will leave room
1575 for the registers we already spilled.
1577 Also, undo the marking of the spill registers from the last time
1578 around in FORBIDDEN_REGS since we will be probably be allocating
1581 ??? It is theoretically possible that we might end up not using one
1582 of our previously-spilled registers in this allocation, even though
1583 they are at the head of the list. It's not clear what to do about
1584 this, but it was no better before, when we marked off the needs met
1585 by the previously-spilled registers. With the current code, globals
1586 can be allocated into these registers, but locals cannot. */
1590 for (i
= j
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1591 if (potential_reload_regs
[i
] != -1)
1592 potential_reload_regs
[j
--] = potential_reload_regs
[i
];
1594 for (i
= 0; i
< n_spills
; i
++)
1596 potential_reload_regs
[i
] = spill_regs
[i
];
1597 spill_reg_order
[spill_regs
[i
]] = -1;
1598 CLEAR_HARD_REG_BIT (forbidden_regs
, spill_regs
[i
]);
1604 /* Now find more reload regs to satisfy the remaining need
1605 Do it by ascending class number, since otherwise a reg
1606 might be spilled for a big class and might fail to count
1607 for a smaller class even though it belongs to that class.
1609 Count spilled regs in `spills', and add entries to
1610 `spill_regs' and `spill_reg_order'.
1612 ??? Note there is a problem here.
1613 When there is a need for a group in a high-numbered class,
1614 and also need for non-group regs that come from a lower class,
1615 the non-group regs are chosen first. If there aren't many regs,
1616 they might leave no room for a group.
1618 This was happening on the 386. To fix it, we added the code
1619 that calls possible_group_p, so that the lower class won't
1620 break up the last possible group.
1622 Really fixing the problem would require changes above
1623 in counting the regs already spilled, and in choose_reload_regs.
1624 It might be hard to avoid introducing bugs there. */
1626 CLEAR_HARD_REG_SET (counted_for_groups
);
1627 CLEAR_HARD_REG_SET (counted_for_nongroups
);
1629 for (class = 0; class < N_REG_CLASSES
; class++)
1631 /* First get the groups of registers.
1632 If we got single registers first, we might fragment
1634 while (max_groups
[class] > 0)
1636 /* If any single spilled regs happen to form groups,
1637 count them now. Maybe we don't really need
1638 to spill another group. */
1639 count_possible_groups (group_size
, group_mode
, max_groups
,
1642 if (max_groups
[class] <= 0)
1645 /* Groups of size 2 (the only groups used on most machines)
1646 are treated specially. */
1647 if (group_size
[class] == 2)
1649 /* First, look for a register that will complete a group. */
1650 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1654 j
= potential_reload_regs
[i
];
1655 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1657 ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1658 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1659 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1660 && HARD_REGNO_MODE_OK (other
, group_mode
[class])
1661 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1663 /* We don't want one part of another group.
1664 We could get "two groups" that overlap! */
1665 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))
1667 (j
< FIRST_PSEUDO_REGISTER
- 1
1668 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1669 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1670 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1671 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1672 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1674 && ! TEST_HARD_REG_BIT (counted_for_groups
,
1677 register enum reg_class
*p
;
1679 /* We have found one that will complete a group,
1680 so count off one group as provided. */
1681 max_groups
[class]--;
1682 p
= reg_class_superclasses
[class];
1683 while (*p
!= LIM_REG_CLASSES
)
1685 if (group_size
[(int) *p
] <= group_size
[class])
1686 max_groups
[(int) *p
]--;
1690 /* Indicate both these regs are part of a group. */
1691 SET_HARD_REG_BIT (counted_for_groups
, j
);
1692 SET_HARD_REG_BIT (counted_for_groups
, other
);
1696 /* We can't complete a group, so start one. */
1697 #ifdef SMALL_REGISTER_CLASSES
1698 /* Look for a pair neither of which is explicitly used. */
1699 if (i
== FIRST_PSEUDO_REGISTER
)
1700 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1703 j
= potential_reload_regs
[i
];
1704 /* Verify that J+1 is a potential reload reg. */
1705 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1706 if (potential_reload_regs
[k
] == j
+ 1)
1708 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1709 && k
< FIRST_PSEUDO_REGISTER
1710 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1711 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1712 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1713 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1714 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1716 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1)
1717 /* Reject J at this stage
1718 if J+1 was explicitly used. */
1719 && ! regs_explicitly_used
[j
+ 1])
1723 /* Now try any group at all
1724 whose registers are not in bad_spill_regs. */
1725 if (i
== FIRST_PSEUDO_REGISTER
)
1726 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1729 j
= potential_reload_regs
[i
];
1730 /* Verify that J+1 is a potential reload reg. */
1731 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1732 if (potential_reload_regs
[k
] == j
+ 1)
1734 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1735 && k
< FIRST_PSEUDO_REGISTER
1736 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1737 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1738 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1739 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1740 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
1746 /* I should be the index in potential_reload_regs
1747 of the new reload reg we have found. */
1749 if (i
>= FIRST_PSEUDO_REGISTER
)
1751 /* There are no groups left to spill. */
1752 spill_failure (max_groups_insn
[class]);
1758 |= new_spill_reg (i
, class, max_needs
, NULL_PTR
,
1763 /* For groups of more than 2 registers,
1764 look for a sufficient sequence of unspilled registers,
1765 and spill them all at once. */
1766 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1770 j
= potential_reload_regs
[i
];
1772 && j
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
1773 && HARD_REGNO_MODE_OK (j
, group_mode
[class]))
1775 /* Check each reg in the sequence. */
1776 for (k
= 0; k
< group_size
[class]; k
++)
1777 if (! (spill_reg_order
[j
+ k
] < 0
1778 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
1779 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
1781 /* We got a full sequence, so spill them all. */
1782 if (k
== group_size
[class])
1784 register enum reg_class
*p
;
1785 for (k
= 0; k
< group_size
[class]; k
++)
1788 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
1789 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
1790 if (potential_reload_regs
[idx
] == j
+ k
)
1793 |= new_spill_reg (idx
, class,
1794 max_needs
, NULL_PTR
,
1798 /* We have found one that will complete a group,
1799 so count off one group as provided. */
1800 max_groups
[class]--;
1801 p
= reg_class_superclasses
[class];
1802 while (*p
!= LIM_REG_CLASSES
)
1804 if (group_size
[(int) *p
]
1805 <= group_size
[class])
1806 max_groups
[(int) *p
]--;
1813 /* We couldn't find any registers for this reload.
1814 Avoid going into an infinite loop. */
1815 if (i
>= FIRST_PSEUDO_REGISTER
)
1817 /* There are no groups left. */
1818 spill_failure (max_groups_insn
[class]);
1825 /* Now similarly satisfy all need for single registers. */
1827 while (max_needs
[class] > 0 || max_nongroups
[class] > 0)
1829 /* If we spilled enough regs, but they weren't counted
1830 against the non-group need, see if we can count them now.
1831 If so, we can avoid some actual spilling. */
1832 if (max_needs
[class] <= 0 && max_nongroups
[class] > 0)
1833 for (i
= 0; i
< n_spills
; i
++)
1834 if (TEST_HARD_REG_BIT (reg_class_contents
[class],
1836 && !TEST_HARD_REG_BIT (counted_for_groups
,
1838 && !TEST_HARD_REG_BIT (counted_for_nongroups
,
1840 && max_nongroups
[class] > 0)
1842 register enum reg_class
*p
;
1844 SET_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]);
1845 max_nongroups
[class]--;
1846 p
= reg_class_superclasses
[class];
1847 while (*p
!= LIM_REG_CLASSES
)
1848 max_nongroups
[(int) *p
++]--;
1850 if (max_needs
[class] <= 0 && max_nongroups
[class] <= 0)
1853 /* Consider the potential reload regs that aren't
1854 yet in use as reload regs, in order of preference.
1855 Find the most preferred one that's in this class. */
1857 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1858 if (potential_reload_regs
[i
] >= 0
1859 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1860 potential_reload_regs
[i
])
1861 /* If this reg will not be available for groups,
1862 pick one that does not foreclose possible groups.
1863 This is a kludge, and not very general,
1864 but it should be sufficient to make the 386 work,
1865 and the problem should not occur on machines with
1867 && (max_nongroups
[class] == 0
1868 || possible_group_p (potential_reload_regs
[i
], max_groups
)))
1871 /* If we couldn't get a register, try to get one even if we
1872 might foreclose possible groups. This may cause problems
1873 later, but that's better than aborting now, since it is
1874 possible that we will, in fact, be able to form the needed
1875 group even with this allocation. */
1877 if (i
>= FIRST_PSEUDO_REGISTER
1878 && (asm_noperands (max_needs
[class] > 0
1879 ? max_needs_insn
[class]
1880 : max_nongroups_insn
[class])
1882 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1883 if (potential_reload_regs
[i
] >= 0
1884 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1885 potential_reload_regs
[i
]))
1888 /* I should be the index in potential_reload_regs
1889 of the new reload reg we have found. */
1891 if (i
>= FIRST_PSEUDO_REGISTER
)
1893 /* There are no possible registers left to spill. */
1894 spill_failure (max_needs
[class] > 0 ? max_needs_insn
[class]
1895 : max_nongroups_insn
[class]);
1901 |= new_spill_reg (i
, class, max_needs
, max_nongroups
,
1907 /* If global-alloc was run, notify it of any register eliminations we have
1910 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1911 if (ep
->can_eliminate
)
1912 mark_elimination (ep
->from
, ep
->to
);
1914 /* Insert code to save and restore call-clobbered hard regs
1915 around calls. Tell if what mode to use so that we will process
1916 those insns in reload_as_needed if we have to. */
1918 if (caller_save_needed
)
1919 save_call_clobbered_regs (num_eliminable
? QImode
1920 : caller_save_spill_class
!= NO_REGS
? HImode
1923 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1924 If that insn didn't set the register (i.e., it copied the register to
1925 memory), just delete that insn instead of the equivalencing insn plus
1926 anything now dead. If we call delete_dead_insn on that insn, we may
1927 delete the insn that actually sets the register if the register die
1928 there and that is incorrect. */
1930 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1931 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
1932 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
1934 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
1935 delete_dead_insn (reg_equiv_init
[i
]);
1938 PUT_CODE (reg_equiv_init
[i
], NOTE
);
1939 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
1940 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
1944 /* Use the reload registers where necessary
1945 by generating move instructions to move the must-be-register
1946 values into or out of the reload registers. */
1948 if (something_needs_reloads
|| something_needs_elimination
1949 || (caller_save_needed
&& num_eliminable
)
1950 || caller_save_spill_class
!= NO_REGS
)
1951 reload_as_needed (first
, global
);
1953 /* If we were able to eliminate the frame pointer, show that it is no
1954 longer live at the start of any basic block. If it ls live by
1955 virtue of being in a pseudo, that pseudo will be marked live
1956 and hence the frame pointer will be known to be live via that
1959 if (! frame_pointer_needed
)
1960 for (i
= 0; i
< n_basic_blocks
; i
++)
1961 basic_block_live_at_start
[i
][HARD_FRAME_POINTER_REGNUM
/ REGSET_ELT_BITS
]
1962 &= ~ ((REGSET_ELT_TYPE
) 1 << (HARD_FRAME_POINTER_REGNUM
1963 % REGSET_ELT_BITS
));
1965 /* Come here (with failure set nonzero) if we can't get enough spill regs
1966 and we decide not to abort about it. */
1969 reload_in_progress
= 0;
1971 /* Now eliminate all pseudo regs by modifying them into
1972 their equivalent memory references.
1973 The REG-rtx's for the pseudos are modified in place,
1974 so all insns that used to refer to them now refer to memory.
1976 For a reg that has a reg_equiv_address, all those insns
1977 were changed by reloading so that no insns refer to it any longer;
1978 but the DECL_RTL of a variable decl may refer to it,
1979 and if so this causes the debugging info to mention the variable. */
1981 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1985 if (reg_equiv_mem
[i
])
1987 addr
= XEXP (reg_equiv_mem
[i
], 0);
1988 in_struct
= MEM_IN_STRUCT_P (reg_equiv_mem
[i
]);
1990 if (reg_equiv_address
[i
])
1991 addr
= reg_equiv_address
[i
];
1994 if (reg_renumber
[i
] < 0)
1996 rtx reg
= regno_reg_rtx
[i
];
1997 XEXP (reg
, 0) = addr
;
1998 REG_USERVAR_P (reg
) = 0;
1999 MEM_IN_STRUCT_P (reg
) = in_struct
;
2000 PUT_CODE (reg
, MEM
);
2002 else if (reg_equiv_mem
[i
])
2003 XEXP (reg_equiv_mem
[i
], 0) = addr
;
2007 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2008 /* Make a pass over all the insns and remove death notes for things that
2009 are no longer registers or no longer die in the insn (e.g., an input
2010 and output pseudo being tied). */
2012 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2013 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2017 for (note
= REG_NOTES (insn
); note
; note
= next
)
2019 next
= XEXP (note
, 1);
2020 if (REG_NOTE_KIND (note
) == REG_DEAD
2021 && (GET_CODE (XEXP (note
, 0)) != REG
2022 || reg_set_p (XEXP (note
, 0), PATTERN (insn
))))
2023 remove_note (insn
, note
);
2028 /* Indicate that we no longer have known memory locations or constants. */
2029 reg_equiv_constant
= 0;
2030 reg_equiv_memory_loc
= 0;
2033 free (scratch_list
);
2036 free (scratch_block
);
2039 CLEAR_HARD_REG_SET (used_spill_regs
);
2040 for (i
= 0; i
< n_spills
; i
++)
2041 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
2046 /* Nonzero if, after spilling reg REGNO for non-groups,
2047 it will still be possible to find a group if we still need one. */
2050 possible_group_p (regno
, max_groups
)
2055 int class = (int) NO_REGS
;
2057 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
2058 if (max_groups
[i
] > 0)
2064 if (class == (int) NO_REGS
)
2067 /* Consider each pair of consecutive registers. */
2068 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2070 /* Ignore pairs that include reg REGNO. */
2071 if (i
== regno
|| i
+ 1 == regno
)
2074 /* Ignore pairs that are outside the class that needs the group.
2075 ??? Here we fail to handle the case where two different classes
2076 independently need groups. But this never happens with our
2077 current machine descriptions. */
2078 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2079 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2082 /* A pair of consecutive regs we can still spill does the trick. */
2083 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2084 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2085 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2088 /* A pair of one already spilled and one we can spill does it
2089 provided the one already spilled is not otherwise reserved. */
2090 if (spill_reg_order
[i
] < 0
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2092 && spill_reg_order
[i
+ 1] >= 0
2093 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
+ 1)
2094 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
+ 1))
2096 if (spill_reg_order
[i
+ 1] < 0
2097 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2098 && spill_reg_order
[i
] >= 0
2099 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
)
2100 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
))
2107 /* Count any groups of CLASS that can be formed from the registers recently
2111 count_possible_groups (group_size
, group_mode
, max_groups
, class)
2113 enum machine_mode
*group_mode
;
2120 /* Now find all consecutive groups of spilled registers
2121 and mark each group off against the need for such groups.
2122 But don't count them against ordinary need, yet. */
2124 if (group_size
[class] == 0)
2127 CLEAR_HARD_REG_SET (new);
2129 /* Make a mask of all the regs that are spill regs in class I. */
2130 for (i
= 0; i
< n_spills
; i
++)
2131 if (TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
2132 && ! TEST_HARD_REG_BIT (counted_for_groups
, spill_regs
[i
])
2133 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
2134 SET_HARD_REG_BIT (new, spill_regs
[i
]);
2136 /* Find each consecutive group of them. */
2137 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
&& max_groups
[class] > 0; i
++)
2138 if (TEST_HARD_REG_BIT (new, i
)
2139 && i
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
2140 && HARD_REGNO_MODE_OK (i
, group_mode
[class]))
2142 for (j
= 1; j
< group_size
[class]; j
++)
2143 if (! TEST_HARD_REG_BIT (new, i
+ j
))
2146 if (j
== group_size
[class])
2148 /* We found a group. Mark it off against this class's need for
2149 groups, and against each superclass too. */
2150 register enum reg_class
*p
;
2152 max_groups
[class]--;
2153 p
= reg_class_superclasses
[class];
2154 while (*p
!= LIM_REG_CLASSES
)
2156 if (group_size
[(int) *p
] <= group_size
[class])
2157 max_groups
[(int) *p
]--;
2161 /* Don't count these registers again. */
2162 for (j
= 0; j
< group_size
[class]; j
++)
2163 SET_HARD_REG_BIT (counted_for_groups
, i
+ j
);
2166 /* Skip to the last reg in this group. When i is incremented above,
2167 it will then point to the first reg of the next possible group. */
2172 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2173 another mode that needs to be reloaded for the same register class CLASS.
2174 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2175 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2177 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2178 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2179 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2180 causes unnecessary failures on machines requiring alignment of register
2181 groups when the two modes are different sizes, because the larger mode has
2182 more strict alignment rules than the smaller mode. */
2185 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2186 enum machine_mode allocate_mode
, other_mode
;
2187 enum reg_class
class;
2190 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2192 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2193 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2194 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2200 /* Handle the failure to find a register to spill.
2201 INSN should be one of the insns which needed this particular spill reg. */
2204 spill_failure (insn
)
2207 if (asm_noperands (PATTERN (insn
)) >= 0)
2208 error_for_asm (insn
, "`asm' needs too many reloads");
2210 fatal_insn ("Unable to find a register to spill.", insn
);
2213 /* Add a new register to the tables of available spill-registers
2214 (as well as spilling all pseudos allocated to the register).
2215 I is the index of this register in potential_reload_regs.
2216 CLASS is the regclass whose need is being satisfied.
2217 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2218 so that this register can count off against them.
2219 MAX_NONGROUPS is 0 if this register is part of a group.
2220 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2223 new_spill_reg (i
, class, max_needs
, max_nongroups
, global
, dumpfile
)
2231 register enum reg_class
*p
;
2233 int regno
= potential_reload_regs
[i
];
2235 if (i
>= FIRST_PSEUDO_REGISTER
)
2236 abort (); /* Caller failed to find any register. */
2238 if (fixed_regs
[regno
] || TEST_HARD_REG_BIT (forbidden_regs
, regno
))
2239 fatal ("fixed or forbidden register was spilled.\n\
2240 This may be due to a compiler bug or to impossible asm\n\
2241 statements or clauses.");
2243 /* Make reg REGNO an additional reload reg. */
2245 potential_reload_regs
[i
] = -1;
2246 spill_regs
[n_spills
] = regno
;
2247 spill_reg_order
[regno
] = n_spills
;
2249 fprintf (dumpfile
, "Spilling reg %d.\n", spill_regs
[n_spills
]);
2251 /* Clear off the needs we just satisfied. */
2254 p
= reg_class_superclasses
[class];
2255 while (*p
!= LIM_REG_CLASSES
)
2256 max_needs
[(int) *p
++]--;
2258 if (max_nongroups
&& max_nongroups
[class] > 0)
2260 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2261 max_nongroups
[class]--;
2262 p
= reg_class_superclasses
[class];
2263 while (*p
!= LIM_REG_CLASSES
)
2264 max_nongroups
[(int) *p
++]--;
2267 /* Spill every pseudo reg that was allocated to this reg
2268 or to something that overlaps this reg. */
2270 val
= spill_hard_reg (spill_regs
[n_spills
], global
, dumpfile
, 0);
2272 /* If there are some registers still to eliminate and this register
2273 wasn't ever used before, additional stack space may have to be
2274 allocated to store this register. Thus, we may have changed the offset
2275 between the stack and frame pointers, so mark that something has changed.
2276 (If new pseudos were spilled, thus requiring more space, VAL would have
2277 been set non-zero by the call to spill_hard_reg above since additional
2278 reloads may be needed in that case.
2280 One might think that we need only set VAL to 1 if this is a call-used
2281 register. However, the set of registers that must be saved by the
2282 prologue is not identical to the call-used set. For example, the
2283 register used by the call insn for the return PC is a call-used register,
2284 but must be saved by the prologue. */
2285 if (num_eliminable
&& ! regs_ever_live
[spill_regs
[n_spills
]])
2288 regs_ever_live
[spill_regs
[n_spills
]] = 1;
2294 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2295 data that is dead in INSN. */
2298 delete_dead_insn (insn
)
2301 rtx prev
= prev_real_insn (insn
);
2304 /* If the previous insn sets a register that dies in our insn, delete it
2306 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2307 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2308 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2309 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
)))
2310 delete_dead_insn (prev
);
2312 PUT_CODE (insn
, NOTE
);
2313 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2314 NOTE_SOURCE_FILE (insn
) = 0;
2317 /* Modify the home of pseudo-reg I.
2318 The new home is present in reg_renumber[I].
2320 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2321 or it may be -1, meaning there is none or it is not relevant.
2322 This is used so that all pseudos spilled from a given hard reg
2323 can share one stack slot. */
2326 alter_reg (i
, from_reg
)
2330 /* When outputting an inline function, this can happen
2331 for a reg that isn't actually used. */
2332 if (regno_reg_rtx
[i
] == 0)
2335 /* If the reg got changed to a MEM at rtl-generation time,
2337 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2340 /* Modify the reg-rtx to contain the new hard reg
2341 number or else to contain its pseudo reg number. */
2342 REGNO (regno_reg_rtx
[i
])
2343 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2345 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2346 allocate a stack slot for it. */
2348 if (reg_renumber
[i
] < 0
2349 && reg_n_refs
[i
] > 0
2350 && reg_equiv_constant
[i
] == 0
2351 && reg_equiv_memory_loc
[i
] == 0)
2354 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2355 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2358 /* Each pseudo reg has an inherent size which comes from its own mode,
2359 and a total size which provides room for paradoxical subregs
2360 which refer to the pseudo reg in wider modes.
2362 We can use a slot already allocated if it provides both
2363 enough inherent space and enough total space.
2364 Otherwise, we allocate a new slot, making sure that it has no less
2365 inherent space, and no less total space, then the previous slot. */
2368 /* No known place to spill from => no slot to reuse. */
2369 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
, -1);
2370 if (BYTES_BIG_ENDIAN
)
2371 /* Cancel the big-endian correction done in assign_stack_local.
2372 Get the address of the beginning of the slot.
2373 This is so we can do a big-endian correction unconditionally
2375 adjust
= inherent_size
- total_size
;
2377 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2379 /* Reuse a stack slot if possible. */
2380 else if (spill_stack_slot
[from_reg
] != 0
2381 && spill_stack_slot_width
[from_reg
] >= total_size
2382 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2384 x
= spill_stack_slot
[from_reg
];
2385 /* Allocate a bigger slot. */
2388 /* Compute maximum size needed, both for inherent size
2389 and for total size. */
2390 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2392 if (spill_stack_slot
[from_reg
])
2394 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2396 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2397 if (spill_stack_slot_width
[from_reg
] > total_size
)
2398 total_size
= spill_stack_slot_width
[from_reg
];
2400 /* Make a slot with that size. */
2401 x
= assign_stack_local (mode
, total_size
, -1);
2403 if (BYTES_BIG_ENDIAN
)
2405 /* Cancel the big-endian correction done in assign_stack_local.
2406 Get the address of the beginning of the slot.
2407 This is so we can do a big-endian correction unconditionally
2409 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2411 stack_slot
= gen_rtx (MEM
, mode_for_size (total_size
2414 plus_constant (XEXP (x
, 0), adjust
));
2416 spill_stack_slot
[from_reg
] = stack_slot
;
2417 spill_stack_slot_width
[from_reg
] = total_size
;
2420 /* On a big endian machine, the "address" of the slot
2421 is the address of the low part that fits its inherent mode. */
2422 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2423 adjust
+= (total_size
- inherent_size
);
2425 /* If we have any adjustment to make, or if the stack slot is the
2426 wrong mode, make a new stack slot. */
2427 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2429 x
= gen_rtx (MEM
, GET_MODE (regno_reg_rtx
[i
]),
2430 plus_constant (XEXP (x
, 0), adjust
));
2431 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2434 /* Save the stack slot for later. */
2435 reg_equiv_memory_loc
[i
] = x
;
2439 /* Mark the slots in regs_ever_live for the hard regs
2440 used by pseudo-reg number REGNO. */
2443 mark_home_live (regno
)
2446 register int i
, lim
;
2447 i
= reg_renumber
[regno
];
2450 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2452 regs_ever_live
[i
++] = 1;
2455 /* Mark the registers used in SCRATCH as being live. */
2458 mark_scratch_live (scratch
)
2462 int regno
= REGNO (scratch
);
2463 int lim
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (scratch
));
2465 for (i
= regno
; i
< lim
; i
++)
2466 regs_ever_live
[i
] = 1;
2469 /* This function handles the tracking of elimination offsets around branches.
2471 X is a piece of RTL being scanned.
2473 INSN is the insn that it came from, if any.
2475 INITIAL_P is non-zero if we are to set the offset to be the initial
2476 offset and zero if we are setting the offset of the label to be the
2480 set_label_offsets (x
, insn
, initial_p
)
2485 enum rtx_code code
= GET_CODE (x
);
2488 struct elim_table
*p
;
2493 if (LABEL_REF_NONLOCAL_P (x
))
2498 /* ... fall through ... */
2501 /* If we know nothing about this label, set the desired offsets. Note
2502 that this sets the offset at a label to be the offset before a label
2503 if we don't know anything about the label. This is not correct for
2504 the label after a BARRIER, but is the best guess we can make. If
2505 we guessed wrong, we will suppress an elimination that might have
2506 been possible had we been able to guess correctly. */
2508 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2510 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2511 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2512 = (initial_p
? reg_eliminate
[i
].initial_offset
2513 : reg_eliminate
[i
].offset
);
2514 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2517 /* Otherwise, if this is the definition of a label and it is
2518 preceded by a BARRIER, set our offsets to the known offset of
2522 && (tem
= prev_nonnote_insn (insn
)) != 0
2523 && GET_CODE (tem
) == BARRIER
)
2525 num_not_at_initial_offset
= 0;
2526 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2528 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
2529 = offsets_at
[CODE_LABEL_NUMBER (x
)][i
];
2530 if (reg_eliminate
[i
].can_eliminate
2531 && (reg_eliminate
[i
].offset
2532 != reg_eliminate
[i
].initial_offset
))
2533 num_not_at_initial_offset
++;
2538 /* If neither of the above cases is true, compare each offset
2539 with those previously recorded and suppress any eliminations
2540 where the offsets disagree. */
2542 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2543 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2544 != (initial_p
? reg_eliminate
[i
].initial_offset
2545 : reg_eliminate
[i
].offset
))
2546 reg_eliminate
[i
].can_eliminate
= 0;
2551 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2553 /* ... fall through ... */
2557 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2558 and hence must have all eliminations at their initial offsets. */
2559 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2560 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2561 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2566 /* Each of the labels in the address vector must be at their initial
2567 offsets. We want the first first for ADDR_VEC and the second
2568 field for ADDR_DIFF_VEC. */
2570 for (i
= 0; i
< XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2571 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2576 /* We only care about setting PC. If the source is not RETURN,
2577 IF_THEN_ELSE, or a label, disable any eliminations not at
2578 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2579 isn't one of those possibilities. For branches to a label,
2580 call ourselves recursively.
2582 Note that this can disable elimination unnecessarily when we have
2583 a non-local goto since it will look like a non-constant jump to
2584 someplace in the current function. This isn't a significant
2585 problem since such jumps will normally be when all elimination
2586 pairs are back to their initial offsets. */
2588 if (SET_DEST (x
) != pc_rtx
)
2591 switch (GET_CODE (SET_SRC (x
)))
2598 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2602 tem
= XEXP (SET_SRC (x
), 1);
2603 if (GET_CODE (tem
) == LABEL_REF
)
2604 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2605 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2608 tem
= XEXP (SET_SRC (x
), 2);
2609 if (GET_CODE (tem
) == LABEL_REF
)
2610 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2611 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2616 /* If we reach here, all eliminations must be at their initial
2617 offset because we are doing a jump to a variable address. */
2618 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2619 if (p
->offset
!= p
->initial_offset
)
2620 p
->can_eliminate
= 0;
2624 /* Used for communication between the next two function to properly share
2625 the vector for an ASM_OPERANDS. */
2627 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2629 /* Scan X and replace any eliminable registers (such as fp) with a
2630 replacement (such as sp), plus an offset.
2632 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2633 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2634 MEM, we are allowed to replace a sum of a register and the constant zero
2635 with the register, which we cannot do outside a MEM. In addition, we need
2636 to record the fact that a register is referenced outside a MEM.
2638 If INSN is an insn, it is the insn containing X. If we replace a REG
2639 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2640 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2641 that the REG is being modified.
2643 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2644 That's used when we eliminate in expressions stored in notes.
2645 This means, do not set ref_outside_mem even if the reference
2648 If we see a modification to a register we know about, take the
2649 appropriate action (see case SET, below).
2651 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2652 replacements done assuming all offsets are at their initial values. If
2653 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2654 encounter, return the actual location so that find_reloads will do
2655 the proper thing. */
2658 eliminate_regs (x
, mem_mode
, insn
)
2660 enum machine_mode mem_mode
;
2663 enum rtx_code code
= GET_CODE (x
);
2664 struct elim_table
*ep
;
2689 /* First handle the case where we encounter a bare register that
2690 is eliminable. Replace it with a PLUS. */
2691 if (regno
< FIRST_PSEUDO_REGISTER
)
2693 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2695 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2698 /* Refs inside notes don't count for this purpose. */
2699 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2700 || GET_CODE (insn
) == INSN_LIST
)))
2701 ep
->ref_outside_mem
= 1;
2702 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2706 else if (reg_equiv_memory_loc
&& reg_equiv_memory_loc
[regno
]
2707 && (reg_equiv_address
[regno
] || num_not_at_initial_offset
))
2709 /* In this case, find_reloads would attempt to either use an
2710 incorrect address (if something is not at its initial offset)
2711 or substitute an replaced address into an insn (which loses
2712 if the offset is changed by some later action). So we simply
2713 return the replaced stack slot (assuming it is changed by
2714 elimination) and ignore the fact that this is actually a
2715 reference to the pseudo. Ensure we make a copy of the
2716 address in case it is shared. */
2717 new = eliminate_regs (reg_equiv_memory_loc
[regno
],
2719 if (new != reg_equiv_memory_loc
[regno
])
2721 cannot_omit_stores
[regno
] = 1;
2722 return copy_rtx (new);
2728 /* If this is the sum of an eliminable register and a constant, rework
2730 if (GET_CODE (XEXP (x
, 0)) == REG
2731 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2732 && CONSTANT_P (XEXP (x
, 1)))
2734 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2736 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2739 /* Refs inside notes don't count for this purpose. */
2740 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2741 || GET_CODE (insn
) == INSN_LIST
)))
2742 ep
->ref_outside_mem
= 1;
2744 /* The only time we want to replace a PLUS with a REG (this
2745 occurs when the constant operand of the PLUS is the negative
2746 of the offset) is when we are inside a MEM. We won't want
2747 to do so at other times because that would change the
2748 structure of the insn in a way that reload can't handle.
2749 We special-case the commonest situation in
2750 eliminate_regs_in_insn, so just replace a PLUS with a
2751 PLUS here, unless inside a MEM. */
2752 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2753 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2756 return gen_rtx (PLUS
, Pmode
, ep
->to_rtx
,
2757 plus_constant (XEXP (x
, 1),
2758 ep
->previous_offset
));
2761 /* If the register is not eliminable, we are done since the other
2762 operand is a constant. */
2766 /* If this is part of an address, we want to bring any constant to the
2767 outermost PLUS. We will do this by doing register replacement in
2768 our operands and seeing if a constant shows up in one of them.
2770 We assume here this is part of an address (or a "load address" insn)
2771 since an eliminable register is not likely to appear in any other
2774 If we have (plus (eliminable) (reg)), we want to produce
2775 (plus (plus (replacement) (reg) (const))). If this was part of a
2776 normal add insn, (plus (replacement) (reg)) will be pushed as a
2777 reload. This is the desired action. */
2780 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2781 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2783 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2785 /* If one side is a PLUS and the other side is a pseudo that
2786 didn't get a hard register but has a reg_equiv_constant,
2787 we must replace the constant here since it may no longer
2788 be in the position of any operand. */
2789 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
2790 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2791 && reg_renumber
[REGNO (new1
)] < 0
2792 && reg_equiv_constant
!= 0
2793 && reg_equiv_constant
[REGNO (new1
)] != 0)
2794 new1
= reg_equiv_constant
[REGNO (new1
)];
2795 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
2796 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2797 && reg_renumber
[REGNO (new0
)] < 0
2798 && reg_equiv_constant
[REGNO (new0
)] != 0)
2799 new0
= reg_equiv_constant
[REGNO (new0
)];
2801 new = form_sum (new0
, new1
);
2803 /* As above, if we are not inside a MEM we do not want to
2804 turn a PLUS into something else. We might try to do so here
2805 for an addition of 0 if we aren't optimizing. */
2806 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2807 return gen_rtx (PLUS
, GET_MODE (x
), new, const0_rtx
);
2815 /* If this is the product of an eliminable register and a
2816 constant, apply the distribute law and move the constant out
2817 so that we have (plus (mult ..) ..). This is needed in order
2818 to keep load-address insns valid. This case is pathological.
2819 We ignore the possibility of overflow here. */
2820 if (GET_CODE (XEXP (x
, 0)) == REG
2821 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2822 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2823 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2825 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2828 /* Refs inside notes don't count for this purpose. */
2829 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2830 || GET_CODE (insn
) == INSN_LIST
)))
2831 ep
->ref_outside_mem
= 1;
2834 plus_constant (gen_rtx (MULT
, Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2835 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2838 /* ... fall through ... */
2843 case DIV
: case UDIV
:
2844 case MOD
: case UMOD
:
2845 case AND
: case IOR
: case XOR
:
2846 case ROTATERT
: case ROTATE
:
2847 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2849 case GE
: case GT
: case GEU
: case GTU
:
2850 case LE
: case LT
: case LEU
: case LTU
:
2852 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2854 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
2856 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2857 return gen_rtx (code
, GET_MODE (x
), new0
, new1
);
2862 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2865 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2866 if (new != XEXP (x
, 0))
2867 x
= gen_rtx (EXPR_LIST
, REG_NOTE_KIND (x
), new, XEXP (x
, 1));
2870 /* ... fall through ... */
2873 /* Now do eliminations in the rest of the chain. If this was
2874 an EXPR_LIST, this might result in allocating more memory than is
2875 strictly needed, but it simplifies the code. */
2878 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2879 if (new != XEXP (x
, 1))
2880 return gen_rtx (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
2888 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2889 if (ep
->to_rtx
== XEXP (x
, 0))
2891 int size
= GET_MODE_SIZE (mem_mode
);
2893 /* If more bytes than MEM_MODE are pushed, account for them. */
2894 #ifdef PUSH_ROUNDING
2895 if (ep
->to_rtx
== stack_pointer_rtx
)
2896 size
= PUSH_ROUNDING (size
);
2898 if (code
== PRE_DEC
|| code
== POST_DEC
)
2904 /* Fall through to generic unary operation case. */
2905 case STRICT_LOW_PART
:
2907 case SIGN_EXTEND
: case ZERO_EXTEND
:
2908 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2909 case FLOAT
: case FIX
:
2910 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2914 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2915 if (new != XEXP (x
, 0))
2916 return gen_rtx (code
, GET_MODE (x
), new);
2920 /* Similar to above processing, but preserve SUBREG_WORD.
2921 Convert (subreg (mem)) to (mem) if not paradoxical.
2922 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2923 pseudo didn't get a hard reg, we must replace this with the
2924 eliminated version of the memory location because push_reloads
2925 may do the replacement in certain circumstances. */
2926 if (GET_CODE (SUBREG_REG (x
)) == REG
2927 && (GET_MODE_SIZE (GET_MODE (x
))
2928 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2929 && reg_equiv_memory_loc
!= 0
2930 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2932 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
2935 /* If we didn't change anything, we must retain the pseudo. */
2936 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
2937 new = SUBREG_REG (x
);
2940 /* Otherwise, ensure NEW isn't shared in case we have to reload
2942 new = copy_rtx (new);
2944 /* In this case, we must show that the pseudo is used in this
2945 insn so that delete_output_reload will do the right thing. */
2946 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
2947 && GET_CODE (insn
) != INSN_LIST
)
2948 emit_insn_before (gen_rtx (USE
, VOIDmode
, SUBREG_REG (x
)),
2953 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
2955 if (new != XEXP (x
, 0))
2957 if (GET_CODE (new) == MEM
2958 && (GET_MODE_SIZE (GET_MODE (x
))
2959 <= GET_MODE_SIZE (GET_MODE (new)))
2960 #ifdef LOAD_EXTEND_OP
2961 /* On these machines we will be reloading what is
2962 inside the SUBREG if it originally was a pseudo and
2963 the inner and outer modes are both a word or
2964 smaller. So leave the SUBREG then. */
2965 && ! (GET_CODE (SUBREG_REG (x
)) == REG
2966 && GET_MODE_SIZE (GET_MODE (x
)) <= UNITS_PER_WORD
2967 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2968 && (GET_MODE_SIZE (GET_MODE (x
))
2969 > GET_MODE_SIZE (GET_MODE (new)))
2970 && INTEGRAL_MODE_P (GET_MODE (new))
2971 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL
)
2975 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2976 enum machine_mode mode
= GET_MODE (x
);
2978 if (BYTES_BIG_ENDIAN
)
2979 offset
+= (MIN (UNITS_PER_WORD
,
2980 GET_MODE_SIZE (GET_MODE (new)))
2981 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2983 PUT_MODE (new, mode
);
2984 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
2988 return gen_rtx (SUBREG
, GET_MODE (x
), new, SUBREG_WORD (x
));
2994 /* If using a register that is the source of an eliminate we still
2995 think can be performed, note it cannot be performed since we don't
2996 know how this register is used. */
2997 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2998 if (ep
->from_rtx
== XEXP (x
, 0))
2999 ep
->can_eliminate
= 0;
3001 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3002 if (new != XEXP (x
, 0))
3003 return gen_rtx (code
, GET_MODE (x
), new);
3007 /* If clobbering a register that is the replacement register for an
3008 elimination we still think can be performed, note that it cannot
3009 be performed. Otherwise, we need not be concerned about it. */
3010 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3011 if (ep
->to_rtx
== XEXP (x
, 0))
3012 ep
->can_eliminate
= 0;
3014 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3015 if (new != XEXP (x
, 0))
3016 return gen_rtx (code
, GET_MODE (x
), new);
3022 /* Properly handle sharing input and constraint vectors. */
3023 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
3025 /* When we come to a new vector not seen before,
3026 scan all its elements; keep the old vector if none
3027 of them changes; otherwise, make a copy. */
3028 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
3029 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
3030 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3031 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
3034 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3035 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
3038 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
3039 new_asm_operands_vec
= old_asm_operands_vec
;
3041 new_asm_operands_vec
3042 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
3045 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3046 if (new_asm_operands_vec
== old_asm_operands_vec
)
3049 new = gen_rtx (ASM_OPERANDS
, VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
3050 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
3051 ASM_OPERANDS_OUTPUT_IDX (x
), new_asm_operands_vec
,
3052 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
3053 ASM_OPERANDS_SOURCE_FILE (x
),
3054 ASM_OPERANDS_SOURCE_LINE (x
));
3055 new->volatil
= x
->volatil
;
3060 /* Check for setting a register that we know about. */
3061 if (GET_CODE (SET_DEST (x
)) == REG
)
3063 /* See if this is setting the replacement register for an
3066 If DEST is the hard frame pointer, we do nothing because we
3067 assume that all assignments to the frame pointer are for
3068 non-local gotos and are being done at a time when they are valid
3069 and do not disturb anything else. Some machines want to
3070 eliminate a fake argument pointer (or even a fake frame pointer)
3071 with either the real frame or the stack pointer. Assignments to
3072 the hard frame pointer must not prevent this elimination. */
3074 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3076 if (ep
->to_rtx
== SET_DEST (x
)
3077 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3079 /* If it is being incremented, adjust the offset. Otherwise,
3080 this elimination can't be done. */
3081 rtx src
= SET_SRC (x
);
3083 if (GET_CODE (src
) == PLUS
3084 && XEXP (src
, 0) == SET_DEST (x
)
3085 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
3086 ep
->offset
-= INTVAL (XEXP (src
, 1));
3088 ep
->can_eliminate
= 0;
3091 /* Now check to see we are assigning to a register that can be
3092 eliminated. If so, it must be as part of a PARALLEL, since we
3093 will not have been called if this is a single SET. So indicate
3094 that we can no longer eliminate this reg. */
3095 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3097 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3098 ep
->can_eliminate
= 0;
3101 /* Now avoid the loop below in this common case. */
3103 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3104 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3106 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3107 write a CLOBBER insn. */
3108 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3109 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3110 && GET_CODE (insn
) != INSN_LIST
)
3111 emit_insn_after (gen_rtx (CLOBBER
, VOIDmode
, SET_DEST (x
)), insn
);
3113 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3114 return gen_rtx (SET
, VOIDmode
, new0
, new1
);
3120 /* Our only special processing is to pass the mode of the MEM to our
3121 recursive call and copy the flags. While we are here, handle this
3122 case more efficiently. */
3123 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3124 if (new != XEXP (x
, 0))
3126 new = gen_rtx (MEM
, GET_MODE (x
), new);
3127 new->volatil
= x
->volatil
;
3128 new->unchanging
= x
->unchanging
;
3129 new->in_struct
= x
->in_struct
;
3136 /* Process each of our operands recursively. If any have changed, make a
3138 fmt
= GET_RTX_FORMAT (code
);
3139 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3143 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3144 if (new != XEXP (x
, i
) && ! copied
)
3146 rtx new_x
= rtx_alloc (code
);
3147 bcopy ((char *) x
, (char *) new_x
,
3148 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3149 + sizeof (new_x
->fld
[0]) * GET_RTX_LENGTH (code
)));
3155 else if (*fmt
== 'E')
3158 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3160 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3161 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3163 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
3164 &XVECEXP (x
, i
, 0));
3167 rtx new_x
= rtx_alloc (code
);
3168 bcopy ((char *) x
, (char *) new_x
,
3169 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3170 + (sizeof (new_x
->fld
[0])
3171 * GET_RTX_LENGTH (code
))));
3175 XVEC (x
, i
) = new_v
;
3178 XVECEXP (x
, i
, j
) = new;
3186 /* Scan INSN and eliminate all eliminable registers in it.
3188 If REPLACE is nonzero, do the replacement destructively. Also
3189 delete the insn as dead it if it is setting an eliminable register.
3191 If REPLACE is zero, do all our allocations in reload_obstack.
3193 If no eliminations were done and this insn doesn't require any elimination
3194 processing (these are not identical conditions: it might be updating sp,
3195 but not referencing fp; this needs to be seen during reload_as_needed so
3196 that the offset between fp and sp can be taken into consideration), zero
3197 is returned. Otherwise, 1 is returned. */
3200 eliminate_regs_in_insn (insn
, replace
)
3204 rtx old_body
= PATTERN (insn
);
3205 rtx old_set
= single_set (insn
);
3208 struct elim_table
*ep
;
3211 push_obstacks (&reload_obstack
, &reload_obstack
);
3213 if (old_set
!= 0 && GET_CODE (SET_DEST (old_set
)) == REG
3214 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3216 /* Check for setting an eliminable register. */
3217 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3218 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3220 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3221 /* If this is setting the frame pointer register to the
3222 hardware frame pointer register and this is an elimination
3223 that will be done (tested above), this insn is really
3224 adjusting the frame pointer downward to compensate for
3225 the adjustment done before a nonlocal goto. */
3226 if (ep
->from
== FRAME_POINTER_REGNUM
3227 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3229 rtx src
= SET_SRC (old_set
);
3232 if (src
== ep
->to_rtx
)
3234 else if (GET_CODE (src
) == PLUS
3235 && GET_CODE (XEXP (src
, 0)) == CONST_INT
)
3236 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3243 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3245 /* First see if this insn remains valid when we
3246 make the change. If not, keep the INSN_CODE
3247 the same and let reload fit it up. */
3248 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3249 validate_change (insn
, &SET_DEST (old_set
),
3251 if (! apply_change_group ())
3253 SET_SRC (old_set
) = src
;
3254 SET_DEST (old_set
) = ep
->to_rtx
;
3264 /* In this case this insn isn't serving a useful purpose. We
3265 will delete it in reload_as_needed once we know that this
3266 elimination is, in fact, being done.
3268 If REPLACE isn't set, we can't delete this insn, but needn't
3269 process it since it won't be used unless something changes. */
3271 delete_dead_insn (insn
);
3276 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3277 in the insn is the negative of the offset in FROM. Substitute
3278 (set (reg) (reg to)) for the insn and change its code.
3280 We have to do this here, rather than in eliminate_regs, do that we can
3281 change the insn code. */
3283 if (GET_CODE (SET_SRC (old_set
)) == PLUS
3284 && GET_CODE (XEXP (SET_SRC (old_set
), 0)) == REG
3285 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
)
3286 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3288 if (ep
->from_rtx
== XEXP (SET_SRC (old_set
), 0)
3289 && ep
->can_eliminate
)
3291 /* We must stop at the first elimination that will be used.
3292 If this one would replace the PLUS with a REG, do it
3293 now. Otherwise, quit the loop and let eliminate_regs
3294 do its normal replacement. */
3295 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_set
), 1)))
3297 /* We assume here that we don't need a PARALLEL of
3298 any CLOBBERs for this assignment. There's not
3299 much we can do if we do need it. */
3300 PATTERN (insn
) = gen_rtx (SET
, VOIDmode
,
3301 SET_DEST (old_set
), ep
->to_rtx
);
3302 INSN_CODE (insn
) = -1;
3311 old_asm_operands_vec
= 0;
3313 /* Replace the body of this insn with a substituted form. If we changed
3314 something, return non-zero.
3316 If we are replacing a body that was a (set X (plus Y Z)), try to
3317 re-recognize the insn. We do this in case we had a simple addition
3318 but now can do this as a load-address. This saves an insn in this
3321 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3322 if (new_body
!= old_body
)
3324 /* If we aren't replacing things permanently and we changed something,
3325 make another copy to ensure that all the RTL is new. Otherwise
3326 things can go wrong if find_reload swaps commutative operands
3327 and one is inside RTL that has been copied while the other is not. */
3329 /* Don't copy an asm_operands because (1) there's no need and (2)
3330 copy_rtx can't do it properly when there are multiple outputs. */
3331 if (! replace
&& asm_noperands (old_body
) < 0)
3332 new_body
= copy_rtx (new_body
);
3334 /* If we had a move insn but now we don't, rerecognize it. This will
3335 cause spurious re-recognition if the old move had a PARALLEL since
3336 the new one still will, but we can't call single_set without
3337 having put NEW_BODY into the insn and the re-recognition won't
3338 hurt in this rare case. */
3340 && ((GET_CODE (SET_SRC (old_set
)) == REG
3341 && (GET_CODE (new_body
) != SET
3342 || GET_CODE (SET_SRC (new_body
)) != REG
))
3343 /* If this was a load from or store to memory, compare
3344 the MEM in recog_operand to the one in the insn. If they
3345 are not equal, then rerecognize the insn. */
3347 && ((GET_CODE (SET_SRC (old_set
)) == MEM
3348 && SET_SRC (old_set
) != recog_operand
[1])
3349 || (GET_CODE (SET_DEST (old_set
)) == MEM
3350 && SET_DEST (old_set
) != recog_operand
[0])))
3351 /* If this was an add insn before, rerecognize. */
3352 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3354 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3355 /* If recognition fails, store the new body anyway.
3356 It's normal to have recognition failures here
3357 due to bizarre memory addresses; reloading will fix them. */
3358 PATTERN (insn
) = new_body
;
3361 PATTERN (insn
) = new_body
;
3366 /* Loop through all elimination pairs. See if any have changed and
3367 recalculate the number not at initial offset.
3369 Compute the maximum offset (minimum offset if the stack does not
3370 grow downward) for each elimination pair.
3372 We also detect a cases where register elimination cannot be done,
3373 namely, if a register would be both changed and referenced outside a MEM
3374 in the resulting insn since such an insn is often undefined and, even if
3375 not, we cannot know what meaning will be given to it. Note that it is
3376 valid to have a register used in an address in an insn that changes it
3377 (presumably with a pre- or post-increment or decrement).
3379 If anything changes, return nonzero. */
3381 num_not_at_initial_offset
= 0;
3382 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3384 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3385 ep
->can_eliminate
= 0;
3387 ep
->ref_outside_mem
= 0;
3389 if (ep
->previous_offset
!= ep
->offset
)
3392 ep
->previous_offset
= ep
->offset
;
3393 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3394 num_not_at_initial_offset
++;
3396 #ifdef STACK_GROWS_DOWNWARD
3397 ep
->max_offset
= MAX (ep
->max_offset
, ep
->offset
);
3399 ep
->max_offset
= MIN (ep
->max_offset
, ep
->offset
);
3404 /* If we changed something, perform elimination in REG_NOTES. This is
3405 needed even when REPLACE is zero because a REG_DEAD note might refer
3406 to a register that we eliminate and could cause a different number
3407 of spill registers to be needed in the final reload pass than in
3409 if (val
&& REG_NOTES (insn
) != 0)
3410 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3418 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3419 replacement we currently believe is valid, mark it as not eliminable if X
3420 modifies DEST in any way other than by adding a constant integer to it.
3422 If DEST is the frame pointer, we do nothing because we assume that
3423 all assignments to the hard frame pointer are nonlocal gotos and are being
3424 done at a time when they are valid and do not disturb anything else.
3425 Some machines want to eliminate a fake argument pointer with either the
3426 frame or stack pointer. Assignments to the hard frame pointer must not
3427 prevent this elimination.
3429 Called via note_stores from reload before starting its passes to scan
3430 the insns of the function. */
3433 mark_not_eliminable (dest
, x
)
3439 /* A SUBREG of a hard register here is just changing its mode. We should
3440 not see a SUBREG of an eliminable hard register, but check just in
3442 if (GET_CODE (dest
) == SUBREG
)
3443 dest
= SUBREG_REG (dest
);
3445 if (dest
== hard_frame_pointer_rtx
)
3448 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3449 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3450 && (GET_CODE (x
) != SET
3451 || GET_CODE (SET_SRC (x
)) != PLUS
3452 || XEXP (SET_SRC (x
), 0) != dest
3453 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3455 reg_eliminate
[i
].can_eliminate_previous
3456 = reg_eliminate
[i
].can_eliminate
= 0;
3461 /* Kick all pseudos out of hard register REGNO.
3462 If GLOBAL is nonzero, try to find someplace else to put them.
3463 If DUMPFILE is nonzero, log actions taken on that file.
3465 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3466 because we found we can't eliminate some register. In the case, no pseudos
3467 are allowed to be in the register, even if they are only in a block that
3468 doesn't require spill registers, unlike the case when we are spilling this
3469 hard reg to produce another spill register.
3471 Return nonzero if any pseudos needed to be kicked out. */
3474 spill_hard_reg (regno
, global
, dumpfile
, cant_eliminate
)
3480 enum reg_class
class = REGNO_REG_CLASS (regno
);
3481 int something_changed
= 0;
3484 SET_HARD_REG_BIT (forbidden_regs
, regno
);
3487 regs_ever_live
[regno
] = 1;
3489 /* Spill every pseudo reg that was allocated to this reg
3490 or to something that overlaps this reg. */
3492 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3493 if (reg_renumber
[i
] >= 0
3494 && reg_renumber
[i
] <= regno
3496 + HARD_REGNO_NREGS (reg_renumber
[i
],
3497 PSEUDO_REGNO_MODE (i
))
3500 /* If this register belongs solely to a basic block which needed no
3501 spilling of any class that this register is contained in,
3502 leave it be, unless we are spilling this register because
3503 it was a hard register that can't be eliminated. */
3505 if (! cant_eliminate
3506 && basic_block_needs
[0]
3507 && reg_basic_block
[i
] >= 0
3508 && basic_block_needs
[(int) class][reg_basic_block
[i
]] == 0)
3512 for (p
= reg_class_superclasses
[(int) class];
3513 *p
!= LIM_REG_CLASSES
; p
++)
3514 if (basic_block_needs
[(int) *p
][reg_basic_block
[i
]] > 0)
3517 if (*p
== LIM_REG_CLASSES
)
3521 /* Mark it as no longer having a hard register home. */
3522 reg_renumber
[i
] = -1;
3523 /* We will need to scan everything again. */
3524 something_changed
= 1;
3526 retry_global_alloc (i
, forbidden_regs
);
3528 alter_reg (i
, regno
);
3531 if (reg_renumber
[i
] == -1)
3532 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3534 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3535 i
, reg_renumber
[i
]);
3538 for (i
= 0; i
< scratch_list_length
; i
++)
3540 if (scratch_list
[i
] && REGNO (scratch_list
[i
]) == regno
)
3542 if (! cant_eliminate
&& basic_block_needs
[0]
3543 && ! basic_block_needs
[(int) class][scratch_block
[i
]])
3547 for (p
= reg_class_superclasses
[(int) class];
3548 *p
!= LIM_REG_CLASSES
; p
++)
3549 if (basic_block_needs
[(int) *p
][scratch_block
[i
]] > 0)
3552 if (*p
== LIM_REG_CLASSES
)
3555 PUT_CODE (scratch_list
[i
], SCRATCH
);
3556 scratch_list
[i
] = 0;
3557 something_changed
= 1;
3562 return something_changed
;
3565 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3566 Also mark any hard registers used to store user variables as
3567 forbidden from being used for spill registers. */
3570 scan_paradoxical_subregs (x
)
3575 register enum rtx_code code
= GET_CODE (x
);
3580 #ifdef SMALL_REGISTER_CLASSES
3581 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
3582 SET_HARD_REG_BIT (forbidden_regs
, REGNO (x
));
3598 if (GET_CODE (SUBREG_REG (x
)) == REG
3599 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3600 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3601 = GET_MODE_SIZE (GET_MODE (x
));
3605 fmt
= GET_RTX_FORMAT (code
);
3606 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3609 scan_paradoxical_subregs (XEXP (x
, i
));
3610 else if (fmt
[i
] == 'E')
3613 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3614 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3620 hard_reg_use_compare (p1
, p2
)
3621 struct hard_reg_n_uses
*p1
, *p2
;
3623 int tem
= p1
->uses
- p2
->uses
;
3624 if (tem
!= 0) return tem
;
3625 /* If regs are equally good, sort by regno,
3626 so that the results of qsort leave nothing to chance. */
3627 return p1
->regno
- p2
->regno
;
3630 /* Choose the order to consider regs for use as reload registers
3631 based on how much trouble would be caused by spilling one.
3632 Store them in order of decreasing preference in potential_reload_regs. */
3635 order_regs_for_reload (global
)
3642 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3644 CLEAR_HARD_REG_SET (bad_spill_regs
);
3646 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3647 potential_reload_regs
[i
] = -1;
3649 /* Count number of uses of each hard reg by pseudo regs allocated to it
3650 and then order them by decreasing use. */
3652 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3654 hard_reg_n_uses
[i
].uses
= 0;
3655 hard_reg_n_uses
[i
].regno
= i
;
3658 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3660 int regno
= reg_renumber
[i
];
3663 int lim
= regno
+ HARD_REGNO_NREGS (regno
, PSEUDO_REGNO_MODE (i
));
3666 /* If allocated by local-alloc, show more uses since
3667 we're not going to be able to reallocate it, but
3668 we might if allocated by global alloc. */
3669 if (global
&& reg_allocno
[i
] < 0)
3670 hard_reg_n_uses
[regno
].uses
+= (reg_n_refs
[i
] + 1) / 2;
3672 hard_reg_n_uses
[regno
++].uses
+= reg_n_refs
[i
];
3675 large
+= reg_n_refs
[i
];
3678 /* Now fixed registers (which cannot safely be used for reloading)
3679 get a very high use count so they will be considered least desirable.
3680 Registers used explicitly in the rtl code are almost as bad. */
3682 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3686 hard_reg_n_uses
[i
].uses
+= 2 * large
+ 2;
3687 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3689 else if (regs_explicitly_used
[i
])
3691 hard_reg_n_uses
[i
].uses
+= large
+ 1;
3692 #ifndef SMALL_REGISTER_CLASSES
3693 /* ??? We are doing this here because of the potential that
3694 bad code may be generated if a register explicitly used in
3695 an insn was used as a spill register for that insn. But
3696 not using these are spill registers may lose on some machine.
3697 We'll have to see how this works out. */
3698 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3702 hard_reg_n_uses
[HARD_FRAME_POINTER_REGNUM
].uses
+= 2 * large
+ 2;
3703 SET_HARD_REG_BIT (bad_spill_regs
, HARD_FRAME_POINTER_REGNUM
);
3705 #ifdef ELIMINABLE_REGS
3706 /* If registers other than the frame pointer are eliminable, mark them as
3708 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3710 hard_reg_n_uses
[reg_eliminate
[i
].from
].uses
+= 2 * large
+ 2;
3711 SET_HARD_REG_BIT (bad_spill_regs
, reg_eliminate
[i
].from
);
3715 /* Prefer registers not so far used, for use in temporary loading.
3716 Among them, if REG_ALLOC_ORDER is defined, use that order.
3717 Otherwise, prefer registers not preserved by calls. */
3719 #ifdef REG_ALLOC_ORDER
3720 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3722 int regno
= reg_alloc_order
[i
];
3724 if (hard_reg_n_uses
[regno
].uses
== 0)
3725 potential_reload_regs
[o
++] = regno
;
3728 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3730 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
])
3731 potential_reload_regs
[o
++] = i
;
3733 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3735 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
])
3736 potential_reload_regs
[o
++] = i
;
3740 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
3741 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
3743 /* Now add the regs that are already used,
3744 preferring those used less often. The fixed and otherwise forbidden
3745 registers will be at the end of this list. */
3747 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3748 if (hard_reg_n_uses
[i
].uses
!= 0)
3749 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
3752 /* Used in reload_as_needed to sort the spilled regs. */
3755 compare_spill_regs (r1
, r2
)
3761 /* Reload pseudo-registers into hard regs around each insn as needed.
3762 Additional register load insns are output before the insn that needs it
3763 and perhaps store insns after insns that modify the reloaded pseudo reg.
3765 reg_last_reload_reg and reg_reloaded_contents keep track of
3766 which registers are already available in reload registers.
3767 We update these for the reloads that we perform,
3768 as the insns are scanned. */
3771 reload_as_needed (first
, live_known
)
3781 bzero ((char *) spill_reg_rtx
, sizeof spill_reg_rtx
);
3782 bzero ((char *) spill_reg_store
, sizeof spill_reg_store
);
3783 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
3784 bzero ((char *) reg_last_reload_reg
, max_regno
* sizeof (rtx
));
3785 reg_has_output_reload
= (char *) alloca (max_regno
);
3786 for (i
= 0; i
< n_spills
; i
++)
3788 reg_reloaded_contents
[i
] = -1;
3789 reg_reloaded_insn
[i
] = 0;
3792 /* Reset all offsets on eliminable registers to their initial values. */
3793 #ifdef ELIMINABLE_REGS
3794 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3796 INITIAL_ELIMINATION_OFFSET (reg_eliminate
[i
].from
, reg_eliminate
[i
].to
,
3797 reg_eliminate
[i
].initial_offset
);
3798 reg_eliminate
[i
].previous_offset
3799 = reg_eliminate
[i
].offset
= reg_eliminate
[i
].initial_offset
;
3802 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
3803 reg_eliminate
[0].previous_offset
3804 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
3807 num_not_at_initial_offset
= 0;
3809 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3810 pack registers with group needs. */
3813 qsort (spill_regs
, n_spills
, sizeof (short), compare_spill_regs
);
3814 for (i
= 0; i
< n_spills
; i
++)
3815 spill_reg_order
[spill_regs
[i
]] = i
;
3818 for (insn
= first
; insn
;)
3820 register rtx next
= NEXT_INSN (insn
);
3822 /* Notice when we move to a new basic block. */
3823 if (live_known
&& this_block
+ 1 < n_basic_blocks
3824 && insn
== basic_block_head
[this_block
+1])
3827 /* If we pass a label, copy the offsets from the label information
3828 into the current offsets of each elimination. */
3829 if (GET_CODE (insn
) == CODE_LABEL
)
3831 num_not_at_initial_offset
= 0;
3832 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3834 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
3835 = offsets_at
[CODE_LABEL_NUMBER (insn
)][i
];
3836 if (reg_eliminate
[i
].can_eliminate
3837 && (reg_eliminate
[i
].offset
3838 != reg_eliminate
[i
].initial_offset
))
3839 num_not_at_initial_offset
++;
3843 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3845 rtx avoid_return_reg
= 0;
3846 rtx oldpat
= PATTERN (insn
);
3848 #ifdef SMALL_REGISTER_CLASSES
3849 /* Set avoid_return_reg if this is an insn
3850 that might use the value of a function call. */
3851 if (GET_CODE (insn
) == CALL_INSN
)
3853 if (GET_CODE (PATTERN (insn
)) == SET
)
3854 after_call
= SET_DEST (PATTERN (insn
));
3855 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
3856 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
3857 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
3861 else if (after_call
!= 0
3862 && !(GET_CODE (PATTERN (insn
)) == SET
3863 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
3865 if (reg_referenced_p (after_call
, PATTERN (insn
)))
3866 avoid_return_reg
= after_call
;
3869 #endif /* SMALL_REGISTER_CLASSES */
3871 /* If this is a USE and CLOBBER of a MEM, ensure that any
3872 references to eliminable registers have been removed. */
3874 if ((GET_CODE (PATTERN (insn
)) == USE
3875 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
3876 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
3877 XEXP (XEXP (PATTERN (insn
), 0), 0)
3878 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
3879 GET_MODE (XEXP (PATTERN (insn
), 0)), NULL_RTX
);
3881 /* If we need to do register elimination processing, do so.
3882 This might delete the insn, in which case we are done. */
3883 if (num_eliminable
&& GET_MODE (insn
) == QImode
)
3885 eliminate_regs_in_insn (insn
, 1);
3886 if (GET_CODE (insn
) == NOTE
)
3893 if (GET_MODE (insn
) == VOIDmode
)
3895 /* First find the pseudo regs that must be reloaded for this insn.
3896 This info is returned in the tables reload_... (see reload.h).
3897 Also modify the body of INSN by substituting RELOAD
3898 rtx's for those pseudo regs. */
3901 bzero (reg_has_output_reload
, max_regno
);
3902 CLEAR_HARD_REG_SET (reg_is_output_reload
);
3904 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
3910 rtx prev
= PREV_INSN (insn
), next
= NEXT_INSN (insn
);
3914 /* If this block has not had spilling done for a
3915 particular clas and we have any non-optionals that need a
3916 spill reg in that class, abort. */
3918 for (class = 0; class < N_REG_CLASSES
; class++)
3919 if (basic_block_needs
[class] != 0
3920 && basic_block_needs
[class][this_block
] == 0)
3921 for (i
= 0; i
< n_reloads
; i
++)
3922 if (class == (int) reload_reg_class
[i
]
3923 && reload_reg_rtx
[i
] == 0
3924 && ! reload_optional
[i
]
3925 && (reload_in
[i
] != 0 || reload_out
[i
] != 0
3926 || reload_secondary_p
[i
] != 0))
3927 fatal_insn ("Non-optional registers need a spill register", insn
);
3929 /* Now compute which reload regs to reload them into. Perhaps
3930 reusing reload regs from previous insns, or else output
3931 load insns to reload them. Maybe output store insns too.
3932 Record the choices of reload reg in reload_reg_rtx. */
3933 choose_reload_regs (insn
, avoid_return_reg
);
3935 #ifdef SMALL_REGISTER_CLASSES
3936 /* Merge any reloads that we didn't combine for fear of
3937 increasing the number of spill registers needed but now
3938 discover can be safely merged. */
3939 merge_assigned_reloads (insn
);
3942 /* Generate the insns to reload operands into or out of
3943 their reload regs. */
3944 emit_reload_insns (insn
);
3946 /* Substitute the chosen reload regs from reload_reg_rtx
3947 into the insn's body (or perhaps into the bodies of other
3948 load and store insn that we just made for reloading
3949 and that we moved the structure into). */
3952 /* If this was an ASM, make sure that all the reload insns
3953 we have generated are valid. If not, give an error
3956 if (asm_noperands (PATTERN (insn
)) >= 0)
3957 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
3958 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
3959 && (recog_memoized (p
) < 0
3960 || (insn_extract (p
),
3961 ! constrain_operands (INSN_CODE (p
), 1))))
3963 error_for_asm (insn
,
3964 "`asm' operand requires impossible reload");
3966 NOTE_SOURCE_FILE (p
) = 0;
3967 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
3970 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3971 is no longer validly lying around to save a future reload.
3972 Note that this does not detect pseudos that were reloaded
3973 for this insn in order to be stored in
3974 (obeying register constraints). That is correct; such reload
3975 registers ARE still valid. */
3976 note_stores (oldpat
, forget_old_reloads_1
);
3978 /* There may have been CLOBBER insns placed after INSN. So scan
3979 between INSN and NEXT and use them to forget old reloads. */
3980 for (x
= NEXT_INSN (insn
); x
!= next
; x
= NEXT_INSN (x
))
3981 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
3982 note_stores (PATTERN (x
), forget_old_reloads_1
);
3985 /* Likewise for regs altered by auto-increment in this insn.
3986 But note that the reg-notes are not changed by reloading:
3987 they still contain the pseudo-regs, not the spill regs. */
3988 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
3989 if (REG_NOTE_KIND (x
) == REG_INC
)
3991 /* See if this pseudo reg was reloaded in this insn.
3992 If so, its last-reload info is still valid
3993 because it is based on this insn's reload. */
3994 for (i
= 0; i
< n_reloads
; i
++)
3995 if (reload_out
[i
] == XEXP (x
, 0))
3999 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
4003 /* A reload reg's contents are unknown after a label. */
4004 if (GET_CODE (insn
) == CODE_LABEL
)
4005 for (i
= 0; i
< n_spills
; i
++)
4007 reg_reloaded_contents
[i
] = -1;
4008 reg_reloaded_insn
[i
] = 0;
4011 /* Don't assume a reload reg is still good after a call insn
4012 if it is a call-used reg. */
4013 else if (GET_CODE (insn
) == CALL_INSN
)
4014 for (i
= 0; i
< n_spills
; i
++)
4015 if (call_used_regs
[spill_regs
[i
]])
4017 reg_reloaded_contents
[i
] = -1;
4018 reg_reloaded_insn
[i
] = 0;
4021 /* In case registers overlap, allow certain insns to invalidate
4022 particular hard registers. */
4024 #ifdef INSN_CLOBBERS_REGNO_P
4025 for (i
= 0 ; i
< n_spills
; i
++)
4026 if (INSN_CLOBBERS_REGNO_P (insn
, spill_regs
[i
]))
4028 reg_reloaded_contents
[i
] = -1;
4029 reg_reloaded_insn
[i
] = 0;
4041 /* Discard all record of any value reloaded from X,
4042 or reloaded in X from someplace else;
4043 unless X is an output reload reg of the current insn.
4045 X may be a hard reg (the reload reg)
4046 or it may be a pseudo reg that was reloaded from. */
4049 forget_old_reloads_1 (x
, ignored
)
4057 /* note_stores does give us subregs of hard regs. */
4058 while (GET_CODE (x
) == SUBREG
)
4060 offset
+= SUBREG_WORD (x
);
4064 if (GET_CODE (x
) != REG
)
4067 regno
= REGNO (x
) + offset
;
4069 if (regno
>= FIRST_PSEUDO_REGISTER
)
4074 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4075 /* Storing into a spilled-reg invalidates its contents.
4076 This can happen if a block-local pseudo is allocated to that reg
4077 and it wasn't spilled because this block's total need is 0.
4078 Then some insn might have an optional reload and use this reg. */
4079 for (i
= 0; i
< nr
; i
++)
4080 if (spill_reg_order
[regno
+ i
] >= 0
4081 /* But don't do this if the reg actually serves as an output
4082 reload reg in the current instruction. */
4084 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
)))
4086 reg_reloaded_contents
[spill_reg_order
[regno
+ i
]] = -1;
4087 reg_reloaded_insn
[spill_reg_order
[regno
+ i
]] = 0;
4091 /* Since value of X has changed,
4092 forget any value previously copied from it. */
4095 /* But don't forget a copy if this is the output reload
4096 that establishes the copy's validity. */
4097 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4098 reg_last_reload_reg
[regno
+ nr
] = 0;
4101 /* For each reload, the mode of the reload register. */
4102 static enum machine_mode reload_mode
[MAX_RELOADS
];
4104 /* For each reload, the largest number of registers it will require. */
4105 static int reload_nregs
[MAX_RELOADS
];
4107 /* Comparison function for qsort to decide which of two reloads
4108 should be handled first. *P1 and *P2 are the reload numbers. */
4111 reload_reg_class_lower (p1
, p2
)
4114 register int r1
= *p1
, r2
= *p2
;
4117 /* Consider required reloads before optional ones. */
4118 t
= reload_optional
[r1
] - reload_optional
[r2
];
4122 /* Count all solitary classes before non-solitary ones. */
4123 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
4124 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
4128 /* Aside from solitaires, consider all multi-reg groups first. */
4129 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
4133 /* Consider reloads in order of increasing reg-class number. */
4134 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
4138 /* If reloads are equally urgent, sort by reload number,
4139 so that the results of qsort leave nothing to chance. */
4143 /* The following HARD_REG_SETs indicate when each hard register is
4144 used for a reload of various parts of the current insn. */
4146 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4147 static HARD_REG_SET reload_reg_used
;
4148 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4149 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4150 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4151 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4152 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4153 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4154 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4155 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4156 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4157 static HARD_REG_SET reload_reg_used_in_op_addr
;
4158 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4159 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4160 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4161 static HARD_REG_SET reload_reg_used_in_insn
;
4162 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4163 static HARD_REG_SET reload_reg_used_in_other_addr
;
4165 /* If reg is in use as a reload reg for any sort of reload. */
4166 static HARD_REG_SET reload_reg_used_at_all
;
4168 /* If reg is use as an inherited reload. We just mark the first register
4170 static HARD_REG_SET reload_reg_used_for_inherit
;
4172 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4173 TYPE. MODE is used to indicate how many consecutive regs are
4177 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4180 enum reload_type type
;
4181 enum machine_mode mode
;
4183 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4186 for (i
= regno
; i
< nregs
+ regno
; i
++)
4191 SET_HARD_REG_BIT (reload_reg_used
, i
);
4194 case RELOAD_FOR_INPUT_ADDRESS
:
4195 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4198 case RELOAD_FOR_OUTPUT_ADDRESS
:
4199 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4202 case RELOAD_FOR_OPERAND_ADDRESS
:
4203 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4206 case RELOAD_FOR_OPADDR_ADDR
:
4207 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4210 case RELOAD_FOR_OTHER_ADDRESS
:
4211 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4214 case RELOAD_FOR_INPUT
:
4215 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4218 case RELOAD_FOR_OUTPUT
:
4219 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4222 case RELOAD_FOR_INSN
:
4223 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4227 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4231 /* Similarly, but show REGNO is no longer in use for a reload. */
4234 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4237 enum reload_type type
;
4238 enum machine_mode mode
;
4240 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4243 for (i
= regno
; i
< nregs
+ regno
; i
++)
4248 CLEAR_HARD_REG_BIT (reload_reg_used
, i
);
4251 case RELOAD_FOR_INPUT_ADDRESS
:
4252 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4255 case RELOAD_FOR_OUTPUT_ADDRESS
:
4256 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4259 case RELOAD_FOR_OPERAND_ADDRESS
:
4260 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4263 case RELOAD_FOR_OPADDR_ADDR
:
4264 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4267 case RELOAD_FOR_OTHER_ADDRESS
:
4268 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4271 case RELOAD_FOR_INPUT
:
4272 CLEAR_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4275 case RELOAD_FOR_OUTPUT
:
4276 CLEAR_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4279 case RELOAD_FOR_INSN
:
4280 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4286 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4287 specified by OPNUM and TYPE. */
4290 reload_reg_free_p (regno
, opnum
, type
)
4293 enum reload_type type
;
4297 /* In use for a RELOAD_OTHER means it's not available for anything except
4298 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4299 to be used only for inputs. */
4301 if (type
!= RELOAD_FOR_OTHER_ADDRESS
4302 && TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4308 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4309 we can't use it for RELOAD_OTHER. */
4310 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
4311 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4312 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4315 for (i
= 0; i
< reload_n_operands
; i
++)
4316 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4317 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4319 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4324 case RELOAD_FOR_INPUT
:
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4329 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4332 /* If it is used for some other input, can't use it. */
4333 for (i
= 0; i
< reload_n_operands
; i
++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4337 /* If it is used in a later operand's address, can't use it. */
4338 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4339 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4344 case RELOAD_FOR_INPUT_ADDRESS
:
4345 /* Can't use a register if it is used for an input address for this
4346 operand or used as an input in an earlier one. */
4347 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
))
4350 for (i
= 0; i
< opnum
; i
++)
4351 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4356 case RELOAD_FOR_OUTPUT_ADDRESS
:
4357 /* Can't use a register if it is used for an output address for this
4358 operand or used as an output in this or a later operand. */
4359 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4362 for (i
= opnum
; i
< reload_n_operands
; i
++)
4363 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4368 case RELOAD_FOR_OPERAND_ADDRESS
:
4369 for (i
= 0; i
< reload_n_operands
; i
++)
4370 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4373 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4374 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4376 case RELOAD_FOR_OPADDR_ADDR
:
4377 for (i
= 0; i
< reload_n_operands
; i
++)
4378 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4381 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4383 case RELOAD_FOR_OUTPUT
:
4384 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4385 outputs, or an operand address for this or an earlier output. */
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4389 for (i
= 0; i
< reload_n_operands
; i
++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4393 for (i
= 0; i
<= opnum
; i
++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4399 case RELOAD_FOR_INSN
:
4400 for (i
= 0; i
< reload_n_operands
; i
++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4402 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4405 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4406 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4408 case RELOAD_FOR_OTHER_ADDRESS
:
4409 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4414 /* Return 1 if the value in reload reg REGNO, as used by a reload
4415 needed for the part of the insn specified by OPNUM and TYPE,
4416 is not in use for a reload in any prior part of the insn.
4418 We can assume that the reload reg was already tested for availability
4419 at the time it is needed, and we should not check this again,
4420 in case the reg has already been marked in use. */
4423 reload_reg_free_before_p (regno
, opnum
, type
)
4426 enum reload_type type
;
4432 case RELOAD_FOR_OTHER_ADDRESS
:
4433 /* These always come first. */
4437 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4439 /* If this use is for part of the insn,
4440 check the reg is not in use for any prior part. It is tempting
4441 to try to do this by falling through from objecs that occur
4442 later in the insn to ones that occur earlier, but that will not
4443 correctly take into account the fact that here we MUST ignore
4444 things that would prevent the register from being allocated in
4445 the first place, since we know that it was allocated. */
4447 case RELOAD_FOR_OUTPUT_ADDRESS
:
4448 /* Earlier reloads are for earlier outputs or their addresses,
4449 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4450 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4452 for (i
= 0; i
< opnum
; i
++)
4453 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4454 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4457 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4460 for (i
= 0; i
< reload_n_operands
; i
++)
4461 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4462 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4465 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4466 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4467 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4469 case RELOAD_FOR_OUTPUT
:
4470 /* This can't be used in the output address for this operand and
4471 anything that can't be used for it, except that we've already
4472 tested for RELOAD_FOR_INSN objects. */
4474 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4477 for (i
= 0; i
< opnum
; i
++)
4478 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4479 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4482 for (i
= 0; i
< reload_n_operands
; i
++)
4483 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4488 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4490 case RELOAD_FOR_OPERAND_ADDRESS
:
4491 case RELOAD_FOR_OPADDR_ADDR
:
4492 case RELOAD_FOR_INSN
:
4493 /* These can't conflict with inputs, or each other, so all we have to
4494 test is input addresses and the addresses of OTHER items. */
4496 for (i
= 0; i
< reload_n_operands
; i
++)
4497 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4500 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4502 case RELOAD_FOR_INPUT
:
4503 /* The only things earlier are the address for this and
4504 earlier inputs, other inputs (which we know we don't conflict
4505 with), and addresses of RELOAD_OTHER objects. */
4507 for (i
= 0; i
<= opnum
; i
++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4511 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4513 case RELOAD_FOR_INPUT_ADDRESS
:
4514 /* Similarly, all we have to check is for use in earlier inputs'
4516 for (i
= 0; i
< opnum
; i
++)
4517 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4520 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4525 /* Return 1 if the value in reload reg REGNO, as used by a reload
4526 needed for the part of the insn specified by OPNUM and TYPE,
4527 is still available in REGNO at the end of the insn.
4529 We can assume that the reload reg was already tested for availability
4530 at the time it is needed, and we should not check this again,
4531 in case the reg has already been marked in use. */
4534 reload_reg_reaches_end_p (regno
, opnum
, type
)
4537 enum reload_type type
;
4544 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4545 its value must reach the end. */
4548 /* If this use is for part of the insn,
4549 its value reaches if no subsequent part uses the same register.
4550 Just like the above function, don't try to do this with lots
4553 case RELOAD_FOR_OTHER_ADDRESS
:
4554 /* Here we check for everything else, since these don't conflict
4555 with anything else and everything comes later. */
4557 for (i
= 0; i
< reload_n_operands
; i
++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4559 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4560 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4564 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4565 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4566 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4568 case RELOAD_FOR_INPUT_ADDRESS
:
4569 /* Similar, except that we check only for this and subsequent inputs
4570 and the address of only subsequent inputs and we do not need
4571 to check for RELOAD_OTHER objects since they are known not to
4574 for (i
= opnum
; i
< reload_n_operands
; i
++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4578 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4579 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4582 for (i
= 0; i
< reload_n_operands
; i
++)
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4584 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4587 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4590 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4591 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4593 case RELOAD_FOR_INPUT
:
4594 /* Similar to input address, except we start at the next operand for
4595 both input and input address and we do not check for
4596 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4599 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4600 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4601 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4604 /* ... fall through ... */
4606 case RELOAD_FOR_OPERAND_ADDRESS
:
4607 /* Check outputs and their addresses. */
4609 for (i
= 0; i
< reload_n_operands
; i
++)
4610 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4611 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4616 case RELOAD_FOR_OPADDR_ADDR
:
4617 for (i
= 0; i
< reload_n_operands
; i
++)
4618 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4619 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4622 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4623 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4625 case RELOAD_FOR_INSN
:
4626 /* These conflict with other outputs with RELOAD_OTHER. So
4627 we need only check for output addresses. */
4631 /* ... fall through ... */
4633 case RELOAD_FOR_OUTPUT
:
4634 case RELOAD_FOR_OUTPUT_ADDRESS
:
4635 /* We already know these can't conflict with a later output. So the
4636 only thing to check are later output addresses. */
4637 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4638 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4647 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4650 This function uses the same algorithm as reload_reg_free_p above. */
4653 reloads_conflict (r1
, r2
)
4656 enum reload_type r1_type
= reload_when_needed
[r1
];
4657 enum reload_type r2_type
= reload_when_needed
[r2
];
4658 int r1_opnum
= reload_opnum
[r1
];
4659 int r2_opnum
= reload_opnum
[r2
];
4661 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4663 if (r2_type
== RELOAD_OTHER
&& r1_type
!= RELOAD_FOR_OTHER_ADDRESS
)
4666 /* Otherwise, check conflicts differently for each type. */
4670 case RELOAD_FOR_INPUT
:
4671 return (r2_type
== RELOAD_FOR_INSN
4672 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
4673 || r2_type
== RELOAD_FOR_OPADDR_ADDR
4674 || r2_type
== RELOAD_FOR_INPUT
4675 || (r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r2_opnum
> r1_opnum
));
4677 case RELOAD_FOR_INPUT_ADDRESS
:
4678 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
4679 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4681 case RELOAD_FOR_OUTPUT_ADDRESS
:
4682 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
4683 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
4685 case RELOAD_FOR_OPERAND_ADDRESS
:
4686 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
4687 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4689 case RELOAD_FOR_OPADDR_ADDR
:
4690 return (r2_type
== RELOAD_FOR_INPUT
4691 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
4693 case RELOAD_FOR_OUTPUT
:
4694 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
4695 || (r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
4696 && r2_opnum
>= r1_opnum
));
4698 case RELOAD_FOR_INSN
:
4699 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
4700 || r2_type
== RELOAD_FOR_INSN
4701 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4703 case RELOAD_FOR_OTHER_ADDRESS
:
4704 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
4707 return r2_type
!= RELOAD_FOR_OTHER_ADDRESS
;
4714 /* Vector of reload-numbers showing the order in which the reloads should
4716 short reload_order
[MAX_RELOADS
];
4718 /* Indexed by reload number, 1 if incoming value
4719 inherited from previous insns. */
4720 char reload_inherited
[MAX_RELOADS
];
4722 /* For an inherited reload, this is the insn the reload was inherited from,
4723 if we know it. Otherwise, this is 0. */
4724 rtx reload_inheritance_insn
[MAX_RELOADS
];
4726 /* If non-zero, this is a place to get the value of the reload,
4727 rather than using reload_in. */
4728 rtx reload_override_in
[MAX_RELOADS
];
4730 /* For each reload, the index in spill_regs of the spill register used,
4731 or -1 if we did not need one of the spill registers for this reload. */
4732 int reload_spill_index
[MAX_RELOADS
];
4734 /* Find a spill register to use as a reload register for reload R.
4735 LAST_RELOAD is non-zero if this is the last reload for the insn being
4738 Set reload_reg_rtx[R] to the register allocated.
4740 If NOERROR is nonzero, we return 1 if successful,
4741 or 0 if we couldn't find a spill reg and we didn't change anything. */
4744 allocate_reload_reg (r
, insn
, last_reload
, noerror
)
4756 /* If we put this reload ahead, thinking it is a group,
4757 then insist on finding a group. Otherwise we can grab a
4758 reg that some other reload needs.
4759 (That can happen when we have a 68000 DATA_OR_FP_REG
4760 which is a group of data regs or one fp reg.)
4761 We need not be so restrictive if there are no more reloads
4764 ??? Really it would be nicer to have smarter handling
4765 for that kind of reg class, where a problem like this is normal.
4766 Perhaps those classes should be avoided for reloading
4767 by use of more alternatives. */
4769 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
4771 /* If we want a single register and haven't yet found one,
4772 take any reg in the right class and not in use.
4773 If we want a consecutive group, here is where we look for it.
4775 We use two passes so we can first look for reload regs to
4776 reuse, which are already in use for other reloads in this insn,
4777 and only then use additional registers.
4778 I think that maximizing reuse is needed to make sure we don't
4779 run out of reload regs. Suppose we have three reloads, and
4780 reloads A and B can share regs. These need two regs.
4781 Suppose A and B are given different regs.
4782 That leaves none for C. */
4783 for (pass
= 0; pass
< 2; pass
++)
4785 /* I is the index in spill_regs.
4786 We advance it round-robin between insns to use all spill regs
4787 equally, so that inherited reloads have a chance
4788 of leapfrogging each other. Don't do this, however, when we have
4789 group needs and failure would be fatal; if we only have a relatively
4790 small number of spill registers, and more than one of them has
4791 group needs, then by starting in the middle, we may end up
4792 allocating the first one in such a way that we are not left with
4793 sufficient groups to handle the rest. */
4795 if (noerror
|| ! force_group
)
4800 for (count
= 0; count
< n_spills
; count
++)
4802 int class = (int) reload_reg_class
[r
];
4804 i
= (i
+ 1) % n_spills
;
4806 if (reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
4807 reload_when_needed
[r
])
4808 && TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
4809 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
4810 /* Look first for regs to share, then for unshared. But
4811 don't share regs used for inherited reloads; they are
4812 the ones we want to preserve. */
4814 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
4816 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
4819 int nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
4820 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4821 (on 68000) got us two FP regs. If NR is 1,
4822 we would reject both of them. */
4824 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
4825 /* If we need only one reg, we have already won. */
4828 /* But reject a single reg if we demand a group. */
4833 /* Otherwise check that as many consecutive regs as we need
4835 Also, don't use for a group registers that are
4836 needed for nongroups. */
4837 if (! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
4840 regno
= spill_regs
[i
] + nr
- 1;
4841 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
4842 && spill_reg_order
[regno
] >= 0
4843 && reload_reg_free_p (regno
, reload_opnum
[r
],
4844 reload_when_needed
[r
])
4845 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
4855 /* If we found something on pass 1, omit pass 2. */
4856 if (count
< n_spills
)
4860 /* We should have found a spill register by now. */
4861 if (count
== n_spills
)
4868 /* I is the index in SPILL_REG_RTX of the reload register we are to
4869 allocate. Get an rtx for it and find its register number. */
4871 new = spill_reg_rtx
[i
];
4873 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
4874 spill_reg_rtx
[i
] = new
4875 = gen_rtx (REG
, reload_mode
[r
], spill_regs
[i
]);
4877 regno
= true_regnum (new);
4879 /* Detect when the reload reg can't hold the reload mode.
4880 This used to be one `if', but Sequent compiler can't handle that. */
4881 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
4883 enum machine_mode test_mode
= VOIDmode
;
4885 test_mode
= GET_MODE (reload_in
[r
]);
4886 /* If reload_in[r] has VOIDmode, it means we will load it
4887 in whatever mode the reload reg has: to wit, reload_mode[r].
4888 We have already tested that for validity. */
4889 /* Aside from that, we need to test that the expressions
4890 to reload from or into have modes which are valid for this
4891 reload register. Otherwise the reload insns would be invalid. */
4892 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
4893 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
4894 if (! (reload_out
[r
] != 0
4895 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
4897 /* The reg is OK. */
4900 /* Mark as in use for this insn the reload regs we use
4902 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
4903 reload_when_needed
[r
], reload_mode
[r
]);
4905 reload_reg_rtx
[r
] = new;
4906 reload_spill_index
[r
] = i
;
4911 /* The reg is not OK. */
4916 if (asm_noperands (PATTERN (insn
)) < 0)
4917 /* It's the compiler's fault. */
4918 fatal_insn ("Could not find a spill register", insn
);
4920 /* It's the user's fault; the operand's mode and constraint
4921 don't match. Disable this reload so we don't crash in final. */
4922 error_for_asm (insn
,
4923 "`asm' operand constraint incompatible with operand size");
4926 reload_reg_rtx
[r
] = 0;
4927 reload_optional
[r
] = 1;
4928 reload_secondary_p
[r
] = 1;
4933 /* Assign hard reg targets for the pseudo-registers we must reload
4934 into hard regs for this insn.
4935 Also output the instructions to copy them in and out of the hard regs.
4937 For machines with register classes, we are responsible for
4938 finding a reload reg in the proper class. */
4941 choose_reload_regs (insn
, avoid_return_reg
)
4943 rtx avoid_return_reg
;
4946 int max_group_size
= 1;
4947 enum reg_class group_class
= NO_REGS
;
4950 rtx save_reload_reg_rtx
[MAX_RELOADS
];
4951 char save_reload_inherited
[MAX_RELOADS
];
4952 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
4953 rtx save_reload_override_in
[MAX_RELOADS
];
4954 int save_reload_spill_index
[MAX_RELOADS
];
4955 HARD_REG_SET save_reload_reg_used
;
4956 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4957 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4958 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4959 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4960 HARD_REG_SET save_reload_reg_used_in_op_addr
;
4961 HARD_REG_SET save_reload_reg_used_in_op_addr_reload
;
4962 HARD_REG_SET save_reload_reg_used_in_insn
;
4963 HARD_REG_SET save_reload_reg_used_in_other_addr
;
4964 HARD_REG_SET save_reload_reg_used_at_all
;
4966 bzero (reload_inherited
, MAX_RELOADS
);
4967 bzero ((char *) reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
4968 bzero ((char *) reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
4970 CLEAR_HARD_REG_SET (reload_reg_used
);
4971 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
4972 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
4973 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
4974 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
4975 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
4977 for (i
= 0; i
< reload_n_operands
; i
++)
4979 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
4980 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
4981 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
4982 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
4985 #ifdef SMALL_REGISTER_CLASSES
4986 /* Don't bother with avoiding the return reg
4987 if we have no mandatory reload that could use it. */
4988 if (avoid_return_reg
)
4991 int regno
= REGNO (avoid_return_reg
);
4993 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
4996 for (r
= regno
; r
< regno
+ nregs
; r
++)
4997 if (spill_reg_order
[r
] >= 0)
4998 for (j
= 0; j
< n_reloads
; j
++)
4999 if (!reload_optional
[j
] && reload_reg_rtx
[j
] == 0
5000 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
5001 || reload_secondary_p
[j
])
5003 TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[j
]], r
))
5006 avoid_return_reg
= 0;
5008 #endif /* SMALL_REGISTER_CLASSES */
5010 #if 0 /* Not needed, now that we can always retry without inheritance. */
5011 /* See if we have more mandatory reloads than spill regs.
5012 If so, then we cannot risk optimizations that could prevent
5013 reloads from sharing one spill register.
5015 Since we will try finding a better register than reload_reg_rtx
5016 unless it is equal to reload_in or reload_out, count such reloads. */
5020 #ifdef SMALL_REGISTER_CLASSES
5021 int tem
= (avoid_return_reg
!= 0);
5023 for (j
= 0; j
< n_reloads
; j
++)
5024 if (! reload_optional
[j
]
5025 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
5026 && (reload_reg_rtx
[j
] == 0
5027 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
5028 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
5035 #ifdef SMALL_REGISTER_CLASSES
5036 /* Don't use the subroutine call return reg for a reload
5037 if we are supposed to avoid it. */
5038 if (avoid_return_reg
)
5040 int regno
= REGNO (avoid_return_reg
);
5042 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5045 for (r
= regno
; r
< regno
+ nregs
; r
++)
5046 if (spill_reg_order
[r
] >= 0)
5047 SET_HARD_REG_BIT (reload_reg_used
, r
);
5049 #endif /* SMALL_REGISTER_CLASSES */
5051 /* In order to be certain of getting the registers we need,
5052 we must sort the reloads into order of increasing register class.
5053 Then our grabbing of reload registers will parallel the process
5054 that provided the reload registers.
5056 Also note whether any of the reloads wants a consecutive group of regs.
5057 If so, record the maximum size of the group desired and what
5058 register class contains all the groups needed by this insn. */
5060 for (j
= 0; j
< n_reloads
; j
++)
5062 reload_order
[j
] = j
;
5063 reload_spill_index
[j
] = -1;
5066 = (reload_inmode
[j
] == VOIDmode
5067 || (GET_MODE_SIZE (reload_outmode
[j
])
5068 > GET_MODE_SIZE (reload_inmode
[j
])))
5069 ? reload_outmode
[j
] : reload_inmode
[j
];
5071 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
5073 if (reload_nregs
[j
] > 1)
5075 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
5076 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
5079 /* If we have already decided to use a certain register,
5080 don't use it in another way. */
5081 if (reload_reg_rtx
[j
])
5082 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
5083 reload_when_needed
[j
], reload_mode
[j
]);
5087 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5089 bcopy ((char *) reload_reg_rtx
, (char *) save_reload_reg_rtx
,
5090 sizeof reload_reg_rtx
);
5091 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
5092 bcopy ((char *) reload_inheritance_insn
,
5093 (char *) save_reload_inheritance_insn
,
5094 sizeof reload_inheritance_insn
);
5095 bcopy ((char *) reload_override_in
, (char *) save_reload_override_in
,
5096 sizeof reload_override_in
);
5097 bcopy ((char *) reload_spill_index
, (char *) save_reload_spill_index
,
5098 sizeof reload_spill_index
);
5099 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
5100 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
5101 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
5102 reload_reg_used_in_op_addr
);
5104 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload
,
5105 reload_reg_used_in_op_addr_reload
);
5107 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
5108 reload_reg_used_in_insn
);
5109 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
5110 reload_reg_used_in_other_addr
);
5112 for (i
= 0; i
< reload_n_operands
; i
++)
5114 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
5115 reload_reg_used_in_output
[i
]);
5116 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
5117 reload_reg_used_in_input
[i
]);
5118 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
5119 reload_reg_used_in_input_addr
[i
]);
5120 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
5121 reload_reg_used_in_output_addr
[i
]);
5124 /* If -O, try first with inheritance, then turning it off.
5125 If not -O, don't do inheritance.
5126 Using inheritance when not optimizing leads to paradoxes
5127 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5128 because one side of the comparison might be inherited. */
5130 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5132 /* Process the reloads in order of preference just found.
5133 Beyond this point, subregs can be found in reload_reg_rtx.
5135 This used to look for an existing reloaded home for all
5136 of the reloads, and only then perform any new reloads.
5137 But that could lose if the reloads were done out of reg-class order
5138 because a later reload with a looser constraint might have an old
5139 home in a register needed by an earlier reload with a tighter constraint.
5141 To solve this, we make two passes over the reloads, in the order
5142 described above. In the first pass we try to inherit a reload
5143 from a previous insn. If there is a later reload that needs a
5144 class that is a proper subset of the class being processed, we must
5145 also allocate a spill register during the first pass.
5147 Then make a second pass over the reloads to allocate any reloads
5148 that haven't been given registers yet. */
5150 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5152 for (j
= 0; j
< n_reloads
; j
++)
5154 register int r
= reload_order
[j
];
5156 /* Ignore reloads that got marked inoperative. */
5157 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5160 /* If find_reloads chose a to use reload_in or reload_out as a reload
5161 register, we don't need to chose one. Otherwise, try even if it found
5162 one since we might save an insn if we find the value lying around. */
5163 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
5164 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
5165 || rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])))
5168 #if 0 /* No longer needed for correct operation.
5169 It might give better code, or might not; worth an experiment? */
5170 /* If this is an optional reload, we can't inherit from earlier insns
5171 until we are sure that any non-optional reloads have been allocated.
5172 The following code takes advantage of the fact that optional reloads
5173 are at the end of reload_order. */
5174 if (reload_optional
[r
] != 0)
5175 for (i
= 0; i
< j
; i
++)
5176 if ((reload_out
[reload_order
[i
]] != 0
5177 || reload_in
[reload_order
[i
]] != 0
5178 || reload_secondary_p
[reload_order
[i
]])
5179 && ! reload_optional
[reload_order
[i
]]
5180 && reload_reg_rtx
[reload_order
[i
]] == 0)
5181 allocate_reload_reg (reload_order
[i
], insn
, 0, inheritance
);
5184 /* First see if this pseudo is already available as reloaded
5185 for a previous insn. We cannot try to inherit for reloads
5186 that are smaller than the maximum number of registers needed
5187 for groups unless the register we would allocate cannot be used
5190 We could check here to see if this is a secondary reload for
5191 an object that is already in a register of the desired class.
5192 This would avoid the need for the secondary reload register.
5193 But this is complex because we can't easily determine what
5194 objects might want to be loaded via this reload. So let a register
5195 be allocated here. In `emit_reload_insns' we suppress one of the
5196 loads in the case described above. */
5200 register int regno
= -1;
5201 enum machine_mode mode
;
5203 if (reload_in
[r
] == 0)
5205 else if (GET_CODE (reload_in
[r
]) == REG
)
5207 regno
= REGNO (reload_in
[r
]);
5208 mode
= GET_MODE (reload_in
[r
]);
5210 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
5212 regno
= REGNO (reload_in_reg
[r
]);
5213 mode
= GET_MODE (reload_in_reg
[r
]);
5216 /* This won't work, since REGNO can be a pseudo reg number.
5217 Also, it takes much more hair to keep track of all the things
5218 that can invalidate an inherited reload of part of a pseudoreg. */
5219 else if (GET_CODE (reload_in
[r
]) == SUBREG
5220 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5221 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5224 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5226 i
= spill_reg_order
[REGNO (reg_last_reload_reg
[regno
])];
5228 if (reg_reloaded_contents
[i
] == regno
5229 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
5230 >= GET_MODE_SIZE (mode
))
5231 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
5232 && TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5234 && (reload_nregs
[r
] == max_group_size
5235 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5237 && reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
5238 reload_when_needed
[r
])
5239 && reload_reg_free_before_p (spill_regs
[i
],
5241 reload_when_needed
[r
]))
5243 /* If a group is needed, verify that all the subsequent
5244 registers still have their values intact. */
5246 = HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5249 for (k
= 1; k
< nr
; k
++)
5250 if (reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
5258 /* We found a register that contains the
5259 value we need. If this register is the
5260 same as an `earlyclobber' operand of the
5261 current insn, just mark it as a place to
5262 reload from since we can't use it as the
5263 reload register itself. */
5265 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5266 if (reg_overlap_mentioned_for_reload_p
5267 (reg_last_reload_reg
[regno
],
5268 reload_earlyclobbers
[i1
]))
5271 if (i1
!= n_earlyclobbers
5272 /* Don't really use the inherited spill reg
5273 if we need it wider than we've got it. */
5274 || (GET_MODE_SIZE (reload_mode
[r
])
5275 > GET_MODE_SIZE (mode
)))
5276 reload_override_in
[r
] = reg_last_reload_reg
[regno
];
5280 /* We can use this as a reload reg. */
5281 /* Mark the register as in use for this part of
5283 mark_reload_reg_in_use (spill_regs
[i
],
5285 reload_when_needed
[r
],
5287 reload_reg_rtx
[r
] = reg_last_reload_reg
[regno
];
5288 reload_inherited
[r
] = 1;
5289 reload_inheritance_insn
[r
]
5290 = reg_reloaded_insn
[i
];
5291 reload_spill_index
[r
] = i
;
5292 for (k
= 0; k
< nr
; k
++)
5293 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5301 /* Here's another way to see if the value is already lying around. */
5303 && reload_in
[r
] != 0
5304 && ! reload_inherited
[r
]
5305 && reload_out
[r
] == 0
5306 && (CONSTANT_P (reload_in
[r
])
5307 || GET_CODE (reload_in
[r
]) == PLUS
5308 || GET_CODE (reload_in
[r
]) == REG
5309 || GET_CODE (reload_in
[r
]) == MEM
)
5310 && (reload_nregs
[r
] == max_group_size
5311 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5314 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5315 -1, NULL_PTR
, 0, reload_mode
[r
]);
5320 if (GET_CODE (equiv
) == REG
)
5321 regno
= REGNO (equiv
);
5322 else if (GET_CODE (equiv
) == SUBREG
)
5324 /* This must be a SUBREG of a hard register.
5325 Make a new REG since this might be used in an
5326 address and not all machines support SUBREGs
5328 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5329 equiv
= gen_rtx (REG
, reload_mode
[r
], regno
);
5335 /* If we found a spill reg, reject it unless it is free
5336 and of the desired class. */
5338 && ((spill_reg_order
[regno
] >= 0
5339 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5340 reload_when_needed
[r
]))
5341 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5345 if (equiv
!= 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
))
5348 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5351 /* We found a register that contains the value we need.
5352 If this register is the same as an `earlyclobber' operand
5353 of the current insn, just mark it as a place to reload from
5354 since we can't use it as the reload register itself. */
5357 for (i
= 0; i
< n_earlyclobbers
; i
++)
5358 if (reg_overlap_mentioned_for_reload_p (equiv
,
5359 reload_earlyclobbers
[i
]))
5361 reload_override_in
[r
] = equiv
;
5366 /* JRV: If the equiv register we have found is explicitly
5367 clobbered in the current insn, mark but don't use, as above. */
5369 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5371 reload_override_in
[r
] = equiv
;
5375 /* If we found an equivalent reg, say no code need be generated
5376 to load it, and use it as our reload reg. */
5377 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
5379 reload_reg_rtx
[r
] = equiv
;
5380 reload_inherited
[r
] = 1;
5381 /* If it is a spill reg,
5382 mark the spill reg as in use for this insn. */
5383 i
= spill_reg_order
[regno
];
5386 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
5388 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
5389 reload_when_needed
[r
],
5391 for (k
= 0; k
< nr
; k
++)
5392 SET_HARD_REG_BIT (reload_reg_used_for_inherit
, regno
+ k
);
5397 /* If we found a register to use already, or if this is an optional
5398 reload, we are done. */
5399 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
5402 #if 0 /* No longer needed for correct operation. Might or might not
5403 give better code on the average. Want to experiment? */
5405 /* See if there is a later reload that has a class different from our
5406 class that intersects our class or that requires less register
5407 than our reload. If so, we must allocate a register to this
5408 reload now, since that reload might inherit a previous reload
5409 and take the only available register in our class. Don't do this
5410 for optional reloads since they will force all previous reloads
5411 to be allocated. Also don't do this for reloads that have been
5414 for (i
= j
+ 1; i
< n_reloads
; i
++)
5416 int s
= reload_order
[i
];
5418 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
5419 && ! reload_secondary_p
[s
])
5420 || reload_optional
[s
])
5423 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
5424 && reg_classes_intersect_p (reload_reg_class
[r
],
5425 reload_reg_class
[s
]))
5426 || reload_nregs
[s
] < reload_nregs
[r
])
5433 allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
);
5437 /* Now allocate reload registers for anything non-optional that
5438 didn't get one yet. */
5439 for (j
= 0; j
< n_reloads
; j
++)
5441 register int r
= reload_order
[j
];
5443 /* Ignore reloads that got marked inoperative. */
5444 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5447 /* Skip reloads that already have a register allocated or are
5449 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
5452 if (! allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
))
5456 /* If that loop got all the way, we have won. */
5461 /* Loop around and try without any inheritance. */
5462 /* First undo everything done by the failed attempt
5463 to allocate with inheritance. */
5464 bcopy ((char *) save_reload_reg_rtx
, (char *) reload_reg_rtx
,
5465 sizeof reload_reg_rtx
);
5466 bcopy ((char *) save_reload_inherited
, (char *) reload_inherited
,
5467 sizeof reload_inherited
);
5468 bcopy ((char *) save_reload_inheritance_insn
,
5469 (char *) reload_inheritance_insn
,
5470 sizeof reload_inheritance_insn
);
5471 bcopy ((char *) save_reload_override_in
, (char *) reload_override_in
,
5472 sizeof reload_override_in
);
5473 bcopy ((char *) save_reload_spill_index
, (char *) reload_spill_index
,
5474 sizeof reload_spill_index
);
5475 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
5476 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
5477 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
5478 save_reload_reg_used_in_op_addr
);
5479 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload
,
5480 save_reload_reg_used_in_op_addr_reload
);
5481 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
5482 save_reload_reg_used_in_insn
);
5483 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
5484 save_reload_reg_used_in_other_addr
);
5486 for (i
= 0; i
< reload_n_operands
; i
++)
5488 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
5489 save_reload_reg_used_in_input
[i
]);
5490 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
5491 save_reload_reg_used_in_output
[i
]);
5492 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
5493 save_reload_reg_used_in_input_addr
[i
]);
5494 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
5495 save_reload_reg_used_in_output_addr
[i
]);
5499 /* If we thought we could inherit a reload, because it seemed that
5500 nothing else wanted the same reload register earlier in the insn,
5501 verify that assumption, now that all reloads have been assigned. */
5503 for (j
= 0; j
< n_reloads
; j
++)
5505 register int r
= reload_order
[j
];
5507 if (reload_inherited
[r
] && reload_reg_rtx
[r
] != 0
5508 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx
[r
]),
5510 reload_when_needed
[r
]))
5511 reload_inherited
[r
] = 0;
5513 /* If we found a better place to reload from,
5514 validate it in the same fashion, if it is a reload reg. */
5515 if (reload_override_in
[r
]
5516 && (GET_CODE (reload_override_in
[r
]) == REG
5517 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
5519 int regno
= true_regnum (reload_override_in
[r
]);
5520 if (spill_reg_order
[regno
] >= 0
5521 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5522 reload_when_needed
[r
]))
5523 reload_override_in
[r
] = 0;
5527 /* Now that reload_override_in is known valid,
5528 actually override reload_in. */
5529 for (j
= 0; j
< n_reloads
; j
++)
5530 if (reload_override_in
[j
])
5531 reload_in
[j
] = reload_override_in
[j
];
5533 /* If this reload won't be done because it has been cancelled or is
5534 optional and not inherited, clear reload_reg_rtx so other
5535 routines (such as subst_reloads) don't get confused. */
5536 for (j
= 0; j
< n_reloads
; j
++)
5537 if (reload_reg_rtx
[j
] != 0
5538 && ((reload_optional
[j
] && ! reload_inherited
[j
])
5539 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
5540 && ! reload_secondary_p
[j
])))
5542 int regno
= true_regnum (reload_reg_rtx
[j
]);
5544 if (spill_reg_order
[regno
] >= 0)
5545 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
5546 reload_when_needed
[j
], reload_mode
[j
]);
5547 reload_reg_rtx
[j
] = 0;
5550 /* Record which pseudos and which spill regs have output reloads. */
5551 for (j
= 0; j
< n_reloads
; j
++)
5553 register int r
= reload_order
[j
];
5555 i
= reload_spill_index
[r
];
5557 /* I is nonneg if this reload used one of the spill regs.
5558 If reload_reg_rtx[r] is 0, this is an optional reload
5559 that we opted to ignore. */
5560 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
5561 && reload_reg_rtx
[r
] != 0)
5563 register int nregno
= REGNO (reload_out
[r
]);
5566 if (nregno
< FIRST_PSEUDO_REGISTER
)
5567 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
5570 reg_has_output_reload
[nregno
+ nr
] = 1;
5574 nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5576 SET_HARD_REG_BIT (reg_is_output_reload
, spill_regs
[i
] + nr
);
5579 if (reload_when_needed
[r
] != RELOAD_OTHER
5580 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
5581 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
5587 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5588 reloads of the same item for fear that we might not have enough reload
5589 registers. However, normally they will get the same reload register
5590 and hence actually need not be loaded twice.
5592 Here we check for the most common case of this phenomenon: when we have
5593 a number of reloads for the same object, each of which were allocated
5594 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5595 reload, and is not modified in the insn itself. If we find such,
5596 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5597 This will not increase the number of spill registers needed and will
5598 prevent redundant code. */
5600 #ifdef SMALL_REGISTER_CLASSES
5603 merge_assigned_reloads (insn
)
5608 /* Scan all the reloads looking for ones that only load values and
5609 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5610 assigned and not modified by INSN. */
5612 for (i
= 0; i
< n_reloads
; i
++)
5614 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
5615 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
5616 || reg_set_p (reload_reg_rtx
[i
], insn
))
5619 /* Look at all other reloads. Ensure that the only use of this
5620 reload_reg_rtx is in a reload that just loads the same value
5621 as we do. Note that any secondary reloads must be of the identical
5622 class since the values, modes, and result registers are the
5623 same, so we need not do anything with any secondary reloads. */
5625 for (j
= 0; j
< n_reloads
; j
++)
5627 if (i
== j
|| reload_reg_rtx
[j
] == 0
5628 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
5632 /* If the reload regs aren't exactly the same (e.g, different modes)
5633 or if the values are different, we can't merge anything with this
5636 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
5637 || reload_out
[j
] != 0 || reload_in
[j
] == 0
5638 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
5642 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5643 we, in fact, found any matching reloads. */
5647 for (j
= 0; j
< n_reloads
; j
++)
5648 if (i
!= j
&& reload_reg_rtx
[j
] != 0
5649 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
]))
5651 reload_when_needed
[i
] = RELOAD_OTHER
;
5653 transfer_replacements (i
, j
);
5656 /* If this is now RELOAD_OTHER, look for any reloads that load
5657 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5658 if they were for inputs, RELOAD_OTHER for outputs. Note that
5659 this test is equivalent to looking for reloads for this operand
5662 if (reload_when_needed
[i
] == RELOAD_OTHER
)
5663 for (j
= 0; j
< n_reloads
; j
++)
5664 if (reload_in
[j
] != 0
5665 && reload_when_needed
[i
] != RELOAD_OTHER
5666 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
5668 reload_when_needed
[j
]
5669 = reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
5670 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
;
5674 #endif /* SMALL_RELOAD_CLASSES */
5676 /* Output insns to reload values in and out of the chosen reload regs. */
5679 emit_reload_insns (insn
)
5683 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
5684 rtx other_input_address_reload_insns
= 0;
5685 rtx other_input_reload_insns
= 0;
5686 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
5687 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
5688 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
5689 rtx operand_reload_insns
= 0;
5690 rtx other_operand_reload_insns
= 0;
5691 rtx other_output_reload_insns
= 0;
5692 rtx following_insn
= NEXT_INSN (insn
);
5693 rtx before_insn
= insn
;
5695 /* Values to be put in spill_reg_store are put here first. */
5696 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
5698 for (j
= 0; j
< reload_n_operands
; j
++)
5699 input_reload_insns
[j
] = input_address_reload_insns
[j
]
5700 = output_reload_insns
[j
] = output_address_reload_insns
[j
] = 0;
5702 /* Now output the instructions to copy the data into and out of the
5703 reload registers. Do these in the order that the reloads were reported,
5704 since reloads of base and index registers precede reloads of operands
5705 and the operands may need the base and index registers reloaded. */
5707 for (j
= 0; j
< n_reloads
; j
++)
5710 rtx oldequiv_reg
= 0;
5712 if (reload_spill_index
[j
] >= 0)
5713 new_spill_reg_store
[reload_spill_index
[j
]] = 0;
5716 if (old
!= 0 && ! reload_inherited
[j
]
5717 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
5718 && reload_reg_rtx
[j
] != 0)
5720 register rtx reloadreg
= reload_reg_rtx
[j
];
5722 enum machine_mode mode
;
5725 /* Determine the mode to reload in.
5726 This is very tricky because we have three to choose from.
5727 There is the mode the insn operand wants (reload_inmode[J]).
5728 There is the mode of the reload register RELOADREG.
5729 There is the intrinsic mode of the operand, which we could find
5730 by stripping some SUBREGs.
5731 It turns out that RELOADREG's mode is irrelevant:
5732 we can change that arbitrarily.
5734 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5735 then the reload reg may not support QImode moves, so use SImode.
5736 If foo is in memory due to spilling a pseudo reg, this is safe,
5737 because the QImode value is in the least significant part of a
5738 slot big enough for a SImode. If foo is some other sort of
5739 memory reference, then it is impossible to reload this case,
5740 so previous passes had better make sure this never happens.
5742 Then consider a one-word union which has SImode and one of its
5743 members is a float, being fetched as (SUBREG:SF union:SI).
5744 We must fetch that as SFmode because we could be loading into
5745 a float-only register. In this case OLD's mode is correct.
5747 Consider an immediate integer: it has VOIDmode. Here we need
5748 to get a mode from something else.
5750 In some cases, there is a fourth mode, the operand's
5751 containing mode. If the insn specifies a containing mode for
5752 this operand, it overrides all others.
5754 I am not sure whether the algorithm here is always right,
5755 but it does the right things in those cases. */
5757 mode
= GET_MODE (old
);
5758 if (mode
== VOIDmode
)
5759 mode
= reload_inmode
[j
];
5761 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5762 /* If we need a secondary register for this operation, see if
5763 the value is already in a register in that class. Don't
5764 do this if the secondary register will be used as a scratch
5767 if (reload_secondary_in_reload
[j
] >= 0
5768 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
5771 = find_equiv_reg (old
, insn
,
5772 reload_reg_class
[reload_secondary_in_reload
[j
]],
5773 -1, NULL_PTR
, 0, mode
);
5776 /* If reloading from memory, see if there is a register
5777 that already holds the same value. If so, reload from there.
5778 We can pass 0 as the reload_reg_p argument because
5779 any other reload has either already been emitted,
5780 in which case find_equiv_reg will see the reload-insn,
5781 or has yet to be emitted, in which case it doesn't matter
5782 because we will use this equiv reg right away. */
5784 if (oldequiv
== 0 && optimize
5785 && (GET_CODE (old
) == MEM
5786 || (GET_CODE (old
) == REG
5787 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5788 && reg_renumber
[REGNO (old
)] < 0)))
5789 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
5790 -1, NULL_PTR
, 0, mode
);
5794 int regno
= true_regnum (oldequiv
);
5796 /* If OLDEQUIV is a spill register, don't use it for this
5797 if any other reload needs it at an earlier stage of this insn
5798 or at this stage. */
5799 if (spill_reg_order
[regno
] >= 0
5800 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
5801 reload_when_needed
[j
])
5802 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
5803 reload_when_needed
[j
])))
5806 /* If OLDEQUIV is not a spill register,
5807 don't use it if any other reload wants it. */
5808 if (spill_reg_order
[regno
] < 0)
5811 for (k
= 0; k
< n_reloads
; k
++)
5812 if (reload_reg_rtx
[k
] != 0 && k
!= j
5813 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
5821 /* If it is no cheaper to copy from OLDEQUIV into the
5822 reload register than it would be to move from memory,
5823 don't use it. Likewise, if we need a secondary register
5827 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
5828 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
5829 reload_reg_class
[j
])
5830 >= MEMORY_MOVE_COST (mode
)))
5831 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5832 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
5836 #ifdef SECONDARY_MEMORY_NEEDED
5837 || SECONDARY_MEMORY_NEEDED (reload_reg_class
[j
],
5838 REGNO_REG_CLASS (regno
),
5847 else if (GET_CODE (oldequiv
) == REG
)
5848 oldequiv_reg
= oldequiv
;
5849 else if (GET_CODE (oldequiv
) == SUBREG
)
5850 oldequiv_reg
= SUBREG_REG (oldequiv
);
5852 /* If we are reloading from a register that was recently stored in
5853 with an output-reload, see if we can prove there was
5854 actually no need to store the old value in it. */
5856 if (optimize
&& GET_CODE (oldequiv
) == REG
5857 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
5858 && spill_reg_order
[REGNO (oldequiv
)] >= 0
5859 && spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]] != 0
5860 && find_reg_note (insn
, REG_DEAD
, reload_in
[j
])
5861 /* This is unsafe if operand occurs more than once in current
5862 insn. Perhaps some occurrences weren't reloaded. */
5863 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1)
5864 delete_output_reload
5865 (insn
, j
, spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]]);
5867 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5868 then load RELOADREG from OLDEQUIV. Note that we cannot use
5869 gen_lowpart_common since it can do the wrong thing when
5870 RELOADREG has a multi-word mode. Note that RELOADREG
5871 must always be a REG here. */
5873 if (GET_MODE (reloadreg
) != mode
)
5874 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
5875 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
5876 oldequiv
= SUBREG_REG (oldequiv
);
5877 if (GET_MODE (oldequiv
) != VOIDmode
5878 && mode
!= GET_MODE (oldequiv
))
5879 oldequiv
= gen_rtx (SUBREG
, mode
, oldequiv
, 0);
5881 /* Switch to the right place to emit the reload insns. */
5882 switch (reload_when_needed
[j
])
5885 where
= &other_input_reload_insns
;
5887 case RELOAD_FOR_INPUT
:
5888 where
= &input_reload_insns
[reload_opnum
[j
]];
5890 case RELOAD_FOR_INPUT_ADDRESS
:
5891 where
= &input_address_reload_insns
[reload_opnum
[j
]];
5893 case RELOAD_FOR_OUTPUT_ADDRESS
:
5894 where
= &output_address_reload_insns
[reload_opnum
[j
]];
5896 case RELOAD_FOR_OPERAND_ADDRESS
:
5897 where
= &operand_reload_insns
;
5899 case RELOAD_FOR_OPADDR_ADDR
:
5900 where
= &other_operand_reload_insns
;
5902 case RELOAD_FOR_OTHER_ADDRESS
:
5903 where
= &other_input_address_reload_insns
;
5909 push_to_sequence (*where
);
5912 /* Auto-increment addresses must be reloaded in a special way. */
5913 if (GET_CODE (oldequiv
) == POST_INC
5914 || GET_CODE (oldequiv
) == POST_DEC
5915 || GET_CODE (oldequiv
) == PRE_INC
5916 || GET_CODE (oldequiv
) == PRE_DEC
)
5918 /* We are not going to bother supporting the case where a
5919 incremented register can't be copied directly from
5920 OLDEQUIV since this seems highly unlikely. */
5921 if (reload_secondary_in_reload
[j
] >= 0)
5923 /* Prevent normal processing of this reload. */
5925 /* Output a special code sequence for this case. */
5926 inc_for_reload (reloadreg
, oldequiv
, reload_inc
[j
]);
5929 /* If we are reloading a pseudo-register that was set by the previous
5930 insn, see if we can get rid of that pseudo-register entirely
5931 by redirecting the previous insn into our reload register. */
5933 else if (optimize
&& GET_CODE (old
) == REG
5934 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5935 && dead_or_set_p (insn
, old
)
5936 /* This is unsafe if some other reload
5937 uses the same reg first. */
5938 && reload_reg_free_before_p (REGNO (reloadreg
),
5940 reload_when_needed
[j
]))
5942 rtx temp
= PREV_INSN (insn
);
5943 while (temp
&& GET_CODE (temp
) == NOTE
)
5944 temp
= PREV_INSN (temp
);
5946 && GET_CODE (temp
) == INSN
5947 && GET_CODE (PATTERN (temp
)) == SET
5948 && SET_DEST (PATTERN (temp
)) == old
5949 /* Make sure we can access insn_operand_constraint. */
5950 && asm_noperands (PATTERN (temp
)) < 0
5951 /* This is unsafe if prev insn rejects our reload reg. */
5952 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
5954 /* This is unsafe if operand occurs more than once in current
5955 insn. Perhaps some occurrences aren't reloaded. */
5956 && count_occurrences (PATTERN (insn
), old
) == 1
5957 /* Don't risk splitting a matching pair of operands. */
5958 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
5960 /* Store into the reload register instead of the pseudo. */
5961 SET_DEST (PATTERN (temp
)) = reloadreg
;
5962 /* If these are the only uses of the pseudo reg,
5963 pretend for GDB it lives in the reload reg we used. */
5964 if (reg_n_deaths
[REGNO (old
)] == 1
5965 && reg_n_sets
[REGNO (old
)] == 1)
5967 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
5968 alter_reg (REGNO (old
), -1);
5974 /* We can't do that, so output an insn to load RELOADREG. */
5978 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5979 rtx second_reload_reg
= 0;
5980 enum insn_code icode
;
5982 /* If we have a secondary reload, pick up the secondary register
5983 and icode, if any. If OLDEQUIV and OLD are different or
5984 if this is an in-out reload, recompute whether or not we
5985 still need a secondary register and what the icode should
5986 be. If we still need a secondary register and the class or
5987 icode is different, go back to reloading from OLD if using
5988 OLDEQUIV means that we got the wrong type of register. We
5989 cannot have different class or icode due to an in-out reload
5990 because we don't make such reloads when both the input and
5991 output need secondary reload registers. */
5993 if (reload_secondary_in_reload
[j
] >= 0)
5995 int secondary_reload
= reload_secondary_in_reload
[j
];
5996 rtx real_oldequiv
= oldequiv
;
5999 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6000 and similarly for OLD.
6001 See comments in get_secondary_reload in reload.c. */
6002 if (GET_CODE (oldequiv
) == REG
6003 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6004 && reg_equiv_mem
[REGNO (oldequiv
)] != 0)
6005 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
6007 if (GET_CODE (old
) == REG
6008 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6009 && reg_equiv_mem
[REGNO (old
)] != 0)
6010 real_old
= reg_equiv_mem
[REGNO (old
)];
6012 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
6013 icode
= reload_secondary_in_icode
[j
];
6015 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
6016 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
6018 enum reg_class new_class
6019 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6020 mode
, real_oldequiv
);
6022 if (new_class
== NO_REGS
)
6023 second_reload_reg
= 0;
6026 enum insn_code new_icode
;
6027 enum machine_mode new_mode
;
6029 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
6030 REGNO (second_reload_reg
)))
6031 oldequiv
= old
, real_oldequiv
= real_old
;
6034 new_icode
= reload_in_optab
[(int) mode
];
6035 if (new_icode
!= CODE_FOR_nothing
6036 && ((insn_operand_predicate
[(int) new_icode
][0]
6037 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
6039 || (insn_operand_predicate
[(int) new_icode
][1]
6040 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
6041 (real_oldequiv
, mode
)))))
6042 new_icode
= CODE_FOR_nothing
;
6044 if (new_icode
== CODE_FOR_nothing
)
6047 new_mode
= insn_operand_mode
[(int) new_icode
][2];
6049 if (GET_MODE (second_reload_reg
) != new_mode
)
6051 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6053 oldequiv
= old
, real_oldequiv
= real_old
;
6056 = gen_rtx (REG
, new_mode
,
6057 REGNO (second_reload_reg
));
6063 /* If we still need a secondary reload register, check
6064 to see if it is being used as a scratch or intermediate
6065 register and generate code appropriately. If we need
6066 a scratch register, use REAL_OLDEQUIV since the form of
6067 the insn may depend on the actual address if it is
6070 if (second_reload_reg
)
6072 if (icode
!= CODE_FOR_nothing
)
6074 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6075 second_reload_reg
));
6080 /* See if we need a scratch register to load the
6081 intermediate register (a tertiary reload). */
6082 enum insn_code tertiary_icode
6083 = reload_secondary_in_icode
[secondary_reload
];
6085 if (tertiary_icode
!= CODE_FOR_nothing
)
6087 rtx third_reload_reg
6088 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
6090 emit_insn ((GEN_FCN (tertiary_icode
)
6091 (second_reload_reg
, real_oldequiv
,
6092 third_reload_reg
)));
6095 gen_reload (second_reload_reg
, oldequiv
,
6097 reload_when_needed
[j
]);
6099 oldequiv
= second_reload_reg
;
6105 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6106 gen_reload (reloadreg
, oldequiv
, reload_opnum
[j
],
6107 reload_when_needed
[j
]);
6109 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6110 /* We may have to make a REG_DEAD note for the secondary reload
6111 register in the insns we just made. Find the last insn that
6112 mentioned the register. */
6113 if (! special
&& second_reload_reg
6114 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg
)))
6118 for (prev
= get_last_insn (); prev
;
6119 prev
= PREV_INSN (prev
))
6120 if (GET_RTX_CLASS (GET_CODE (prev
) == 'i')
6121 && reg_overlap_mentioned_for_reload_p (second_reload_reg
,
6124 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6133 /* End this sequence. */
6134 *where
= get_insns ();
6138 /* Add a note saying the input reload reg
6139 dies in this insn, if anyone cares. */
6140 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6142 && reload_reg_rtx
[j
] != old
6143 && reload_reg_rtx
[j
] != 0
6144 && reload_out
[j
] == 0
6145 && ! reload_inherited
[j
]
6146 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
])))
6148 register rtx reloadreg
= reload_reg_rtx
[j
];
6151 /* We can't abort here because we need to support this for sched.c.
6152 It's not terrible to miss a REG_DEAD note, but we should try
6153 to figure out how to do this correctly. */
6154 /* The code below is incorrect for address-only reloads. */
6155 if (reload_when_needed
[j
] != RELOAD_OTHER
6156 && reload_when_needed
[j
] != RELOAD_FOR_INPUT
)
6160 /* Add a death note to this insn, for an input reload. */
6162 if ((reload_when_needed
[j
] == RELOAD_OTHER
6163 || reload_when_needed
[j
] == RELOAD_FOR_INPUT
)
6164 && ! dead_or_set_p (insn
, reloadreg
))
6166 = gen_rtx (EXPR_LIST
, REG_DEAD
,
6167 reloadreg
, REG_NOTES (insn
));
6170 /* When we inherit a reload, the last marked death of the reload reg
6171 may no longer really be a death. */
6172 if (reload_reg_rtx
[j
] != 0
6173 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
]))
6174 && reload_inherited
[j
])
6176 /* Handle inheriting an output reload.
6177 Remove the death note from the output reload insn. */
6178 if (reload_spill_index
[j
] >= 0
6179 && GET_CODE (reload_in
[j
]) == REG
6180 && spill_reg_store
[reload_spill_index
[j
]] != 0
6181 && find_regno_note (spill_reg_store
[reload_spill_index
[j
]],
6182 REG_DEAD
, REGNO (reload_reg_rtx
[j
])))
6183 remove_death (REGNO (reload_reg_rtx
[j
]),
6184 spill_reg_store
[reload_spill_index
[j
]]);
6185 /* Likewise for input reloads that were inherited. */
6186 else if (reload_spill_index
[j
] >= 0
6187 && GET_CODE (reload_in
[j
]) == REG
6188 && spill_reg_store
[reload_spill_index
[j
]] == 0
6189 && reload_inheritance_insn
[j
] != 0
6190 && find_regno_note (reload_inheritance_insn
[j
], REG_DEAD
,
6191 REGNO (reload_reg_rtx
[j
])))
6192 remove_death (REGNO (reload_reg_rtx
[j
]),
6193 reload_inheritance_insn
[j
]);
6198 /* We got this register from find_equiv_reg.
6199 Search back for its last death note and get rid of it.
6200 But don't search back too far.
6201 Don't go past a place where this reg is set,
6202 since a death note before that remains valid. */
6203 for (prev
= PREV_INSN (insn
);
6204 prev
&& GET_CODE (prev
) != CODE_LABEL
;
6205 prev
= PREV_INSN (prev
))
6206 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6207 && dead_or_set_p (prev
, reload_reg_rtx
[j
]))
6209 if (find_regno_note (prev
, REG_DEAD
,
6210 REGNO (reload_reg_rtx
[j
])))
6211 remove_death (REGNO (reload_reg_rtx
[j
]), prev
);
6217 /* We might have used find_equiv_reg above to choose an alternate
6218 place from which to reload. If so, and it died, we need to remove
6219 that death and move it to one of the insns we just made. */
6221 if (oldequiv_reg
!= 0
6222 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg
)))
6226 for (prev
= PREV_INSN (insn
); prev
&& GET_CODE (prev
) != CODE_LABEL
;
6227 prev
= PREV_INSN (prev
))
6228 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6229 && dead_or_set_p (prev
, oldequiv_reg
))
6231 if (find_regno_note (prev
, REG_DEAD
, REGNO (oldequiv_reg
)))
6233 for (prev1
= this_reload_insn
;
6234 prev1
; prev1
= PREV_INSN (prev1
))
6235 if (GET_RTX_CLASS (GET_CODE (prev1
) == 'i')
6236 && reg_overlap_mentioned_for_reload_p (oldequiv_reg
,
6239 REG_NOTES (prev1
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6244 remove_death (REGNO (oldequiv_reg
), prev
);
6251 /* If we are reloading a register that was recently stored in with an
6252 output-reload, see if we can prove there was
6253 actually no need to store the old value in it. */
6255 if (optimize
&& reload_inherited
[j
] && reload_spill_index
[j
] >= 0
6256 && reload_in
[j
] != 0
6257 && GET_CODE (reload_in
[j
]) == REG
6259 /* There doesn't seem to be any reason to restrict this to pseudos
6260 and doing so loses in the case where we are copying from a
6261 register of the wrong class. */
6262 && REGNO (reload_in
[j
]) >= FIRST_PSEUDO_REGISTER
6264 && spill_reg_store
[reload_spill_index
[j
]] != 0
6265 /* This is unsafe if some other reload uses the same reg first. */
6266 && reload_reg_free_before_p (spill_regs
[reload_spill_index
[j
]],
6267 reload_opnum
[j
], reload_when_needed
[j
])
6268 && dead_or_set_p (insn
, reload_in
[j
])
6269 /* This is unsafe if operand occurs more than once in current
6270 insn. Perhaps some occurrences weren't reloaded. */
6271 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1)
6272 delete_output_reload (insn
, j
,
6273 spill_reg_store
[reload_spill_index
[j
]]);
6275 /* Input-reloading is done. Now do output-reloading,
6276 storing the value from the reload-register after the main insn
6277 if reload_out[j] is nonzero.
6279 ??? At some point we need to support handling output reloads of
6280 JUMP_INSNs or insns that set cc0. */
6281 old
= reload_out
[j
];
6283 && reload_reg_rtx
[j
] != old
6284 && reload_reg_rtx
[j
] != 0)
6286 register rtx reloadreg
= reload_reg_rtx
[j
];
6287 register rtx second_reloadreg
= 0;
6289 enum machine_mode mode
;
6292 /* An output operand that dies right away does need a reload,
6293 but need not be copied from it. Show the new location in the
6295 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
6296 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6298 XEXP (note
, 0) = reload_reg_rtx
[j
];
6301 /* Likewise for a SUBREG of an operand that dies. */
6302 else if (GET_CODE (old
) == SUBREG
6303 && GET_CODE (SUBREG_REG (old
)) == REG
6304 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
6307 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
),
6311 else if (GET_CODE (old
) == SCRATCH
)
6312 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6313 but we don't want to make an output reload. */
6317 /* Strip off of OLD any size-increasing SUBREGs such as
6318 (SUBREG:SI foo:QI 0). */
6320 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
6321 && (GET_MODE_SIZE (GET_MODE (old
))
6322 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
6323 old
= SUBREG_REG (old
);
6326 /* If is a JUMP_INSN, we can't support output reloads yet. */
6327 if (GET_CODE (insn
) == JUMP_INSN
)
6330 if (reload_when_needed
[j
] == RELOAD_OTHER
)
6333 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
6335 /* Determine the mode to reload in.
6336 See comments above (for input reloading). */
6338 mode
= GET_MODE (old
);
6339 if (mode
== VOIDmode
)
6341 /* VOIDmode should never happen for an output. */
6342 if (asm_noperands (PATTERN (insn
)) < 0)
6343 /* It's the compiler's fault. */
6344 fatal_insn ("VOIDmode on an output", insn
);
6345 error_for_asm (insn
, "output operand is constant in `asm'");
6346 /* Prevent crash--use something we know is valid. */
6348 old
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6351 if (GET_MODE (reloadreg
) != mode
)
6352 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6354 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6356 /* If we need two reload regs, set RELOADREG to the intermediate
6357 one, since it will be stored into OLD. We might need a secondary
6358 register only for an input reload, so check again here. */
6360 if (reload_secondary_out_reload
[j
] >= 0)
6364 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
6365 && reg_equiv_mem
[REGNO (old
)] != 0)
6366 real_old
= reg_equiv_mem
[REGNO (old
)];
6368 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
6372 second_reloadreg
= reloadreg
;
6373 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
6375 /* See if RELOADREG is to be used as a scratch register
6376 or as an intermediate register. */
6377 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
6379 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
6380 (real_old
, second_reloadreg
, reloadreg
)));
6385 /* See if we need both a scratch and intermediate reload
6388 int secondary_reload
= reload_secondary_out_reload
[j
];
6389 enum insn_code tertiary_icode
6390 = reload_secondary_out_icode
[secondary_reload
];
6392 if (GET_MODE (reloadreg
) != mode
)
6393 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6395 if (tertiary_icode
!= CODE_FOR_nothing
)
6398 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
6401 /* Copy primary reload reg to secondary reload reg.
6402 (Note that these have been swapped above, then
6403 secondary reload reg to OLD using our insn. */
6405 /* If REAL_OLD is a paradoxical SUBREG, remove it
6406 and try to put the opposite SUBREG on
6408 if (GET_CODE (real_old
) == SUBREG
6409 && (GET_MODE_SIZE (GET_MODE (real_old
))
6410 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
6411 && 0 != (tem
= gen_lowpart_common
6412 (GET_MODE (SUBREG_REG (real_old
)),
6414 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
6416 gen_reload (reloadreg
, second_reloadreg
,
6417 reload_opnum
[j
], reload_when_needed
[j
]);
6418 emit_insn ((GEN_FCN (tertiary_icode
)
6419 (real_old
, reloadreg
, third_reloadreg
)));
6424 /* Copy between the reload regs here and then to
6427 gen_reload (reloadreg
, second_reloadreg
,
6428 reload_opnum
[j
], reload_when_needed
[j
]);
6434 /* Output the last reload insn. */
6436 gen_reload (old
, reloadreg
, reload_opnum
[j
],
6437 reload_when_needed
[j
]);
6439 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6440 /* If final will look at death notes for this reg,
6441 put one on the last output-reload insn to use it. Similarly
6442 for any secondary register. */
6443 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg
)))
6444 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6445 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6446 && reg_overlap_mentioned_for_reload_p (reloadreg
,
6448 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6449 reloadreg
, REG_NOTES (p
));
6451 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6453 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg
)))
6454 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6455 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6456 && reg_overlap_mentioned_for_reload_p (second_reloadreg
,
6458 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6459 second_reloadreg
, REG_NOTES (p
));
6462 /* Look at all insns we emitted, just to be safe. */
6463 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
6464 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
6466 /* If this output reload doesn't come from a spill reg,
6467 clear any memory of reloaded copies of the pseudo reg.
6468 If this output reload comes from a spill reg,
6469 reg_has_output_reload will make this do nothing. */
6470 note_stores (PATTERN (p
), forget_old_reloads_1
);
6472 if (reg_mentioned_p (reload_reg_rtx
[j
], PATTERN (p
))
6473 && reload_spill_index
[j
] >= 0)
6474 new_spill_reg_store
[reload_spill_index
[j
]] = p
;
6477 if (reload_when_needed
[j
] == RELOAD_OTHER
)
6479 if (other_output_reload_insns
)
6480 emit_insns (other_output_reload_insns
);
6481 other_output_reload_insns
= get_insns ();
6484 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
6490 /* Now write all the insns we made for reloads in the order expected by
6491 the allocation functions. Prior to the insn being reloaded, we write
6492 the following reloads:
6494 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6496 RELOAD_OTHER reloads, output in ascending order by reload number.
6498 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6499 the RELOAD_FOR_INPUT reload for the operand.
6501 RELOAD_FOR_OPADDR_ADDRS reloads.
6503 RELOAD_FOR_OPERAND_ADDRESS reloads.
6505 After the insn being reloaded, we write the following:
6507 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6508 the RELOAD_FOR_OUTPUT reload for that operand.
6510 Any RELOAD_OTHER output reloads, output in descending order by
6513 emit_insns_before (other_input_address_reload_insns
, before_insn
);
6514 emit_insns_before (other_input_reload_insns
, before_insn
);
6516 for (j
= 0; j
< reload_n_operands
; j
++)
6518 emit_insns_before (input_address_reload_insns
[j
], before_insn
);
6519 emit_insns_before (input_reload_insns
[j
], before_insn
);
6522 emit_insns_before (other_operand_reload_insns
, before_insn
);
6523 emit_insns_before (operand_reload_insns
, before_insn
);
6525 for (j
= 0; j
< reload_n_operands
; j
++)
6527 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
6528 emit_insns_before (output_reload_insns
[j
], following_insn
);
6531 emit_insns_before (other_output_reload_insns
, following_insn
);
6533 /* Move death notes from INSN
6534 to output-operand-address and output reload insns. */
6535 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6538 /* Loop over those insns, last ones first. */
6539 for (insn1
= PREV_INSN (following_insn
); insn1
!= insn
;
6540 insn1
= PREV_INSN (insn1
))
6541 if (GET_CODE (insn1
) == INSN
&& GET_CODE (PATTERN (insn1
)) == SET
)
6543 rtx source
= SET_SRC (PATTERN (insn1
));
6544 rtx dest
= SET_DEST (PATTERN (insn1
));
6546 /* The note we will examine next. */
6547 rtx reg_notes
= REG_NOTES (insn
);
6548 /* The place that pointed to this note. */
6549 rtx
*prev_reg_note
= ®_NOTES (insn
);
6551 /* If the note is for something used in the source of this
6552 reload insn, or in the output address, move the note. */
6555 rtx next_reg_notes
= XEXP (reg_notes
, 1);
6556 if (REG_NOTE_KIND (reg_notes
) == REG_DEAD
6557 && GET_CODE (XEXP (reg_notes
, 0)) == REG
6558 && ((GET_CODE (dest
) != REG
6559 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6561 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6564 *prev_reg_note
= next_reg_notes
;
6565 XEXP (reg_notes
, 1) = REG_NOTES (insn1
);
6566 REG_NOTES (insn1
) = reg_notes
;
6569 prev_reg_note
= &XEXP (reg_notes
, 1);
6571 reg_notes
= next_reg_notes
;
6577 /* For all the spill regs newly reloaded in this instruction,
6578 record what they were reloaded from, so subsequent instructions
6579 can inherit the reloads.
6581 Update spill_reg_store for the reloads of this insn.
6582 Copy the elements that were updated in the loop above. */
6584 for (j
= 0; j
< n_reloads
; j
++)
6586 register int r
= reload_order
[j
];
6587 register int i
= reload_spill_index
[r
];
6589 /* I is nonneg if this reload used one of the spill regs.
6590 If reload_reg_rtx[r] is 0, this is an optional reload
6591 that we opted to ignore.
6593 Also ignore reloads that don't reach the end of the insn,
6594 since we will eventually see the one that does. */
6596 if (i
>= 0 && reload_reg_rtx
[r
] != 0
6597 && reload_reg_reaches_end_p (spill_regs
[i
], reload_opnum
[r
],
6598 reload_when_needed
[r
]))
6600 /* First, clear out memory of what used to be in this spill reg.
6601 If consecutive registers are used, clear them all. */
6603 = HARD_REGNO_NREGS (spill_regs
[i
], GET_MODE (reload_reg_rtx
[r
]));
6606 for (k
= 0; k
< nr
; k
++)
6608 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]] = -1;
6609 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = 0;
6612 /* Maybe the spill reg contains a copy of reload_out. */
6613 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6615 register int nregno
= REGNO (reload_out
[r
]);
6616 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6617 : HARD_REGNO_NREGS (nregno
,
6618 GET_MODE (reload_reg_rtx
[r
])));
6620 spill_reg_store
[i
] = new_spill_reg_store
[i
];
6621 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6623 /* If NREGNO is a hard register, it may occupy more than
6624 one register. If it does, say what is in the
6625 rest of the registers assuming that both registers
6626 agree on how many words the object takes. If not,
6627 invalidate the subsequent registers. */
6629 if (nregno
< FIRST_PSEUDO_REGISTER
)
6630 for (k
= 1; k
< nnr
; k
++)
6631 reg_last_reload_reg
[nregno
+ k
]
6632 = (nr
== nnr
? gen_rtx (REG
,
6633 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6634 REGNO (reload_reg_rtx
[r
]) + k
)
6637 /* Now do the inverse operation. */
6638 for (k
= 0; k
< nr
; k
++)
6640 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6641 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6643 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = insn
;
6647 /* Maybe the spill reg contains a copy of reload_in. Only do
6648 something if there will not be an output reload for
6649 the register being reloaded. */
6650 else if (reload_out
[r
] == 0
6651 && reload_in
[r
] != 0
6652 && ((GET_CODE (reload_in
[r
]) == REG
6653 && ! reg_has_output_reload
[REGNO (reload_in
[r
])])
6654 || (GET_CODE (reload_in_reg
[r
]) == REG
6655 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])])))
6657 register int nregno
;
6660 if (GET_CODE (reload_in
[r
]) == REG
)
6661 nregno
= REGNO (reload_in
[r
]);
6663 nregno
= REGNO (reload_in_reg
[r
]);
6665 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6666 : HARD_REGNO_NREGS (nregno
,
6667 GET_MODE (reload_reg_rtx
[r
])));
6669 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6671 if (nregno
< FIRST_PSEUDO_REGISTER
)
6672 for (k
= 1; k
< nnr
; k
++)
6673 reg_last_reload_reg
[nregno
+ k
]
6674 = (nr
== nnr
? gen_rtx (REG
,
6675 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6676 REGNO (reload_reg_rtx
[r
]) + k
)
6679 /* Unless we inherited this reload, show we haven't
6680 recently done a store. */
6681 if (! reload_inherited
[r
])
6682 spill_reg_store
[i
] = 0;
6684 for (k
= 0; k
< nr
; k
++)
6686 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6687 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6689 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]]
6695 /* The following if-statement was #if 0'd in 1.34 (or before...).
6696 It's reenabled in 1.35 because supposedly nothing else
6697 deals with this problem. */
6699 /* If a register gets output-reloaded from a non-spill register,
6700 that invalidates any previous reloaded copy of it.
6701 But forget_old_reloads_1 won't get to see it, because
6702 it thinks only about the original insn. So invalidate it here. */
6703 if (i
< 0 && reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6705 register int nregno
= REGNO (reload_out
[r
]);
6706 if (nregno
>= FIRST_PSEUDO_REGISTER
)
6707 reg_last_reload_reg
[nregno
] = 0;
6710 int num_regs
= HARD_REGNO_NREGS (nregno
,GET_MODE (reload_out
[r
]));
6712 while (num_regs
-- > 0)
6713 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
6719 /* Emit code to perform a reload from IN (which may be a reload register) to
6720 OUT (which may also be a reload register). IN or OUT is from operand
6721 OPNUM with reload type TYPE.
6723 Returns first insn emitted. */
6726 gen_reload (out
, in
, opnum
, type
)
6730 enum reload_type type
;
6732 rtx last
= get_last_insn ();
6735 /* If IN is a paradoxical SUBREG, remove it and try to put the
6736 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6737 if (GET_CODE (in
) == SUBREG
6738 && (GET_MODE_SIZE (GET_MODE (in
))
6739 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
6740 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
6741 in
= SUBREG_REG (in
), out
= tem
;
6742 else if (GET_CODE (out
) == SUBREG
6743 && (GET_MODE_SIZE (GET_MODE (out
))
6744 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
6745 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
6746 out
= SUBREG_REG (out
), in
= tem
;
6748 /* How to do this reload can get quite tricky. Normally, we are being
6749 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6750 register that didn't get a hard register. In that case we can just
6751 call emit_move_insn.
6753 We can also be asked to reload a PLUS that adds a register or a MEM to
6754 another register, constant or MEM. This can occur during frame pointer
6755 elimination and while reloading addresses. This case is handled by
6756 trying to emit a single insn to perform the add. If it is not valid,
6757 we use a two insn sequence.
6759 Finally, we could be called to handle an 'o' constraint by putting
6760 an address into a register. In that case, we first try to do this
6761 with a named pattern of "reload_load_address". If no such pattern
6762 exists, we just emit a SET insn and hope for the best (it will normally
6763 be valid on machines that use 'o').
6765 This entire process is made complex because reload will never
6766 process the insns we generate here and so we must ensure that
6767 they will fit their constraints and also by the fact that parts of
6768 IN might be being reloaded separately and replaced with spill registers.
6769 Because of this, we are, in some sense, just guessing the right approach
6770 here. The one listed above seems to work.
6772 ??? At some point, this whole thing needs to be rethought. */
6774 if (GET_CODE (in
) == PLUS
6775 && (GET_CODE (XEXP (in
, 0)) == REG
6776 || GET_CODE (XEXP (in
, 0)) == MEM
)
6777 && (GET_CODE (XEXP (in
, 1)) == REG
6778 || CONSTANT_P (XEXP (in
, 1))
6779 || GET_CODE (XEXP (in
, 1)) == MEM
))
6781 /* We need to compute the sum of a register or a MEM and another
6782 register, constant, or MEM, and put it into the reload
6783 register. The best possible way of doing this is if the machine
6784 has a three-operand ADD insn that accepts the required operands.
6786 The simplest approach is to try to generate such an insn and see if it
6787 is recognized and matches its constraints. If so, it can be used.
6789 It might be better not to actually emit the insn unless it is valid,
6790 but we need to pass the insn as an operand to `recog' and
6791 `insn_extract' and it is simpler to emit and then delete the insn if
6792 not valid than to dummy things up. */
6794 rtx op0
, op1
, tem
, insn
;
6797 op0
= find_replacement (&XEXP (in
, 0));
6798 op1
= find_replacement (&XEXP (in
, 1));
6800 /* Since constraint checking is strict, commutativity won't be
6801 checked, so we need to do that here to avoid spurious failure
6802 if the add instruction is two-address and the second operand
6803 of the add is the same as the reload reg, which is frequently
6804 the case. If the insn would be A = B + A, rearrange it so
6805 it will be A = A + B as constrain_operands expects. */
6807 if (GET_CODE (XEXP (in
, 1)) == REG
6808 && REGNO (out
) == REGNO (XEXP (in
, 1)))
6809 tem
= op0
, op0
= op1
, op1
= tem
;
6811 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
6812 in
= gen_rtx (PLUS
, GET_MODE (in
), op0
, op1
);
6814 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, out
, in
));
6815 code
= recog_memoized (insn
);
6819 insn_extract (insn
);
6820 /* We want constrain operands to treat this insn strictly in
6821 its validity determination, i.e., the way it would after reload
6823 if (constrain_operands (code
, 1))
6827 delete_insns_since (last
);
6829 /* If that failed, we must use a conservative two-insn sequence.
6830 use move to copy constant, MEM, or pseudo register to the reload
6831 register since "move" will be able to handle an arbitrary operand,
6832 unlike add which can't, in general. Then add the registers.
6834 If there is another way to do this for a specific machine, a
6835 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6838 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
6839 || (GET_CODE (op1
) == REG
6840 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
6841 tem
= op0
, op0
= op1
, op1
= tem
;
6843 emit_insn (gen_move_insn (out
, op0
));
6845 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6846 This fixes a problem on the 32K where the stack pointer cannot
6847 be used as an operand of an add insn. */
6849 if (rtx_equal_p (op0
, op1
))
6852 insn
= emit_insn (gen_add2_insn (out
, op1
));
6854 /* If that failed, copy the address register to the reload register.
6855 Then add the constant to the reload register. */
6857 code
= recog_memoized (insn
);
6861 insn_extract (insn
);
6862 /* We want constrain operands to treat this insn strictly in
6863 its validity determination, i.e., the way it would after reload
6865 if (constrain_operands (code
, 1))
6869 delete_insns_since (last
);
6871 emit_insn (gen_move_insn (out
, op1
));
6872 emit_insn (gen_add2_insn (out
, op0
));
6875 #ifdef SECONDARY_MEMORY_NEEDED
6876 /* If we need a memory location to do the move, do it that way. */
6877 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
6878 && GET_CODE (out
) == REG
&& REGNO (out
) < FIRST_PSEUDO_REGISTER
6879 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
6880 REGNO_REG_CLASS (REGNO (out
)),
6883 /* Get the memory to use and rewrite both registers to its mode. */
6884 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
6886 if (GET_MODE (loc
) != GET_MODE (out
))
6887 out
= gen_rtx (REG
, GET_MODE (loc
), REGNO (out
));
6889 if (GET_MODE (loc
) != GET_MODE (in
))
6890 in
= gen_rtx (REG
, GET_MODE (loc
), REGNO (in
));
6892 emit_insn (gen_move_insn (loc
, in
));
6893 emit_insn (gen_move_insn (out
, loc
));
6897 /* If IN is a simple operand, use gen_move_insn. */
6898 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
6899 emit_insn (gen_move_insn (out
, in
));
6901 #ifdef HAVE_reload_load_address
6902 else if (HAVE_reload_load_address
)
6903 emit_insn (gen_reload_load_address (out
, in
));
6906 /* Otherwise, just write (set OUT IN) and hope for the best. */
6908 emit_insn (gen_rtx (SET
, VOIDmode
, out
, in
));
6910 /* Return the first insn emitted.
6911 We can not just return get_last_insn, because there may have
6912 been multiple instructions emitted. Also note that gen_move_insn may
6913 emit more than one insn itself, so we can not assume that there is one
6914 insn emitted per emit_insn_before call. */
6916 return last
? NEXT_INSN (last
) : get_insns ();
6919 /* Delete a previously made output-reload
6920 whose result we now believe is not needed.
6921 First we double-check.
6923 INSN is the insn now being processed.
6924 OUTPUT_RELOAD_INSN is the insn of the output reload.
6925 J is the reload-number for this insn. */
6928 delete_output_reload (insn
, j
, output_reload_insn
)
6931 rtx output_reload_insn
;
6935 /* Get the raw pseudo-register referred to. */
6937 rtx reg
= reload_in
[j
];
6938 while (GET_CODE (reg
) == SUBREG
)
6939 reg
= SUBREG_REG (reg
);
6941 /* If the pseudo-reg we are reloading is no longer referenced
6942 anywhere between the store into it and here,
6943 and no jumps or labels intervene, then the value can get
6944 here through the reload reg alone.
6945 Otherwise, give up--return. */
6946 for (i1
= NEXT_INSN (output_reload_insn
);
6947 i1
!= insn
; i1
= NEXT_INSN (i1
))
6949 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
6951 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
6952 && reg_mentioned_p (reg
, PATTERN (i1
)))
6956 if (cannot_omit_stores
[REGNO (reg
)])
6959 /* If this insn will store in the pseudo again,
6960 the previous store can be removed. */
6961 if (reload_out
[j
] == reload_in
[j
])
6962 delete_insn (output_reload_insn
);
6964 /* See if the pseudo reg has been completely replaced
6965 with reload regs. If so, delete the store insn
6966 and forget we had a stack slot for the pseudo. */
6967 else if (reg_n_deaths
[REGNO (reg
)] == 1
6968 && reg_basic_block
[REGNO (reg
)] >= 0
6969 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
6973 /* We know that it was used only between here
6974 and the beginning of the current basic block.
6975 (We also know that the last use before INSN was
6976 the output reload we are thinking of deleting, but never mind that.)
6977 Search that range; see if any ref remains. */
6978 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6980 rtx set
= single_set (i2
);
6982 /* Uses which just store in the pseudo don't count,
6983 since if they are the only uses, they are dead. */
6984 if (set
!= 0 && SET_DEST (set
) == reg
)
6986 if (GET_CODE (i2
) == CODE_LABEL
6987 || GET_CODE (i2
) == JUMP_INSN
)
6989 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
6990 && reg_mentioned_p (reg
, PATTERN (i2
)))
6991 /* Some other ref remains;
6992 we can't do anything. */
6996 /* Delete the now-dead stores into this pseudo. */
6997 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6999 rtx set
= single_set (i2
);
7001 if (set
!= 0 && SET_DEST (set
) == reg
)
7003 if (GET_CODE (i2
) == CODE_LABEL
7004 || GET_CODE (i2
) == JUMP_INSN
)
7008 /* For the debugging info,
7009 say the pseudo lives in this reload reg. */
7010 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
7011 alter_reg (REGNO (reg
), -1);
7015 /* Output reload-insns to reload VALUE into RELOADREG.
7016 VALUE is an autoincrement or autodecrement RTX whose operand
7017 is a register or memory location;
7018 so reloading involves incrementing that location.
7020 INC_AMOUNT is the number to increment or decrement by (always positive).
7021 This cannot be deduced from VALUE. */
7024 inc_for_reload (reloadreg
, value
, inc_amount
)
7029 /* REG or MEM to be copied and incremented. */
7030 rtx incloc
= XEXP (value
, 0);
7031 /* Nonzero if increment after copying. */
7032 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
7038 /* No hard register is equivalent to this register after
7039 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7040 we could inc/dec that register as well (maybe even using it for
7041 the source), but I'm not sure it's worth worrying about. */
7042 if (GET_CODE (incloc
) == REG
)
7043 reg_last_reload_reg
[REGNO (incloc
)] = 0;
7045 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
7046 inc_amount
= - inc_amount
;
7048 inc
= GEN_INT (inc_amount
);
7050 /* If this is post-increment, first copy the location to the reload reg. */
7052 emit_insn (gen_move_insn (reloadreg
, incloc
));
7054 /* See if we can directly increment INCLOC. Use a method similar to that
7057 last
= get_last_insn ();
7058 add_insn
= emit_insn (gen_rtx (SET
, VOIDmode
, incloc
,
7059 gen_rtx (PLUS
, GET_MODE (incloc
),
7062 code
= recog_memoized (add_insn
);
7065 insn_extract (add_insn
);
7066 if (constrain_operands (code
, 1))
7068 /* If this is a pre-increment and we have incremented the value
7069 where it lives, copy the incremented value to RELOADREG to
7070 be used as an address. */
7073 emit_insn (gen_move_insn (reloadreg
, incloc
));
7079 delete_insns_since (last
);
7081 /* If couldn't do the increment directly, must increment in RELOADREG.
7082 The way we do this depends on whether this is pre- or post-increment.
7083 For pre-increment, copy INCLOC to the reload register, increment it
7084 there, then save back. */
7088 emit_insn (gen_move_insn (reloadreg
, incloc
));
7089 emit_insn (gen_add2_insn (reloadreg
, inc
));
7090 emit_insn (gen_move_insn (incloc
, reloadreg
));
7095 Because this might be a jump insn or a compare, and because RELOADREG
7096 may not be available after the insn in an input reload, we must do
7097 the incrementation before the insn being reloaded for.
7099 We have already copied INCLOC to RELOADREG. Increment the copy in
7100 RELOADREG, save that back, then decrement RELOADREG so it has
7101 the original value. */
7103 emit_insn (gen_add2_insn (reloadreg
, inc
));
7104 emit_insn (gen_move_insn (incloc
, reloadreg
));
7105 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
7111 /* Return 1 if we are certain that the constraint-string STRING allows
7112 the hard register REG. Return 0 if we can't be sure of this. */
7115 constraint_accepts_reg_p (string
, reg
)
7120 int regno
= true_regnum (reg
);
7123 /* Initialize for first alternative. */
7125 /* Check that each alternative contains `g' or `r'. */
7127 switch (c
= *string
++)
7130 /* If an alternative lacks `g' or `r', we lose. */
7133 /* If an alternative lacks `g' or `r', we lose. */
7136 /* Initialize for next alternative. */
7141 /* Any general reg wins for this alternative. */
7142 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
7146 /* Any reg in specified class wins for this alternative. */
7148 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
7150 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
7156 /* Return the number of places FIND appears within X, but don't count
7157 an occurrence if some SET_DEST is FIND. */
7160 count_occurrences (x
, find
)
7161 register rtx x
, find
;
7164 register enum rtx_code code
;
7165 register char *format_ptr
;
7173 code
= GET_CODE (x
);
7188 if (SET_DEST (x
) == find
)
7189 return count_occurrences (SET_SRC (x
), find
);
7193 format_ptr
= GET_RTX_FORMAT (code
);
7196 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
7198 switch (*format_ptr
++)
7201 count
+= count_occurrences (XEXP (x
, i
), find
);
7205 if (XVEC (x
, i
) != NULL
)
7207 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7208 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);